Spaces:
Building
Building
File size: 15,739 Bytes
e00b837 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Hive Appier Framework
# Copyright (c) 2008-2024 Hive Solutions Lda.
#
# This file is part of Hive Appier Framework.
#
# Hive Appier Framework is free software: you can redistribute it and/or modify
# it under the terms of the Apache License as published by the Apache
# Foundation, either version 2.0 of the License, or (at your option) any
# later version.
#
# Hive Appier Framework is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Apache License for more details.
#
# You should have received a copy of the Apache License along with
# Hive Appier Framework. If not, see <http://www.apache.org/licenses/>.
__author__ = "João Magalhães <joamag@hive.pt>"
""" The author(s) of the module """
__copyright__ = "Copyright (c) 2008-2024 Hive Solutions Lda."
""" The copyright for the module """
__license__ = "Apache License, Version 2.0"
""" The license for the module """
import copy
from . import common
from . import legacy
from . import exceptions
class ModelAsync(object):
@classmethod
async def get_a(cls, *args, **kwargs):
(
fields,
eager,
eager_l,
map,
rules,
meta,
build,
fill,
resolve_a,
skip,
limit,
sort,
raise_e,
) = cls._get_attrs(
kwargs,
(
("fields", None),
("eager", None),
("eager_l", None),
("map", False),
("rules", True),
("meta", False),
("build", True),
("fill", True),
("resolve_a", None),
("skip", 0),
("limit", 0),
("sort", None),
("raise_e", True),
),
)
# in case there's a sort field and the safe search mode is enabled
# we must add sorting by the `_id` field so that the retrieval is
# considered to be deterministic, otherwise some DB implementations
# will not respect the same sorting sequence across different calls
if sort and (skip or limit):
if not isinstance(sort, list):
sort = list(sort)
sort.append(["_id", 1])
if eager_l == None:
eager_l = map
if resolve_a == None:
resolve_a = map
if eager_l:
eager = cls._eager_b(eager)
fields = cls._sniff(fields, rules=rules)
collection = cls._collection_a()
model = await collection.find_one(
kwargs, fields, skip=skip, limit=limit, sort=sort
)
if not model and raise_e:
is_devel = common.is_devel()
if is_devel:
message = "%s not found for %s" % (cls.__name__, str(kwargs))
else:
message = "%s not found" % cls.__name__
raise exceptions.NotFoundError(message=message)
if not model and not raise_e:
return model
cls.types(model)
if fill:
cls.fill(model, safe=True)
if build:
cls.build(model, map=map, rules=rules, meta=meta)
if eager:
model = cls._eager(model, eager, map=map)
if resolve_a:
model = cls._resolve_all(model, resolve=False)
return model if map else cls.old(model=model, safe=False)
@classmethod
async def find_a(cls, *args, **kwargs):
(
fields,
eager,
eager_l,
map,
rules,
meta,
build,
fill,
resolve_a,
skip,
limit,
sort,
raise_e,
) = cls._get_attrs(
kwargs,
(
("fields", None),
("eager", None),
("eager_l", False),
("map", False),
("rules", True),
("meta", False),
("build", True),
("fill", True),
("resolve_a", None),
("skip", 0),
("limit", 0),
("sort", None),
("raise_e", False),
),
)
# in case there's a sort field and the safe search mode is enabled
# we must add sorting by the `_id` field so that the search is
# considered to be deterministic, otherwise some DB implementations
# will not respect the same sorting sequence across different calls
if sort and (skip or limit):
if not isinstance(sort, list):
sort = list(sort)
sort.append(["_id", 1])
if resolve_a == None:
resolve_a = map
if eager_l:
eager = cls._eager_b(eager)
cls._find_s(kwargs)
cls._find_d(kwargs)
fields = cls._sniff(fields, rules=rules)
collection = cls._collection_a()
models = collection.find(kwargs, fields, skip=skip, limit=limit, sort=sort)
if not models and raise_e:
is_devel = common.is_devel()
if is_devel:
message = "%s not found for %s" % (cls.__name__, str(kwargs))
else:
message = "%s not found" % cls.__name__
raise exceptions.NotFoundError(message=message)
models = [cls.types(model) async for model in models]
if fill:
models = [cls.fill(model, safe=True) for model in models]
if build:
[cls.build(model, map=map, rules=rules, meta=meta) for model in models]
if eager:
models = cls._eager(models, eager, map=map)
if resolve_a:
models = [cls._resolve_all(model, resolve=False) for model in models]
models = (
models if map else [cls.old(model=model, safe=False) for model in models]
)
return models
@classmethod
async def _increment_a(cls, name):
_name = cls._name() + ":" + name
store = cls._collection_a(name="counters")
value = await store.find_and_modify(
{"_id": _name}, {"$inc": {"seq": 1}}, new=True, upsert=True
)
value = value or await store.find_one({"_id": _name})
return value["seq"]
async def save_a(
self,
validate=True,
verify=True,
is_new=None,
increment_a=None,
immutables_a=None,
pre_validate=True,
pre_save=True,
pre_create=True,
pre_update=True,
post_validate=True,
post_save=True,
post_create=True,
post_update=True,
before_callbacks=[],
after_callbacks=[],
):
# ensures that the current instance is associated with
# a concrete model, ready to be persisted in database
if verify:
self.assert_is_concrete()
# checks if the instance to be saved is a new instance
# or if this is an update operation and then determines
# series of default values taking that into account
if is_new == None:
is_new = self.is_new()
if increment_a == None:
increment_a = is_new
if immutables_a == None:
immutables_a = not is_new
# runs the validation process in the current model, this
# should ensure that the model is ready to be saved in the
# data source, without corruption of it, only run this process
# in case the validate flag is correctly set
validate and self._validate(
pre_validate=pre_validate, post_validate=post_validate
)
# calls the complete set of event handlers for the current
# save operation, this should trigger changes in the model
if pre_save:
self.pre_save()
if pre_create and is_new:
self.pre_create()
if pre_update and not is_new:
self.pre_update()
# filters the values that are present in the current model
# so that only the valid ones are stored in, invalid values
# are going to be removed, note that if the operation is an
# update operation and the "immutable rules" also apply, the
# returned value is normalized meaning that for instance if
# any relation is loaded the reference value is returned instead
# of the loaded relation values (required for persistence)
model = await self._filter_a(
increment_a=increment_a, immutables_a=immutables_a, normalize=True
)
# in case the current model is not new must create a new
# model instance and remove the main identifier from it
if not is_new:
_model = copy.copy(model)
del _model["_id"]
# calls the complete set of callbacks that should be called
# before the concrete data store save operation
for callback in before_callbacks:
callback(self, model)
# retrieves the reference to the store object to be used and
# uses it to store the current model data
store = self._get_store_a()
if is_new:
await store.insert(model)
self.apply(model, safe_a=False)
else:
await store.update({"_id": model["_id"]}, {"$set": _model})
# calls the complete set of callbacks that should be called
# after the concrete data store save operation
for callback in after_callbacks:
callback(self, model)
# calls the post save event handlers in order to be able to
# execute appropriate post operations
if post_save:
self.post_save()
if post_create and is_new:
self.post_create()
if post_update and not is_new:
self.post_update()
# returns the instance that has just been used for the save
# operation, this may be used for chaining operations
return self
async def delete_a(
self,
verify=True,
pre_delete=True,
post_delete=True,
before_callbacks=[],
after_callbacks=[],
):
# ensures that the current instance is associated with
# a concrete model, ready to be persisted in database
if verify:
self.assert_is_concrete()
# calls the complete set of event handlers for the current
# delete operation, this should trigger changes in the model
if pre_delete:
self.pre_delete()
# calls the complete set of callbacks that should be called
# before the concrete data store delete operation
for callback in before_callbacks:
callback(self)
# retrieves the reference to the store object to be able to
# execute the removal command for the current model
store = self._get_store_a()
await store.remove({"_id": self._id})
# calls the underlying delete handler that may be used to extend
# the default delete functionality
self._delete()
# calls the complete set of callbacks that should be called
# after the concrete data store delete operation
for callback in after_callbacks:
callback(self)
# calls the complete set of event handlers for the current
# delete operation, this should trigger changes in the model
if post_delete:
self.post_delete()
async def reload_a(self, *args, **kwargs):
is_new = self.is_new()
if is_new:
raise exceptions.OperationalError(
message="Can't reload a new model entity", code=412
)
cls = self.__class__
return await cls.get_a(_id=self._id, *args, **kwargs)
async def _filter_a(
self,
increment_a=True,
immutables_a=False,
normalize=False,
resolve=False,
all=False,
evaluator="json_v",
):
# creates the model that will hold the "filtered" model
# with all the items that conform with the class specification
model = {}
# retrieves the class associated with the current instance
# to be able to retrieve the correct definition methods
cls = self.__class__
# retrieves the (schema) definition for the current model
# to be "filtered" it's going to be used to retrieve the
# various definitions for the model fields
definition = cls.definition()
# retrieves the complete list of fields that are meant to be
# automatically incremented for every save operation
increments = cls.increments()
# gather the set of elements that are considered immutables and
# that are not meant to be changed if the current operation to
# apply the filter is not a new operation (update operation)
immutables = cls.immutables()
# iterates over all the increment fields and increments their
# fields so that a new value is set on the model, note that if
# the increment apply is unset the increment operation is ignored
for name in increments:
if not increment_a:
continue
if name in self.model:
model[name] = cls._ensure_min(name, self.model[name])
else:
model[name] = await cls._increment_a(name)
# iterates over all the model items to filter the ones
# that are not valid for the current class context
for name, value in legacy.eager(self.model.items()):
if not name in definition:
continue
if immutables_a and name in immutables:
continue
value = self._evaluate(name, value, evaluator=evaluator)
model[name] = value
# in case the normalize flag is set must iterate over all
# items to try to normalize the values by calling the reference
# value this will returns the reference index value instead of
# the normal value that would prevent normalization
if normalize:
for name, value in legacy.eager(self.model.items()):
if not name in definition:
continue
if not hasattr(value, "ref_v"):
continue
model[name] = value.ref_v()
# in case the resolution flag is set, it means that a recursive
# approach must be performed for the resolution of values that
# implement the map value (recursive resolution) method, this is
# a complex (and possible computational expensive) process that
# may imply access to the base data source
if resolve:
for name, value in legacy.eager(self.model.items()):
if not name in definition:
continue
model[name] = cls._resolve(name, value)
# in case the all flag is set the extra fields (not present
# in definition) must also be used to populate the resulting
# (filtered) map so that it contains the complete set of values
# present in the base map of the current instance
if all:
for name, value in legacy.eager(self.model.items()):
if name in model:
continue
model[name] = value
# returns the model containing the "filtered" items resulting
# from the validation of the items against the model class
return model
|