prompt
stringlengths 45
17.8k
| completion
stringlengths 6
107
| api
stringlengths 12
42
|
---|---|---|
"""Implementing balance and fiscal_note_items tables
Revision ID: 6099ed2a58e0
Revises: <KEY>
Create Date: 2021-10-11 14:52:44.126077
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "6099ed2a58e0"
down_revision = "<KEY>"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"balance",
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("value", sa.Float(), nullable=False),
sa.Column("type", sa.Enum("DEBT", "CREDIT", name="balancetype"), nullable=False),
sa.Column(
"operation",
sa.Enum(
"PAYMENT_OF_EMPLOYEES",
"PAYMENT_OF_SUPPLIERS",
"ANOTHER_PAYMENTS",
"SALE_IN_PIX",
"SALE_IN_DEBT",
"SALE_IN_CREDIT",
"SALE_IN_MONEY",
name="operationtype",
),
nullable=False,
),
sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=True),
sa.Column("owner_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.ForeignKeyConstraint(
["owner_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_balance_created_at"), "balance", ["created_at"], unique=False)
op.create_index(op.f("ix_balance_operation"), "balance", ["operation"], unique=False)
op.create_index(op.f("ix_balance_owner_id"), "balance", ["owner_id"], unique=False)
op.create_index(op.f("ix_balance_type"), "balance", ["type"], unique=False)
op.create_table(
"fiscal_note_items",
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("buy_value", sa.Float(), nullable=False),
sa.Column("sugested_sell_value", sa.Float(), nullable=False),
sa.Column("owner_id", | sqlmodel.sql.sqltypes.GUID() | sqlmodel.sql.sqltypes.GUID |
"""Implementing balance and fiscal_note_items tables
Revision ID: 6099ed2a58e0
Revises: <KEY>
Create Date: 2021-10-11 14:52:44.126077
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "6099ed2a58e0"
down_revision = "<KEY>"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"balance",
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("value", sa.Float(), nullable=False),
sa.Column("type", sa.Enum("DEBT", "CREDIT", name="balancetype"), nullable=False),
sa.Column(
"operation",
sa.Enum(
"PAYMENT_OF_EMPLOYEES",
"PAYMENT_OF_SUPPLIERS",
"ANOTHER_PAYMENTS",
"SALE_IN_PIX",
"SALE_IN_DEBT",
"SALE_IN_CREDIT",
"SALE_IN_MONEY",
name="operationtype",
),
nullable=False,
),
sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=True),
sa.Column("owner_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.ForeignKeyConstraint(
["owner_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_balance_created_at"), "balance", ["created_at"], unique=False)
op.create_index(op.f("ix_balance_operation"), "balance", ["operation"], unique=False)
op.create_index(op.f("ix_balance_owner_id"), "balance", ["owner_id"], unique=False)
op.create_index(op.f("ix_balance_type"), "balance", ["type"], unique=False)
op.create_table(
"fiscal_note_items",
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("buy_value", sa.Float(), nullable=False),
sa.Column("sugested_sell_value", sa.Float(), nullable=False),
sa.Column("owner_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("fiscal_note_id", | sqlmodel.sql.sqltypes.GUID() | sqlmodel.sql.sqltypes.GUID |
"""Implementing balance and fiscal_note_items tables
Revision ID: 6099ed2a58e0
Revises: <KEY>
Create Date: 2021-10-11 14:52:44.126077
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "6099ed2a58e0"
down_revision = "<KEY>"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"balance",
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("value", sa.Float(), nullable=False),
sa.Column("type", sa.Enum("DEBT", "CREDIT", name="balancetype"), nullable=False),
sa.Column(
"operation",
sa.Enum(
"PAYMENT_OF_EMPLOYEES",
"PAYMENT_OF_SUPPLIERS",
"ANOTHER_PAYMENTS",
"SALE_IN_PIX",
"SALE_IN_DEBT",
"SALE_IN_CREDIT",
"SALE_IN_MONEY",
name="operationtype",
),
nullable=False,
),
sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=True),
sa.Column("owner_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.ForeignKeyConstraint(
["owner_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_balance_created_at"), "balance", ["created_at"], unique=False)
op.create_index(op.f("ix_balance_operation"), "balance", ["operation"], unique=False)
op.create_index(op.f("ix_balance_owner_id"), "balance", ["owner_id"], unique=False)
op.create_index(op.f("ix_balance_type"), "balance", ["type"], unique=False)
op.create_table(
"fiscal_note_items",
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("buy_value", sa.Float(), nullable=False),
sa.Column("sugested_sell_value", sa.Float(), nullable=False),
sa.Column("owner_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("fiscal_note_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("item_id", | sqlmodel.sql.sqltypes.GUID() | sqlmodel.sql.sqltypes.GUID |
"""Implementing balance and fiscal_note_items tables
Revision ID: 6099ed2a58e0
Revises: <KEY>
Create Date: 2021-10-11 14:52:44.126077
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "6099ed2a58e0"
down_revision = "<KEY>"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"balance",
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("value", sa.Float(), nullable=False),
sa.Column("type", sa.Enum("DEBT", "CREDIT", name="balancetype"), nullable=False),
sa.Column(
"operation",
sa.Enum(
"PAYMENT_OF_EMPLOYEES",
"PAYMENT_OF_SUPPLIERS",
"ANOTHER_PAYMENTS",
"SALE_IN_PIX",
"SALE_IN_DEBT",
"SALE_IN_CREDIT",
"SALE_IN_MONEY",
name="operationtype",
),
nullable=False,
),
sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=True),
sa.Column("owner_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.ForeignKeyConstraint(
["owner_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_balance_created_at"), "balance", ["created_at"], unique=False)
op.create_index(op.f("ix_balance_operation"), "balance", ["operation"], unique=False)
op.create_index(op.f("ix_balance_owner_id"), "balance", ["owner_id"], unique=False)
op.create_index(op.f("ix_balance_type"), "balance", ["type"], unique=False)
op.create_table(
"fiscal_note_items",
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("buy_value", sa.Float(), nullable=False),
sa.Column("sugested_sell_value", sa.Float(), nullable=False),
sa.Column("owner_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("fiscal_note_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("item_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("file_id", | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
from sqlalchemy.orm import Session
from sqlmodel import select
from sqlalchemy.exc import SQLAlchemyError
from typing import Any
# import sys
#
# sys.path.append("..")
from app.db import models, pagination,session_scope
from app.util import passutil, schemas
from app.logs import fastapi_logger
from app.crud import get_user,get_user_password
class CRUDLogin:
def check_username_password(self, email: str, password: str) -> Any:
""" Verify Password"""
db_user_info = get_user_password(email=email)
return passutil.verify_password(str(password),
str(db_user_info.password))
def check_active_session(self, session_id: str):
""" check for active session """
try:
with session_scope() as db:
statement = | select(models.UsersLoginAttempt) | sqlmodel.select |
from sqlalchemy.orm import Session
from sqlmodel import select
from sqlalchemy.exc import SQLAlchemyError
from typing import Any
# import sys
#
# sys.path.append("..")
from app.db import models, pagination,session_scope
from app.util import passutil, schemas
from app.logs import fastapi_logger
from app.crud import get_user,get_user_password
class CRUDLogin:
def check_username_password(self, email: str, password: str) -> Any:
""" Verify Password"""
db_user_info = get_user_password(email=email)
return passutil.verify_password(str(password),
str(db_user_info.password))
def check_active_session(self, session_id: str):
""" check for active session """
try:
with session_scope() as db:
statement = select(models.UsersLoginAttempt).where(
models.UsersLoginAttempt.session_id == session_id)
results = db.exec(statement)
data = results.one()
return data
except SQLAlchemyError as e:
fastapi_logger.exception("check_active_session")
return None
def login_user(self, user: schemas.UserLogIn, session_id: str) -> Any:
""" Login Attempt Record """
try:
with session_scope() as db:
db_session = models.UsersLoginAttempt(
email=user.email,
session_id=session_id,
ip_address=user.ip_address,
browser=user.browser,
status="logged_in")
db.add(db_session)
db.commit()
db.refresh(db_session)
return db_session
except SQLAlchemyError as e:
fastapi_logger.exception("login_user")
return None
def active_user(self, session_id: str) -> Any:
""" check for active user"""
try:
with session_scope() as db:
statement = | select(models.UsersLoginAttempt) | sqlmodel.select |
from sqlalchemy.orm import Session
from sqlmodel import select
from sqlalchemy.exc import SQLAlchemyError
from typing import Any
# import sys
#
# sys.path.append("..")
from app.db import models, pagination,session_scope
from app.util import passutil, schemas
from app.logs import fastapi_logger
from app.crud import get_user,get_user_password
class CRUDLogin:
def check_username_password(self, email: str, password: str) -> Any:
""" Verify Password"""
db_user_info = get_user_password(email=email)
return passutil.verify_password(str(password),
str(db_user_info.password))
def check_active_session(self, session_id: str):
""" check for active session """
try:
with session_scope() as db:
statement = select(models.UsersLoginAttempt).where(
models.UsersLoginAttempt.session_id == session_id)
results = db.exec(statement)
data = results.one()
return data
except SQLAlchemyError as e:
fastapi_logger.exception("check_active_session")
return None
def login_user(self, user: schemas.UserLogIn, session_id: str) -> Any:
""" Login Attempt Record """
try:
with session_scope() as db:
db_session = models.UsersLoginAttempt(
email=user.email,
session_id=session_id,
ip_address=user.ip_address,
browser=user.browser,
status="logged_in")
db.add(db_session)
db.commit()
db.refresh(db_session)
return db_session
except SQLAlchemyError as e:
fastapi_logger.exception("login_user")
return None
def active_user(self, session_id: str) -> Any:
""" check for active user"""
try:
with session_scope() as db:
statement = select(models.UsersLoginAttempt).where(
models.UsersLoginAttempt.session_id == session_id)
results = db.exec(statement)
db_session = results.one()
db_session.status = "active"
db.add(db_session)
db.commit()
db.refresh(db_session)
return db_session
except SQLAlchemyError as e:
fastapi_logger.exception("active_user")
return None
def logoff_user(self, session_id: str) -> Any:
""" Logging off Record"""
try:
with session_scope() as db:
statement = | select(models.UsersLoginAttempt) | sqlmodel.select |
from typing import Optional # (1)
from sqlmodel import Field, SQLModel, create_engine # (2)
class Hero(SQLModel, table=True): # (3)
id: Optional[int] = Field(default=None, primary_key=True) # (4)
name: str # (5)
secret_name: str # (6)
age: Optional[int] = None # (7)
sqlite_file_name = "database.db" # (8)
sqlite_url = f"sqlite:///{sqlite_file_name}" # (9)
engine = | create_engine(sqlite_url, echo=True) | sqlmodel.create_engine |
from typing import Optional # (1)
from sqlmodel import Field, SQLModel, create_engine # (2)
class Hero(SQLModel, table=True): # (3)
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
from typing import Optional # (1)
from sqlmodel import Field, SQLModel, create_engine # (2)
class Hero(SQLModel, table=True): # (3)
id: Optional[int] = Field(default=None, primary_key=True) # (4)
name: str # (5)
secret_name: str # (6)
age: Optional[int] = None # (7)
sqlite_file_name = "database.db" # (8)
sqlite_url = f"sqlite:///{sqlite_file_name}" # (9)
engine = create_engine(sqlite_url, echo=True) # (10)
def create_db_and_tables(): # (11)
| SQLModel.metadata.create_all(engine) | sqlmodel.SQLModel.metadata.create_all |
from datetime import datetime
from decimal import Decimal
from typing import Optional
from fastapi import APIRouter
from sqlmodel import Field, SQLModel
router = APIRouter()
class Guardian(SQLModel, table=True):
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
from datetime import datetime
from decimal import Decimal
from typing import Optional
from fastapi import APIRouter
from sqlmodel import Field, SQLModel
router = APIRouter()
class Guardian(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
email = str
email_verified_at: Optional[datetime] = None
password: str
remember_token: str
first_name_thai: str
last_name_thai: str
first_name_english: str
last_name_english: str
occupation_id: Optional[int] = None
gender: str
is_thai_address: bool
address_house_number: str
address_moo: str
address_soi: str
address_road: str
address_tambon_id: Optional[int] = None
address_amphoe_id: Optional[int] = None
address_province_id: Optional[int] = None
address_other: str
latitude: Decimal
longitude: Decimal
latitude_custom: Decimal
longitude_custom: Decimal
alive: bool
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class GuardianPhone(SQLModel, table=True):
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
from datetime import datetime
from decimal import Decimal
from typing import Optional
from fastapi import APIRouter
from sqlmodel import Field, SQLModel
router = APIRouter()
class Guardian(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
email = str
email_verified_at: Optional[datetime] = None
password: str
remember_token: str
first_name_thai: str
last_name_thai: str
first_name_english: str
last_name_english: str
occupation_id: Optional[int] = None
gender: str
is_thai_address: bool
address_house_number: str
address_moo: str
address_soi: str
address_road: str
address_tambon_id: Optional[int] = None
address_amphoe_id: Optional[int] = None
address_province_id: Optional[int] = None
address_other: str
latitude: Decimal
longitude: Decimal
latitude_custom: Decimal
longitude_custom: Decimal
alive: bool
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class GuardianPhone(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
guardian_id: int
number: str
detail: str
receive_sms: bool
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class GuardianIdNumber(SQLModel, table=True):
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
from datetime import datetime
from decimal import Decimal
from typing import Optional
from fastapi import APIRouter
from sqlmodel import Field, SQLModel
router = APIRouter()
class Guardian(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
email = str
email_verified_at: Optional[datetime] = None
password: str
remember_token: str
first_name_thai: str
last_name_thai: str
first_name_english: str
last_name_english: str
occupation_id: Optional[int] = None
gender: str
is_thai_address: bool
address_house_number: str
address_moo: str
address_soi: str
address_road: str
address_tambon_id: Optional[int] = None
address_amphoe_id: Optional[int] = None
address_province_id: Optional[int] = None
address_other: str
latitude: Decimal
longitude: Decimal
latitude_custom: Decimal
longitude_custom: Decimal
alive: bool
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class GuardianPhone(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
guardian_id: int
number: str
detail: str
receive_sms: bool
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class GuardianIdNumber(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
guardian_id: int
id_type_id: int
number: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class GuardianPatientMap(SQLModel, table=True):
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
from datetime import datetime
from decimal import Decimal
from typing import Optional
from fastapi import APIRouter
from sqlmodel import Field, SQLModel
router = APIRouter()
class Guardian(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
email = str
email_verified_at: Optional[datetime] = None
password: str
remember_token: str
first_name_thai: str
last_name_thai: str
first_name_english: str
last_name_english: str
occupation_id: Optional[int] = None
gender: str
is_thai_address: bool
address_house_number: str
address_moo: str
address_soi: str
address_road: str
address_tambon_id: Optional[int] = None
address_amphoe_id: Optional[int] = None
address_province_id: Optional[int] = None
address_other: str
latitude: Decimal
longitude: Decimal
latitude_custom: Decimal
longitude_custom: Decimal
alive: bool
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class GuardianPhone(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
guardian_id: int
number: str
detail: str
receive_sms: bool
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class GuardianIdNumber(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
guardian_id: int
id_type_id: int
number: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class GuardianPatientMap(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
guardian_id: int
patient_id: int
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class GuardianNotification(SQLModel, table=True):
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
from datetime import date, datetime
from typing import Optional
from pydantic import BaseModel, validator
from sqlmodel import Field, SQLModel
# Simple classes for access control tokens
class Token(BaseModel):
access_token: str
token_type: str
expiry: datetime
class TokenData(BaseModel):
username: Optional[str] = None
# Default user class, this is the one to interact with.
class User(SQLModel):
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
from datetime import date, datetime
from typing import Optional
from pydantic import BaseModel, validator
from sqlmodel import Field, SQLModel
# Simple classes for access control tokens
class Token(BaseModel):
access_token: str
token_type: str
expiry: datetime
class TokenData(BaseModel):
username: Optional[str] = None
# Default user class, this is the one to interact with.
class User(SQLModel):
id: Optional[int] = Field(default=None, primary_key=True)
full_name: str
username: str
email: str
disabled: Optional[bool] = | Field(default=False) | sqlmodel.Field |
from datetime import date, datetime
from typing import Optional
from pydantic import BaseModel, validator
from sqlmodel import Field, SQLModel
# Simple classes for access control tokens
class Token(BaseModel):
access_token: str
token_type: str
expiry: datetime
class TokenData(BaseModel):
username: Optional[str] = None
# Default user class, this is the one to interact with.
class User(SQLModel):
id: Optional[int] = Field(default=None, primary_key=True)
full_name: str
username: str
email: str
disabled: Optional[bool] = Field(default=False)
roles: Optional[str] = | Field(default="appuser") | sqlmodel.Field |
from datetime import date, datetime
from typing import Optional
from pydantic import BaseModel, validator
from sqlmodel import Field, SQLModel
# Simple classes for access control tokens
class Token(BaseModel):
access_token: str
token_type: str
expiry: datetime
class TokenData(BaseModel):
username: Optional[str] = None
# Default user class, this is the one to interact with.
class User(SQLModel):
id: Optional[int] = Field(default=None, primary_key=True)
full_name: str
username: str
email: str
disabled: Optional[bool] = Field(default=False)
roles: Optional[str] = Field(default="appuser")
created: Optional[datetime] = Field(default=datetime.utcnow())
# Don't ever return FullUser instances - ALWAYS return 'User' at maximum, since FullUser includes hashedpasword.
# FullUser is only need during creation or resetting of password.
class FullUser(User, table=True):
__tablename__ = "Users"
hashedpassword: str
# Opservation class is used for both storage and retrieval operations.
class Observation(SQLModel, table=True):
__tablename__ = "Observations"
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
from datetime import datetime, timezone
from typing import Generic, Optional, Type, TypeVar
from fastapi_users.authentication.strategy.db import AccessTokenDatabase
from fastapi_users.authentication.strategy.db.models import BaseAccessToken
from pydantic import UUID4
from sqlalchemy import Column, types
from sqlalchemy.ext.asyncio import AsyncSession
from sqlmodel import Field, Session, SQLModel, select
def now_utc():
return datetime.now(timezone.utc)
class SQLModelBaseAccessToken(BaseAccessToken, SQLModel):
__tablename__ = "accesstoken"
token: str = Field(
sa_column=Column("token", types.String(length=43), primary_key=True)
)
created_at: datetime = Field(
default_factory=now_utc,
sa_column=Column(
"created_at", types.DateTime(timezone=True), nullable=False, index=True
),
)
user_id: UUID4 = | Field(foreign_key="user.id", nullable=False) | sqlmodel.Field |
from datetime import datetime, timezone
from typing import Generic, Optional, Type, TypeVar
from fastapi_users.authentication.strategy.db import AccessTokenDatabase
from fastapi_users.authentication.strategy.db.models import BaseAccessToken
from pydantic import UUID4
from sqlalchemy import Column, types
from sqlalchemy.ext.asyncio import AsyncSession
from sqlmodel import Field, Session, SQLModel, select
def now_utc():
return datetime.now(timezone.utc)
class SQLModelBaseAccessToken(BaseAccessToken, SQLModel):
__tablename__ = "accesstoken"
token: str = Field(
sa_column=Column("token", types.String(length=43), primary_key=True)
)
created_at: datetime = Field(
default_factory=now_utc,
sa_column=Column(
"created_at", types.DateTime(timezone=True), nullable=False, index=True
),
)
user_id: UUID4 = Field(foreign_key="user.id", nullable=False)
class Config:
orm_mode = True
A = TypeVar("A", bound=SQLModelBaseAccessToken)
class SQLModelAccessTokenDatabase(Generic[A], AccessTokenDatabase[A]):
"""
Access token database adapter for SQLModel.
:param user_db_model: SQLModel model of a DB representation of an access token.
:param session: SQLAlchemy session.
"""
def __init__(self, access_token_model: Type[A], session: Session):
self.access_token_model = access_token_model
self.session = session
async def get_by_token(
self, token: str, max_age: Optional[datetime] = None
) -> Optional[A]:
statement = | select(self.access_token_model) | sqlmodel.select |
from datetime import datetime, timezone
from typing import Generic, Optional, Type, TypeVar
from fastapi_users.authentication.strategy.db import AccessTokenDatabase
from fastapi_users.authentication.strategy.db.models import BaseAccessToken
from pydantic import UUID4
from sqlalchemy import Column, types
from sqlalchemy.ext.asyncio import AsyncSession
from sqlmodel import Field, Session, SQLModel, select
def now_utc():
return datetime.now(timezone.utc)
class SQLModelBaseAccessToken(BaseAccessToken, SQLModel):
__tablename__ = "accesstoken"
token: str = Field(
sa_column=Column("token", types.String(length=43), primary_key=True)
)
created_at: datetime = Field(
default_factory=now_utc,
sa_column=Column(
"created_at", types.DateTime(timezone=True), nullable=False, index=True
),
)
user_id: UUID4 = Field(foreign_key="user.id", nullable=False)
class Config:
orm_mode = True
A = TypeVar("A", bound=SQLModelBaseAccessToken)
class SQLModelAccessTokenDatabase(Generic[A], AccessTokenDatabase[A]):
"""
Access token database adapter for SQLModel.
:param user_db_model: SQLModel model of a DB representation of an access token.
:param session: SQLAlchemy session.
"""
def __init__(self, access_token_model: Type[A], session: Session):
self.access_token_model = access_token_model
self.session = session
async def get_by_token(
self, token: str, max_age: Optional[datetime] = None
) -> Optional[A]:
statement = select(self.access_token_model).where(
self.access_token_model.token == token
)
if max_age is not None:
statement = statement.where(self.access_token_model.created_at >= max_age)
results = self.session.exec(statement)
return results.first()
async def create(self, access_token: A) -> A:
self.session.add(access_token)
self.session.commit()
self.session.refresh(access_token)
return access_token
async def update(self, access_token: A) -> A:
self.session.add(access_token)
self.session.commit()
self.session.refresh(access_token)
return access_token
async def delete(self, access_token: A) -> None:
self.session.delete(access_token)
self.session.commit()
class SQLModelAccessTokenDatabaseAsync(Generic[A], AccessTokenDatabase[A]):
"""
Access token database adapter for SQLModel working purely asynchronously.
:param user_db_model: SQLModel model of a DB representation of an access token.
:param session: SQLAlchemy async session.
"""
def __init__(self, access_token_model: Type[A], session: AsyncSession):
self.access_token_model = access_token_model
self.session = session
async def get_by_token(
self, token: str, max_age: Optional[datetime] = None
) -> Optional[A]:
statement = | select(self.access_token_model) | sqlmodel.select |
from datetime import datetime
from sqlmodel import Field, SQLModel, Relationship, Column, DateTime
from app.models.links import LinkGroupUser
from typing import List, Optional
from pydantic import EmailStr
from app.models.base_uuid_model import BaseUUIDModel
from uuid import UUID
class UserBase(SQLModel):
first_name: str
last_name: str
email: EmailStr = | Field(nullable=True, index=True, sa_column_kwargs={"unique": True}) | sqlmodel.Field |
from datetime import datetime
from sqlmodel import Field, SQLModel, Relationship, Column, DateTime
from app.models.links import LinkGroupUser
from typing import List, Optional
from pydantic import EmailStr
from app.models.base_uuid_model import BaseUUIDModel
from uuid import UUID
class UserBase(SQLModel):
first_name: str
last_name: str
email: EmailStr = Field(nullable=True, index=True, sa_column_kwargs={"unique": True})
is_active: bool = | Field(default=True) | sqlmodel.Field |
from datetime import datetime
from sqlmodel import Field, SQLModel, Relationship, Column, DateTime
from app.models.links import LinkGroupUser
from typing import List, Optional
from pydantic import EmailStr
from app.models.base_uuid_model import BaseUUIDModel
from uuid import UUID
class UserBase(SQLModel):
first_name: str
last_name: str
email: EmailStr = Field(nullable=True, index=True, sa_column_kwargs={"unique": True})
is_active: bool = Field(default=True)
is_superuser: bool = | Field(default=False) | sqlmodel.Field |
from datetime import datetime
from sqlmodel import Field, SQLModel, Relationship, Column, DateTime
from app.models.links import LinkGroupUser
from typing import List, Optional
from pydantic import EmailStr
from app.models.base_uuid_model import BaseUUIDModel
from uuid import UUID
class UserBase(SQLModel):
first_name: str
last_name: str
email: EmailStr = Field(nullable=True, index=True, sa_column_kwargs={"unique": True})
is_active: bool = Field(default=True)
is_superuser: bool = Field(default=False)
birthdate: Optional[datetime] = Field(sa_column=Column(DateTime(timezone=True), nullable=True)) #birthday with timezone
phone: Optional[str]
state: Optional[str]
country: Optional[str]
address: Optional[str]
class User(BaseUUIDModel, UserBase, table=True):
hashed_password: str = Field(
nullable=False, index=True
)
role_id: Optional[UUID] = | Field(default=None, foreign_key="role.id") | sqlmodel.Field |
from datetime import datetime
from sqlmodel import Field, SQLModel, Relationship, Column, DateTime
from app.models.links import LinkGroupUser
from typing import List, Optional
from pydantic import EmailStr
from app.models.base_uuid_model import BaseUUIDModel
from uuid import UUID
class UserBase(SQLModel):
first_name: str
last_name: str
email: EmailStr = Field(nullable=True, index=True, sa_column_kwargs={"unique": True})
is_active: bool = Field(default=True)
is_superuser: bool = Field(default=False)
birthdate: Optional[datetime] = Field(sa_column=Column(DateTime(timezone=True), nullable=True)) #birthday with timezone
phone: Optional[str]
state: Optional[str]
country: Optional[str]
address: Optional[str]
class User(BaseUUIDModel, UserBase, table=True):
hashed_password: str = Field(
nullable=False, index=True
)
role_id: Optional[UUID] = Field(default=None, foreign_key="role.id")
role: Optional["Role"] = | Relationship(back_populates="users", sa_relationship_kwargs={"lazy": "selectin"}) | sqlmodel.Relationship |
from datetime import datetime
from sqlmodel import Field, SQLModel, Relationship, Column, DateTime
from app.models.links import LinkGroupUser
from typing import List, Optional
from pydantic import EmailStr
from app.models.base_uuid_model import BaseUUIDModel
from uuid import UUID
class UserBase(SQLModel):
first_name: str
last_name: str
email: EmailStr = Field(nullable=True, index=True, sa_column_kwargs={"unique": True})
is_active: bool = Field(default=True)
is_superuser: bool = Field(default=False)
birthdate: Optional[datetime] = Field(sa_column=Column(DateTime(timezone=True), nullable=True)) #birthday with timezone
phone: Optional[str]
state: Optional[str]
country: Optional[str]
address: Optional[str]
class User(BaseUUIDModel, UserBase, table=True):
hashed_password: str = Field(
nullable=False, index=True
)
role_id: Optional[UUID] = Field(default=None, foreign_key="role.id")
role: Optional["Role"] = Relationship(back_populates="users", sa_relationship_kwargs={"lazy": "selectin"})
groups: List["Group"] = | Relationship(back_populates="users", link_model=LinkGroupUser, sa_relationship_kwargs={"lazy": "selectin"}) | sqlmodel.Relationship |
from datetime import datetime
from sqlmodel import Field, SQLModel, Relationship, Column, DateTime
from app.models.links import LinkGroupUser
from typing import List, Optional
from pydantic import EmailStr
from app.models.base_uuid_model import BaseUUIDModel
from uuid import UUID
class UserBase(SQLModel):
first_name: str
last_name: str
email: EmailStr = Field(nullable=True, index=True, sa_column_kwargs={"unique": True})
is_active: bool = Field(default=True)
is_superuser: bool = Field(default=False)
birthdate: Optional[datetime] = Field(sa_column=Column( | DateTime(timezone=True) | sqlmodel.DateTime |
"""(Basic) Message Tables/Models.
Models of the Traction tables for Messages (layer over AcaPy basic messaging).
"""
import uuid
from datetime import datetime
from typing import List, Optional
from sqlalchemy.orm import selectinload
from sqlmodel import Field, Relationship
from sqlalchemy import (
Column,
func,
String,
select,
desc,
text,
)
from sqlalchemy.dialects.postgresql import UUID, TIMESTAMP, ARRAY
from sqlmodel.ext.asyncio.session import AsyncSession
from api.db.models.base import BaseModel
from api.db.models.v1.contact import Contact
from api.endpoints.models.v1.errors import (
NotFoundError,
)
class Message(BaseModel, table=True):
"""Message.
Model for the Message table (postgresql specific dialects in use).
This will track Messages for the Tenants (between contacts).
Attributes:
message_id: Traction ID for message OR when receiving, it is the AcaPy message_id
tenant_id: Traction Tenant ID
contact_id: Traction Contact ID
status: Business and Tenant indicator for Credential state; independent of AcaPy
Basic Message Exchange state
role: sender or recipient
deleted: Issuer Credential "soft" delete indicator.
tags: Set by tenant for arbitrary grouping of Credentials
content: actual content of the message
state: The underlying AcaPy message exchange state
sent_time: sent_time data in AcaPy payload
created_at: Timestamp when record was created in Traction
updated_at: Timestamp when record was last modified in Traction
"""
message_id: uuid.UUID = Field(
sa_column=Column(
UUID(as_uuid=True),
primary_key=True,
server_default=text("gen_random_uuid()"),
)
)
tenant_id: uuid.UUID = | Field(foreign_key="tenant.id", index=True) | sqlmodel.Field |
"""(Basic) Message Tables/Models.
Models of the Traction tables for Messages (layer over AcaPy basic messaging).
"""
import uuid
from datetime import datetime
from typing import List, Optional
from sqlalchemy.orm import selectinload
from sqlmodel import Field, Relationship
from sqlalchemy import (
Column,
func,
String,
select,
desc,
text,
)
from sqlalchemy.dialects.postgresql import UUID, TIMESTAMP, ARRAY
from sqlmodel.ext.asyncio.session import AsyncSession
from api.db.models.base import BaseModel
from api.db.models.v1.contact import Contact
from api.endpoints.models.v1.errors import (
NotFoundError,
)
class Message(BaseModel, table=True):
"""Message.
Model for the Message table (postgresql specific dialects in use).
This will track Messages for the Tenants (between contacts).
Attributes:
message_id: Traction ID for message OR when receiving, it is the AcaPy message_id
tenant_id: Traction Tenant ID
contact_id: Traction Contact ID
status: Business and Tenant indicator for Credential state; independent of AcaPy
Basic Message Exchange state
role: sender or recipient
deleted: Issuer Credential "soft" delete indicator.
tags: Set by tenant for arbitrary grouping of Credentials
content: actual content of the message
state: The underlying AcaPy message exchange state
sent_time: sent_time data in AcaPy payload
created_at: Timestamp when record was created in Traction
updated_at: Timestamp when record was last modified in Traction
"""
message_id: uuid.UUID = Field(
sa_column=Column(
UUID(as_uuid=True),
primary_key=True,
server_default=text("gen_random_uuid()"),
)
)
tenant_id: uuid.UUID = Field(foreign_key="tenant.id", index=True)
contact_id: uuid.UUID = | Field(foreign_key="contact.contact_id", index=True) | sqlmodel.Field |
"""(Basic) Message Tables/Models.
Models of the Traction tables for Messages (layer over AcaPy basic messaging).
"""
import uuid
from datetime import datetime
from typing import List, Optional
from sqlalchemy.orm import selectinload
from sqlmodel import Field, Relationship
from sqlalchemy import (
Column,
func,
String,
select,
desc,
text,
)
from sqlalchemy.dialects.postgresql import UUID, TIMESTAMP, ARRAY
from sqlmodel.ext.asyncio.session import AsyncSession
from api.db.models.base import BaseModel
from api.db.models.v1.contact import Contact
from api.endpoints.models.v1.errors import (
NotFoundError,
)
class Message(BaseModel, table=True):
"""Message.
Model for the Message table (postgresql specific dialects in use).
This will track Messages for the Tenants (between contacts).
Attributes:
message_id: Traction ID for message OR when receiving, it is the AcaPy message_id
tenant_id: Traction Tenant ID
contact_id: Traction Contact ID
status: Business and Tenant indicator for Credential state; independent of AcaPy
Basic Message Exchange state
role: sender or recipient
deleted: Issuer Credential "soft" delete indicator.
tags: Set by tenant for arbitrary grouping of Credentials
content: actual content of the message
state: The underlying AcaPy message exchange state
sent_time: sent_time data in AcaPy payload
created_at: Timestamp when record was created in Traction
updated_at: Timestamp when record was last modified in Traction
"""
message_id: uuid.UUID = Field(
sa_column=Column(
UUID(as_uuid=True),
primary_key=True,
server_default=text("gen_random_uuid()"),
)
)
tenant_id: uuid.UUID = Field(foreign_key="tenant.id", index=True)
contact_id: uuid.UUID = Field(foreign_key="contact.contact_id", index=True)
status: str = | Field(nullable=False) | sqlmodel.Field |
"""(Basic) Message Tables/Models.
Models of the Traction tables for Messages (layer over AcaPy basic messaging).
"""
import uuid
from datetime import datetime
from typing import List, Optional
from sqlalchemy.orm import selectinload
from sqlmodel import Field, Relationship
from sqlalchemy import (
Column,
func,
String,
select,
desc,
text,
)
from sqlalchemy.dialects.postgresql import UUID, TIMESTAMP, ARRAY
from sqlmodel.ext.asyncio.session import AsyncSession
from api.db.models.base import BaseModel
from api.db.models.v1.contact import Contact
from api.endpoints.models.v1.errors import (
NotFoundError,
)
class Message(BaseModel, table=True):
"""Message.
Model for the Message table (postgresql specific dialects in use).
This will track Messages for the Tenants (between contacts).
Attributes:
message_id: Traction ID for message OR when receiving, it is the AcaPy message_id
tenant_id: Traction Tenant ID
contact_id: Traction Contact ID
status: Business and Tenant indicator for Credential state; independent of AcaPy
Basic Message Exchange state
role: sender or recipient
deleted: Issuer Credential "soft" delete indicator.
tags: Set by tenant for arbitrary grouping of Credentials
content: actual content of the message
state: The underlying AcaPy message exchange state
sent_time: sent_time data in AcaPy payload
created_at: Timestamp when record was created in Traction
updated_at: Timestamp when record was last modified in Traction
"""
message_id: uuid.UUID = Field(
sa_column=Column(
UUID(as_uuid=True),
primary_key=True,
server_default=text("gen_random_uuid()"),
)
)
tenant_id: uuid.UUID = Field(foreign_key="tenant.id", index=True)
contact_id: uuid.UUID = Field(foreign_key="contact.contact_id", index=True)
status: str = Field(nullable=False)
role: str = | Field(nullable=False) | sqlmodel.Field |
"""(Basic) Message Tables/Models.
Models of the Traction tables for Messages (layer over AcaPy basic messaging).
"""
import uuid
from datetime import datetime
from typing import List, Optional
from sqlalchemy.orm import selectinload
from sqlmodel import Field, Relationship
from sqlalchemy import (
Column,
func,
String,
select,
desc,
text,
)
from sqlalchemy.dialects.postgresql import UUID, TIMESTAMP, ARRAY
from sqlmodel.ext.asyncio.session import AsyncSession
from api.db.models.base import BaseModel
from api.db.models.v1.contact import Contact
from api.endpoints.models.v1.errors import (
NotFoundError,
)
class Message(BaseModel, table=True):
"""Message.
Model for the Message table (postgresql specific dialects in use).
This will track Messages for the Tenants (between contacts).
Attributes:
message_id: Traction ID for message OR when receiving, it is the AcaPy message_id
tenant_id: Traction Tenant ID
contact_id: Traction Contact ID
status: Business and Tenant indicator for Credential state; independent of AcaPy
Basic Message Exchange state
role: sender or recipient
deleted: Issuer Credential "soft" delete indicator.
tags: Set by tenant for arbitrary grouping of Credentials
content: actual content of the message
state: The underlying AcaPy message exchange state
sent_time: sent_time data in AcaPy payload
created_at: Timestamp when record was created in Traction
updated_at: Timestamp when record was last modified in Traction
"""
message_id: uuid.UUID = Field(
sa_column=Column(
UUID(as_uuid=True),
primary_key=True,
server_default=text("gen_random_uuid()"),
)
)
tenant_id: uuid.UUID = Field(foreign_key="tenant.id", index=True)
contact_id: uuid.UUID = Field(foreign_key="contact.contact_id", index=True)
status: str = Field(nullable=False)
role: str = Field(nullable=False)
deleted: bool = | Field(nullable=False, default=False) | sqlmodel.Field |
"""(Basic) Message Tables/Models.
Models of the Traction tables for Messages (layer over AcaPy basic messaging).
"""
import uuid
from datetime import datetime
from typing import List, Optional
from sqlalchemy.orm import selectinload
from sqlmodel import Field, Relationship
from sqlalchemy import (
Column,
func,
String,
select,
desc,
text,
)
from sqlalchemy.dialects.postgresql import UUID, TIMESTAMP, ARRAY
from sqlmodel.ext.asyncio.session import AsyncSession
from api.db.models.base import BaseModel
from api.db.models.v1.contact import Contact
from api.endpoints.models.v1.errors import (
NotFoundError,
)
class Message(BaseModel, table=True):
"""Message.
Model for the Message table (postgresql specific dialects in use).
This will track Messages for the Tenants (between contacts).
Attributes:
message_id: Traction ID for message OR when receiving, it is the AcaPy message_id
tenant_id: Traction Tenant ID
contact_id: Traction Contact ID
status: Business and Tenant indicator for Credential state; independent of AcaPy
Basic Message Exchange state
role: sender or recipient
deleted: Issuer Credential "soft" delete indicator.
tags: Set by tenant for arbitrary grouping of Credentials
content: actual content of the message
state: The underlying AcaPy message exchange state
sent_time: sent_time data in AcaPy payload
created_at: Timestamp when record was created in Traction
updated_at: Timestamp when record was last modified in Traction
"""
message_id: uuid.UUID = Field(
sa_column=Column(
UUID(as_uuid=True),
primary_key=True,
server_default=text("gen_random_uuid()"),
)
)
tenant_id: uuid.UUID = Field(foreign_key="tenant.id", index=True)
contact_id: uuid.UUID = Field(foreign_key="contact.contact_id", index=True)
status: str = Field(nullable=False)
role: str = Field(nullable=False)
deleted: bool = Field(nullable=False, default=False)
tags: List[str] = Field(sa_column=Column(ARRAY(String)))
content: str = | Field(nullable=True) | sqlmodel.Field |
"""(Basic) Message Tables/Models.
Models of the Traction tables for Messages (layer over AcaPy basic messaging).
"""
import uuid
from datetime import datetime
from typing import List, Optional
from sqlalchemy.orm import selectinload
from sqlmodel import Field, Relationship
from sqlalchemy import (
Column,
func,
String,
select,
desc,
text,
)
from sqlalchemy.dialects.postgresql import UUID, TIMESTAMP, ARRAY
from sqlmodel.ext.asyncio.session import AsyncSession
from api.db.models.base import BaseModel
from api.db.models.v1.contact import Contact
from api.endpoints.models.v1.errors import (
NotFoundError,
)
class Message(BaseModel, table=True):
"""Message.
Model for the Message table (postgresql specific dialects in use).
This will track Messages for the Tenants (between contacts).
Attributes:
message_id: Traction ID for message OR when receiving, it is the AcaPy message_id
tenant_id: Traction Tenant ID
contact_id: Traction Contact ID
status: Business and Tenant indicator for Credential state; independent of AcaPy
Basic Message Exchange state
role: sender or recipient
deleted: Issuer Credential "soft" delete indicator.
tags: Set by tenant for arbitrary grouping of Credentials
content: actual content of the message
state: The underlying AcaPy message exchange state
sent_time: sent_time data in AcaPy payload
created_at: Timestamp when record was created in Traction
updated_at: Timestamp when record was last modified in Traction
"""
message_id: uuid.UUID = Field(
sa_column=Column(
UUID(as_uuid=True),
primary_key=True,
server_default=text("gen_random_uuid()"),
)
)
tenant_id: uuid.UUID = Field(foreign_key="tenant.id", index=True)
contact_id: uuid.UUID = Field(foreign_key="contact.contact_id", index=True)
status: str = Field(nullable=False)
role: str = Field(nullable=False)
deleted: bool = Field(nullable=False, default=False)
tags: List[str] = Field(sa_column=Column(ARRAY(String)))
content: str = Field(nullable=True)
revocation_comment: str = | Field(nullable=True) | sqlmodel.Field |
"""(Basic) Message Tables/Models.
Models of the Traction tables for Messages (layer over AcaPy basic messaging).
"""
import uuid
from datetime import datetime
from typing import List, Optional
from sqlalchemy.orm import selectinload
from sqlmodel import Field, Relationship
from sqlalchemy import (
Column,
func,
String,
select,
desc,
text,
)
from sqlalchemy.dialects.postgresql import UUID, TIMESTAMP, ARRAY
from sqlmodel.ext.asyncio.session import AsyncSession
from api.db.models.base import BaseModel
from api.db.models.v1.contact import Contact
from api.endpoints.models.v1.errors import (
NotFoundError,
)
class Message(BaseModel, table=True):
"""Message.
Model for the Message table (postgresql specific dialects in use).
This will track Messages for the Tenants (between contacts).
Attributes:
message_id: Traction ID for message OR when receiving, it is the AcaPy message_id
tenant_id: Traction Tenant ID
contact_id: Traction Contact ID
status: Business and Tenant indicator for Credential state; independent of AcaPy
Basic Message Exchange state
role: sender or recipient
deleted: Issuer Credential "soft" delete indicator.
tags: Set by tenant for arbitrary grouping of Credentials
content: actual content of the message
state: The underlying AcaPy message exchange state
sent_time: sent_time data in AcaPy payload
created_at: Timestamp when record was created in Traction
updated_at: Timestamp when record was last modified in Traction
"""
message_id: uuid.UUID = Field(
sa_column=Column(
UUID(as_uuid=True),
primary_key=True,
server_default=text("gen_random_uuid()"),
)
)
tenant_id: uuid.UUID = Field(foreign_key="tenant.id", index=True)
contact_id: uuid.UUID = Field(foreign_key="contact.contact_id", index=True)
status: str = Field(nullable=False)
role: str = Field(nullable=False)
deleted: bool = Field(nullable=False, default=False)
tags: List[str] = Field(sa_column=Column(ARRAY(String)))
content: str = Field(nullable=True)
revocation_comment: str = Field(nullable=True)
# acapy data ---
state: str = | Field(nullable=False) | sqlmodel.Field |
"""(Basic) Message Tables/Models.
Models of the Traction tables for Messages (layer over AcaPy basic messaging).
"""
import uuid
from datetime import datetime
from typing import List, Optional
from sqlalchemy.orm import selectinload
from sqlmodel import Field, Relationship
from sqlalchemy import (
Column,
func,
String,
select,
desc,
text,
)
from sqlalchemy.dialects.postgresql import UUID, TIMESTAMP, ARRAY
from sqlmodel.ext.asyncio.session import AsyncSession
from api.db.models.base import BaseModel
from api.db.models.v1.contact import Contact
from api.endpoints.models.v1.errors import (
NotFoundError,
)
class Message(BaseModel, table=True):
"""Message.
Model for the Message table (postgresql specific dialects in use).
This will track Messages for the Tenants (between contacts).
Attributes:
message_id: Traction ID for message OR when receiving, it is the AcaPy message_id
tenant_id: Traction Tenant ID
contact_id: Traction Contact ID
status: Business and Tenant indicator for Credential state; independent of AcaPy
Basic Message Exchange state
role: sender or recipient
deleted: Issuer Credential "soft" delete indicator.
tags: Set by tenant for arbitrary grouping of Credentials
content: actual content of the message
state: The underlying AcaPy message exchange state
sent_time: sent_time data in AcaPy payload
created_at: Timestamp when record was created in Traction
updated_at: Timestamp when record was last modified in Traction
"""
message_id: uuid.UUID = Field(
sa_column=Column(
UUID(as_uuid=True),
primary_key=True,
server_default=text("gen_random_uuid()"),
)
)
tenant_id: uuid.UUID = Field(foreign_key="tenant.id", index=True)
contact_id: uuid.UUID = Field(foreign_key="contact.contact_id", index=True)
status: str = Field(nullable=False)
role: str = Field(nullable=False)
deleted: bool = Field(nullable=False, default=False)
tags: List[str] = Field(sa_column=Column(ARRAY(String)))
content: str = Field(nullable=True)
revocation_comment: str = Field(nullable=True)
# acapy data ---
state: str = Field(nullable=False)
sent_time: datetime = Field(sa_column=Column(TIMESTAMP, nullable=True))
# --- acapy data
# relationships ---
contact: Optional[Contact] = | Relationship(back_populates="messages") | sqlmodel.Relationship |
from typing import Union
from fastapi import FastAPI
from pydantic import BaseSettings
from ...utils import get_settings
try:
from sqlalchemy.engine import Engine
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlmodel import Session, SQLModel, create_engine
from sqlmodel.ext.asyncio.session import AsyncSession
except ImportError:
raise RuntimeError(
"SQLModel is not installed. Please install it with `pip install sqlmodel pyhumps`"
)
class Database:
"""
A class to wrap the sqlalchemy engine and open a connection session to the db.
"""
def __init__(self, engine: Union[Engine, AsyncEngine], is_async: bool = False):
self.engine = engine
self.is_async = is_async
def open(self) -> Union[Session, AsyncSession]:
if self.is_async:
return AsyncSession(self.engine)
else:
return Session(self.engine)
def setup(app: FastAPI, settings: BaseSettings = None) -> Database:
"""
Install the sqlmodel plugin to the app.
This will attach 1 attribute to `app.state` i.e:
* `db` - `popol.sqlmodel.Database` instance object to open db connection.
Args:
app: FastAPI app.
settings: The settings (can be pydantic.BaseSettings).
Returns:
Database: The database.
"""
settings = get_settings(app, settings)
prefix = "SQLALCHEMY_"
db_uri = getattr(settings, f"{prefix}DATABASE_URI", None)
if not db_uri:
raise RuntimeError(f"{prefix}DATABASE_URI is not set")
async_mode = getattr(settings, f"{prefix}ASYNC_MODE", False)
options = getattr(settings, f"{prefix}OPTIONS", {})
if async_mode:
engine = create_async_engine(db_uri, **options)
else:
engine = | create_engine(db_uri, **options) | sqlmodel.create_engine |
from typing import Union
from fastapi import FastAPI
from pydantic import BaseSettings
from ...utils import get_settings
try:
from sqlalchemy.engine import Engine
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlmodel import Session, SQLModel, create_engine
from sqlmodel.ext.asyncio.session import AsyncSession
except ImportError:
raise RuntimeError(
"SQLModel is not installed. Please install it with `pip install sqlmodel pyhumps`"
)
class Database:
"""
A class to wrap the sqlalchemy engine and open a connection session to the db.
"""
def __init__(self, engine: Union[Engine, AsyncEngine], is_async: bool = False):
self.engine = engine
self.is_async = is_async
def open(self) -> Union[Session, AsyncSession]:
if self.is_async:
return | AsyncSession(self.engine) | sqlmodel.ext.asyncio.session.AsyncSession |
from typing import Union
from fastapi import FastAPI
from pydantic import BaseSettings
from ...utils import get_settings
try:
from sqlalchemy.engine import Engine
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlmodel import Session, SQLModel, create_engine
from sqlmodel.ext.asyncio.session import AsyncSession
except ImportError:
raise RuntimeError(
"SQLModel is not installed. Please install it with `pip install sqlmodel pyhumps`"
)
class Database:
"""
A class to wrap the sqlalchemy engine and open a connection session to the db.
"""
def __init__(self, engine: Union[Engine, AsyncEngine], is_async: bool = False):
self.engine = engine
self.is_async = is_async
def open(self) -> Union[Session, AsyncSession]:
if self.is_async:
return AsyncSession(self.engine)
else:
return | Session(self.engine) | sqlmodel.Session |
from typing import Union
from fastapi import FastAPI
from pydantic import BaseSettings
from ...utils import get_settings
try:
from sqlalchemy.engine import Engine
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlmodel import Session, SQLModel, create_engine
from sqlmodel.ext.asyncio.session import AsyncSession
except ImportError:
raise RuntimeError(
"SQLModel is not installed. Please install it with `pip install sqlmodel pyhumps`"
)
class Database:
"""
A class to wrap the sqlalchemy engine and open a connection session to the db.
"""
def __init__(self, engine: Union[Engine, AsyncEngine], is_async: bool = False):
self.engine = engine
self.is_async = is_async
def open(self) -> Union[Session, AsyncSession]:
if self.is_async:
return AsyncSession(self.engine)
else:
return Session(self.engine)
def setup(app: FastAPI, settings: BaseSettings = None) -> Database:
"""
Install the sqlmodel plugin to the app.
This will attach 1 attribute to `app.state` i.e:
* `db` - `popol.sqlmodel.Database` instance object to open db connection.
Args:
app: FastAPI app.
settings: The settings (can be pydantic.BaseSettings).
Returns:
Database: The database.
"""
settings = get_settings(app, settings)
prefix = "SQLALCHEMY_"
db_uri = getattr(settings, f"{prefix}DATABASE_URI", None)
if not db_uri:
raise RuntimeError(f"{prefix}DATABASE_URI is not set")
async_mode = getattr(settings, f"{prefix}ASYNC_MODE", False)
options = getattr(settings, f"{prefix}OPTIONS", {})
if async_mode:
engine = create_async_engine(db_uri, **options)
else:
engine = create_engine(db_uri, **options)
db = Database(engine, async_mode)
app.state.db = db
async def startup():
# reference: https://github.com/tiangolo/sqlmodel/issues/54#issue-981884262
if async_mode:
async with engine.begin() as conn:
await conn.run_sync(SQLModel.metadata.create_all)
else:
| SQLModel.metadata.create_all(engine) | sqlmodel.SQLModel.metadata.create_all |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from datetime import datetime
from functools import partial
from io import StringIO
from typing import (
Any,
Callable,
ClassVar,
Dict,
Iterable,
List,
Optional,
Set,
Tuple,
Type,
TypeVar,
Union,
cast,
overload,
)
from uuid import UUID
from sqlalchemy import Column, DateTime
from sqlalchemy.orm import registry
from sqlalchemy.sql import func
from sqlalchemy.sql.base import ImmutableColumnCollection
from sqlalchemy.sql.schema import Table
from sqlmodel.main import Field, FieldInfo, SQLModel, SQLModelMetaclass
from dbgen.core.args import ArgLike, Const
from dbgen.core.attribute import Attribute
from dbgen.core.base import Base, BaseMeta
from dbgen.core.node.load import Load, LoadEntity
from dbgen.core.type_registry import column_registry
from dbgen.exceptions import DBgenInvalidArgument, DBgenMissingInfo
def inherit_field(
bases, field_name: str, initial_value=set(), joiner=lambda x, y: x.union(y), type_check: bool = True
):
field_val = initial_value
for base in reversed(bases):
curr_id = getattr(base, field_name, initial_value)
if curr_id is not None:
if type_check and not isinstance(curr_id, type(initial_value)):
raise TypeError(f"Invalid {field_name} val: {curr_id}")
field_val = joiner(field_val, curr_id)
return field_val
overwrite_parent = partial(inherit_field, initial_value="", joiner=lambda x, y: y)
DEFAULT_ENTITY_REGISTRY = registry()
logger = logging.getLogger('dbgen.core.entity')
_T = TypeVar("_T")
def __dataclass_transform__(
*,
eq_default: bool = True,
order_default: bool = False,
kw_only_default: bool = False,
field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
) -> Callable[[_T], _T]:
return lambda a: a
@__dataclass_transform__(
kw_only_default=True,
field_descriptors=(
Field,
FieldInfo,
Attribute,
),
)
class EntityMetaclass(SQLModelMetaclass, BaseMeta):
def __new__(mcs, name, bases, attrs, **kwargs):
# Join the keys from all parents for __identifying__, _hashinclude_, and _hashexclude_
new_attrs = attrs.copy()
for value in ("__identifying__", "_hashexclude_", "_hashinclude_"):
starting = new_attrs.get(value, set())
if isinstance(starting, list):
starting = set(starting)
new_attrs[value] = starting.union(inherit_field(bases, value))
if kwargs.get('all_id', False):
assert (
"__identifying__" not in attrs
), f"Error with Entity {name}. Can't supply both all_id kwarg and __identifying__ attr"
new_attrs['__identifying__'] = new_attrs['__identifying__'].union(
{key for key in attrs.get('__annotations__', {})}
)
# Automatically add identifying attributes to the hashinclude
new_attrs["_hashinclude_"].update(new_attrs.get("__identifying__"))
# Set the default registry to be the default_registry
if "registry" not in kwargs:
kwargs["registry"] = DEFAULT_ENTITY_REGISTRY
# Call SQLModelMetaclass.__new__
cls = super().__new__(mcs, name, bases, new_attrs, **kwargs)
# Validate that we don't have table=True on current class and a base
current_cls_is_table = getattr(cls.__config__, "table", False) and kwargs.get("table")
setattr(cls, "_is_table", current_cls_is_table)
if current_cls_is_table:
base_is_table = False
for base in bases:
config = getattr(base, "__config__", None)
if config and getattr(config, "table", False):
base_is_table = True
offending_base_name = base.__name__
break
if base_is_table:
raise ValueError(
"Can't use table=True when inheriting from another table.\n"
f"Both {offending_base_name} and {name} have table=True set.\n"
"Create a common ancestor with table=False and mutaually inherit from that."
)
# Need to look into parents to find schema, only using most recent
schema_key = "__schema__"
schema = getattr(cls, schema_key, "") or overwrite_parent(bases, schema_key)
table_args = getattr(cls, "__table_args__", None) or dict().copy()
if not schema:
schema = "public"
if schema:
setattr(cls, schema_key, schema)
table_args = table_args.copy()
table_args.update({"schema": schema})
setattr(cls, "__table_args__", table_args)
setattr(
cls,
"__fulltablename__",
f"{schema}.{cls.__tablename__}" if schema else cls.__tablename__,
)
# Validate __identifying__ by making sure all attribute exists on Entity
unknown_ids = list(
filter(
lambda x: x not in cls.__fields__,
new_attrs["__identifying__"],
)
)
if unknown_ids:
raise ValueError(
f"Invalid Entity Class Definition. Identifying attributes not found on class: {unknown_ids}"
)
return cls
def __init__(cls, name, bases, attrs, **kwargs):
if cls._is_table:
registry = cls._sa_registry
if cls.__fulltablename__ in registry.metadata.tables:
raise ValueError(
f"The Class {attrs.get('__module__','')}.{name}'s __table_name__ {cls.__tablename__!r} already present in the registry's metadata.\n"
"This can occur if two Entity sub-classes share a case-insensitive name or if the same table has been added to the registry twice.\n"
"To address this you can set a different __tablename__ attribute for one or to clear the registry, you can call Entity.clear_registry() prior to declaring this class."
)
super().__init__(name, bases, attrs, **kwargs)
class BaseEntity(Base, SQLModel, metaclass=EntityMetaclass):
__identifying__: ClassVar[Set[str]]
__fulltablename__: ClassVar[str]
__schema__: ClassVar[str]
__table__: ClassVar[Table]
_is_table: ClassVar[bool]
_sa_registry: ClassVar[registry]
class Config:
"""Pydantic Config"""
force_validation = True
@classmethod
def _columns(cls) -> ImmutableColumnCollection:
if isinstance(cls.__fulltablename__, str):
table = cls.metadata.tables.get(cls.__fulltablename__)
if table is not None:
return table.c
raise ValueError(
f"{cls.__fulltablename__} not in metadata, is table=True set? {cls.metadata.tables}"
)
raise ValueError(f"Can't read __fulltablename__ {cls.__fulltablename__}")
@classmethod
def _get_load_entity(cls) -> LoadEntity:
"""Returns a LoadEntity which has the bare-minimum needed to load into this table."""
# Check that entity is a table
if not cls._is_table:
raise ValueError(f"{cls.__qualname__} is not a table. Can't get LoadEntity of a non-table Entity")
columns = cls._columns()
# Search for primary key name
primary_keys = [x.name for x in cls.__table__.primary_key]
if len(primary_keys) > 1:
raise NotImplementedError(f"Multiple primary_keys found: {primary_keys}")
elif not primary_keys:
raise ValueError(f"No primary key found on {cls.__name__}'s columns:\n{columns}")
primary_key_name = primary_keys[0]
all_attrs = {col.name: col for col in columns if not col.foreign_keys}
all_fks = {col.name: col for col in columns if col.foreign_keys}
# Create the attribute dict which maps attribute name to column type
attributes = {}
for col_name, col in columns.items():
try:
dt = column_registry[col.type]
attributes[col_name] = (
f"{dt.type_name}[]" if getattr(col.type, '_is_array', False) else dt.type_name
)
except KeyError:
raise TypeError(
f"Cannot parse column {col_name} on table {cls.__tablename__} due to its unknown type {type(col.type)}"
)
foreign_keys = set(all_fks.keys())
identifying_attributes = {x for x in all_attrs if x in cls.__identifying__}
identifying_fks = [x for x in all_fks if x in cls.__identifying__]
return LoadEntity(
name=cls.__tablename__ or cls.__name__,
schema_=cls.__schema__,
entity_class_str=f"{cls.__module__}.{cls.__qualname__}",
primary_key_name=primary_key_name,
attributes=attributes,
foreign_keys=foreign_keys,
identifying_attributes=identifying_attributes,
identifying_foreign_keys=identifying_fks,
)
@classmethod
def load(cls, insert: bool = False, validation: Optional[str] = None, **kwargs) -> Load[UUID]:
name = cls.__tablename__
assert isinstance(name, str)
# TODO check if we need this anymore
key_filter = lambda keyval: keyval[0] != "insert" and not isinstance(keyval[1], (ArgLike, Load))
invalid_args = list(filter(key_filter, kwargs.items()))
JSONAble = (str, int, float, dict, tuple)
for arg_name, invalid_arg in invalid_args:
# Check Invalid args to see if a const block would be appropriate
if isinstance(invalid_arg, JSONAble):
kwargs[arg_name] = Const(invalid_arg)
else:
raise ValueError(f"Non-jsonable constant value found: {arg_name}\n{invalid_arg}")
# get PK
pk = kwargs.pop(name, None)
# if we don't have a PK reference check for missing ID info
if not pk:
missing = cls.__identifying__ - set(kwargs)
if missing:
err = (
"Cannot refer to a row in {} without a PK or essential data."
" Missing essential data: {}"
)
raise DBgenMissingInfo(err.format(name, missing))
# Iterate through the columns to ensure we have no unknown kwargs
class_columns: List[Column] = list(cls._columns()) or []
all_attrs = {col.name: col for col in class_columns if not col.foreign_keys}
all_fks = {col.name: col for col in class_columns if col.foreign_keys}
attrs = {key: val for key, val in kwargs.items() if key in all_attrs}
fks = {key: col for key, col in kwargs.items() if key not in attrs}
for fk in fks:
if fk not in all_fks:
raise DBgenInvalidArgument(f'unknown "{fk}" kwarg in Load of {name}')
for k, v in fks.items():
if isinstance(v, Load):
fks[k] = v[v.outputs[0]]
return Load(
load_entity=cls._get_load_entity(),
primary_key=pk,
inputs={**attrs, **fks},
insert=insert,
validation=validation,
)
@classmethod
def _quick_load(cls, connection, rows: Iterable[Iterable[Any]], column_names: List[str]) -> None:
"""Bulk load many rows into entity"""
from dbgen.templates import jinja_env
# Assemble rows into stringio for copy_from statement
io_obj = StringIO()
for row in rows:
io_obj.write("\t".join(map(str, row)) + "\n")
io_obj.seek(0)
# Temporary table to copy data into
# Set name to be hash of input rows to ensure uniqueness for parallelization
temp_table_name = f"{cls.__tablename__}_temp_load_table"
load_entity = cls._get_load_entity()
# Need to create a temp table to copy data into
# Add an auto_inc column so that data can be ordered by its insert location
drop_temp_table = f"DROP TABLE IF EXISTS {temp_table_name};"
create_temp_table = """
CREATE TEMPORARY TABLE {temp_table_name} AS
TABLE {schema}.{obj}
WITH NO DATA;
ALTER TABLE {temp_table_name}
ADD COLUMN auto_inc SERIAL NOT NULL;
""".format(
obj=load_entity.name,
schema=load_entity.schema_,
temp_table_name=temp_table_name,
)
insert_template = jinja_env.get_template("insert.sql.jinja")
template_args = dict(
obj=load_entity.name,
obj_pk_name=load_entity.primary_key_name,
temp_table_name=temp_table_name,
all_column_names=column_names,
schema=load_entity.schema_,
first=False,
update=True,
)
insert_statement = insert_template.render(**template_args)
with connection.cursor() as curs:
curs.execute(drop_temp_table)
connection.commit()
with connection.cursor() as curs:
curs.execute(create_temp_table)
curs.copy_from(io_obj, temp_table_name, null="None", columns=column_names)
curs.execute(insert_statement)
connection.commit()
with connection.cursor() as curs:
curs.execute(drop_temp_table)
connection.commit()
@classmethod
def clear_registry(cls):
"""Removes all Entity classes from the Entity registry"""
cls.metadata.clear()
cls._sa_registry.dispose()
@classmethod
def foreign_key(cls, primary_key: bool = False):
"""Removes all Entity classes from the Entity registry"""
load_entity = cls._get_load_entity()
return Field(
None,
foreign_key=f"{cls.__fulltablename__}.{load_entity.primary_key_name}",
primary_key=primary_key,
)
id_field = Field(
default=None,
primary_key=True,
sa_column_kwargs={"autoincrement": False, "unique": True},
)
gen_id_field = Field(
default=None,
)
get_created_at_field = lambda: Field(
None, sa_column=Column(DateTime(timezone=True), server_default=func.now())
)
class Entity(BaseEntity):
id: Optional[UUID] = id_field
gen_id: Optional[UUID]
created_at: Optional[datetime] = get_created_at_field()
Model = TypeVar("Model", bound="BaseEntity")
@overload
def create_entity(
model_name: str,
field_definitions: Dict[str, Union[Tuple[type, Any], type, Tuple[type, ...]]],
base: None = None,
identifying: Set[str] = None,
schema: Optional[str] = None,
__module__: str = __name__,
**kwargs,
) -> Type[BaseEntity]:
...
@overload
def create_entity(
model_name: str,
field_definitions: Dict[str, Union[Tuple[type, Any], type, Tuple[type, ...]]],
base: Type[Model],
identifying: Set[str] = None,
schema: Optional[str] = None,
__module__: str = __name__,
**kwargs,
) -> Type[Model]:
...
def create_entity(
model_name: str,
field_definitions: Dict[str, Union[Tuple[type, Any], type, Tuple[type, ...]]] = None,
base: Optional[Type[Model]] = None,
identifying: Set[str] = None,
schema: Optional[str] = None,
__module__: str = __name__,
**kwargs,
) -> Type[Model]:
"""
Dynamically create a model, similar to the Pydantic `create_model()` method
:param model_name: name of the created model
:param field_definitions: data fields of the create model
:param base: base to inherit from
:param __module__: module of the created model
:param **kwargs: Other keyword arguments to pass to the metaclass constructor, e.g. table=True
"""
if base is None:
base = cast(Type["Model"], BaseEntity)
field_definitions = field_definitions or {}
fields = {}
annotations = {}
identifying = identifying or set()
for f_name, f_def in field_definitions.items():
if f_name.startswith("_"):
raise ValueError("Field names may not start with an underscore")
try:
if isinstance(f_def, tuple) and len(f_def) > 1:
f_annotation, f_value = f_def
elif isinstance(f_def, tuple):
f_annotation, f_value = f_def[0], | Field(nullable=False) | sqlmodel.main.Field |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from datetime import datetime
from functools import partial
from io import StringIO
from typing import (
Any,
Callable,
ClassVar,
Dict,
Iterable,
List,
Optional,
Set,
Tuple,
Type,
TypeVar,
Union,
cast,
overload,
)
from uuid import UUID
from sqlalchemy import Column, DateTime
from sqlalchemy.orm import registry
from sqlalchemy.sql import func
from sqlalchemy.sql.base import ImmutableColumnCollection
from sqlalchemy.sql.schema import Table
from sqlmodel.main import Field, FieldInfo, SQLModel, SQLModelMetaclass
from dbgen.core.args import ArgLike, Const
from dbgen.core.attribute import Attribute
from dbgen.core.base import Base, BaseMeta
from dbgen.core.node.load import Load, LoadEntity
from dbgen.core.type_registry import column_registry
from dbgen.exceptions import DBgenInvalidArgument, DBgenMissingInfo
def inherit_field(
bases, field_name: str, initial_value=set(), joiner=lambda x, y: x.union(y), type_check: bool = True
):
field_val = initial_value
for base in reversed(bases):
curr_id = getattr(base, field_name, initial_value)
if curr_id is not None:
if type_check and not isinstance(curr_id, type(initial_value)):
raise TypeError(f"Invalid {field_name} val: {curr_id}")
field_val = joiner(field_val, curr_id)
return field_val
overwrite_parent = partial(inherit_field, initial_value="", joiner=lambda x, y: y)
DEFAULT_ENTITY_REGISTRY = registry()
logger = logging.getLogger('dbgen.core.entity')
_T = TypeVar("_T")
def __dataclass_transform__(
*,
eq_default: bool = True,
order_default: bool = False,
kw_only_default: bool = False,
field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
) -> Callable[[_T], _T]:
return lambda a: a
@__dataclass_transform__(
kw_only_default=True,
field_descriptors=(
Field,
FieldInfo,
Attribute,
),
)
class EntityMetaclass(SQLModelMetaclass, BaseMeta):
def __new__(mcs, name, bases, attrs, **kwargs):
# Join the keys from all parents for __identifying__, _hashinclude_, and _hashexclude_
new_attrs = attrs.copy()
for value in ("__identifying__", "_hashexclude_", "_hashinclude_"):
starting = new_attrs.get(value, set())
if isinstance(starting, list):
starting = set(starting)
new_attrs[value] = starting.union(inherit_field(bases, value))
if kwargs.get('all_id', False):
assert (
"__identifying__" not in attrs
), f"Error with Entity {name}. Can't supply both all_id kwarg and __identifying__ attr"
new_attrs['__identifying__'] = new_attrs['__identifying__'].union(
{key for key in attrs.get('__annotations__', {})}
)
# Automatically add identifying attributes to the hashinclude
new_attrs["_hashinclude_"].update(new_attrs.get("__identifying__"))
# Set the default registry to be the default_registry
if "registry" not in kwargs:
kwargs["registry"] = DEFAULT_ENTITY_REGISTRY
# Call SQLModelMetaclass.__new__
cls = super().__new__(mcs, name, bases, new_attrs, **kwargs)
# Validate that we don't have table=True on current class and a base
current_cls_is_table = getattr(cls.__config__, "table", False) and kwargs.get("table")
setattr(cls, "_is_table", current_cls_is_table)
if current_cls_is_table:
base_is_table = False
for base in bases:
config = getattr(base, "__config__", None)
if config and getattr(config, "table", False):
base_is_table = True
offending_base_name = base.__name__
break
if base_is_table:
raise ValueError(
"Can't use table=True when inheriting from another table.\n"
f"Both {offending_base_name} and {name} have table=True set.\n"
"Create a common ancestor with table=False and mutaually inherit from that."
)
# Need to look into parents to find schema, only using most recent
schema_key = "__schema__"
schema = getattr(cls, schema_key, "") or overwrite_parent(bases, schema_key)
table_args = getattr(cls, "__table_args__", None) or dict().copy()
if not schema:
schema = "public"
if schema:
setattr(cls, schema_key, schema)
table_args = table_args.copy()
table_args.update({"schema": schema})
setattr(cls, "__table_args__", table_args)
setattr(
cls,
"__fulltablename__",
f"{schema}.{cls.__tablename__}" if schema else cls.__tablename__,
)
# Validate __identifying__ by making sure all attribute exists on Entity
unknown_ids = list(
filter(
lambda x: x not in cls.__fields__,
new_attrs["__identifying__"],
)
)
if unknown_ids:
raise ValueError(
f"Invalid Entity Class Definition. Identifying attributes not found on class: {unknown_ids}"
)
return cls
def __init__(cls, name, bases, attrs, **kwargs):
if cls._is_table:
registry = cls._sa_registry
if cls.__fulltablename__ in registry.metadata.tables:
raise ValueError(
f"The Class {attrs.get('__module__','')}.{name}'s __table_name__ {cls.__tablename__!r} already present in the registry's metadata.\n"
"This can occur if two Entity sub-classes share a case-insensitive name or if the same table has been added to the registry twice.\n"
"To address this you can set a different __tablename__ attribute for one or to clear the registry, you can call Entity.clear_registry() prior to declaring this class."
)
super().__init__(name, bases, attrs, **kwargs)
class BaseEntity(Base, SQLModel, metaclass=EntityMetaclass):
__identifying__: ClassVar[Set[str]]
__fulltablename__: ClassVar[str]
__schema__: ClassVar[str]
__table__: ClassVar[Table]
_is_table: ClassVar[bool]
_sa_registry: ClassVar[registry]
class Config:
"""Pydantic Config"""
force_validation = True
@classmethod
def _columns(cls) -> ImmutableColumnCollection:
if isinstance(cls.__fulltablename__, str):
table = cls.metadata.tables.get(cls.__fulltablename__)
if table is not None:
return table.c
raise ValueError(
f"{cls.__fulltablename__} not in metadata, is table=True set? {cls.metadata.tables}"
)
raise ValueError(f"Can't read __fulltablename__ {cls.__fulltablename__}")
@classmethod
def _get_load_entity(cls) -> LoadEntity:
"""Returns a LoadEntity which has the bare-minimum needed to load into this table."""
# Check that entity is a table
if not cls._is_table:
raise ValueError(f"{cls.__qualname__} is not a table. Can't get LoadEntity of a non-table Entity")
columns = cls._columns()
# Search for primary key name
primary_keys = [x.name for x in cls.__table__.primary_key]
if len(primary_keys) > 1:
raise NotImplementedError(f"Multiple primary_keys found: {primary_keys}")
elif not primary_keys:
raise ValueError(f"No primary key found on {cls.__name__}'s columns:\n{columns}")
primary_key_name = primary_keys[0]
all_attrs = {col.name: col for col in columns if not col.foreign_keys}
all_fks = {col.name: col for col in columns if col.foreign_keys}
# Create the attribute dict which maps attribute name to column type
attributes = {}
for col_name, col in columns.items():
try:
dt = column_registry[col.type]
attributes[col_name] = (
f"{dt.type_name}[]" if getattr(col.type, '_is_array', False) else dt.type_name
)
except KeyError:
raise TypeError(
f"Cannot parse column {col_name} on table {cls.__tablename__} due to its unknown type {type(col.type)}"
)
foreign_keys = set(all_fks.keys())
identifying_attributes = {x for x in all_attrs if x in cls.__identifying__}
identifying_fks = [x for x in all_fks if x in cls.__identifying__]
return LoadEntity(
name=cls.__tablename__ or cls.__name__,
schema_=cls.__schema__,
entity_class_str=f"{cls.__module__}.{cls.__qualname__}",
primary_key_name=primary_key_name,
attributes=attributes,
foreign_keys=foreign_keys,
identifying_attributes=identifying_attributes,
identifying_foreign_keys=identifying_fks,
)
@classmethod
def load(cls, insert: bool = False, validation: Optional[str] = None, **kwargs) -> Load[UUID]:
name = cls.__tablename__
assert isinstance(name, str)
# TODO check if we need this anymore
key_filter = lambda keyval: keyval[0] != "insert" and not isinstance(keyval[1], (ArgLike, Load))
invalid_args = list(filter(key_filter, kwargs.items()))
JSONAble = (str, int, float, dict, tuple)
for arg_name, invalid_arg in invalid_args:
# Check Invalid args to see if a const block would be appropriate
if isinstance(invalid_arg, JSONAble):
kwargs[arg_name] = Const(invalid_arg)
else:
raise ValueError(f"Non-jsonable constant value found: {arg_name}\n{invalid_arg}")
# get PK
pk = kwargs.pop(name, None)
# if we don't have a PK reference check for missing ID info
if not pk:
missing = cls.__identifying__ - set(kwargs)
if missing:
err = (
"Cannot refer to a row in {} without a PK or essential data."
" Missing essential data: {}"
)
raise DBgenMissingInfo(err.format(name, missing))
# Iterate through the columns to ensure we have no unknown kwargs
class_columns: List[Column] = list(cls._columns()) or []
all_attrs = {col.name: col for col in class_columns if not col.foreign_keys}
all_fks = {col.name: col for col in class_columns if col.foreign_keys}
attrs = {key: val for key, val in kwargs.items() if key in all_attrs}
fks = {key: col for key, col in kwargs.items() if key not in attrs}
for fk in fks:
if fk not in all_fks:
raise DBgenInvalidArgument(f'unknown "{fk}" kwarg in Load of {name}')
for k, v in fks.items():
if isinstance(v, Load):
fks[k] = v[v.outputs[0]]
return Load(
load_entity=cls._get_load_entity(),
primary_key=pk,
inputs={**attrs, **fks},
insert=insert,
validation=validation,
)
@classmethod
def _quick_load(cls, connection, rows: Iterable[Iterable[Any]], column_names: List[str]) -> None:
"""Bulk load many rows into entity"""
from dbgen.templates import jinja_env
# Assemble rows into stringio for copy_from statement
io_obj = StringIO()
for row in rows:
io_obj.write("\t".join(map(str, row)) + "\n")
io_obj.seek(0)
# Temporary table to copy data into
# Set name to be hash of input rows to ensure uniqueness for parallelization
temp_table_name = f"{cls.__tablename__}_temp_load_table"
load_entity = cls._get_load_entity()
# Need to create a temp table to copy data into
# Add an auto_inc column so that data can be ordered by its insert location
drop_temp_table = f"DROP TABLE IF EXISTS {temp_table_name};"
create_temp_table = """
CREATE TEMPORARY TABLE {temp_table_name} AS
TABLE {schema}.{obj}
WITH NO DATA;
ALTER TABLE {temp_table_name}
ADD COLUMN auto_inc SERIAL NOT NULL;
""".format(
obj=load_entity.name,
schema=load_entity.schema_,
temp_table_name=temp_table_name,
)
insert_template = jinja_env.get_template("insert.sql.jinja")
template_args = dict(
obj=load_entity.name,
obj_pk_name=load_entity.primary_key_name,
temp_table_name=temp_table_name,
all_column_names=column_names,
schema=load_entity.schema_,
first=False,
update=True,
)
insert_statement = insert_template.render(**template_args)
with connection.cursor() as curs:
curs.execute(drop_temp_table)
connection.commit()
with connection.cursor() as curs:
curs.execute(create_temp_table)
curs.copy_from(io_obj, temp_table_name, null="None", columns=column_names)
curs.execute(insert_statement)
connection.commit()
with connection.cursor() as curs:
curs.execute(drop_temp_table)
connection.commit()
@classmethod
def clear_registry(cls):
"""Removes all Entity classes from the Entity registry"""
cls.metadata.clear()
cls._sa_registry.dispose()
@classmethod
def foreign_key(cls, primary_key: bool = False):
"""Removes all Entity classes from the Entity registry"""
load_entity = cls._get_load_entity()
return Field(
None,
foreign_key=f"{cls.__fulltablename__}.{load_entity.primary_key_name}",
primary_key=primary_key,
)
id_field = Field(
default=None,
primary_key=True,
sa_column_kwargs={"autoincrement": False, "unique": True},
)
gen_id_field = Field(
default=None,
)
get_created_at_field = lambda: Field(
None, sa_column=Column(DateTime(timezone=True), server_default=func.now())
)
class Entity(BaseEntity):
id: Optional[UUID] = id_field
gen_id: Optional[UUID]
created_at: Optional[datetime] = get_created_at_field()
Model = TypeVar("Model", bound="BaseEntity")
@overload
def create_entity(
model_name: str,
field_definitions: Dict[str, Union[Tuple[type, Any], type, Tuple[type, ...]]],
base: None = None,
identifying: Set[str] = None,
schema: Optional[str] = None,
__module__: str = __name__,
**kwargs,
) -> Type[BaseEntity]:
...
@overload
def create_entity(
model_name: str,
field_definitions: Dict[str, Union[Tuple[type, Any], type, Tuple[type, ...]]],
base: Type[Model],
identifying: Set[str] = None,
schema: Optional[str] = None,
__module__: str = __name__,
**kwargs,
) -> Type[Model]:
...
def create_entity(
model_name: str,
field_definitions: Dict[str, Union[Tuple[type, Any], type, Tuple[type, ...]]] = None,
base: Optional[Type[Model]] = None,
identifying: Set[str] = None,
schema: Optional[str] = None,
__module__: str = __name__,
**kwargs,
) -> Type[Model]:
"""
Dynamically create a model, similar to the Pydantic `create_model()` method
:param model_name: name of the created model
:param field_definitions: data fields of the create model
:param base: base to inherit from
:param __module__: module of the created model
:param **kwargs: Other keyword arguments to pass to the metaclass constructor, e.g. table=True
"""
if base is None:
base = cast(Type["Model"], BaseEntity)
field_definitions = field_definitions or {}
fields = {}
annotations = {}
identifying = identifying or set()
for f_name, f_def in field_definitions.items():
if f_name.startswith("_"):
raise ValueError("Field names may not start with an underscore")
try:
if isinstance(f_def, tuple) and len(f_def) > 1:
f_annotation, f_value = f_def
elif isinstance(f_def, tuple):
f_annotation, f_value = f_def[0], Field(nullable=False)
else:
f_annotation, f_value = f_def, | Field(nullable=False) | sqlmodel.main.Field |
from datetime import datetime, date
from typing import Optional
from fastapi import APIRouter, Depends
from sqlmodel import Field, SQLModel
from ...db import get_session
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
router = APIRouter()
class HistoryVpi(SQLModel, table=True):
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
"""initial2
Revision ID: 9d9a<PASSWORD>dbfd7
Revises: <PASSWORD>
Create Date: 2021-11-01 04:28:38.426261
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = '9d9a746db<PASSWORD>'
down_revision = 'a<PASSWORD>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('images',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('url', | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
"""
Node related APIs.
"""
import logging
from datetime import datetime
from typing import List, Optional
from fastapi import APIRouter, Depends
from sqlmodel import Session, SQLModel, select
from datajunction.models.column import ColumnType
from datajunction.models.node import Node, NodeType
from datajunction.utils import get_session
_logger = logging.getLogger(__name__)
router = APIRouter()
class SimpleColumn(SQLModel):
"""
A simplified column schema, without ID or dimensions.
"""
name: str
type: ColumnType
class NodeMetadata(SQLModel):
"""
A node with information about columns and if it is a metric.
"""
id: int
name: str
description: str = ""
created_at: datetime
updated_at: datetime
type: NodeType
expression: Optional[str] = None
columns: List[SimpleColumn]
@router.get("/nodes/", response_model=List[NodeMetadata])
def read_nodes(*, session: Session = Depends(get_session)) -> List[NodeMetadata]:
"""
List the available nodes.
"""
return session.exec( | select(Node) | sqlmodel.select |
from fastapi import APIRouter, Depends
from ..utils import engine, get_session
from sqlmodel import Session, select, SQLModel, or_
from sqlalchemy.exc import NoResultFound
from ..models.role import Role
from datetime import datetime
router = APIRouter(prefix="/api/roles", tags=["role"])
session = | Session(engine) | sqlmodel.Session |
from fastapi import APIRouter, Depends
from ..utils import engine, get_session
from sqlmodel import Session, select, SQLModel, or_
from sqlalchemy.exc import NoResultFound
from ..models.role import Role
from datetime import datetime
router = APIRouter(prefix="/api/roles", tags=["role"])
session = Session(engine)
# Post new role
@router.post("/")
async def post_role(*, role: Role, session: Session = Depends(get_session)):
statement = select(Role).where(Role.id == role.id)
try:
result = session.exec(statement).one()
return False
except NoResultFound:
session.add(role)
session.commit()
session.refresh(role)
return role
# Get list of all roles
@router.get("/")
async def read_roles(session: Session = Depends(get_session)):
statement = | select(Role) | sqlmodel.select |
from fastapi import APIRouter, Depends
from ..utils import engine, get_session
from sqlmodel import Session, select, SQLModel, or_
from sqlalchemy.exc import NoResultFound
from ..models.role import Role
from datetime import datetime
router = APIRouter(prefix="/api/roles", tags=["role"])
session = Session(engine)
# Post new role
@router.post("/")
async def post_role(*, role: Role, session: Session = Depends(get_session)):
statement = | select(Role) | sqlmodel.select |
from fastapi import APIRouter, Depends
from ..utils import engine, get_session
from sqlmodel import Session, select, SQLModel, or_
from sqlalchemy.exc import NoResultFound
from ..models.role import Role
from datetime import datetime
router = APIRouter(prefix="/api/roles", tags=["role"])
session = Session(engine)
# Post new role
@router.post("/")
async def post_role(*, role: Role, session: Session = Depends(get_session)):
statement = select(Role).where(Role.id == role.id)
try:
result = session.exec(statement).one()
return False
except NoResultFound:
session.add(role)
session.commit()
session.refresh(role)
return role
# Get list of all roles
@router.get("/")
async def read_roles(session: Session = Depends(get_session)):
statement = select(Role)
results = session.exec(statement).all()
return results
# Get list of active roles
@router.get("/active")
async def read_roles(session: Session = Depends(get_session)):
statement = | select(Role) | sqlmodel.select |
from fastapi import APIRouter, Depends
from ..utils import engine, get_session
from sqlmodel import Session, select, SQLModel, or_
from sqlalchemy.exc import NoResultFound
from ..models.role import Role
from datetime import datetime
router = APIRouter(prefix="/api/roles", tags=["role"])
session = Session(engine)
# Post new role
@router.post("/")
async def post_role(*, role: Role, session: Session = Depends(get_session)):
statement = select(Role).where(Role.id == role.id)
try:
result = session.exec(statement).one()
return False
except NoResultFound:
session.add(role)
session.commit()
session.refresh(role)
return role
# Get list of all roles
@router.get("/")
async def read_roles(session: Session = Depends(get_session)):
statement = select(Role)
results = session.exec(statement).all()
return results
# Get list of active roles
@router.get("/active")
async def read_roles(session: Session = Depends(get_session)):
statement = select(Role).where(Role.is_active == True)
results = session.exec(statement).all()
return results
@router.put("/{role_id}/activate")
async def activate_role(
role_id: str = None,
session: Session = Depends(get_session),
):
statement = | select(Role) | sqlmodel.select |
from fastapi import APIRouter, Depends
from ..utils import engine, get_session
from sqlmodel import Session, select, SQLModel, or_
from sqlalchemy.exc import NoResultFound
from ..models.role import Role
from datetime import datetime
router = APIRouter(prefix="/api/roles", tags=["role"])
session = Session(engine)
# Post new role
@router.post("/")
async def post_role(*, role: Role, session: Session = Depends(get_session)):
statement = select(Role).where(Role.id == role.id)
try:
result = session.exec(statement).one()
return False
except NoResultFound:
session.add(role)
session.commit()
session.refresh(role)
return role
# Get list of all roles
@router.get("/")
async def read_roles(session: Session = Depends(get_session)):
statement = select(Role)
results = session.exec(statement).all()
return results
# Get list of active roles
@router.get("/active")
async def read_roles(session: Session = Depends(get_session)):
statement = select(Role).where(Role.is_active == True)
results = session.exec(statement).all()
return results
@router.put("/{role_id}/activate")
async def activate_role(
role_id: str = None,
session: Session = Depends(get_session),
):
statement = select(Role).where(Role.id == role_id)
role_to_activate = session.exec(statement).one()
role_to_activate.is_active = True
role_to_activate.updated_at = datetime.now()
session.add(role_to_activate)
session.commit()
session.refresh(role_to_activate)
return role_to_activate
# Deactivate role
@router.put("/{role_id}/deactivate")
async def deactivate_role(
role_id: str = None,
session: Session = Depends(get_session),
):
statement = | select(Role) | sqlmodel.select |
from fastapi import APIRouter, Depends
from ..utils import engine, get_session
from sqlmodel import Session, select, SQLModel, or_
from sqlalchemy.exc import NoResultFound
from ..models.role import Role
from datetime import datetime
router = APIRouter(prefix="/api/roles", tags=["role"])
session = Session(engine)
# Post new role
@router.post("/")
async def post_role(*, role: Role, session: Session = Depends(get_session)):
statement = select(Role).where(Role.id == role.id)
try:
result = session.exec(statement).one()
return False
except NoResultFound:
session.add(role)
session.commit()
session.refresh(role)
return role
# Get list of all roles
@router.get("/")
async def read_roles(session: Session = Depends(get_session)):
statement = select(Role)
results = session.exec(statement).all()
return results
# Get list of active roles
@router.get("/active")
async def read_roles(session: Session = Depends(get_session)):
statement = select(Role).where(Role.is_active == True)
results = session.exec(statement).all()
return results
@router.put("/{role_id}/activate")
async def activate_role(
role_id: str = None,
session: Session = Depends(get_session),
):
statement = select(Role).where(Role.id == role_id)
role_to_activate = session.exec(statement).one()
role_to_activate.is_active = True
role_to_activate.updated_at = datetime.now()
session.add(role_to_activate)
session.commit()
session.refresh(role_to_activate)
return role_to_activate
# Deactivate role
@router.put("/{role_id}/deactivate")
async def deactivate_role(
role_id: str = None,
session: Session = Depends(get_session),
):
statement = select(Role).where(Role.id == role_id)
role_to_deactivate = session.exec(statement).one()
role_to_deactivate.is_active = False
role_to_deactivate.updated_at = datetime.now()
session.add(role_to_deactivate)
session.commit()
session.refresh(role_to_deactivate)
return role_to_deactivate
# Update role
@router.put("/")
async def update_role(
id: str = None,
new_name: str = None,
new_short_name: str = None,
is_active: bool = None,
session: Session = Depends(get_session),
):
statement = | select(Role.is_active) | sqlmodel.select |
from fastapi import APIRouter, Depends
from ..utils import engine, get_session
from sqlmodel import Session, select, SQLModel, or_
from sqlalchemy.exc import NoResultFound
from ..models.role import Role
from datetime import datetime
router = APIRouter(prefix="/api/roles", tags=["role"])
session = Session(engine)
# Post new role
@router.post("/")
async def post_role(*, role: Role, session: Session = Depends(get_session)):
statement = select(Role).where(Role.id == role.id)
try:
result = session.exec(statement).one()
return False
except NoResultFound:
session.add(role)
session.commit()
session.refresh(role)
return role
# Get list of all roles
@router.get("/")
async def read_roles(session: Session = Depends(get_session)):
statement = select(Role)
results = session.exec(statement).all()
return results
# Get list of active roles
@router.get("/active")
async def read_roles(session: Session = Depends(get_session)):
statement = select(Role).where(Role.is_active == True)
results = session.exec(statement).all()
return results
@router.put("/{role_id}/activate")
async def activate_role(
role_id: str = None,
session: Session = Depends(get_session),
):
statement = select(Role).where(Role.id == role_id)
role_to_activate = session.exec(statement).one()
role_to_activate.is_active = True
role_to_activate.updated_at = datetime.now()
session.add(role_to_activate)
session.commit()
session.refresh(role_to_activate)
return role_to_activate
# Deactivate role
@router.put("/{role_id}/deactivate")
async def deactivate_role(
role_id: str = None,
session: Session = Depends(get_session),
):
statement = select(Role).where(Role.id == role_id)
role_to_deactivate = session.exec(statement).one()
role_to_deactivate.is_active = False
role_to_deactivate.updated_at = datetime.now()
session.add(role_to_deactivate)
session.commit()
session.refresh(role_to_deactivate)
return role_to_deactivate
# Update role
@router.put("/")
async def update_role(
id: str = None,
new_name: str = None,
new_short_name: str = None,
is_active: bool = None,
session: Session = Depends(get_session),
):
statement = select(Role.is_active).where(Role.id == id)
result = session.exec(statement).first()
if result == True:
statement = | select(Role) | sqlmodel.select |
from sqlmodel import Session, select
from config.Database import Database
class UserDatabase(Database):
def __init__(self) -> None:
super(UserDatabase, self).__init__()
async def get_by_params(self, object: object, email: str):
with | Session(self._engine) | sqlmodel.Session |
from sqlmodel import Session, select
from config.Database import Database
class UserDatabase(Database):
def __init__(self) -> None:
super(UserDatabase, self).__init__()
async def get_by_params(self, object: object, email: str):
with Session(self._engine) as session:
statement = | select(object) | sqlmodel.select |
"""empty message
Revision ID: de316f0831f9
Revises: 6<PASSWORD>1e462e9
Create Date: 2021-11-19 23:38:39.754126
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "de316f0831f9"
down_revision = "60f151e462e9"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
sale_type = postgresql.ENUM(
"SALE_IN_PIX",
"SALE_IN_DEBT",
"SALE_IN_CREDIT",
"SALE_IN_MONEY",
"SALE_IN_TRANSFER",
"SALE_IN_BILLET",
"SALE_OTHERS",
name="saletype",
)
sale_type.create(op.get_bind())
op.add_column(
"orders",
sa.Column(
"sale_type",
sa.Enum(
"SALE_IN_PIX",
"SALE_IN_DEBT",
"SALE_IN_CREDIT",
"SALE_IN_MONEY",
"SALE_IN_TRANSFER",
"SALE_IN_BILLET",
"SALE_OTHERS",
name="saletype",
),
nullable=True,
),
)
op.drop_index("ix_balance_operation", table_name="balance")
op.drop_index("ix_balance_type", table_name="balance")
op.drop_column("balance", "type")
op.alter_column("clients", "email", existing_type=sa.VARCHAR(), nullable=False)
op.alter_column("clients", "phone", existing_type=sa.VARCHAR(), nullable=False)
op.alter_column("clients", "owner_id", existing_type=postgresql.UUID(), nullable=False)
op.alter_column("orders", "owner_id", existing_type=postgresql.UUID(), nullable=False)
op.add_column("clients", sa.Column("zip_code", | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
"""empty message
Revision ID: de316f0831f9
Revises: 6<PASSWORD>1e462e9
Create Date: 2021-11-19 23:38:39.754126
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "de316f0831f9"
down_revision = "60f151e462e9"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
sale_type = postgresql.ENUM(
"SALE_IN_PIX",
"SALE_IN_DEBT",
"SALE_IN_CREDIT",
"SALE_IN_MONEY",
"SALE_IN_TRANSFER",
"SALE_IN_BILLET",
"SALE_OTHERS",
name="saletype",
)
sale_type.create(op.get_bind())
op.add_column(
"orders",
sa.Column(
"sale_type",
sa.Enum(
"SALE_IN_PIX",
"SALE_IN_DEBT",
"SALE_IN_CREDIT",
"SALE_IN_MONEY",
"SALE_IN_TRANSFER",
"SALE_IN_BILLET",
"SALE_OTHERS",
name="saletype",
),
nullable=True,
),
)
op.drop_index("ix_balance_operation", table_name="balance")
op.drop_index("ix_balance_type", table_name="balance")
op.drop_column("balance", "type")
op.alter_column("clients", "email", existing_type=sa.VARCHAR(), nullable=False)
op.alter_column("clients", "phone", existing_type=sa.VARCHAR(), nullable=False)
op.alter_column("clients", "owner_id", existing_type=postgresql.UUID(), nullable=False)
op.alter_column("orders", "owner_id", existing_type=postgresql.UUID(), nullable=False)
op.add_column("clients", sa.Column("zip_code", sqlmodel.sql.sqltypes.AutoString(), nullable=True))
op.add_column("clients", sa.Column("address", | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
import scrapy
from imdb_rating.dependencies.models import Movie
from pydantic import ValidationError
from scrapy.crawler import CrawlerProcess
from sqlmodel import Session, select
class IMDBSpider(scrapy.Spider):
name = "imdb"
custom_settings = {"FEED_EXPORT_ENCODING": "utf-8"}
def start_requests(self):
"""
This method is called by Scrapy to start the crawl.
"""
self.start = self.start.strftime("%Y-%m-%d")
self.end = self.end.strftime("%Y-%m-%d")
yield scrapy.Request(
url=f"https://www.imdb.com/search/title/?title_type=feature&year={self.start},{self.end}&start=1",
callback=self.parse,
)
def parse(self, response):
"""
This method is called by Scrapy to parse the response.
Parameters
----------
response : scrapy.http.Response
The response from the server.
Yields
------
scrapy.http.Request
The next request to be crawled.
"""
for film in response.xpath('//*[@id="main"]/div/div[3]/div/div'):
try:
title = film.xpath(".//div[3]/h3/a/text()").get()
except:
title = None
try:
year = (
film.xpath(".//div[3]/h3/span[2]/text()")
.get()
.split(" ")[-1]
.replace("(", "")
.replace(")", "")
)
except:
year = None
try:
rating = film.xpath(".//div[3]/div/div/strong/text()").get()
except:
rating = None
try:
duration = film.css("span.runtime::text").get().replace(" min", "")
except:
duration = None
try:
votes = film.css(".//div[3]/p[4]/span[2]/@data-value").get()
except:
votes = None
try:
genres = film.css("span.genre::text").get().split(", ")
genres = [genre.strip() for genre in genres]
genres.extend([None for _ in range(3 - len(genres))])
genre1, genre2, genre3 = genres[:3]
except:
genre1, genre2, genre3 = None, None, None
try:
certificate = film.css("span.certificate::text").get()
except:
certificate = None
try:
synopsis = film.xpath(".//div[3]/p[2]/text()").get().strip()
except:
synopsis = None
try:
image = film.xpath(".//div[2]/a/img/@loadlate").get().split("._V1_")[0]
except:
image = None
try:
cast = film.xpath(".//div[3]/p[3]/*/text()").getall()
split = cast.index("|")
directors = cast[:split]
directors.extend([None for _ in range(3 - len(directors))])
director1, director2, director3 = directors[:3]
actors = cast[split + 1 :]
actors.extend([None for _ in range(3 - len(actors))])
actor1, actor2, actor3 = actors[:3]
except:
actor1, actor2, actor3 = None, None, None
director1, director2, director3 = None, None, None
try:
movie = Movie.validate(
dict(
title=title,
year=year,
actual_rating=rating,
votes=votes,
duration=duration,
certificate=certificate,
synopsis=synopsis,
image=image,
actor1=actor1,
actor2=actor2,
actor3=actor3,
director1=director1,
director2=director2,
director3=director3,
genre1=genre1,
genre2=genre2,
genre3=genre3,
)
)
with | Session(self.engine) | sqlmodel.Session |
import scrapy
from imdb_rating.dependencies.models import Movie
from pydantic import ValidationError
from scrapy.crawler import CrawlerProcess
from sqlmodel import Session, select
class IMDBSpider(scrapy.Spider):
name = "imdb"
custom_settings = {"FEED_EXPORT_ENCODING": "utf-8"}
def start_requests(self):
"""
This method is called by Scrapy to start the crawl.
"""
self.start = self.start.strftime("%Y-%m-%d")
self.end = self.end.strftime("%Y-%m-%d")
yield scrapy.Request(
url=f"https://www.imdb.com/search/title/?title_type=feature&year={self.start},{self.end}&start=1",
callback=self.parse,
)
def parse(self, response):
"""
This method is called by Scrapy to parse the response.
Parameters
----------
response : scrapy.http.Response
The response from the server.
Yields
------
scrapy.http.Request
The next request to be crawled.
"""
for film in response.xpath('//*[@id="main"]/div/div[3]/div/div'):
try:
title = film.xpath(".//div[3]/h3/a/text()").get()
except:
title = None
try:
year = (
film.xpath(".//div[3]/h3/span[2]/text()")
.get()
.split(" ")[-1]
.replace("(", "")
.replace(")", "")
)
except:
year = None
try:
rating = film.xpath(".//div[3]/div/div/strong/text()").get()
except:
rating = None
try:
duration = film.css("span.runtime::text").get().replace(" min", "")
except:
duration = None
try:
votes = film.css(".//div[3]/p[4]/span[2]/@data-value").get()
except:
votes = None
try:
genres = film.css("span.genre::text").get().split(", ")
genres = [genre.strip() for genre in genres]
genres.extend([None for _ in range(3 - len(genres))])
genre1, genre2, genre3 = genres[:3]
except:
genre1, genre2, genre3 = None, None, None
try:
certificate = film.css("span.certificate::text").get()
except:
certificate = None
try:
synopsis = film.xpath(".//div[3]/p[2]/text()").get().strip()
except:
synopsis = None
try:
image = film.xpath(".//div[2]/a/img/@loadlate").get().split("._V1_")[0]
except:
image = None
try:
cast = film.xpath(".//div[3]/p[3]/*/text()").getall()
split = cast.index("|")
directors = cast[:split]
directors.extend([None for _ in range(3 - len(directors))])
director1, director2, director3 = directors[:3]
actors = cast[split + 1 :]
actors.extend([None for _ in range(3 - len(actors))])
actor1, actor2, actor3 = actors[:3]
except:
actor1, actor2, actor3 = None, None, None
director1, director2, director3 = None, None, None
try:
movie = Movie.validate(
dict(
title=title,
year=year,
actual_rating=rating,
votes=votes,
duration=duration,
certificate=certificate,
synopsis=synopsis,
image=image,
actor1=actor1,
actor2=actor2,
actor3=actor3,
director1=director1,
director2=director2,
director3=director3,
genre1=genre1,
genre2=genre2,
genre3=genre3,
)
)
with Session(self.engine) as session:
statement = | select(Movie) | sqlmodel.select |
from fastapi import Depends, Response
from fastapi.routing import APIRouter
from pydantic import BaseModel # pylint: disable=E0611
from sqlmodel import Session, select
from starlette.responses import JSONResponse
from fastapi_server.database.database import get_session
from fastapi_server.models.user import User
login_router = APIRouter()
class LoginModel(BaseModel):
email: str
password: str
# TODO: Replace /login endpoint when Response is available in strawberry query info-context
@login_router.post('/login')
async def login(login_data: LoginModel, session: Session = Depends(get_session)) -> Response:
statement = | select(User) | sqlmodel.select |
from sqlmodel import Session
from sfm.database import engine
from sfm.config import get_settings
from sfm.utils import verify_api_auth_token
from fastapi import Depends, HTTPException
from fastapi.security import HTTPBearer, HTTPBasicCredentials
from passlib.context import CryptContext
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
app_settings = get_settings()
security = HTTPBearer()
def get_db(): # pragma: no cover
db = | Session(engine) | sqlmodel.Session |
import asyncio
import logging
import os
import time
from datetime import datetime
from sqlmodel import Session, SQLModel, select
from starlette.concurrency import run_in_threadpool
from ..datatypes import ArtmuseumTimeLabel
from ..scraping.artmuseum import scrap_artmuseum
from ..scraping.philharmonia import scrap_philharmonia
from .models import ArtmuseumExhibition, PhilharmoniaConcert
def refresh_data(engine):
"""
Scrap all the data sources for up-to-date info. Drop local values and replace them with the new data.
We are trying to be an exact mirror of our data sources.
The easiest way to achieve this is to regularly throw out all the data we have and scrap up-to-date info.
The cost of this approach in performance/resources is neglectable and is much preffered over complications
brought by trying to maintain a local copy by continuously patching it up with UPDATEs.
(there can be edits in the source info, urls can change, etc. - it's not worth it to consider all such corner cases)
"""
logging.info("Started scraping up-to-date info.")
known_addrs = {}
with Session(engine) as session:
stmt = select(ArtmuseumExhibition.url, ArtmuseumExhibition.address).where(
ArtmuseumExhibition.address != None
)
known_addrs = dict(session.exec(stmt).all())
exhibitions = scrap_artmuseum(known_addrs)
concerts = scrap_philharmonia()
logging.info("Finished scraping up-to-date info.")
logging.info("Started updating the database.")
with Session(engine) as session:
session.query(PhilharmoniaConcert).delete()
session.query(ArtmuseumExhibition).delete()
session.bulk_save_objects(concerts)
session.bulk_save_objects(exhibitions)
session.commit()
logging.info("Finished updating the database.")
async def loop_refreshing_data(engine, update_interval, initial_sleep_time: int = 0):
if initial_sleep_time > 0:
await asyncio.sleep(initial_sleep_time)
while True:
await run_in_threadpool(refresh_data, engine)
await asyncio.sleep(update_interval)
def init_db(engine):
update_interval = 60 * 60 * 8 # 8 hours
initial_sleep_time = 0
if os.path.isfile(engine.url.database):
last_modified = os.path.getmtime(engine.url.database)
dt = time.time() - last_modified
if dt <= update_interval:
initial_sleep_time = update_interval - dt
last_update = datetime.fromtimestamp(last_modified).replace(microsecond=0)
logging.info(
f"Last database update - {last_update}, the next one is scheduled in ...[N]h [N]m.... (at h:m)"
)
| SQLModel.metadata.create_all(engine) | sqlmodel.SQLModel.metadata.create_all |
import asyncio
import logging
import os
import time
from datetime import datetime
from sqlmodel import Session, SQLModel, select
from starlette.concurrency import run_in_threadpool
from ..datatypes import ArtmuseumTimeLabel
from ..scraping.artmuseum import scrap_artmuseum
from ..scraping.philharmonia import scrap_philharmonia
from .models import ArtmuseumExhibition, PhilharmoniaConcert
def refresh_data(engine):
"""
Scrap all the data sources for up-to-date info. Drop local values and replace them with the new data.
We are trying to be an exact mirror of our data sources.
The easiest way to achieve this is to regularly throw out all the data we have and scrap up-to-date info.
The cost of this approach in performance/resources is neglectable and is much preffered over complications
brought by trying to maintain a local copy by continuously patching it up with UPDATEs.
(there can be edits in the source info, urls can change, etc. - it's not worth it to consider all such corner cases)
"""
logging.info("Started scraping up-to-date info.")
known_addrs = {}
with | Session(engine) | sqlmodel.Session |
import asyncio
import logging
import os
import time
from datetime import datetime
from sqlmodel import Session, SQLModel, select
from starlette.concurrency import run_in_threadpool
from ..datatypes import ArtmuseumTimeLabel
from ..scraping.artmuseum import scrap_artmuseum
from ..scraping.philharmonia import scrap_philharmonia
from .models import ArtmuseumExhibition, PhilharmoniaConcert
def refresh_data(engine):
"""
Scrap all the data sources for up-to-date info. Drop local values and replace them with the new data.
We are trying to be an exact mirror of our data sources.
The easiest way to achieve this is to regularly throw out all the data we have and scrap up-to-date info.
The cost of this approach in performance/resources is neglectable and is much preffered over complications
brought by trying to maintain a local copy by continuously patching it up with UPDATEs.
(there can be edits in the source info, urls can change, etc. - it's not worth it to consider all such corner cases)
"""
logging.info("Started scraping up-to-date info.")
known_addrs = {}
with Session(engine) as session:
stmt = select(ArtmuseumExhibition.url, ArtmuseumExhibition.address).where(
ArtmuseumExhibition.address != None
)
known_addrs = dict(session.exec(stmt).all())
exhibitions = scrap_artmuseum(known_addrs)
concerts = scrap_philharmonia()
logging.info("Finished scraping up-to-date info.")
logging.info("Started updating the database.")
with | Session(engine) | sqlmodel.Session |
import asyncio
import logging
import os
import time
from datetime import datetime
from sqlmodel import Session, SQLModel, select
from starlette.concurrency import run_in_threadpool
from ..datatypes import ArtmuseumTimeLabel
from ..scraping.artmuseum import scrap_artmuseum
from ..scraping.philharmonia import scrap_philharmonia
from .models import ArtmuseumExhibition, PhilharmoniaConcert
def refresh_data(engine):
"""
Scrap all the data sources for up-to-date info. Drop local values and replace them with the new data.
We are trying to be an exact mirror of our data sources.
The easiest way to achieve this is to regularly throw out all the data we have and scrap up-to-date info.
The cost of this approach in performance/resources is neglectable and is much preffered over complications
brought by trying to maintain a local copy by continuously patching it up with UPDATEs.
(there can be edits in the source info, urls can change, etc. - it's not worth it to consider all such corner cases)
"""
logging.info("Started scraping up-to-date info.")
known_addrs = {}
with Session(engine) as session:
stmt = | select(ArtmuseumExhibition.url, ArtmuseumExhibition.address) | sqlmodel.select |
import pytest
from fastapi.testclient import TestClient
from sqlmodel import Session, SQLModel, create_engine
from sqlmodel.pool import StaticPool
from api.main import app, get_session
from api.models import Measurement, Observer
@pytest.fixture(name="session")
def session_fixture():
engine = create_engine(
"sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
)
| SQLModel.metadata.create_all(engine) | sqlmodel.SQLModel.metadata.create_all |
import pytest
from fastapi.testclient import TestClient
from sqlmodel import Session, SQLModel, create_engine
from sqlmodel.pool import StaticPool
from api.main import app, get_session
from api.models import Measurement, Observer
@pytest.fixture(name="session")
def session_fixture():
engine = create_engine(
"sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
)
SQLModel.metadata.create_all(engine)
with | Session(engine) | sqlmodel.Session |
"""Add countries
Revision ID: <KEY>
Revises: 423e059e8b64
Create Date: 2022-02-12 07:51:13.003045+00:00
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
# revision identifiers, used by Alembic.
revision = "<KEY>"
down_revision = "423e059e8b64"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"countries",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
from typing import TYPE_CHECKING, Any, Dict, Optional
from uuid import UUID
from sqlalchemy.schema import Column, ForeignKey
from sqlmodel import Field, Relationship, select
from sqlmodel.sql.sqltypes import GUID
from joj.horse.models.base import BaseORMModel
from joj.horse.services.db import db_session
from joj.horse.services.oauth import OAuth2Profile
if TYPE_CHECKING:
from joj.horse.models import User
class UserOAuthAccount(BaseORMModel, table=True): # type: ignore[call-arg]
__tablename__ = "user_oauth_accounts"
oauth_name: str = | Field() | sqlmodel.Field |
from typing import TYPE_CHECKING, Any, Dict, Optional
from uuid import UUID
from sqlalchemy.schema import Column, ForeignKey
from sqlmodel import Field, Relationship, select
from sqlmodel.sql.sqltypes import GUID
from joj.horse.models.base import BaseORMModel
from joj.horse.services.db import db_session
from joj.horse.services.oauth import OAuth2Profile
if TYPE_CHECKING:
from joj.horse.models import User
class UserOAuthAccount(BaseORMModel, table=True): # type: ignore[call-arg]
__tablename__ = "user_oauth_accounts"
oauth_name: str = Field()
access_token: str = | Field() | sqlmodel.Field |
from typing import TYPE_CHECKING, Any, Dict, Optional
from uuid import UUID
from sqlalchemy.schema import Column, ForeignKey
from sqlmodel import Field, Relationship, select
from sqlmodel.sql.sqltypes import GUID
from joj.horse.models.base import BaseORMModel
from joj.horse.services.db import db_session
from joj.horse.services.oauth import OAuth2Profile
if TYPE_CHECKING:
from joj.horse.models import User
class UserOAuthAccount(BaseORMModel, table=True): # type: ignore[call-arg]
__tablename__ = "user_oauth_accounts"
oauth_name: str = Field()
access_token: str = Field()
refresh_token: Optional[str] = | Field(None, nullable=True) | sqlmodel.Field |
from typing import TYPE_CHECKING, Any, Dict, Optional
from uuid import UUID
from sqlalchemy.schema import Column, ForeignKey
from sqlmodel import Field, Relationship, select
from sqlmodel.sql.sqltypes import GUID
from joj.horse.models.base import BaseORMModel
from joj.horse.services.db import db_session
from joj.horse.services.oauth import OAuth2Profile
if TYPE_CHECKING:
from joj.horse.models import User
class UserOAuthAccount(BaseORMModel, table=True): # type: ignore[call-arg]
__tablename__ = "user_oauth_accounts"
oauth_name: str = Field()
access_token: str = Field()
refresh_token: Optional[str] = Field(None, nullable=True)
expires_at: Optional[int] = | Field(None, nullable=True) | sqlmodel.Field |
from typing import TYPE_CHECKING, Any, Dict, Optional
from uuid import UUID
from sqlalchemy.schema import Column, ForeignKey
from sqlmodel import Field, Relationship, select
from sqlmodel.sql.sqltypes import GUID
from joj.horse.models.base import BaseORMModel
from joj.horse.services.db import db_session
from joj.horse.services.oauth import OAuth2Profile
if TYPE_CHECKING:
from joj.horse.models import User
class UserOAuthAccount(BaseORMModel, table=True): # type: ignore[call-arg]
__tablename__ = "user_oauth_accounts"
oauth_name: str = Field()
access_token: str = Field()
refresh_token: Optional[str] = Field(None, nullable=True)
expires_at: Optional[int] = Field(None, nullable=True)
account_id: str = | Field(index=True) | sqlmodel.Field |
from typing import TYPE_CHECKING, Any, Dict, Optional
from uuid import UUID
from sqlalchemy.schema import Column, ForeignKey
from sqlmodel import Field, Relationship, select
from sqlmodel.sql.sqltypes import GUID
from joj.horse.models.base import BaseORMModel
from joj.horse.services.db import db_session
from joj.horse.services.oauth import OAuth2Profile
if TYPE_CHECKING:
from joj.horse.models import User
class UserOAuthAccount(BaseORMModel, table=True): # type: ignore[call-arg]
__tablename__ = "user_oauth_accounts"
oauth_name: str = Field()
access_token: str = Field()
refresh_token: Optional[str] = Field(None, nullable=True)
expires_at: Optional[int] = Field(None, nullable=True)
account_id: str = Field(index=True)
account_name: Optional[str] = | Field(None, index=True, nullable=True) | sqlmodel.Field |
from typing import TYPE_CHECKING, Any, Dict, Optional
from uuid import UUID
from sqlalchemy.schema import Column, ForeignKey
from sqlmodel import Field, Relationship, select
from sqlmodel.sql.sqltypes import GUID
from joj.horse.models.base import BaseORMModel
from joj.horse.services.db import db_session
from joj.horse.services.oauth import OAuth2Profile
if TYPE_CHECKING:
from joj.horse.models import User
class UserOAuthAccount(BaseORMModel, table=True): # type: ignore[call-arg]
__tablename__ = "user_oauth_accounts"
oauth_name: str = Field()
access_token: str = Field()
refresh_token: Optional[str] = Field(None, nullable=True)
expires_at: Optional[int] = Field(None, nullable=True)
account_id: str = Field(index=True)
account_name: Optional[str] = Field(None, index=True, nullable=True)
account_email: str = | Field(index=True) | sqlmodel.Field |
from typing import TYPE_CHECKING, Any, Dict, Optional
from uuid import UUID
from sqlalchemy.schema import Column, ForeignKey
from sqlmodel import Field, Relationship, select
from sqlmodel.sql.sqltypes import GUID
from joj.horse.models.base import BaseORMModel
from joj.horse.services.db import db_session
from joj.horse.services.oauth import OAuth2Profile
if TYPE_CHECKING:
from joj.horse.models import User
class UserOAuthAccount(BaseORMModel, table=True): # type: ignore[call-arg]
__tablename__ = "user_oauth_accounts"
oauth_name: str = Field()
access_token: str = Field()
refresh_token: Optional[str] = Field(None, nullable=True)
expires_at: Optional[int] = Field(None, nullable=True)
account_id: str = Field(index=True)
account_name: Optional[str] = Field(None, index=True, nullable=True)
account_email: str = Field(index=True)
user_id: Optional[UUID] = Field(
sa_column=Column(GUID, ForeignKey("users.id", ondelete="CASCADE"))
)
user: Optional["User"] = | Relationship(back_populates="oauth_accounts") | sqlmodel.Relationship |
from typing import TYPE_CHECKING, Any, Dict, Optional
from uuid import UUID
from sqlalchemy.schema import Column, ForeignKey
from sqlmodel import Field, Relationship, select
from sqlmodel.sql.sqltypes import GUID
from joj.horse.models.base import BaseORMModel
from joj.horse.services.db import db_session
from joj.horse.services.oauth import OAuth2Profile
if TYPE_CHECKING:
from joj.horse.models import User
class UserOAuthAccount(BaseORMModel, table=True): # type: ignore[call-arg]
__tablename__ = "user_oauth_accounts"
oauth_name: str = Field()
access_token: str = Field()
refresh_token: Optional[str] = Field(None, nullable=True)
expires_at: Optional[int] = Field(None, nullable=True)
account_id: str = Field(index=True)
account_name: Optional[str] = Field(None, index=True, nullable=True)
account_email: str = Field(index=True)
user_id: Optional[UUID] = Field(
sa_column=Column(GUID, ForeignKey("users.id", ondelete="CASCADE"))
)
user: Optional["User"] = Relationship(back_populates="oauth_accounts")
@staticmethod
async def create_or_update(
oauth_name: str, token: Dict[str, Any], profile: OAuth2Profile
) -> "UserOAuthAccount":
access_token = token["access_token"]
refresh_token = token.get("refresh_token", None)
expires_at = token.get("expires_at", None)
async with db_session() as session:
statement = (
| select(UserOAuthAccount) | sqlmodel.select |
from fastapi import APIRouter, Depends
from ..utils import engine, get_session
from ..models.capacity import Capacity
from sqlmodel import Session, select, SQLModel, and_
from sqlalchemy.exc import NoResultFound
from ..models.user import User
from ..models.team import Team
router = APIRouter(prefix="/api/capacities", tags=["capacity"])
session = | Session(engine) | sqlmodel.Session |
from fastapi import APIRouter, Depends
from ..utils import engine, get_session
from ..models.capacity import Capacity
from sqlmodel import Session, select, SQLModel, and_
from sqlalchemy.exc import NoResultFound
from ..models.user import User
from ..models.team import Team
router = APIRouter(prefix="/api/capacities", tags=["capacity"])
session = Session(engine)
@router.post("/")
async def post_capacity(*, capacity: Capacity, session: Session = Depends(get_session)):
"""
Post new capacity.
Parameters
----------
capacity : Capacity
Capacity that is to be added to the database.
session : Session
SQL session that is to be used to add the capacity.
Defaults to creating a dependency on the running SQL model session.
"""
statement = select(Capacity).where(
and_(
Capacity.user_id == capacity.user_id,
Capacity.team_id == capacity.team_id,
capacity.year == capacity.year,
Capacity.month == capacity.month,
)
)
try:
result = session.exec(statement).one()
return False
except NoResultFound:
session.add(capacity)
session.commit()
session.refresh(capacity)
return capacity
@router.get("/")
async def get_capacities(
session: Session = Depends(get_session),
is_locked: bool = None,
user_id: int = None,
team_id: int = None,
month: int = None,
year: int = None,
):
"""
Get list of all capacities.
Parameters
----------
session : Session
SQL session that is to be used to get a list of the epic areas.
Defaults to creating a dependency on the running SQL model session.
is_locked : bool
Whether or not the capacity is locked or not.
user_id : int
User id of the user in question.
team_id : int
Team id of the user's team.
month : int
Month of capacity in question.
year : int
Year of capacity in question.
"""
statement = | select(Capacity) | sqlmodel.select |
from fastapi import APIRouter, Depends
from ..utils import engine, get_session
from ..models.capacity import Capacity
from sqlmodel import Session, select, SQLModel, and_
from sqlalchemy.exc import NoResultFound
from ..models.user import User
from ..models.team import Team
router = APIRouter(prefix="/api/capacities", tags=["capacity"])
session = Session(engine)
@router.post("/")
async def post_capacity(*, capacity: Capacity, session: Session = Depends(get_session)):
"""
Post new capacity.
Parameters
----------
capacity : Capacity
Capacity that is to be added to the database.
session : Session
SQL session that is to be used to add the capacity.
Defaults to creating a dependency on the running SQL model session.
"""
statement = | select(Capacity) | sqlmodel.select |
from fastapi import APIRouter, Depends
from ..utils import engine, get_session
from ..models.capacity import Capacity
from sqlmodel import Session, select, SQLModel, and_
from sqlalchemy.exc import NoResultFound
from ..models.user import User
from ..models.team import Team
router = APIRouter(prefix="/api/capacities", tags=["capacity"])
session = Session(engine)
@router.post("/")
async def post_capacity(*, capacity: Capacity, session: Session = Depends(get_session)):
"""
Post new capacity.
Parameters
----------
capacity : Capacity
Capacity that is to be added to the database.
session : Session
SQL session that is to be used to add the capacity.
Defaults to creating a dependency on the running SQL model session.
"""
statement = select(Capacity).where(
and_(
Capacity.user_id == capacity.user_id,
Capacity.team_id == capacity.team_id,
capacity.year == capacity.year,
Capacity.month == capacity.month,
)
)
try:
result = session.exec(statement).one()
return False
except NoResultFound:
session.add(capacity)
session.commit()
session.refresh(capacity)
return capacity
@router.get("/")
async def get_capacities(
session: Session = Depends(get_session),
is_locked: bool = None,
user_id: int = None,
team_id: int = None,
month: int = None,
year: int = None,
):
"""
Get list of all capacities.
Parameters
----------
session : Session
SQL session that is to be used to get a list of the epic areas.
Defaults to creating a dependency on the running SQL model session.
is_locked : bool
Whether or not the capacity is locked or not.
user_id : int
User id of the user in question.
team_id : int
Team id of the user's team.
month : int
Month of capacity in question.
year : int
Year of capacity in question.
"""
statement = select(Capacity)
# Select capacity by user_id, team_id, month, year
if (user_id and team_id and month and year) != None:
statement = (
select(
Capacity.id.label("capacity_id"),
User.short_name.label("user_short_name"),
Team.short_name.label("team_short_name"),
Capacity.year,
Capacity.month,
Capacity.days,
)
.select_from(Capacity)
.join(User, Capacity.user_id == User.id)
.join(Team, Capacity.team_id == Team.id)
.where(Capacity.user_id == user_id)
.where(Capacity.team_id == team_id)
.where(Capacity.month == month)
.where(Capacity.year == year)
)
result = session.exec(statement).all()
return result
@router.delete("/")
async def delete_capacities(
capacity_id: str = None,
session: Session = Depends(get_session),
):
"""
Delete a capacity
Parameters
----------
capacity_id : str
ID of the capacity that is to be removed from the database.
session : Session
SQL session that is to be used to delete the capacity.
Defaults to creating a dependency on the running SQL model session.
"""
statement = | select(Capacity) | sqlmodel.select |
from datetime import datetime
from typing import Optional
import typer
from sqlalchemy.orm.exc import UnmappedInstanceError
from sqlmodel import Session, select
from .database import engine
from .functions_aux import Status
from .tables import ToDo, Timer
app = typer.Typer()
@app.command()
def task(id: str, task: str = None,
status: Optional[Status] = typer.Option(None),
tag: str = None, remarks: str = None, project: str = None,
due_date: datetime = typer.Option(None, formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, formats=['%Y-%m-%d'])):
"""Edit record from to-do list"""
with | Session(engine) | sqlmodel.Session |
from datetime import datetime
from typing import Optional
import typer
from sqlalchemy.orm.exc import UnmappedInstanceError
from sqlmodel import Session, select
from .database import engine
from .functions_aux import Status
from .tables import ToDo, Timer
app = typer.Typer()
@app.command()
def task(id: str, task: str = None,
status: Optional[Status] = typer.Option(None),
tag: str = None, remarks: str = None, project: str = None,
due_date: datetime = typer.Option(None, formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, formats=['%Y-%m-%d'])):
"""Edit record from to-do list"""
with Session(engine) as session:
try:
query = session.get(ToDo, id)
if task is not None:
query.task = task
if tag is not None:
query.tag = tag
if remarks is not None:
query.remarks = remarks
if project is not None:
query.project = project
if status is None or status == query.status:
pass
elif status == 'done':
query.status = status
query.date_end = datetime.now().date()
elif status == 'doing' and query.status == 'done':
query.status = status
query.date_end = None
elif status == 'to do':
timer = session.exec(select(Timer).where(
Timer.id_todo == id)).all()
if len(timer) > 0:
typer.secho(f'\nTask already started\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
else:
query.status = status
query.date_end = None
else:
query.status = status
today = datetime.today()
if due_date is not None and reminder \
is not None and reminder >= due_date:
typer.secho(
f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None and reminder <= today:
typer.secho(
f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif due_date is not None and query.reminder \
is not None and due_date < query.reminder:
typer.secho(
f'\ndue date must be grater than {query.reminder.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None and query.due_date \
is not None and reminder >= query.due_date:
typer.secho(
f'\nreminder must be smaller than {query.due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None:
query.reminder = reminder
elif due_date is not None:
query.due_date = due_date
session.add(query)
edit = typer.confirm(f"""Are you sure you want to edit:
{query}""")
if not edit:
typer.secho("Not editing",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Editing it!",
fg=typer.colors.RED)
session.commit()
except AttributeError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except UnmappedInstanceError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def project(project: str, new_project: str):
"""Edit project name in tasks"""
with | Session(engine) | sqlmodel.Session |
from datetime import datetime
from typing import Optional
import typer
from sqlalchemy.orm.exc import UnmappedInstanceError
from sqlmodel import Session, select
from .database import engine
from .functions_aux import Status
from .tables import ToDo, Timer
app = typer.Typer()
@app.command()
def task(id: str, task: str = None,
status: Optional[Status] = typer.Option(None),
tag: str = None, remarks: str = None, project: str = None,
due_date: datetime = typer.Option(None, formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, formats=['%Y-%m-%d'])):
"""Edit record from to-do list"""
with Session(engine) as session:
try:
query = session.get(ToDo, id)
if task is not None:
query.task = task
if tag is not None:
query.tag = tag
if remarks is not None:
query.remarks = remarks
if project is not None:
query.project = project
if status is None or status == query.status:
pass
elif status == 'done':
query.status = status
query.date_end = datetime.now().date()
elif status == 'doing' and query.status == 'done':
query.status = status
query.date_end = None
elif status == 'to do':
timer = session.exec(select(Timer).where(
Timer.id_todo == id)).all()
if len(timer) > 0:
typer.secho(f'\nTask already started\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
else:
query.status = status
query.date_end = None
else:
query.status = status
today = datetime.today()
if due_date is not None and reminder \
is not None and reminder >= due_date:
typer.secho(
f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None and reminder <= today:
typer.secho(
f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif due_date is not None and query.reminder \
is not None and due_date < query.reminder:
typer.secho(
f'\ndue date must be grater than {query.reminder.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None and query.due_date \
is not None and reminder >= query.due_date:
typer.secho(
f'\nreminder must be smaller than {query.due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None:
query.reminder = reminder
elif due_date is not None:
query.due_date = due_date
session.add(query)
edit = typer.confirm(f"""Are you sure you want to edit:
{query}""")
if not edit:
typer.secho("Not editing",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Editing it!",
fg=typer.colors.RED)
session.commit()
except AttributeError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except UnmappedInstanceError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def project(project: str, new_project: str):
"""Edit project name in tasks"""
with Session(engine) as session:
tasks = session.exec(select(ToDo).where(
ToDo.project == project)).all()
if len(tasks) > 0:
for task in tasks:
task.project = new_project
session.add(task)
edit = typer.confirm(f"""Are you sure you want to edit:
{tasks}""")
if not edit:
typer.secho("Not editing",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Editing it!",
fg=typer.colors.RED)
session.commit()
else:
typer.secho(f'\nInvalid project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def del_task(id: str):
"""Delete task"""
try:
with Session(engine) as session:
task = session.get(ToDo, id)
timers = session.exec(select(Timer).where(
Timer.id_todo == task.id)).all()
for timer in timers:
session.delete(timer)
session.delete(task)
edit = typer.confirm(f"""Are you sure you want to delete:
{task}""")
if not edit:
typer.secho("Not deleting",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Deleting it!",
fg=typer.colors.RED)
session.commit()
except UnmappedInstanceError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def del_project(project: str):
"""Delete all tasks from a project"""
with | Session(engine) | sqlmodel.Session |
from datetime import datetime
from typing import Optional
import typer
from sqlalchemy.orm.exc import UnmappedInstanceError
from sqlmodel import Session, select
from .database import engine
from .functions_aux import Status
from .tables import ToDo, Timer
app = typer.Typer()
@app.command()
def task(id: str, task: str = None,
status: Optional[Status] = typer.Option(None),
tag: str = None, remarks: str = None, project: str = None,
due_date: datetime = typer.Option(None, formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, formats=['%Y-%m-%d'])):
"""Edit record from to-do list"""
with Session(engine) as session:
try:
query = session.get(ToDo, id)
if task is not None:
query.task = task
if tag is not None:
query.tag = tag
if remarks is not None:
query.remarks = remarks
if project is not None:
query.project = project
if status is None or status == query.status:
pass
elif status == 'done':
query.status = status
query.date_end = datetime.now().date()
elif status == 'doing' and query.status == 'done':
query.status = status
query.date_end = None
elif status == 'to do':
timer = session.exec(select(Timer).where(
Timer.id_todo == id)).all()
if len(timer) > 0:
typer.secho(f'\nTask already started\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
else:
query.status = status
query.date_end = None
else:
query.status = status
today = datetime.today()
if due_date is not None and reminder \
is not None and reminder >= due_date:
typer.secho(
f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None and reminder <= today:
typer.secho(
f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif due_date is not None and query.reminder \
is not None and due_date < query.reminder:
typer.secho(
f'\ndue date must be grater than {query.reminder.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None and query.due_date \
is not None and reminder >= query.due_date:
typer.secho(
f'\nreminder must be smaller than {query.due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None:
query.reminder = reminder
elif due_date is not None:
query.due_date = due_date
session.add(query)
edit = typer.confirm(f"""Are you sure you want to edit:
{query}""")
if not edit:
typer.secho("Not editing",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Editing it!",
fg=typer.colors.RED)
session.commit()
except AttributeError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except UnmappedInstanceError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def project(project: str, new_project: str):
"""Edit project name in tasks"""
with Session(engine) as session:
tasks = session.exec(select(ToDo).where(
ToDo.project == project)).all()
if len(tasks) > 0:
for task in tasks:
task.project = new_project
session.add(task)
edit = typer.confirm(f"""Are you sure you want to edit:
{tasks}""")
if not edit:
typer.secho("Not editing",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Editing it!",
fg=typer.colors.RED)
session.commit()
else:
typer.secho(f'\nInvalid project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def del_task(id: str):
"""Delete task"""
try:
with Session(engine) as session:
task = session.get(ToDo, id)
timers = session.exec(select(Timer).where(
Timer.id_todo == task.id)).all()
for timer in timers:
session.delete(timer)
session.delete(task)
edit = typer.confirm(f"""Are you sure you want to delete:
{task}""")
if not edit:
typer.secho("Not deleting",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Deleting it!",
fg=typer.colors.RED)
session.commit()
except UnmappedInstanceError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def del_project(project: str):
"""Delete all tasks from a project"""
with Session(engine) as session:
tasks = session.exec(select(ToDo).where(
ToDo.project == project)).all()
if len(tasks) > 0:
for task in tasks:
timers = session.exec(select(Timer).where(
Timer.id_todo == task.id)).all()
for timer in timers:
session.delete(timer)
session.delete(task)
session.delete(task)
edit = typer.confirm(f"""Are you sure you want to delete:
{tasks}""")
if not edit:
typer.secho("Not deleting",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("deleting it!",
fg=typer.colors.RED)
session.commit()
else:
typer.secho(f'\nInvalid project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def timer(id: int,
end: datetime = typer.Option('', formats=['%Y-%m-%d %H:%M:%S'])):
"""Edit record from Timer"""
with | Session(engine) | sqlmodel.Session |
from datetime import datetime
from typing import Optional
import typer
from sqlalchemy.orm.exc import UnmappedInstanceError
from sqlmodel import Session, select
from .database import engine
from .functions_aux import Status
from .tables import ToDo, Timer
app = typer.Typer()
@app.command()
def task(id: str, task: str = None,
status: Optional[Status] = typer.Option(None),
tag: str = None, remarks: str = None, project: str = None,
due_date: datetime = typer.Option(None, formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, formats=['%Y-%m-%d'])):
"""Edit record from to-do list"""
with Session(engine) as session:
try:
query = session.get(ToDo, id)
if task is not None:
query.task = task
if tag is not None:
query.tag = tag
if remarks is not None:
query.remarks = remarks
if project is not None:
query.project = project
if status is None or status == query.status:
pass
elif status == 'done':
query.status = status
query.date_end = datetime.now().date()
elif status == 'doing' and query.status == 'done':
query.status = status
query.date_end = None
elif status == 'to do':
timer = session.exec(select(Timer).where(
Timer.id_todo == id)).all()
if len(timer) > 0:
typer.secho(f'\nTask already started\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
else:
query.status = status
query.date_end = None
else:
query.status = status
today = datetime.today()
if due_date is not None and reminder \
is not None and reminder >= due_date:
typer.secho(
f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None and reminder <= today:
typer.secho(
f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif due_date is not None and query.reminder \
is not None and due_date < query.reminder:
typer.secho(
f'\ndue date must be grater than {query.reminder.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None and query.due_date \
is not None and reminder >= query.due_date:
typer.secho(
f'\nreminder must be smaller than {query.due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None:
query.reminder = reminder
elif due_date is not None:
query.due_date = due_date
session.add(query)
edit = typer.confirm(f"""Are you sure you want to edit:
{query}""")
if not edit:
typer.secho("Not editing",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Editing it!",
fg=typer.colors.RED)
session.commit()
except AttributeError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except UnmappedInstanceError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def project(project: str, new_project: str):
"""Edit project name in tasks"""
with Session(engine) as session:
tasks = session.exec(select(ToDo).where(
ToDo.project == project)).all()
if len(tasks) > 0:
for task in tasks:
task.project = new_project
session.add(task)
edit = typer.confirm(f"""Are you sure you want to edit:
{tasks}""")
if not edit:
typer.secho("Not editing",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Editing it!",
fg=typer.colors.RED)
session.commit()
else:
typer.secho(f'\nInvalid project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def del_task(id: str):
"""Delete task"""
try:
with Session(engine) as session:
task = session.get(ToDo, id)
timers = session.exec(select(Timer).where(
Timer.id_todo == task.id)).all()
for timer in timers:
session.delete(timer)
session.delete(task)
edit = typer.confirm(f"""Are you sure you want to delete:
{task}""")
if not edit:
typer.secho("Not deleting",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Deleting it!",
fg=typer.colors.RED)
session.commit()
except UnmappedInstanceError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def del_project(project: str):
"""Delete all tasks from a project"""
with Session(engine) as session:
tasks = session.exec(select(ToDo).where(
ToDo.project == project)).all()
if len(tasks) > 0:
for task in tasks:
timers = session.exec(select(Timer).where(
Timer.id_todo == task.id)).all()
for timer in timers:
session.delete(timer)
session.delete(task)
session.delete(task)
edit = typer.confirm(f"""Are you sure you want to delete:
{tasks}""")
if not edit:
typer.secho("Not deleting",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("deleting it!",
fg=typer.colors.RED)
session.commit()
else:
typer.secho(f'\nInvalid project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def timer(id: int,
end: datetime = typer.Option('', formats=['%Y-%m-%d %H:%M:%S'])):
"""Edit record from Timer"""
with Session(engine) as session:
try:
query = session.get(Timer, id)
if end <= query.start:
typer.secho(
f'\nEnd must be >= {query.start}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if end >= datetime.now():
typer.secho(
f'\nEnd must be < {datetime.now()}'
)
raise typer.Exit(code=1)
query.end = end
session.add(query)
edit = typer.confirm(f"""Are you sure you want to edit:
{query}""")
if not edit:
typer.secho("Not editing",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Editing it!",
fg=typer.colors.RED)
session.commit()
except AttributeError:
typer.secho(f'\nInvalid timer id\n',
fg=typer.colors.RED)
@app.command()
def del_timer(id: int):
"""Delete record from Timer"""
with | Session(engine) | sqlmodel.Session |
from datetime import datetime
from typing import Optional
import typer
from sqlalchemy.orm.exc import UnmappedInstanceError
from sqlmodel import Session, select
from .database import engine
from .functions_aux import Status
from .tables import ToDo, Timer
app = typer.Typer()
@app.command()
def task(id: str, task: str = None,
status: Optional[Status] = typer.Option(None),
tag: str = None, remarks: str = None, project: str = None,
due_date: datetime = typer.Option(None, formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, formats=['%Y-%m-%d'])):
"""Edit record from to-do list"""
with Session(engine) as session:
try:
query = session.get(ToDo, id)
if task is not None:
query.task = task
if tag is not None:
query.tag = tag
if remarks is not None:
query.remarks = remarks
if project is not None:
query.project = project
if status is None or status == query.status:
pass
elif status == 'done':
query.status = status
query.date_end = datetime.now().date()
elif status == 'doing' and query.status == 'done':
query.status = status
query.date_end = None
elif status == 'to do':
timer = session.exec(select(Timer).where(
Timer.id_todo == id)).all()
if len(timer) > 0:
typer.secho(f'\nTask already started\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
else:
query.status = status
query.date_end = None
else:
query.status = status
today = datetime.today()
if due_date is not None and reminder \
is not None and reminder >= due_date:
typer.secho(
f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None and reminder <= today:
typer.secho(
f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif due_date is not None and query.reminder \
is not None and due_date < query.reminder:
typer.secho(
f'\ndue date must be grater than {query.reminder.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None and query.due_date \
is not None and reminder >= query.due_date:
typer.secho(
f'\nreminder must be smaller than {query.due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None:
query.reminder = reminder
elif due_date is not None:
query.due_date = due_date
session.add(query)
edit = typer.confirm(f"""Are you sure you want to edit:
{query}""")
if not edit:
typer.secho("Not editing",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Editing it!",
fg=typer.colors.RED)
session.commit()
except AttributeError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except UnmappedInstanceError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def project(project: str, new_project: str):
"""Edit project name in tasks"""
with Session(engine) as session:
tasks = session.exec(select(ToDo).where(
ToDo.project == project)).all()
if len(tasks) > 0:
for task in tasks:
task.project = new_project
session.add(task)
edit = typer.confirm(f"""Are you sure you want to edit:
{tasks}""")
if not edit:
typer.secho("Not editing",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Editing it!",
fg=typer.colors.RED)
session.commit()
else:
typer.secho(f'\nInvalid project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def del_task(id: str):
"""Delete task"""
try:
with | Session(engine) | sqlmodel.Session |
from datetime import datetime
from typing import Optional
import typer
from sqlalchemy.orm.exc import UnmappedInstanceError
from sqlmodel import Session, select
from .database import engine
from .functions_aux import Status
from .tables import ToDo, Timer
app = typer.Typer()
@app.command()
def task(id: str, task: str = None,
status: Optional[Status] = typer.Option(None),
tag: str = None, remarks: str = None, project: str = None,
due_date: datetime = typer.Option(None, formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, formats=['%Y-%m-%d'])):
"""Edit record from to-do list"""
with Session(engine) as session:
try:
query = session.get(ToDo, id)
if task is not None:
query.task = task
if tag is not None:
query.tag = tag
if remarks is not None:
query.remarks = remarks
if project is not None:
query.project = project
if status is None or status == query.status:
pass
elif status == 'done':
query.status = status
query.date_end = datetime.now().date()
elif status == 'doing' and query.status == 'done':
query.status = status
query.date_end = None
elif status == 'to do':
timer = session.exec(select(Timer).where(
Timer.id_todo == id)).all()
if len(timer) > 0:
typer.secho(f'\nTask already started\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
else:
query.status = status
query.date_end = None
else:
query.status = status
today = datetime.today()
if due_date is not None and reminder \
is not None and reminder >= due_date:
typer.secho(
f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None and reminder <= today:
typer.secho(
f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif due_date is not None and query.reminder \
is not None and due_date < query.reminder:
typer.secho(
f'\ndue date must be grater than {query.reminder.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None and query.due_date \
is not None and reminder >= query.due_date:
typer.secho(
f'\nreminder must be smaller than {query.due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None:
query.reminder = reminder
elif due_date is not None:
query.due_date = due_date
session.add(query)
edit = typer.confirm(f"""Are you sure you want to edit:
{query}""")
if not edit:
typer.secho("Not editing",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Editing it!",
fg=typer.colors.RED)
session.commit()
except AttributeError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except UnmappedInstanceError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def project(project: str, new_project: str):
"""Edit project name in tasks"""
with Session(engine) as session:
tasks = session.exec( | select(ToDo) | sqlmodel.select |
from datetime import datetime
from typing import Optional
import typer
from sqlalchemy.orm.exc import UnmappedInstanceError
from sqlmodel import Session, select
from .database import engine
from .functions_aux import Status
from .tables import ToDo, Timer
app = typer.Typer()
@app.command()
def task(id: str, task: str = None,
status: Optional[Status] = typer.Option(None),
tag: str = None, remarks: str = None, project: str = None,
due_date: datetime = typer.Option(None, formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, formats=['%Y-%m-%d'])):
"""Edit record from to-do list"""
with Session(engine) as session:
try:
query = session.get(ToDo, id)
if task is not None:
query.task = task
if tag is not None:
query.tag = tag
if remarks is not None:
query.remarks = remarks
if project is not None:
query.project = project
if status is None or status == query.status:
pass
elif status == 'done':
query.status = status
query.date_end = datetime.now().date()
elif status == 'doing' and query.status == 'done':
query.status = status
query.date_end = None
elif status == 'to do':
timer = session.exec(select(Timer).where(
Timer.id_todo == id)).all()
if len(timer) > 0:
typer.secho(f'\nTask already started\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
else:
query.status = status
query.date_end = None
else:
query.status = status
today = datetime.today()
if due_date is not None and reminder \
is not None and reminder >= due_date:
typer.secho(
f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None and reminder <= today:
typer.secho(
f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif due_date is not None and query.reminder \
is not None and due_date < query.reminder:
typer.secho(
f'\ndue date must be grater than {query.reminder.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None and query.due_date \
is not None and reminder >= query.due_date:
typer.secho(
f'\nreminder must be smaller than {query.due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None:
query.reminder = reminder
elif due_date is not None:
query.due_date = due_date
session.add(query)
edit = typer.confirm(f"""Are you sure you want to edit:
{query}""")
if not edit:
typer.secho("Not editing",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Editing it!",
fg=typer.colors.RED)
session.commit()
except AttributeError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except UnmappedInstanceError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def project(project: str, new_project: str):
"""Edit project name in tasks"""
with Session(engine) as session:
tasks = session.exec(select(ToDo).where(
ToDo.project == project)).all()
if len(tasks) > 0:
for task in tasks:
task.project = new_project
session.add(task)
edit = typer.confirm(f"""Are you sure you want to edit:
{tasks}""")
if not edit:
typer.secho("Not editing",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Editing it!",
fg=typer.colors.RED)
session.commit()
else:
typer.secho(f'\nInvalid project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def del_task(id: str):
"""Delete task"""
try:
with Session(engine) as session:
task = session.get(ToDo, id)
timers = session.exec(select(Timer).where(
Timer.id_todo == task.id)).all()
for timer in timers:
session.delete(timer)
session.delete(task)
edit = typer.confirm(f"""Are you sure you want to delete:
{task}""")
if not edit:
typer.secho("Not deleting",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Deleting it!",
fg=typer.colors.RED)
session.commit()
except UnmappedInstanceError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def del_project(project: str):
"""Delete all tasks from a project"""
with Session(engine) as session:
tasks = session.exec( | select(ToDo) | sqlmodel.select |
from datetime import datetime
from typing import Optional
import typer
from sqlalchemy.orm.exc import UnmappedInstanceError
from sqlmodel import Session, select
from .database import engine
from .functions_aux import Status
from .tables import ToDo, Timer
app = typer.Typer()
@app.command()
def task(id: str, task: str = None,
status: Optional[Status] = typer.Option(None),
tag: str = None, remarks: str = None, project: str = None,
due_date: datetime = typer.Option(None, formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, formats=['%Y-%m-%d'])):
"""Edit record from to-do list"""
with Session(engine) as session:
try:
query = session.get(ToDo, id)
if task is not None:
query.task = task
if tag is not None:
query.tag = tag
if remarks is not None:
query.remarks = remarks
if project is not None:
query.project = project
if status is None or status == query.status:
pass
elif status == 'done':
query.status = status
query.date_end = datetime.now().date()
elif status == 'doing' and query.status == 'done':
query.status = status
query.date_end = None
elif status == 'to do':
timer = session.exec(select(Timer).where(
Timer.id_todo == id)).all()
if len(timer) > 0:
typer.secho(f'\nTask already started\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
else:
query.status = status
query.date_end = None
else:
query.status = status
today = datetime.today()
if due_date is not None and reminder \
is not None and reminder >= due_date:
typer.secho(
f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None and reminder <= today:
typer.secho(
f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif due_date is not None and query.reminder \
is not None and due_date < query.reminder:
typer.secho(
f'\ndue date must be grater than {query.reminder.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None and query.due_date \
is not None and reminder >= query.due_date:
typer.secho(
f'\nreminder must be smaller than {query.due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None:
query.reminder = reminder
elif due_date is not None:
query.due_date = due_date
session.add(query)
edit = typer.confirm(f"""Are you sure you want to edit:
{query}""")
if not edit:
typer.secho("Not editing",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Editing it!",
fg=typer.colors.RED)
session.commit()
except AttributeError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except UnmappedInstanceError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def project(project: str, new_project: str):
"""Edit project name in tasks"""
with Session(engine) as session:
tasks = session.exec(select(ToDo).where(
ToDo.project == project)).all()
if len(tasks) > 0:
for task in tasks:
task.project = new_project
session.add(task)
edit = typer.confirm(f"""Are you sure you want to edit:
{tasks}""")
if not edit:
typer.secho("Not editing",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Editing it!",
fg=typer.colors.RED)
session.commit()
else:
typer.secho(f'\nInvalid project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def del_task(id: str):
"""Delete task"""
try:
with Session(engine) as session:
task = session.get(ToDo, id)
timers = session.exec( | select(Timer) | sqlmodel.select |
from datetime import datetime
from typing import Optional
import typer
from sqlalchemy.orm.exc import UnmappedInstanceError
from sqlmodel import Session, select
from .database import engine
from .functions_aux import Status
from .tables import ToDo, Timer
app = typer.Typer()
@app.command()
def task(id: str, task: str = None,
status: Optional[Status] = typer.Option(None),
tag: str = None, remarks: str = None, project: str = None,
due_date: datetime = typer.Option(None, formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, formats=['%Y-%m-%d'])):
"""Edit record from to-do list"""
with Session(engine) as session:
try:
query = session.get(ToDo, id)
if task is not None:
query.task = task
if tag is not None:
query.tag = tag
if remarks is not None:
query.remarks = remarks
if project is not None:
query.project = project
if status is None or status == query.status:
pass
elif status == 'done':
query.status = status
query.date_end = datetime.now().date()
elif status == 'doing' and query.status == 'done':
query.status = status
query.date_end = None
elif status == 'to do':
timer = session.exec(select(Timer).where(
Timer.id_todo == id)).all()
if len(timer) > 0:
typer.secho(f'\nTask already started\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
else:
query.status = status
query.date_end = None
else:
query.status = status
today = datetime.today()
if due_date is not None and reminder \
is not None and reminder >= due_date:
typer.secho(
f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None and reminder <= today:
typer.secho(
f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif due_date is not None and query.reminder \
is not None and due_date < query.reminder:
typer.secho(
f'\ndue date must be grater than {query.reminder.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None and query.due_date \
is not None and reminder >= query.due_date:
typer.secho(
f'\nreminder must be smaller than {query.due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None:
query.reminder = reminder
elif due_date is not None:
query.due_date = due_date
session.add(query)
edit = typer.confirm(f"""Are you sure you want to edit:
{query}""")
if not edit:
typer.secho("Not editing",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Editing it!",
fg=typer.colors.RED)
session.commit()
except AttributeError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except UnmappedInstanceError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def project(project: str, new_project: str):
"""Edit project name in tasks"""
with Session(engine) as session:
tasks = session.exec(select(ToDo).where(
ToDo.project == project)).all()
if len(tasks) > 0:
for task in tasks:
task.project = new_project
session.add(task)
edit = typer.confirm(f"""Are you sure you want to edit:
{tasks}""")
if not edit:
typer.secho("Not editing",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Editing it!",
fg=typer.colors.RED)
session.commit()
else:
typer.secho(f'\nInvalid project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def del_task(id: str):
"""Delete task"""
try:
with Session(engine) as session:
task = session.get(ToDo, id)
timers = session.exec(select(Timer).where(
Timer.id_todo == task.id)).all()
for timer in timers:
session.delete(timer)
session.delete(task)
edit = typer.confirm(f"""Are you sure you want to delete:
{task}""")
if not edit:
typer.secho("Not deleting",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Deleting it!",
fg=typer.colors.RED)
session.commit()
except UnmappedInstanceError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def del_project(project: str):
"""Delete all tasks from a project"""
with Session(engine) as session:
tasks = session.exec(select(ToDo).where(
ToDo.project == project)).all()
if len(tasks) > 0:
for task in tasks:
timers = session.exec( | select(Timer) | sqlmodel.select |
from datetime import datetime
from typing import Optional
import typer
from sqlalchemy.orm.exc import UnmappedInstanceError
from sqlmodel import Session, select
from .database import engine
from .functions_aux import Status
from .tables import ToDo, Timer
app = typer.Typer()
@app.command()
def task(id: str, task: str = None,
status: Optional[Status] = typer.Option(None),
tag: str = None, remarks: str = None, project: str = None,
due_date: datetime = typer.Option(None, formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, formats=['%Y-%m-%d'])):
"""Edit record from to-do list"""
with Session(engine) as session:
try:
query = session.get(ToDo, id)
if task is not None:
query.task = task
if tag is not None:
query.tag = tag
if remarks is not None:
query.remarks = remarks
if project is not None:
query.project = project
if status is None or status == query.status:
pass
elif status == 'done':
query.status = status
query.date_end = datetime.now().date()
elif status == 'doing' and query.status == 'done':
query.status = status
query.date_end = None
elif status == 'to do':
timer = session.exec( | select(Timer) | sqlmodel.select |
from datetime import datetime
from sqlmodel import Session, SQLModel, create_engine, text
import sqlite3
database_loc = "backend/database.sqlite"
con_str = f"sqlite:///{database_loc}"
engine = | create_engine(con_str, echo=True) | sqlmodel.create_engine |
from datetime import datetime
from sqlmodel import Session, SQLModel, create_engine, text
import sqlite3
database_loc = "backend/database.sqlite"
con_str = f"sqlite:///{database_loc}"
engine = create_engine(con_str, echo=True)
sqlite3_engine = sqlite3.connect(f"{database_loc}")
def get_session():
session = Session(engine)
return session
def create_db():
SQLModel.metadata.create_all(engine)
def execute_sample_sql(session):
"""Read sample sql database and import it."""
with open("backend/tests/sample.sql") as f:
content = f.read()
queries = filter(None, content.split(";\n"))
queries = [text(query) for query in queries]
for query in queries:
session.exec(query)
session.commit()
session.expire_all()
session = | Session(engine) | sqlmodel.Session |
from datetime import datetime
from sqlmodel import Session, SQLModel, create_engine, text
import sqlite3
database_loc = "backend/database.sqlite"
con_str = f"sqlite:///{database_loc}"
engine = create_engine(con_str, echo=True)
sqlite3_engine = sqlite3.connect(f"{database_loc}")
def get_session():
session = | Session(engine) | sqlmodel.Session |
from datetime import datetime
from sqlmodel import Session, SQLModel, create_engine, text
import sqlite3
database_loc = "backend/database.sqlite"
con_str = f"sqlite:///{database_loc}"
engine = create_engine(con_str, echo=True)
sqlite3_engine = sqlite3.connect(f"{database_loc}")
def get_session():
session = Session(engine)
return session
def create_db():
| SQLModel.metadata.create_all(engine) | sqlmodel.SQLModel.metadata.create_all |
from datetime import datetime
from sqlmodel import Session, SQLModel, create_engine, text
import sqlite3
database_loc = "backend/database.sqlite"
con_str = f"sqlite:///{database_loc}"
engine = create_engine(con_str, echo=True)
sqlite3_engine = sqlite3.connect(f"{database_loc}")
def get_session():
session = Session(engine)
return session
def create_db():
SQLModel.metadata.create_all(engine)
def execute_sample_sql(session):
"""Read sample sql database and import it."""
with open("backend/tests/sample.sql") as f:
content = f.read()
queries = filter(None, content.split(";\n"))
queries = [ | text(query) | sqlmodel.text |
import os
import pathlib
from datetime import datetime, date, time
from decimal import Decimal
from typing import Optional, List
from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile
from fastapi.encoders import jsonable_encoder
from sqlmodel import Field, SQLModel
from ..db import get_session
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
router = APIRouter()
cert_path = '/uploads/user/{user_id}/cert/'
avatar_path = '/uploads/user/{user_id}/avatar/'
class User(SQLModel, table=True):
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |